hash
stringlengths
64
64
content
stringlengths
0
1.51M
f97355059af0eecd96ef38d469151420a582f60c8987b58ec4e4582df1205637
import os import re from django.contrib.gis.db.models import Extent3D, Union from django.contrib.gis.db.models.functions import ( AsGeoJSON, AsKML, Length, Perimeter, Scale, Translate, ) from django.contrib.gis.geos import GEOSGeometry, LineString, Point, Polygon from django.test import TestCase, skipUnlessDBFeature from ..utils import FuncTestMixin from .models import ( City3D, Interstate2D, Interstate3D, InterstateProj2D, InterstateProj3D, MultiPoint3D, Point2D, Point3D, Polygon2D, Polygon3D, ) data_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'data')) city_file = os.path.join(data_path, 'cities', 'cities.shp') vrt_file = os.path.join(data_path, 'test_vrt', 'test_vrt.vrt') # The coordinates of each city, with Z values corresponding to their # altitude in meters. city_data = ( ('Houston', (-95.363151, 29.763374, 18)), ('Dallas', (-96.801611, 32.782057, 147)), ('Oklahoma City', (-97.521157, 34.464642, 380)), ('Wellington', (174.783117, -41.315268, 14)), ('Pueblo', (-104.609252, 38.255001, 1433)), ('Lawrence', (-95.235060, 38.971823, 251)), ('Chicago', (-87.650175, 41.850385, 181)), ('Victoria', (-123.305196, 48.462611, 15)), ) # Reference mapping of city name to its altitude (Z value). city_dict = {name: coords for name, coords in city_data} # 3D freeway data derived from the National Elevation Dataset: # http://seamless.usgs.gov/products/9arc.php interstate_data = ( ('I-45', 'LINESTRING(-95.3708481 29.7765870 11.339,-95.3694580 29.7787980 4.536,' '-95.3690305 29.7797359 9.762,-95.3691886 29.7812450 12.448,' '-95.3696447 29.7850144 10.457,-95.3702511 29.7868518 9.418,' '-95.3706724 29.7881286 14.858,-95.3711632 29.7896157 15.386,' '-95.3714525 29.7936267 13.168,-95.3717848 29.7955007 15.104,' '-95.3717719 29.7969804 16.516,-95.3717305 29.7982117 13.923,' '-95.3717254 29.8000778 14.385,-95.3719875 29.8013539 15.160,' '-95.3720575 29.8026785 15.544,-95.3721321 29.8040912 14.975,' '-95.3722074 29.8050998 15.688,-95.3722779 29.8060430 16.099,' '-95.3733818 29.8076750 15.197,-95.3741563 29.8103686 17.268,' '-95.3749458 29.8129927 19.857,-95.3763564 29.8144557 15.435)', (11.339, 4.536, 9.762, 12.448, 10.457, 9.418, 14.858, 15.386, 13.168, 15.104, 16.516, 13.923, 14.385, 15.16, 15.544, 14.975, 15.688, 16.099, 15.197, 17.268, 19.857, 15.435), ), ) # Bounding box polygon for inner-loop of Houston (in projected coordinate # system 32140), with elevation values from the National Elevation Dataset # (see above). bbox_data = ( 'POLYGON((941527.97 4225693.20,962596.48 4226349.75,963152.57 4209023.95,' '942051.75 4208366.38,941527.97 4225693.20))', (21.71, 13.21, 9.12, 16.40, 21.71) ) class Geo3DLoadingHelper: def _load_interstate_data(self): # Interstate (2D / 3D and Geographic/Projected variants) for name, line, exp_z in interstate_data: line_3d = GEOSGeometry(line, srid=4269) line_2d = LineString([coord[:2] for coord in line_3d.coords], srid=4269) # Creating a geographic and projected version of the # interstate in both 2D and 3D. Interstate3D.objects.create(name=name, line=line_3d) InterstateProj3D.objects.create(name=name, line=line_3d) Interstate2D.objects.create(name=name, line=line_2d) InterstateProj2D.objects.create(name=name, line=line_2d) def _load_city_data(self): for name, pnt_data in city_data: City3D.objects.create( name=name, point=Point(*pnt_data, srid=4326), pointg=Point(*pnt_data, srid=4326), ) def _load_polygon_data(self): bbox_wkt, bbox_z = bbox_data bbox_2d = GEOSGeometry(bbox_wkt, srid=32140) bbox_3d = Polygon(tuple((x, y, z) for (x, y), z in zip(bbox_2d[0].coords, bbox_z)), srid=32140) Polygon2D.objects.create(name='2D BBox', poly=bbox_2d) Polygon3D.objects.create(name='3D BBox', poly=bbox_3d) @skipUnlessDBFeature("supports_3d_storage") class Geo3DTest(Geo3DLoadingHelper, TestCase): """ Only a subset of the PostGIS routines are 3D-enabled, and this TestCase tries to test the features that can handle 3D and that are also available within GeoDjango. For more information, see the PostGIS docs on the routines that support 3D: https://postgis.net/docs/PostGIS_Special_Functions_Index.html#PostGIS_3D_Functions """ def test_3d_hasz(self): """ Make sure data is 3D and has expected Z values -- shouldn't change because of coordinate system. """ self._load_interstate_data() for name, line, exp_z in interstate_data: interstate = Interstate3D.objects.get(name=name) interstate_proj = InterstateProj3D.objects.get(name=name) for i in [interstate, interstate_proj]: self.assertTrue(i.line.hasz) self.assertEqual(exp_z, tuple(i.line.z)) self._load_city_data() for name, pnt_data in city_data: city = City3D.objects.get(name=name) # Testing both geometry and geography fields self.assertTrue(city.point.hasz) self.assertTrue(city.pointg.hasz) self.assertEqual(city.point.z, pnt_data[2]) self.assertEqual(city.pointg.z, pnt_data[2]) def test_3d_polygons(self): """ Test the creation of polygon 3D models. """ self._load_polygon_data() p3d = Polygon3D.objects.get(name='3D BBox') self.assertTrue(p3d.poly.hasz) self.assertIsInstance(p3d.poly, Polygon) self.assertEqual(p3d.poly.srid, 32140) def test_3d_layermapping(self): """ Testing LayerMapping on 3D models. """ # Import here as GDAL is required for those imports from django.contrib.gis.utils import LayerMapping, LayerMapError point_mapping = {'point': 'POINT'} mpoint_mapping = {'mpoint': 'MULTIPOINT'} # The VRT is 3D, but should still be able to map sans the Z. lm = LayerMapping(Point2D, vrt_file, point_mapping, transform=False) lm.save() self.assertEqual(3, Point2D.objects.count()) # The city shapefile is 2D, and won't be able to fill the coordinates # in the 3D model -- thus, a LayerMapError is raised. with self.assertRaises(LayerMapError): LayerMapping(Point3D, city_file, point_mapping, transform=False) # 3D model should take 3D data just fine. lm = LayerMapping(Point3D, vrt_file, point_mapping, transform=False) lm.save() self.assertEqual(3, Point3D.objects.count()) # Making sure LayerMapping.make_multi works right, by converting # a Point25D into a MultiPoint25D. lm = LayerMapping(MultiPoint3D, vrt_file, mpoint_mapping, transform=False) lm.save() self.assertEqual(3, MultiPoint3D.objects.count()) @skipUnlessDBFeature("supports_3d_functions") def test_union(self): """ Testing the Union aggregate of 3D models. """ # PostGIS query that returned the reference EWKT for this test: # `SELECT ST_AsText(ST_Union(point)) FROM geo3d_city3d;` self._load_city_data() ref_ewkt = ( 'SRID=4326;MULTIPOINT(-123.305196 48.462611 15,-104.609252 38.255001 1433,' '-97.521157 34.464642 380,-96.801611 32.782057 147,-95.363151 29.763374 18,' '-95.23506 38.971823 251,-87.650175 41.850385 181,174.783117 -41.315268 14)' ) ref_union = GEOSGeometry(ref_ewkt) union = City3D.objects.aggregate(Union('point'))['point__union'] self.assertTrue(union.hasz) # Ordering of points in the resulting geometry may vary between implementations self.assertEqual({p.ewkt for p in ref_union}, {p.ewkt for p in union}) @skipUnlessDBFeature("supports_3d_functions") def test_extent(self): """ Testing the Extent3D aggregate for 3D models. """ self._load_city_data() # `SELECT ST_Extent3D(point) FROM geo3d_city3d;` ref_extent3d = (-123.305196, -41.315268, 14, 174.783117, 48.462611, 1433) extent = City3D.objects.aggregate(Extent3D('point'))['point__extent3d'] def check_extent3d(extent3d, tol=6): for ref_val, ext_val in zip(ref_extent3d, extent3d): self.assertAlmostEqual(ref_val, ext_val, tol) check_extent3d(extent) self.assertIsNone(City3D.objects.none().aggregate(Extent3D('point'))['point__extent3d']) @skipUnlessDBFeature("supports_3d_functions") class Geo3DFunctionsTests(FuncTestMixin, Geo3DLoadingHelper, TestCase): def test_kml(self): """ Test KML() function with Z values. """ self._load_city_data() h = City3D.objects.annotate(kml=AsKML('point', precision=6)).get(name='Houston') # KML should be 3D. # `SELECT ST_AsKML(point, 6) FROM geo3d_city3d WHERE name = 'Houston';` ref_kml_regex = re.compile(r'^<Point><coordinates>-95.363\d+,29.763\d+,18</coordinates></Point>$') self.assertTrue(ref_kml_regex.match(h.kml)) def test_geojson(self): """ Test GeoJSON() function with Z values. """ self._load_city_data() h = City3D.objects.annotate(geojson=AsGeoJSON('point', precision=6)).get(name='Houston') # GeoJSON should be 3D # `SELECT ST_AsGeoJSON(point, 6) FROM geo3d_city3d WHERE name='Houston';` ref_json_regex = re.compile(r'^{"type":"Point","coordinates":\[-95.363151,29.763374,18(\.0+)?\]}$') self.assertTrue(ref_json_regex.match(h.geojson)) def test_perimeter(self): """ Testing Perimeter() function on 3D fields. """ self._load_polygon_data() # Reference query for values below: # `SELECT ST_Perimeter3D(poly), ST_Perimeter2D(poly) FROM geo3d_polygon3d;` ref_perim_3d = 76859.2620451 ref_perim_2d = 76859.2577803 tol = 6 poly2d = Polygon2D.objects.annotate(perimeter=Perimeter('poly')).get(name='2D BBox') self.assertAlmostEqual(ref_perim_2d, poly2d.perimeter.m, tol) poly3d = Polygon3D.objects.annotate(perimeter=Perimeter('poly')).get(name='3D BBox') self.assertAlmostEqual(ref_perim_3d, poly3d.perimeter.m, tol) def test_length(self): """ Testing Length() function on 3D fields. """ # ST_Length_Spheroid Z-aware, and thus does not need to use # a separate function internally. # `SELECT ST_Length_Spheroid(line, 'SPHEROID["GRS 1980",6378137,298.257222101]') # FROM geo3d_interstate[2d|3d];` self._load_interstate_data() tol = 3 ref_length_2d = 4368.1721949481 ref_length_3d = 4368.62547052088 inter2d = Interstate2D.objects.annotate(length=Length('line')).get(name='I-45') self.assertAlmostEqual(ref_length_2d, inter2d.length.m, tol) inter3d = Interstate3D.objects.annotate(length=Length('line')).get(name='I-45') self.assertAlmostEqual(ref_length_3d, inter3d.length.m, tol) # Making sure `ST_Length3D` is used on for a projected # and 3D model rather than `ST_Length`. # `SELECT ST_Length(line) FROM geo3d_interstateproj2d;` ref_length_2d = 4367.71564892392 # `SELECT ST_Length3D(line) FROM geo3d_interstateproj3d;` ref_length_3d = 4368.16897234101 inter2d = InterstateProj2D.objects.annotate(length=Length('line')).get(name='I-45') self.assertAlmostEqual(ref_length_2d, inter2d.length.m, tol) inter3d = InterstateProj3D.objects.annotate(length=Length('line')).get(name='I-45') self.assertAlmostEqual(ref_length_3d, inter3d.length.m, tol) def test_scale(self): """ Testing Scale() function on Z values. """ self._load_city_data() # Mapping of City name to reference Z values. zscales = (-3, 4, 23) for zscale in zscales: for city in City3D.objects.annotate(scale=Scale('point', 1.0, 1.0, zscale)): self.assertEqual(city_dict[city.name][2] * zscale, city.scale.z) def test_translate(self): """ Testing Translate() function on Z values. """ self._load_city_data() ztranslations = (5.23, 23, -17) for ztrans in ztranslations: for city in City3D.objects.annotate(translate=Translate('point', 0, 0, ztrans)): self.assertEqual(city_dict[city.name][2] + ztrans, city.translate.z)
1378f1ec0cf45ed911b83de8a99e1ed9a5cab7224e2eac3c599cf4d8dbdaf620
import ctypes import itertools import json import pickle import random from binascii import a2b_hex from io import BytesIO from unittest import mock, skipIf from django.contrib.gis import gdal from django.contrib.gis.geos import ( GeometryCollection, GEOSException, GEOSGeometry, LinearRing, LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon, fromfile, fromstr, ) from django.contrib.gis.geos.libgeos import geos_version_tuple from django.contrib.gis.shortcuts import numpy from django.template import Context from django.template.engine import Engine from django.test import SimpleTestCase from ..test_data import TestDataMixin class GEOSTest(SimpleTestCase, TestDataMixin): def test_wkt(self): "Testing WKT output." for g in self.geometries.wkt_out: geom = fromstr(g.wkt) if geom.hasz: self.assertEqual(g.ewkt, geom.wkt) def test_hex(self): "Testing HEX output." for g in self.geometries.hex_wkt: geom = fromstr(g.wkt) self.assertEqual(g.hex, geom.hex.decode()) def test_hexewkb(self): "Testing (HEX)EWKB output." # For testing HEX(EWKB). ogc_hex = b'01010000000000000000000000000000000000F03F' ogc_hex_3d = b'01010000800000000000000000000000000000F03F0000000000000040' # `SELECT ST_AsHEXEWKB(ST_GeomFromText('POINT(0 1)', 4326));` hexewkb_2d = b'0101000020E61000000000000000000000000000000000F03F' # `SELECT ST_AsHEXEWKB(ST_GeomFromEWKT('SRID=4326;POINT(0 1 2)'));` hexewkb_3d = b'01010000A0E61000000000000000000000000000000000F03F0000000000000040' pnt_2d = Point(0, 1, srid=4326) pnt_3d = Point(0, 1, 2, srid=4326) # OGC-compliant HEX will not have SRID value. self.assertEqual(ogc_hex, pnt_2d.hex) self.assertEqual(ogc_hex_3d, pnt_3d.hex) # HEXEWKB should be appropriate for its dimension -- have to use an # a WKBWriter w/dimension set accordingly, else GEOS will insert # garbage into 3D coordinate if there is none. self.assertEqual(hexewkb_2d, pnt_2d.hexewkb) self.assertEqual(hexewkb_3d, pnt_3d.hexewkb) self.assertIs(GEOSGeometry(hexewkb_3d).hasz, True) # Same for EWKB. self.assertEqual(memoryview(a2b_hex(hexewkb_2d)), pnt_2d.ewkb) self.assertEqual(memoryview(a2b_hex(hexewkb_3d)), pnt_3d.ewkb) # Redundant sanity check. self.assertEqual(4326, GEOSGeometry(hexewkb_2d).srid) def test_kml(self): "Testing KML output." for tg in self.geometries.wkt_out: geom = fromstr(tg.wkt) kml = getattr(tg, 'kml', False) if kml: self.assertEqual(kml, geom.kml) def test_errors(self): "Testing the Error handlers." # string-based for err in self.geometries.errors: with self.assertRaises((GEOSException, ValueError)): fromstr(err.wkt) # Bad WKB with self.assertRaises(GEOSException): GEOSGeometry(memoryview(b'0')) class NotAGeometry: pass # Some other object with self.assertRaises(TypeError): GEOSGeometry(NotAGeometry()) # None with self.assertRaises(TypeError): GEOSGeometry(None) def test_wkb(self): "Testing WKB output." for g in self.geometries.hex_wkt: geom = fromstr(g.wkt) wkb = geom.wkb self.assertEqual(wkb.hex().upper(), g.hex) def test_create_hex(self): "Testing creation from HEX." for g in self.geometries.hex_wkt: geom_h = GEOSGeometry(g.hex) # we need to do this so decimal places get normalized geom_t = fromstr(g.wkt) self.assertEqual(geom_t.wkt, geom_h.wkt) def test_create_wkb(self): "Testing creation from WKB." for g in self.geometries.hex_wkt: wkb = memoryview(bytes.fromhex(g.hex)) geom_h = GEOSGeometry(wkb) # we need to do this so decimal places get normalized geom_t = fromstr(g.wkt) self.assertEqual(geom_t.wkt, geom_h.wkt) def test_ewkt(self): "Testing EWKT." srids = (-1, 32140) for srid in srids: for p in self.geometries.polygons: ewkt = 'SRID=%d;%s' % (srid, p.wkt) poly = fromstr(ewkt) self.assertEqual(srid, poly.srid) self.assertEqual(srid, poly.shell.srid) self.assertEqual(srid, fromstr(poly.ewkt).srid) # Checking export def test_json(self): "Testing GeoJSON input/output (via GDAL)." for g in self.geometries.json_geoms: geom = GEOSGeometry(g.wkt) if not hasattr(g, 'not_equal'): # Loading jsons to prevent decimal differences self.assertEqual(json.loads(g.json), json.loads(geom.json)) self.assertEqual(json.loads(g.json), json.loads(geom.geojson)) self.assertEqual(GEOSGeometry(g.wkt, 4326), GEOSGeometry(geom.json)) def test_json_srid(self): geojson_data = { "type": "Point", "coordinates": [2, 49], "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::4322" } } } self.assertEqual(GEOSGeometry(json.dumps(geojson_data)), Point(2, 49, srid=4322)) def test_fromfile(self): "Testing the fromfile() factory." ref_pnt = GEOSGeometry('POINT(5 23)') wkt_f = BytesIO() wkt_f.write(ref_pnt.wkt.encode()) wkb_f = BytesIO() wkb_f.write(bytes(ref_pnt.wkb)) # Other tests use `fromfile()` on string filenames so those # aren't tested here. for fh in (wkt_f, wkb_f): fh.seek(0) pnt = fromfile(fh) self.assertEqual(ref_pnt, pnt) def test_eq(self): "Testing equivalence." p = fromstr('POINT(5 23)') self.assertEqual(p, p.wkt) self.assertNotEqual(p, 'foo') ls = fromstr('LINESTRING(0 0, 1 1, 5 5)') self.assertEqual(ls, ls.wkt) self.assertNotEqual(p, 'bar') self.assertEqual(p, 'POINT(5.0 23.0)') # Error shouldn't be raise on equivalence testing with # an invalid type. for g in (p, ls): self.assertIsNotNone(g) self.assertNotEqual(g, {'foo': 'bar'}) self.assertIsNot(g, False) def test_hash(self): point_1 = Point(5, 23) point_2 = Point(5, 23, srid=4326) point_3 = Point(5, 23, srid=32632) multipoint_1 = MultiPoint(point_1, srid=4326) multipoint_2 = MultiPoint(point_2) multipoint_3 = MultiPoint(point_3) self.assertNotEqual(hash(point_1), hash(point_2)) self.assertNotEqual(hash(point_1), hash(point_3)) self.assertNotEqual(hash(point_2), hash(point_3)) self.assertNotEqual(hash(multipoint_1), hash(multipoint_2)) self.assertEqual(hash(multipoint_2), hash(multipoint_3)) self.assertNotEqual(hash(multipoint_1), hash(point_1)) self.assertNotEqual(hash(multipoint_2), hash(point_2)) self.assertNotEqual(hash(multipoint_3), hash(point_3)) def test_eq_with_srid(self): "Testing non-equivalence with different srids." p0 = Point(5, 23) p1 = Point(5, 23, srid=4326) p2 = Point(5, 23, srid=32632) # GEOS self.assertNotEqual(p0, p1) self.assertNotEqual(p1, p2) # EWKT self.assertNotEqual(p0, p1.ewkt) self.assertNotEqual(p1, p0.ewkt) self.assertNotEqual(p1, p2.ewkt) # Equivalence with matching SRIDs self.assertEqual(p2, p2) self.assertEqual(p2, p2.ewkt) # WKT contains no SRID so will not equal self.assertNotEqual(p2, p2.wkt) # SRID of 0 self.assertEqual(p0, 'SRID=0;POINT (5 23)') self.assertNotEqual(p1, 'SRID=0;POINT (5 23)') def test_points(self): "Testing Point objects." prev = fromstr('POINT(0 0)') for p in self.geometries.points: # Creating the point from the WKT pnt = fromstr(p.wkt) self.assertEqual(pnt.geom_type, 'Point') self.assertEqual(pnt.geom_typeid, 0) self.assertEqual(pnt.dims, 0) self.assertEqual(p.x, pnt.x) self.assertEqual(p.y, pnt.y) self.assertEqual(pnt, fromstr(p.wkt)) self.assertIs(pnt == prev, False) # Use assertIs() to test __eq__. # Making sure that the point's X, Y components are what we expect self.assertAlmostEqual(p.x, pnt.tuple[0], 9) self.assertAlmostEqual(p.y, pnt.tuple[1], 9) # Testing the third dimension, and getting the tuple arguments if hasattr(p, 'z'): self.assertIs(pnt.hasz, True) self.assertEqual(p.z, pnt.z) self.assertEqual(p.z, pnt.tuple[2], 9) tup_args = (p.x, p.y, p.z) set_tup1 = (2.71, 3.14, 5.23) set_tup2 = (5.23, 2.71, 3.14) else: self.assertIs(pnt.hasz, False) self.assertIsNone(pnt.z) tup_args = (p.x, p.y) set_tup1 = (2.71, 3.14) set_tup2 = (3.14, 2.71) # Centroid operation on point should be point itself self.assertEqual(p.centroid, pnt.centroid.tuple) # Now testing the different constructors pnt2 = Point(tup_args) # e.g., Point((1, 2)) pnt3 = Point(*tup_args) # e.g., Point(1, 2) self.assertEqual(pnt, pnt2) self.assertEqual(pnt, pnt3) # Now testing setting the x and y pnt.y = 3.14 pnt.x = 2.71 self.assertEqual(3.14, pnt.y) self.assertEqual(2.71, pnt.x) # Setting via the tuple/coords property pnt.tuple = set_tup1 self.assertEqual(set_tup1, pnt.tuple) pnt.coords = set_tup2 self.assertEqual(set_tup2, pnt.coords) prev = pnt # setting the previous geometry def test_point_reverse(self): point = GEOSGeometry('POINT(144.963 -37.8143)', 4326) self.assertEqual(point.srid, 4326) point.reverse() self.assertEqual(point.ewkt, 'SRID=4326;POINT (-37.8143 144.963)') def test_multipoints(self): "Testing MultiPoint objects." for mp in self.geometries.multipoints: mpnt = fromstr(mp.wkt) self.assertEqual(mpnt.geom_type, 'MultiPoint') self.assertEqual(mpnt.geom_typeid, 4) self.assertEqual(mpnt.dims, 0) self.assertAlmostEqual(mp.centroid[0], mpnt.centroid.tuple[0], 9) self.assertAlmostEqual(mp.centroid[1], mpnt.centroid.tuple[1], 9) with self.assertRaises(IndexError): mpnt.__getitem__(len(mpnt)) self.assertEqual(mp.centroid, mpnt.centroid.tuple) self.assertEqual(mp.coords, tuple(m.tuple for m in mpnt)) for p in mpnt: self.assertEqual(p.geom_type, 'Point') self.assertEqual(p.geom_typeid, 0) self.assertIs(p.empty, False) self.assertIs(p.valid, True) def test_linestring(self): "Testing LineString objects." prev = fromstr('POINT(0 0)') for line in self.geometries.linestrings: ls = fromstr(line.wkt) self.assertEqual(ls.geom_type, 'LineString') self.assertEqual(ls.geom_typeid, 1) self.assertEqual(ls.dims, 1) self.assertIs(ls.empty, False) self.assertIs(ls.ring, False) if hasattr(line, 'centroid'): self.assertEqual(line.centroid, ls.centroid.tuple) if hasattr(line, 'tup'): self.assertEqual(line.tup, ls.tuple) self.assertEqual(ls, fromstr(line.wkt)) self.assertIs(ls == prev, False) # Use assertIs() to test __eq__. with self.assertRaises(IndexError): ls.__getitem__(len(ls)) prev = ls # Creating a LineString from a tuple, list, and numpy array self.assertEqual(ls, LineString(ls.tuple)) # tuple self.assertEqual(ls, LineString(*ls.tuple)) # as individual arguments self.assertEqual(ls, LineString([list(tup) for tup in ls.tuple])) # as list # Point individual arguments self.assertEqual(ls.wkt, LineString(*tuple(Point(tup) for tup in ls.tuple)).wkt) if numpy: self.assertEqual(ls, LineString(numpy.array(ls.tuple))) # as numpy array with self.assertRaisesMessage(TypeError, 'Each coordinate should be a sequence (list or tuple)'): LineString((0, 0)) with self.assertRaisesMessage(ValueError, 'LineString requires at least 2 points, got 1.'): LineString([(0, 0)]) if numpy: with self.assertRaisesMessage(ValueError, 'LineString requires at least 2 points, got 1.'): LineString(numpy.array([(0, 0)])) with mock.patch('django.contrib.gis.geos.linestring.numpy', False): with self.assertRaisesMessage(TypeError, 'Invalid initialization input for LineStrings.'): LineString('wrong input') # Test __iter__(). self.assertEqual(list(LineString((0, 0), (1, 1), (2, 2))), [(0, 0), (1, 1), (2, 2)]) def test_linestring_reverse(self): line = GEOSGeometry('LINESTRING(144.963 -37.8143,151.2607 -33.887)', 4326) self.assertEqual(line.srid, 4326) line.reverse() self.assertEqual(line.ewkt, 'SRID=4326;LINESTRING (151.2607 -33.887, 144.963 -37.8143)') def _test_is_counterclockwise(self): lr = LinearRing((0, 0), (1, 0), (0, 1), (0, 0)) self.assertIs(lr.is_counterclockwise, True) lr.reverse() self.assertIs(lr.is_counterclockwise, False) msg = 'Orientation of an empty LinearRing cannot be determined.' with self.assertRaisesMessage(ValueError, msg): LinearRing().is_counterclockwise @skipIf(geos_version_tuple() < (3, 7), 'GEOS >= 3.7.0 is required') def test_is_counterclockwise(self): self._test_is_counterclockwise() @skipIf(geos_version_tuple() < (3, 7), 'GEOS >= 3.7.0 is required') def test_is_counterclockwise_geos_error(self): with mock.patch('django.contrib.gis.geos.prototypes.cs_is_ccw') as mocked: mocked.return_value = 0 mocked.func_name = 'GEOSCoordSeq_isCCW' msg = 'Error encountered in GEOS C function "GEOSCoordSeq_isCCW".' with self.assertRaisesMessage(GEOSException, msg): LinearRing((0, 0), (1, 0), (0, 1), (0, 0)).is_counterclockwise @mock.patch('django.contrib.gis.geos.libgeos.geos_version', lambda: b'3.6.9') def test_is_counterclockwise_fallback(self): self._test_is_counterclockwise() def test_multilinestring(self): "Testing MultiLineString objects." prev = fromstr('POINT(0 0)') for line in self.geometries.multilinestrings: ml = fromstr(line.wkt) self.assertEqual(ml.geom_type, 'MultiLineString') self.assertEqual(ml.geom_typeid, 5) self.assertEqual(ml.dims, 1) self.assertAlmostEqual(line.centroid[0], ml.centroid.x, 9) self.assertAlmostEqual(line.centroid[1], ml.centroid.y, 9) self.assertEqual(ml, fromstr(line.wkt)) self.assertIs(ml == prev, False) # Use assertIs() to test __eq__. prev = ml for ls in ml: self.assertEqual(ls.geom_type, 'LineString') self.assertEqual(ls.geom_typeid, 1) self.assertIs(ls.empty, False) with self.assertRaises(IndexError): ml.__getitem__(len(ml)) self.assertEqual(ml.wkt, MultiLineString(*tuple(s.clone() for s in ml)).wkt) self.assertEqual(ml, MultiLineString(*tuple(LineString(s.tuple) for s in ml))) def test_linearring(self): "Testing LinearRing objects." for rr in self.geometries.linearrings: lr = fromstr(rr.wkt) self.assertEqual(lr.geom_type, 'LinearRing') self.assertEqual(lr.geom_typeid, 2) self.assertEqual(lr.dims, 1) self.assertEqual(rr.n_p, len(lr)) self.assertIs(lr.valid, True) self.assertIs(lr.empty, False) # Creating a LinearRing from a tuple, list, and numpy array self.assertEqual(lr, LinearRing(lr.tuple)) self.assertEqual(lr, LinearRing(*lr.tuple)) self.assertEqual(lr, LinearRing([list(tup) for tup in lr.tuple])) if numpy: self.assertEqual(lr, LinearRing(numpy.array(lr.tuple))) with self.assertRaisesMessage(ValueError, 'LinearRing requires at least 4 points, got 3.'): LinearRing((0, 0), (1, 1), (0, 0)) with self.assertRaisesMessage(ValueError, 'LinearRing requires at least 4 points, got 1.'): LinearRing([(0, 0)]) if numpy: with self.assertRaisesMessage(ValueError, 'LinearRing requires at least 4 points, got 1.'): LinearRing(numpy.array([(0, 0)])) def test_linearring_json(self): self.assertJSONEqual( LinearRing((0, 0), (0, 1), (1, 1), (0, 0)).json, '{"coordinates": [[0, 0], [0, 1], [1, 1], [0, 0]], "type": "LineString"}', ) def test_polygons_from_bbox(self): "Testing `from_bbox` class method." bbox = (-180, -90, 180, 90) p = Polygon.from_bbox(bbox) self.assertEqual(bbox, p.extent) # Testing numerical precision x = 3.14159265358979323 bbox = (0, 0, 1, x) p = Polygon.from_bbox(bbox) y = p.extent[-1] self.assertEqual(format(x, '.13f'), format(y, '.13f')) def test_polygons(self): "Testing Polygon objects." prev = fromstr('POINT(0 0)') for p in self.geometries.polygons: # Creating the Polygon, testing its properties. poly = fromstr(p.wkt) self.assertEqual(poly.geom_type, 'Polygon') self.assertEqual(poly.geom_typeid, 3) self.assertEqual(poly.dims, 2) self.assertIs(poly.empty, False) self.assertIs(poly.ring, False) self.assertEqual(p.n_i, poly.num_interior_rings) self.assertEqual(p.n_i + 1, len(poly)) # Testing __len__ self.assertEqual(p.n_p, poly.num_points) # Area & Centroid self.assertAlmostEqual(p.area, poly.area, 9) self.assertAlmostEqual(p.centroid[0], poly.centroid.tuple[0], 9) self.assertAlmostEqual(p.centroid[1], poly.centroid.tuple[1], 9) # Testing the geometry equivalence self.assertEqual(poly, fromstr(p.wkt)) # Should not be equal to previous geometry self.assertIs(poly == prev, False) # Use assertIs() to test __eq__. self.assertIs(poly != prev, True) # Use assertIs() to test __ne__. # Testing the exterior ring ring = poly.exterior_ring self.assertEqual(ring.geom_type, 'LinearRing') self.assertEqual(ring.geom_typeid, 2) if p.ext_ring_cs: self.assertEqual(p.ext_ring_cs, ring.tuple) self.assertEqual(p.ext_ring_cs, poly[0].tuple) # Testing __getitem__ # Testing __getitem__ and __setitem__ on invalid indices with self.assertRaises(IndexError): poly.__getitem__(len(poly)) with self.assertRaises(IndexError): poly.__setitem__(len(poly), False) with self.assertRaises(IndexError): poly.__getitem__(-1 * len(poly) - 1) # Testing __iter__ for r in poly: self.assertEqual(r.geom_type, 'LinearRing') self.assertEqual(r.geom_typeid, 2) # Testing polygon construction. with self.assertRaises(TypeError): Polygon(0, [1, 2, 3]) with self.assertRaises(TypeError): Polygon('foo') # Polygon(shell, (hole1, ... holeN)) ext_ring, *int_rings = poly self.assertEqual(poly, Polygon(ext_ring, int_rings)) # Polygon(shell_tuple, hole_tuple1, ... , hole_tupleN) ring_tuples = tuple(r.tuple for r in poly) self.assertEqual(poly, Polygon(*ring_tuples)) # Constructing with tuples of LinearRings. self.assertEqual(poly.wkt, Polygon(*tuple(r for r in poly)).wkt) self.assertEqual(poly.wkt, Polygon(*tuple(LinearRing(r.tuple) for r in poly)).wkt) def test_polygons_templates(self): # Accessing Polygon attributes in templates should work. engine = Engine() template = engine.from_string('{{ polygons.0.wkt }}') polygons = [fromstr(p.wkt) for p in self.geometries.multipolygons[:2]] content = template.render(Context({'polygons': polygons})) self.assertIn('MULTIPOLYGON (((100', content) def test_polygon_comparison(self): p1 = Polygon(((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))) p2 = Polygon(((0, 0), (0, 1), (1, 0), (0, 0))) self.assertGreater(p1, p2) self.assertLess(p2, p1) p3 = Polygon(((0, 0), (0, 1), (1, 1), (2, 0), (0, 0))) p4 = Polygon(((0, 0), (0, 1), (2, 2), (1, 0), (0, 0))) self.assertGreater(p4, p3) self.assertLess(p3, p4) def test_multipolygons(self): "Testing MultiPolygon objects." fromstr('POINT (0 0)') for mp in self.geometries.multipolygons: mpoly = fromstr(mp.wkt) self.assertEqual(mpoly.geom_type, 'MultiPolygon') self.assertEqual(mpoly.geom_typeid, 6) self.assertEqual(mpoly.dims, 2) self.assertEqual(mp.valid, mpoly.valid) if mp.valid: self.assertEqual(mp.num_geom, mpoly.num_geom) self.assertEqual(mp.n_p, mpoly.num_coords) self.assertEqual(mp.num_geom, len(mpoly)) with self.assertRaises(IndexError): mpoly.__getitem__(len(mpoly)) for p in mpoly: self.assertEqual(p.geom_type, 'Polygon') self.assertEqual(p.geom_typeid, 3) self.assertIs(p.valid, True) self.assertEqual(mpoly.wkt, MultiPolygon(*tuple(poly.clone() for poly in mpoly)).wkt) def test_memory_hijinks(self): "Testing Geometry __del__() on rings and polygons." # #### Memory issues with rings and poly # These tests are needed to ensure sanity with writable geometries. # Getting a polygon with interior rings, and pulling out the interior rings poly = fromstr(self.geometries.polygons[1].wkt) ring1 = poly[0] ring2 = poly[1] # These deletes should be 'harmless' since they are done on child geometries del ring1 del ring2 ring1 = poly[0] ring2 = poly[1] # Deleting the polygon del poly # Access to these rings is OK since they are clones. str(ring1) str(ring2) def test_coord_seq(self): "Testing Coordinate Sequence objects." for p in self.geometries.polygons: if p.ext_ring_cs: # Constructing the polygon and getting the coordinate sequence poly = fromstr(p.wkt) cs = poly.exterior_ring.coord_seq self.assertEqual(p.ext_ring_cs, cs.tuple) # done in the Polygon test too. self.assertEqual(len(p.ext_ring_cs), len(cs)) # Making sure __len__ works # Checks __getitem__ and __setitem__ for i in range(len(p.ext_ring_cs)): c1 = p.ext_ring_cs[i] # Expected value c2 = cs[i] # Value from coordseq self.assertEqual(c1, c2) # Constructing the test value to set the coordinate sequence with if len(c1) == 2: tset = (5, 23) else: tset = (5, 23, 8) cs[i] = tset # Making sure every set point matches what we expect for j in range(len(tset)): cs[i] = tset self.assertEqual(tset[j], cs[i][j]) def test_relate_pattern(self): "Testing relate() and relate_pattern()." g = fromstr('POINT (0 0)') with self.assertRaises(GEOSException): g.relate_pattern(0, 'invalid pattern, yo') for rg in self.geometries.relate_geoms: a = fromstr(rg.wkt_a) b = fromstr(rg.wkt_b) self.assertEqual(rg.result, a.relate_pattern(b, rg.pattern)) self.assertEqual(rg.pattern, a.relate(b)) def test_intersection(self): "Testing intersects() and intersection()." for i in range(len(self.geometries.topology_geoms)): a = fromstr(self.geometries.topology_geoms[i].wkt_a) b = fromstr(self.geometries.topology_geoms[i].wkt_b) i1 = fromstr(self.geometries.intersect_geoms[i].wkt) self.assertIs(a.intersects(b), True) i2 = a.intersection(b) self.assertEqual(i1, i2) self.assertEqual(i1, a & b) # __and__ is intersection operator a &= b # testing __iand__ self.assertEqual(i1, a) def test_union(self): "Testing union()." for i in range(len(self.geometries.topology_geoms)): a = fromstr(self.geometries.topology_geoms[i].wkt_a) b = fromstr(self.geometries.topology_geoms[i].wkt_b) u1 = fromstr(self.geometries.union_geoms[i].wkt) u2 = a.union(b) self.assertEqual(u1, u2) self.assertEqual(u1, a | b) # __or__ is union operator a |= b # testing __ior__ self.assertEqual(u1, a) def test_unary_union(self): "Testing unary_union." for i in range(len(self.geometries.topology_geoms)): a = fromstr(self.geometries.topology_geoms[i].wkt_a) b = fromstr(self.geometries.topology_geoms[i].wkt_b) u1 = fromstr(self.geometries.union_geoms[i].wkt) u2 = GeometryCollection(a, b).unary_union self.assertTrue(u1.equals(u2)) def test_difference(self): "Testing difference()." for i in range(len(self.geometries.topology_geoms)): a = fromstr(self.geometries.topology_geoms[i].wkt_a) b = fromstr(self.geometries.topology_geoms[i].wkt_b) d1 = fromstr(self.geometries.diff_geoms[i].wkt) d2 = a.difference(b) self.assertEqual(d1, d2) self.assertEqual(d1, a - b) # __sub__ is difference operator a -= b # testing __isub__ self.assertEqual(d1, a) def test_symdifference(self): "Testing sym_difference()." for i in range(len(self.geometries.topology_geoms)): a = fromstr(self.geometries.topology_geoms[i].wkt_a) b = fromstr(self.geometries.topology_geoms[i].wkt_b) d1 = fromstr(self.geometries.sdiff_geoms[i].wkt) d2 = a.sym_difference(b) self.assertEqual(d1, d2) self.assertEqual(d1, a ^ b) # __xor__ is symmetric difference operator a ^= b # testing __ixor__ self.assertEqual(d1, a) def test_buffer(self): bg = self.geometries.buffer_geoms[0] g = fromstr(bg.wkt) # Can't use a floating-point for the number of quadsegs. with self.assertRaises(ctypes.ArgumentError): g.buffer(bg.width, quadsegs=1.1) self._test_buffer(self.geometries.buffer_geoms, 'buffer') def test_buffer_with_style(self): bg = self.geometries.buffer_with_style_geoms[0] g = fromstr(bg.wkt) # Can't use a floating-point for the number of quadsegs. with self.assertRaises(ctypes.ArgumentError): g.buffer_with_style(bg.width, quadsegs=1.1) # Can't use a floating-point for the end cap style. with self.assertRaises(ctypes.ArgumentError): g.buffer_with_style(bg.width, end_cap_style=1.2) # Can't use a end cap style that is not in the enum. with self.assertRaises(GEOSException): g.buffer_with_style(bg.width, end_cap_style=55) # Can't use a floating-point for the join style. with self.assertRaises(ctypes.ArgumentError): g.buffer_with_style(bg.width, join_style=1.3) # Can't use a join style that is not in the enum. with self.assertRaises(GEOSException): g.buffer_with_style(bg.width, join_style=66) self._test_buffer( itertools.chain(self.geometries.buffer_geoms, self.geometries.buffer_with_style_geoms), 'buffer_with_style', ) def _test_buffer(self, geometries, buffer_method_name): for bg in geometries: g = fromstr(bg.wkt) # The buffer we expect exp_buf = fromstr(bg.buffer_wkt) # Constructing our buffer buf_kwargs = { kwarg_name: getattr(bg, kwarg_name) for kwarg_name in ('width', 'quadsegs', 'end_cap_style', 'join_style', 'mitre_limit') if hasattr(bg, kwarg_name) } buf = getattr(g, buffer_method_name)(**buf_kwargs) self.assertEqual(exp_buf.num_coords, buf.num_coords) self.assertEqual(len(exp_buf), len(buf)) # Now assuring that each point in the buffer is almost equal for j in range(len(exp_buf)): exp_ring = exp_buf[j] buf_ring = buf[j] self.assertEqual(len(exp_ring), len(buf_ring)) for k in range(len(exp_ring)): # Asserting the X, Y of each point are almost equal (due to floating point imprecision) self.assertAlmostEqual(exp_ring[k][0], buf_ring[k][0], 9) self.assertAlmostEqual(exp_ring[k][1], buf_ring[k][1], 9) def test_covers(self): poly = Polygon(((0, 0), (0, 10), (10, 10), (10, 0), (0, 0))) self.assertTrue(poly.covers(Point(5, 5))) self.assertFalse(poly.covers(Point(100, 100))) def test_closed(self): ls_closed = LineString((0, 0), (1, 1), (0, 0)) ls_not_closed = LineString((0, 0), (1, 1)) self.assertFalse(ls_not_closed.closed) self.assertTrue(ls_closed.closed) def test_srid(self): "Testing the SRID property and keyword." # Testing SRID keyword on Point pnt = Point(5, 23, srid=4326) self.assertEqual(4326, pnt.srid) pnt.srid = 3084 self.assertEqual(3084, pnt.srid) with self.assertRaises(ctypes.ArgumentError): pnt.srid = '4326' # Testing SRID keyword on fromstr(), and on Polygon rings. poly = fromstr(self.geometries.polygons[1].wkt, srid=4269) self.assertEqual(4269, poly.srid) for ring in poly: self.assertEqual(4269, ring.srid) poly.srid = 4326 self.assertEqual(4326, poly.shell.srid) # Testing SRID keyword on GeometryCollection gc = GeometryCollection(Point(5, 23), LineString((0, 0), (1.5, 1.5), (3, 3)), srid=32021) self.assertEqual(32021, gc.srid) for i in range(len(gc)): self.assertEqual(32021, gc[i].srid) # GEOS may get the SRID from HEXEWKB # 'POINT(5 23)' at SRID=4326 in hex form -- obtained from PostGIS # using `SELECT GeomFromText('POINT (5 23)', 4326);`. hex = '0101000020E610000000000000000014400000000000003740' p1 = fromstr(hex) self.assertEqual(4326, p1.srid) p2 = fromstr(p1.hex) self.assertIsNone(p2.srid) p3 = fromstr(p1.hex, srid=-1) # -1 is intended. self.assertEqual(-1, p3.srid) # Testing that geometry SRID could be set to its own value pnt_wo_srid = Point(1, 1) pnt_wo_srid.srid = pnt_wo_srid.srid # Input geometries that have an SRID. self.assertEqual(GEOSGeometry(pnt.ewkt, srid=pnt.srid).srid, pnt.srid) self.assertEqual(GEOSGeometry(pnt.ewkb, srid=pnt.srid).srid, pnt.srid) with self.assertRaisesMessage(ValueError, 'Input geometry already has SRID: %d.' % pnt.srid): GEOSGeometry(pnt.ewkt, srid=1) with self.assertRaisesMessage(ValueError, 'Input geometry already has SRID: %d.' % pnt.srid): GEOSGeometry(pnt.ewkb, srid=1) def test_custom_srid(self): """Test with a null srid and a srid unknown to GDAL.""" for srid in [None, 999999]: pnt = Point(111200, 220900, srid=srid) self.assertTrue(pnt.ewkt.startswith(("SRID=%s;" % srid if srid else '') + "POINT (111200")) self.assertIsInstance(pnt.ogr, gdal.OGRGeometry) self.assertIsNone(pnt.srs) # Test conversion from custom to a known srid c2w = gdal.CoordTransform( gdal.SpatialReference( '+proj=mill +lat_0=0 +lon_0=0 +x_0=0 +y_0=0 +R_A +ellps=WGS84 ' '+datum=WGS84 +units=m +no_defs' ), gdal.SpatialReference(4326)) new_pnt = pnt.transform(c2w, clone=True) self.assertEqual(new_pnt.srid, 4326) self.assertAlmostEqual(new_pnt.x, 1, 1) self.assertAlmostEqual(new_pnt.y, 2, 1) def test_mutable_geometries(self): "Testing the mutability of Polygons and Geometry Collections." # ### Testing the mutability of Polygons ### for p in self.geometries.polygons: poly = fromstr(p.wkt) # Should only be able to use __setitem__ with LinearRing geometries. with self.assertRaises(TypeError): poly.__setitem__(0, LineString((1, 1), (2, 2))) # Constructing the new shell by adding 500 to every point in the old shell. shell_tup = poly.shell.tuple new_coords = [] for point in shell_tup: new_coords.append((point[0] + 500., point[1] + 500.)) new_shell = LinearRing(*tuple(new_coords)) # Assigning polygon's exterior ring w/the new shell poly.exterior_ring = new_shell str(new_shell) # new shell is still accessible self.assertEqual(poly.exterior_ring, new_shell) self.assertEqual(poly[0], new_shell) # ### Testing the mutability of Geometry Collections for tg in self.geometries.multipoints: mp = fromstr(tg.wkt) for i in range(len(mp)): # Creating a random point. pnt = mp[i] new = Point(random.randint(21, 100), random.randint(21, 100)) # Testing the assignment mp[i] = new str(new) # what was used for the assignment is still accessible self.assertEqual(mp[i], new) self.assertEqual(mp[i].wkt, new.wkt) self.assertNotEqual(pnt, mp[i]) # MultiPolygons involve much more memory management because each # Polygon w/in the collection has its own rings. for tg in self.geometries.multipolygons: mpoly = fromstr(tg.wkt) for i in range(len(mpoly)): poly = mpoly[i] old_poly = mpoly[i] # Offsetting the each ring in the polygon by 500. for j in range(len(poly)): r = poly[j] for k in range(len(r)): r[k] = (r[k][0] + 500., r[k][1] + 500.) poly[j] = r self.assertNotEqual(mpoly[i], poly) # Testing the assignment mpoly[i] = poly str(poly) # Still accessible self.assertEqual(mpoly[i], poly) self.assertNotEqual(mpoly[i], old_poly) # Extreme (!!) __setitem__ -- no longer works, have to detect # in the first object that __setitem__ is called in the subsequent # objects -- maybe mpoly[0, 0, 0] = (3.14, 2.71)? # mpoly[0][0][0] = (3.14, 2.71) # self.assertEqual((3.14, 2.71), mpoly[0][0][0]) # Doing it more slowly.. # self.assertEqual((3.14, 2.71), mpoly[0].shell[0]) # del mpoly def test_point_list_assignment(self): p = Point(0, 0) p[:] = (1, 2, 3) self.assertEqual(p, Point(1, 2, 3)) p[:] = () self.assertEqual(p.wkt, Point()) p[:] = (1, 2) self.assertEqual(p.wkt, Point(1, 2)) with self.assertRaises(ValueError): p[:] = (1,) with self.assertRaises(ValueError): p[:] = (1, 2, 3, 4, 5) def test_linestring_list_assignment(self): ls = LineString((0, 0), (1, 1)) ls[:] = () self.assertEqual(ls, LineString()) ls[:] = ((0, 0), (1, 1), (2, 2)) self.assertEqual(ls, LineString((0, 0), (1, 1), (2, 2))) with self.assertRaises(ValueError): ls[:] = (1,) def test_linearring_list_assignment(self): ls = LinearRing((0, 0), (0, 1), (1, 1), (0, 0)) ls[:] = () self.assertEqual(ls, LinearRing()) ls[:] = ((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)) self.assertEqual(ls, LinearRing((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))) with self.assertRaises(ValueError): ls[:] = ((0, 0), (1, 1), (2, 2)) def test_polygon_list_assignment(self): pol = Polygon() pol[:] = (((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)),) self.assertEqual(pol, Polygon(((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)),)) pol[:] = () self.assertEqual(pol, Polygon()) def test_geometry_collection_list_assignment(self): p = Point() gc = GeometryCollection() gc[:] = [p] self.assertEqual(gc, GeometryCollection(p)) gc[:] = () self.assertEqual(gc, GeometryCollection()) def test_threed(self): "Testing three-dimensional geometries." # Testing a 3D Point pnt = Point(2, 3, 8) self.assertEqual((2., 3., 8.), pnt.coords) with self.assertRaises(TypeError): pnt.tuple = (1., 2.) pnt.coords = (1., 2., 3.) self.assertEqual((1., 2., 3.), pnt.coords) # Testing a 3D LineString ls = LineString((2., 3., 8.), (50., 250., -117.)) self.assertEqual(((2., 3., 8.), (50., 250., -117.)), ls.tuple) with self.assertRaises(TypeError): ls.__setitem__(0, (1., 2.)) ls[0] = (1., 2., 3.) self.assertEqual((1., 2., 3.), ls[0]) def test_distance(self): "Testing the distance() function." # Distance to self should be 0. pnt = Point(0, 0) self.assertEqual(0.0, pnt.distance(Point(0, 0))) # Distance should be 1 self.assertEqual(1.0, pnt.distance(Point(0, 1))) # Distance should be ~ sqrt(2) self.assertAlmostEqual(1.41421356237, pnt.distance(Point(1, 1)), 11) # Distances are from the closest vertex in each geometry -- # should be 3 (distance from (2, 2) to (5, 2)). ls1 = LineString((0, 0), (1, 1), (2, 2)) ls2 = LineString((5, 2), (6, 1), (7, 0)) self.assertEqual(3, ls1.distance(ls2)) def test_length(self): "Testing the length property." # Points have 0 length. pnt = Point(0, 0) self.assertEqual(0.0, pnt.length) # Should be ~ sqrt(2) ls = LineString((0, 0), (1, 1)) self.assertAlmostEqual(1.41421356237, ls.length, 11) # Should be circumference of Polygon poly = Polygon(LinearRing((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))) self.assertEqual(4.0, poly.length) # Should be sum of each element's length in collection. mpoly = MultiPolygon(poly.clone(), poly) self.assertEqual(8.0, mpoly.length) def test_emptyCollections(self): "Testing empty geometries and collections." geoms = [ GeometryCollection([]), fromstr('GEOMETRYCOLLECTION EMPTY'), GeometryCollection(), fromstr('POINT EMPTY'), Point(), fromstr('LINESTRING EMPTY'), LineString(), fromstr('POLYGON EMPTY'), Polygon(), fromstr('MULTILINESTRING EMPTY'), MultiLineString(), fromstr('MULTIPOLYGON EMPTY'), MultiPolygon(()), MultiPolygon(), ] if numpy: geoms.append(LineString(numpy.array([]))) for g in geoms: self.assertIs(g.empty, True) # Testing len() and num_geom. if isinstance(g, Polygon): self.assertEqual(1, len(g)) # Has one empty linear ring self.assertEqual(1, g.num_geom) self.assertEqual(0, len(g[0])) elif isinstance(g, (Point, LineString)): self.assertEqual(1, g.num_geom) self.assertEqual(0, len(g)) else: self.assertEqual(0, g.num_geom) self.assertEqual(0, len(g)) # Testing __getitem__ (doesn't work on Point or Polygon) if isinstance(g, Point): with self.assertRaises(IndexError): g.x elif isinstance(g, Polygon): lr = g.shell self.assertEqual('LINEARRING EMPTY', lr.wkt) self.assertEqual(0, len(lr)) self.assertIs(lr.empty, True) with self.assertRaises(IndexError): lr.__getitem__(0) else: with self.assertRaises(IndexError): g.__getitem__(0) def test_collection_dims(self): gc = GeometryCollection([]) self.assertEqual(gc.dims, -1) gc = GeometryCollection(Point(0, 0)) self.assertEqual(gc.dims, 0) gc = GeometryCollection(LineString((0, 0), (1, 1)), Point(0, 0)) self.assertEqual(gc.dims, 1) gc = GeometryCollection(LineString((0, 0), (1, 1)), Polygon(((0, 0), (0, 1), (1, 1), (0, 0))), Point(0, 0)) self.assertEqual(gc.dims, 2) def test_collections_of_collections(self): "Testing GeometryCollection handling of other collections." # Creating a GeometryCollection WKT string composed of other # collections and polygons. coll = [mp.wkt for mp in self.geometries.multipolygons if mp.valid] coll.extend(mls.wkt for mls in self.geometries.multilinestrings) coll.extend(p.wkt for p in self.geometries.polygons) coll.extend(mp.wkt for mp in self.geometries.multipoints) gc_wkt = 'GEOMETRYCOLLECTION(%s)' % ','.join(coll) # Should construct ok from WKT gc1 = GEOSGeometry(gc_wkt) # Should also construct ok from individual geometry arguments. gc2 = GeometryCollection(*tuple(g for g in gc1)) # And, they should be equal. self.assertEqual(gc1, gc2) def test_gdal(self): "Testing `ogr` and `srs` properties." g1 = fromstr('POINT(5 23)') self.assertIsInstance(g1.ogr, gdal.OGRGeometry) self.assertIsNone(g1.srs) g1_3d = fromstr('POINT(5 23 8)') self.assertIsInstance(g1_3d.ogr, gdal.OGRGeometry) self.assertEqual(g1_3d.ogr.z, 8) g2 = fromstr('LINESTRING(0 0, 5 5, 23 23)', srid=4326) self.assertIsInstance(g2.ogr, gdal.OGRGeometry) self.assertIsInstance(g2.srs, gdal.SpatialReference) self.assertEqual(g2.hex, g2.ogr.hex) self.assertEqual('WGS 84', g2.srs.name) def test_copy(self): "Testing use with the Python `copy` module." import copy poly = GEOSGeometry('POLYGON((0 0, 0 23, 23 23, 23 0, 0 0), (5 5, 5 10, 10 10, 10 5, 5 5))') cpy1 = copy.copy(poly) cpy2 = copy.deepcopy(poly) self.assertNotEqual(poly._ptr, cpy1._ptr) self.assertNotEqual(poly._ptr, cpy2._ptr) def test_transform(self): "Testing `transform` method." orig = GEOSGeometry('POINT (-104.609 38.255)', 4326) trans = GEOSGeometry('POINT (992385.4472045 481455.4944650)', 2774) # Using a srid, a SpatialReference object, and a CoordTransform object # for transformations. t1, t2, t3 = orig.clone(), orig.clone(), orig.clone() t1.transform(trans.srid) t2.transform(gdal.SpatialReference('EPSG:2774')) ct = gdal.CoordTransform(gdal.SpatialReference('WGS84'), gdal.SpatialReference(2774)) t3.transform(ct) # Testing use of the `clone` keyword. k1 = orig.clone() k2 = k1.transform(trans.srid, clone=True) self.assertEqual(k1, orig) self.assertNotEqual(k1, k2) prec = 3 for p in (t1, t2, t3, k2): self.assertAlmostEqual(trans.x, p.x, prec) self.assertAlmostEqual(trans.y, p.y, prec) def test_transform_3d(self): p3d = GEOSGeometry('POINT (5 23 100)', 4326) p3d.transform(2774) self.assertAlmostEqual(p3d.z, 100, 3) def test_transform_noop(self): """ Testing `transform` method (SRID match) """ # transform() should no-op if source & dest SRIDs match, # regardless of whether GDAL is available. g = GEOSGeometry('POINT (-104.609 38.255)', 4326) gt = g.tuple g.transform(4326) self.assertEqual(g.tuple, gt) self.assertEqual(g.srid, 4326) g = GEOSGeometry('POINT (-104.609 38.255)', 4326) g1 = g.transform(4326, clone=True) self.assertEqual(g1.tuple, g.tuple) self.assertEqual(g1.srid, 4326) self.assertIsNot(g1, g, "Clone didn't happen") def test_transform_nosrid(self): """ Testing `transform` method (no SRID or negative SRID) """ g = GEOSGeometry('POINT (-104.609 38.255)', srid=None) with self.assertRaises(GEOSException): g.transform(2774) g = GEOSGeometry('POINT (-104.609 38.255)', srid=None) with self.assertRaises(GEOSException): g.transform(2774, clone=True) g = GEOSGeometry('POINT (-104.609 38.255)', srid=-1) with self.assertRaises(GEOSException): g.transform(2774) g = GEOSGeometry('POINT (-104.609 38.255)', srid=-1) with self.assertRaises(GEOSException): g.transform(2774, clone=True) def test_extent(self): "Testing `extent` method." # The xmin, ymin, xmax, ymax of the MultiPoint should be returned. mp = MultiPoint(Point(5, 23), Point(0, 0), Point(10, 50)) self.assertEqual((0.0, 0.0, 10.0, 50.0), mp.extent) pnt = Point(5.23, 17.8) # Extent of points is just the point itself repeated. self.assertEqual((5.23, 17.8, 5.23, 17.8), pnt.extent) # Testing on the 'real world' Polygon. poly = fromstr(self.geometries.polygons[3].wkt) ring = poly.shell x, y = ring.x, ring.y xmin, ymin = min(x), min(y) xmax, ymax = max(x), max(y) self.assertEqual((xmin, ymin, xmax, ymax), poly.extent) def test_pickle(self): "Testing pickling and unpickling support." # Creating a list of test geometries for pickling, # and setting the SRID on some of them. def get_geoms(lst, srid=None): return [GEOSGeometry(tg.wkt, srid) for tg in lst] tgeoms = get_geoms(self.geometries.points) tgeoms.extend(get_geoms(self.geometries.multilinestrings, 4326)) tgeoms.extend(get_geoms(self.geometries.polygons, 3084)) tgeoms.extend(get_geoms(self.geometries.multipolygons, 3857)) tgeoms.append(Point(srid=4326)) tgeoms.append(Point()) for geom in tgeoms: s1 = pickle.dumps(geom) g1 = pickle.loads(s1) self.assertEqual(geom, g1) self.assertEqual(geom.srid, g1.srid) def test_prepared(self): "Testing PreparedGeometry support." # Creating a simple multipolygon and getting a prepared version. mpoly = GEOSGeometry('MULTIPOLYGON(((0 0,0 5,5 5,5 0,0 0)),((5 5,5 10,10 10,10 5,5 5)))') prep = mpoly.prepared # A set of test points. pnts = [Point(5, 5), Point(7.5, 7.5), Point(2.5, 7.5)] for pnt in pnts: # Results should be the same (but faster) self.assertEqual(mpoly.contains(pnt), prep.contains(pnt)) self.assertEqual(mpoly.intersects(pnt), prep.intersects(pnt)) self.assertEqual(mpoly.covers(pnt), prep.covers(pnt)) self.assertTrue(prep.crosses(fromstr('LINESTRING(1 1, 15 15)'))) self.assertTrue(prep.disjoint(Point(-5, -5))) poly = Polygon(((-1, -1), (1, 1), (1, 0), (-1, -1))) self.assertTrue(prep.overlaps(poly)) poly = Polygon(((-5, 0), (-5, 5), (0, 5), (-5, 0))) self.assertTrue(prep.touches(poly)) poly = Polygon(((-1, -1), (-1, 11), (11, 11), (11, -1), (-1, -1))) self.assertTrue(prep.within(poly)) # Original geometry deletion should not crash the prepared one (#21662) del mpoly self.assertTrue(prep.covers(Point(5, 5))) def test_line_merge(self): "Testing line merge support" ref_geoms = (fromstr('LINESTRING(1 1, 1 1, 3 3)'), fromstr('MULTILINESTRING((1 1, 3 3), (3 3, 4 2))'), ) ref_merged = (fromstr('LINESTRING(1 1, 3 3)'), fromstr('LINESTRING (1 1, 3 3, 4 2)'), ) for geom, merged in zip(ref_geoms, ref_merged): self.assertEqual(merged, geom.merged) def test_valid_reason(self): "Testing IsValidReason support" g = GEOSGeometry("POINT(0 0)") self.assertTrue(g.valid) self.assertIsInstance(g.valid_reason, str) self.assertEqual(g.valid_reason, "Valid Geometry") g = GEOSGeometry("LINESTRING(0 0, 0 0)") self.assertFalse(g.valid) self.assertIsInstance(g.valid_reason, str) self.assertTrue(g.valid_reason.startswith("Too few points in geometry component")) def test_linearref(self): "Testing linear referencing" ls = fromstr('LINESTRING(0 0, 0 10, 10 10, 10 0)') mls = fromstr('MULTILINESTRING((0 0, 0 10), (10 0, 10 10))') self.assertEqual(ls.project(Point(0, 20)), 10.0) self.assertEqual(ls.project(Point(7, 6)), 24) self.assertEqual(ls.project_normalized(Point(0, 20)), 1.0 / 3) self.assertEqual(ls.interpolate(10), Point(0, 10)) self.assertEqual(ls.interpolate(24), Point(10, 6)) self.assertEqual(ls.interpolate_normalized(1.0 / 3), Point(0, 10)) self.assertEqual(mls.project(Point(0, 20)), 10) self.assertEqual(mls.project(Point(7, 6)), 16) self.assertEqual(mls.interpolate(9), Point(0, 9)) self.assertEqual(mls.interpolate(17), Point(10, 7)) def test_deconstructible(self): """ Geometry classes should be deconstructible. """ point = Point(4.337844, 50.827537, srid=4326) path, args, kwargs = point.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.point.Point') self.assertEqual(args, (4.337844, 50.827537)) self.assertEqual(kwargs, {'srid': 4326}) ls = LineString(((0, 0), (1, 1))) path, args, kwargs = ls.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.linestring.LineString') self.assertEqual(args, (((0, 0), (1, 1)),)) self.assertEqual(kwargs, {}) ls2 = LineString([Point(0, 0), Point(1, 1)], srid=4326) path, args, kwargs = ls2.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.linestring.LineString') self.assertEqual(args, ([Point(0, 0), Point(1, 1)],)) self.assertEqual(kwargs, {'srid': 4326}) ext_coords = ((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)) int_coords = ((0.4, 0.4), (0.4, 0.6), (0.6, 0.6), (0.6, 0.4), (0.4, 0.4)) poly = Polygon(ext_coords, int_coords) path, args, kwargs = poly.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.polygon.Polygon') self.assertEqual(args, (ext_coords, int_coords)) self.assertEqual(kwargs, {}) lr = LinearRing((0, 0), (0, 1), (1, 1), (0, 0)) path, args, kwargs = lr.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.linestring.LinearRing') self.assertEqual(args, ((0, 0), (0, 1), (1, 1), (0, 0))) self.assertEqual(kwargs, {}) mp = MultiPoint(Point(0, 0), Point(1, 1)) path, args, kwargs = mp.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.collections.MultiPoint') self.assertEqual(args, (Point(0, 0), Point(1, 1))) self.assertEqual(kwargs, {}) ls1 = LineString((0, 0), (1, 1)) ls2 = LineString((2, 2), (3, 3)) mls = MultiLineString(ls1, ls2) path, args, kwargs = mls.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.collections.MultiLineString') self.assertEqual(args, (ls1, ls2)) self.assertEqual(kwargs, {}) p1 = Polygon(((0, 0), (0, 1), (1, 1), (0, 0))) p2 = Polygon(((1, 1), (1, 2), (2, 2), (1, 1))) mp = MultiPolygon(p1, p2) path, args, kwargs = mp.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.collections.MultiPolygon') self.assertEqual(args, (p1, p2)) self.assertEqual(kwargs, {}) poly = Polygon(((0, 0), (0, 1), (1, 1), (0, 0))) gc = GeometryCollection(Point(0, 0), MultiPoint(Point(0, 0), Point(1, 1)), poly) path, args, kwargs = gc.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.collections.GeometryCollection') self.assertEqual(args, (Point(0, 0), MultiPoint(Point(0, 0), Point(1, 1)), poly)) self.assertEqual(kwargs, {}) def test_subclassing(self): """ GEOSGeometry subclass may itself be subclassed without being forced-cast to the parent class during `__init__`. """ class ExtendedPolygon(Polygon): def __init__(self, *args, data=0, **kwargs): super().__init__(*args, **kwargs) self._data = data def __str__(self): return "EXT_POLYGON - data: %d - %s" % (self._data, self.wkt) ext_poly = ExtendedPolygon(((0, 0), (0, 1), (1, 1), (0, 0)), data=3) self.assertEqual(type(ext_poly), ExtendedPolygon) # ExtendedPolygon.__str__ should be called (instead of Polygon.__str__). self.assertEqual(str(ext_poly), "EXT_POLYGON - data: 3 - POLYGON ((0 0, 0 1, 1 1, 0 0))") self.assertJSONEqual( ext_poly.json, '{"coordinates": [[[0, 0], [0, 1], [1, 1], [0, 0]]], "type": "Polygon"}', ) def test_geos_version_tuple(self): versions = ( (b'3.0.0rc4-CAPI-1.3.3', (3, 0, 0)), (b'3.0.0-CAPI-1.4.1', (3, 0, 0)), (b'3.4.0dev-CAPI-1.8.0', (3, 4, 0)), (b'3.4.0dev-CAPI-1.8.0 r0', (3, 4, 0)), (b'3.6.2-CAPI-1.10.2 4d2925d6', (3, 6, 2)), ) for version_string, version_tuple in versions: with self.subTest(version_string=version_string): with mock.patch('django.contrib.gis.geos.libgeos.geos_version', lambda: version_string): self.assertEqual(geos_version_tuple(), version_tuple) def test_from_gml(self): self.assertEqual( GEOSGeometry('POINT(0 0)'), GEOSGeometry.from_gml( '<gml:Point gml:id="p21" srsName="http://www.opengis.net/def/crs/EPSG/0/4326">' ' <gml:pos srsDimension="2">0 0</gml:pos>' '</gml:Point>' ), ) def test_from_ewkt(self): self.assertEqual(GEOSGeometry.from_ewkt('SRID=1;POINT(1 1)'), Point(1, 1, srid=1)) self.assertEqual(GEOSGeometry.from_ewkt('POINT(1 1)'), Point(1, 1)) def test_from_ewkt_empty_string(self): msg = 'Expected WKT but got an empty string.' with self.assertRaisesMessage(ValueError, msg): GEOSGeometry.from_ewkt('') with self.assertRaisesMessage(ValueError, msg): GEOSGeometry.from_ewkt('SRID=1;') def test_from_ewkt_invalid_srid(self): msg = 'EWKT has invalid SRID part.' with self.assertRaisesMessage(ValueError, msg): GEOSGeometry.from_ewkt('SRUD=1;POINT(1 1)') with self.assertRaisesMessage(ValueError, msg): GEOSGeometry.from_ewkt('SRID=WGS84;POINT(1 1)') def test_fromstr_scientific_wkt(self): self.assertEqual(GEOSGeometry('POINT(1.0e-1 1.0e+1)'), Point(.1, 10)) def test_normalize(self): g = MultiPoint(Point(0, 0), Point(2, 2), Point(1, 1)) self.assertIsNone(g.normalize()) self.assertTrue(g.equals_exact(MultiPoint(Point(2, 2), Point(1, 1), Point(0, 0)))) def test_empty_point(self): p = Point(srid=4326) self.assertEqual(p.ogr.ewkt, p.ewkt) self.assertEqual(p.transform(2774, clone=True), Point(srid=2774)) p.transform(2774) self.assertEqual(p, Point(srid=2774)) def test_linestring_iter(self): ls = LineString((0, 0), (1, 1)) it = iter(ls) # Step into CoordSeq iterator. next(it) ls[:] = [] with self.assertRaises(IndexError): next(it)
5013e65f742b6b5a191dbcda966b8805652d10a59eb0cb0599ebed1b5b77fc19
import os import shutil import struct import tempfile from django.contrib.gis.gdal import GDAL_VERSION, GDALRaster from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.gdal.raster.band import GDALBand from django.contrib.gis.shortcuts import numpy from django.test import SimpleTestCase from ..data.rasters.textrasters import JSON_RASTER class GDALRasterTests(SimpleTestCase): """ Test a GDALRaster instance created from a file (GeoTiff). """ def setUp(self): self.rs_path = os.path.join(os.path.dirname(__file__), '../data/rasters/raster.tif') self.rs = GDALRaster(self.rs_path) def test_rs_name_repr(self): self.assertEqual(self.rs_path, self.rs.name) self.assertRegex(repr(self.rs), r"<Raster object at 0x\w+>") def test_rs_driver(self): self.assertEqual(self.rs.driver.name, 'GTiff') def test_rs_size(self): self.assertEqual(self.rs.width, 163) self.assertEqual(self.rs.height, 174) def test_rs_srs(self): self.assertEqual(self.rs.srs.srid, 3086) self.assertEqual(self.rs.srs.units, (1.0, 'metre')) def test_rs_srid(self): rast = GDALRaster({ 'width': 16, 'height': 16, 'srid': 4326, }) self.assertEqual(rast.srid, 4326) rast.srid = 3086 self.assertEqual(rast.srid, 3086) def test_geotransform_and_friends(self): # Assert correct values for file based raster self.assertEqual( self.rs.geotransform, [511700.4680706557, 100.0, 0.0, 435103.3771231986, 0.0, -100.0] ) self.assertEqual(self.rs.origin, [511700.4680706557, 435103.3771231986]) self.assertEqual(self.rs.origin.x, 511700.4680706557) self.assertEqual(self.rs.origin.y, 435103.3771231986) self.assertEqual(self.rs.scale, [100.0, -100.0]) self.assertEqual(self.rs.scale.x, 100.0) self.assertEqual(self.rs.scale.y, -100.0) self.assertEqual(self.rs.skew, [0, 0]) self.assertEqual(self.rs.skew.x, 0) self.assertEqual(self.rs.skew.y, 0) # Create in-memory rasters and change gtvalues rsmem = GDALRaster(JSON_RASTER) # geotransform accepts both floats and ints rsmem.geotransform = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0] self.assertEqual(rsmem.geotransform, [0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) rsmem.geotransform = range(6) self.assertEqual(rsmem.geotransform, [float(x) for x in range(6)]) self.assertEqual(rsmem.origin, [0, 3]) self.assertEqual(rsmem.origin.x, 0) self.assertEqual(rsmem.origin.y, 3) self.assertEqual(rsmem.scale, [1, 5]) self.assertEqual(rsmem.scale.x, 1) self.assertEqual(rsmem.scale.y, 5) self.assertEqual(rsmem.skew, [2, 4]) self.assertEqual(rsmem.skew.x, 2) self.assertEqual(rsmem.skew.y, 4) self.assertEqual(rsmem.width, 5) self.assertEqual(rsmem.height, 5) def test_geotransform_bad_inputs(self): rsmem = GDALRaster(JSON_RASTER) error_geotransforms = [ [1, 2], [1, 2, 3, 4, 5, 'foo'], [1, 2, 3, 4, 5, 6, 'foo'], ] msg = 'Geotransform must consist of 6 numeric values.' for geotransform in error_geotransforms: with self.subTest(i=geotransform), self.assertRaisesMessage(ValueError, msg): rsmem.geotransform = geotransform def test_rs_extent(self): self.assertEqual( self.rs.extent, (511700.4680706557, 417703.3771231986, 528000.4680706557, 435103.3771231986) ) def test_rs_bands(self): self.assertEqual(len(self.rs.bands), 1) self.assertIsInstance(self.rs.bands[0], GDALBand) def test_memory_based_raster_creation(self): # Create uint8 raster with full pixel data range (0-255) rast = GDALRaster({ 'datatype': 1, 'width': 16, 'height': 16, 'srid': 4326, 'bands': [{ 'data': range(256), 'nodata_value': 255, }], }) # Get array from raster result = rast.bands[0].data() if numpy: result = result.flatten().tolist() # Assert data is same as original input self.assertEqual(result, list(range(256))) def test_file_based_raster_creation(self): # Prepare tempfile rstfile = tempfile.NamedTemporaryFile(suffix='.tif') # Create file-based raster from scratch GDALRaster({ 'datatype': self.rs.bands[0].datatype(), 'driver': 'tif', 'name': rstfile.name, 'width': 163, 'height': 174, 'nr_of_bands': 1, 'srid': self.rs.srs.wkt, 'origin': (self.rs.origin.x, self.rs.origin.y), 'scale': (self.rs.scale.x, self.rs.scale.y), 'skew': (self.rs.skew.x, self.rs.skew.y), 'bands': [{ 'data': self.rs.bands[0].data(), 'nodata_value': self.rs.bands[0].nodata_value, }], }) # Reload newly created raster from file restored_raster = GDALRaster(rstfile.name) # Presence of TOWGS84 depend on GDAL/Proj versions. self.assertEqual( restored_raster.srs.wkt.replace('TOWGS84[0,0,0,0,0,0,0],', ''), self.rs.srs.wkt.replace('TOWGS84[0,0,0,0,0,0,0],', '') ) self.assertEqual(restored_raster.geotransform, self.rs.geotransform) if numpy: numpy.testing.assert_equal( restored_raster.bands[0].data(), self.rs.bands[0].data() ) else: self.assertEqual(restored_raster.bands[0].data(), self.rs.bands[0].data()) def test_nonexistent_file(self): msg = 'Unable to read raster source input "nonexistent.tif".' with self.assertRaisesMessage(GDALException, msg): GDALRaster('nonexistent.tif') def test_vsi_raster_creation(self): # Open a raster as a file object. with open(self.rs_path, 'rb') as dat: # Instantiate a raster from the file binary buffer. vsimem = GDALRaster(dat.read()) # The data of the in-memory file is equal to the source file. result = vsimem.bands[0].data() target = self.rs.bands[0].data() if numpy: result = result.flatten().tolist() target = target.flatten().tolist() self.assertEqual(result, target) def test_vsi_raster_deletion(self): path = '/vsimem/raster.tif' # Create a vsi-based raster from scratch. vsimem = GDALRaster({ 'name': path, 'driver': 'tif', 'width': 4, 'height': 4, 'srid': 4326, 'bands': [{ 'data': range(16), }], }) # The virtual file exists. rst = GDALRaster(path) self.assertEqual(rst.width, 4) # Delete GDALRaster. del vsimem del rst # The virtual file has been removed. msg = 'Could not open the datasource at "/vsimem/raster.tif"' with self.assertRaisesMessage(GDALException, msg): GDALRaster(path) def test_vsi_invalid_buffer_error(self): msg = 'Failed creating VSI raster from the input buffer.' with self.assertRaisesMessage(GDALException, msg): GDALRaster(b'not-a-raster-buffer') def test_vsi_buffer_property(self): # Create a vsi-based raster from scratch. rast = GDALRaster({ 'name': '/vsimem/raster.tif', 'driver': 'tif', 'width': 4, 'height': 4, 'srid': 4326, 'bands': [{ 'data': range(16), }], }) # Do a round trip from raster to buffer to raster. result = GDALRaster(rast.vsi_buffer).bands[0].data() if numpy: result = result.flatten().tolist() # Band data is equal to nodata value except on input block of ones. self.assertEqual(result, list(range(16))) # The vsi buffer is None for rasters that are not vsi based. self.assertIsNone(self.rs.vsi_buffer) def test_offset_size_and_shape_on_raster_creation(self): rast = GDALRaster({ 'datatype': 1, 'width': 4, 'height': 4, 'srid': 4326, 'bands': [{ 'data': (1,), 'offset': (1, 1), 'size': (2, 2), 'shape': (1, 1), 'nodata_value': 2, }], }) # Get array from raster. result = rast.bands[0].data() if numpy: result = result.flatten().tolist() # Band data is equal to nodata value except on input block of ones. self.assertEqual( result, [2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 1, 2, 2, 2, 2, 2] ) def test_set_nodata_value_on_raster_creation(self): # Create raster filled with nodata values. rast = GDALRaster({ 'datatype': 1, 'width': 2, 'height': 2, 'srid': 4326, 'bands': [{'nodata_value': 23}], }) # Get array from raster. result = rast.bands[0].data() if numpy: result = result.flatten().tolist() # All band data is equal to nodata value. self.assertEqual(result, [23] * 4) def test_set_nodata_none_on_raster_creation(self): if GDAL_VERSION < (2, 1): self.skipTest("GDAL >= 2.1 is required for this test.") # Create raster without data and without nodata value. rast = GDALRaster({ 'datatype': 1, 'width': 2, 'height': 2, 'srid': 4326, 'bands': [{'nodata_value': None}], }) # Get array from raster. result = rast.bands[0].data() if numpy: result = result.flatten().tolist() # Band data is equal to zero because no nodata value has been specified. self.assertEqual(result, [0] * 4) def test_raster_metadata_property(self): data = self.rs.metadata self.assertEqual(data['DEFAULT'], {'AREA_OR_POINT': 'Area'}) self.assertEqual(data['IMAGE_STRUCTURE'], {'INTERLEAVE': 'BAND'}) # Create file-based raster from scratch source = GDALRaster({ 'datatype': 1, 'width': 2, 'height': 2, 'srid': 4326, 'bands': [{'data': range(4), 'nodata_value': 99}], }) # Set metadata on raster and on a band. metadata = { 'DEFAULT': {'OWNER': 'Django', 'VERSION': '1.0', 'AREA_OR_POINT': 'Point'}, } source.metadata = metadata source.bands[0].metadata = metadata self.assertEqual(source.metadata['DEFAULT'], metadata['DEFAULT']) self.assertEqual(source.bands[0].metadata['DEFAULT'], metadata['DEFAULT']) # Update metadata on raster. metadata = { 'DEFAULT': {'VERSION': '2.0'}, } source.metadata = metadata self.assertEqual(source.metadata['DEFAULT']['VERSION'], '2.0') # Remove metadata on raster. metadata = { 'DEFAULT': {'OWNER': None}, } source.metadata = metadata self.assertNotIn('OWNER', source.metadata['DEFAULT']) def test_raster_info_accessor(self): if GDAL_VERSION < (2, 1): msg = 'GDAL ≥ 2.1 is required for using the info property.' with self.assertRaisesMessage(ValueError, msg): self.rs.info return infos = self.rs.info # Data info_lines = [line.strip() for line in infos.split('\n') if line.strip() != ''] for line in [ 'Driver: GTiff/GeoTIFF', 'Files: {}'.format(self.rs_path), 'Size is 163, 174', 'Origin = (511700.468070655711927,435103.377123198588379)', 'Pixel Size = (100.000000000000000,-100.000000000000000)', 'Metadata:', 'AREA_OR_POINT=Area', 'Image Structure Metadata:', 'INTERLEAVE=BAND', 'Band 1 Block=163x50 Type=Byte, ColorInterp=Gray', 'NoData Value=15' ]: self.assertIn(line, info_lines) for line in [ r'Upper Left \( 511700.468, 435103.377\) \( 82d51\'46.1\d"W, 27d55\' 1.5\d"N\)', r'Lower Left \( 511700.468, 417703.377\) \( 82d51\'52.0\d"W, 27d45\'37.5\d"N\)', r'Upper Right \( 528000.468, 435103.377\) \( 82d41\'48.8\d"W, 27d54\'56.3\d"N\)', r'Lower Right \( 528000.468, 417703.377\) \( 82d41\'55.5\d"W, 27d45\'32.2\d"N\)', r'Center \( 519850.468, 426403.377\) \( 82d46\'50.6\d"W, 27d50\'16.9\d"N\)', ]: self.assertRegex(infos, line) # CRS (skip the name because string depends on the GDAL/Proj versions). self.assertIn("NAD83 / Florida GDL Albers", infos) def test_compressed_file_based_raster_creation(self): rstfile = tempfile.NamedTemporaryFile(suffix='.tif') # Make a compressed copy of an existing raster. compressed = self.rs.warp({'papsz_options': {'compress': 'packbits'}, 'name': rstfile.name}) # Check physically if compression worked. self.assertLess(os.path.getsize(compressed.name), os.path.getsize(self.rs.name)) # Create file-based raster with options from scratch. compressed = GDALRaster({ 'datatype': 1, 'driver': 'tif', 'name': rstfile.name, 'width': 40, 'height': 40, 'srid': 3086, 'origin': (500000, 400000), 'scale': (100, -100), 'skew': (0, 0), 'bands': [{ 'data': range(40 ^ 2), 'nodata_value': 255, }], 'papsz_options': { 'compress': 'packbits', 'pixeltype': 'signedbyte', 'blockxsize': 23, 'blockysize': 23, } }) # Check if options used on creation are stored in metadata. # Reopening the raster ensures that all metadata has been written # to the file. compressed = GDALRaster(compressed.name) self.assertEqual(compressed.metadata['IMAGE_STRUCTURE']['COMPRESSION'], 'PACKBITS',) self.assertEqual(compressed.bands[0].metadata['IMAGE_STRUCTURE']['PIXELTYPE'], 'SIGNEDBYTE') if GDAL_VERSION >= (2, 1): self.assertIn('Block=40x23', compressed.info) def test_raster_warp(self): # Create in memory raster source = GDALRaster({ 'datatype': 1, 'driver': 'MEM', 'name': 'sourceraster', 'width': 4, 'height': 4, 'nr_of_bands': 1, 'srid': 3086, 'origin': (500000, 400000), 'scale': (100, -100), 'skew': (0, 0), 'bands': [{ 'data': range(16), 'nodata_value': 255, }], }) # Test altering the scale, width, and height of a raster data = { 'scale': [200, -200], 'width': 2, 'height': 2, } target = source.warp(data) self.assertEqual(target.width, data['width']) self.assertEqual(target.height, data['height']) self.assertEqual(target.scale, data['scale']) self.assertEqual(target.bands[0].datatype(), source.bands[0].datatype()) self.assertEqual(target.name, 'sourceraster_copy.MEM') result = target.bands[0].data() if numpy: result = result.flatten().tolist() self.assertEqual(result, [5, 7, 13, 15]) # Test altering the name and datatype (to float) data = { 'name': '/path/to/targetraster.tif', 'datatype': 6, } target = source.warp(data) self.assertEqual(target.bands[0].datatype(), 6) self.assertEqual(target.name, '/path/to/targetraster.tif') self.assertEqual(target.driver.name, 'MEM') result = target.bands[0].data() if numpy: result = result.flatten().tolist() self.assertEqual( result, [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0] ) def test_raster_warp_nodata_zone(self): # Create in memory raster. source = GDALRaster({ 'datatype': 1, 'driver': 'MEM', 'width': 4, 'height': 4, 'srid': 3086, 'origin': (500000, 400000), 'scale': (100, -100), 'skew': (0, 0), 'bands': [{ 'data': range(16), 'nodata_value': 23, }], }) # Warp raster onto a location that does not cover any pixels of the original. result = source.warp({'origin': (200000, 200000)}).bands[0].data() if numpy: result = result.flatten().tolist() # The result is an empty raster filled with the correct nodata value. self.assertEqual(result, [23] * 16) def test_raster_transform(self): # Prepare tempfile and nodata value rstfile = tempfile.NamedTemporaryFile(suffix='.tif') ndv = 99 # Create in file based raster source = GDALRaster({ 'datatype': 1, 'driver': 'tif', 'name': rstfile.name, 'width': 5, 'height': 5, 'nr_of_bands': 1, 'srid': 4326, 'origin': (-5, 5), 'scale': (2, -2), 'skew': (0, 0), 'bands': [{ 'data': range(25), 'nodata_value': ndv, }], }) # Transform raster into srid 4326. target = source.transform(3086) # Reload data from disk target = GDALRaster(target.name) self.assertEqual(target.srs.srid, 3086) self.assertEqual(target.width, 7) self.assertEqual(target.height, 7) self.assertEqual(target.bands[0].datatype(), source.bands[0].datatype()) self.assertAlmostEqual(target.origin[0], 9124842.791079799, 3) self.assertAlmostEqual(target.origin[1], 1589911.6476407414, 3) self.assertAlmostEqual(target.scale[0], 223824.82664250192, 3) self.assertAlmostEqual(target.scale[1], -223824.82664250192, 3) self.assertEqual(target.skew, [0, 0]) result = target.bands[0].data() if numpy: result = result.flatten().tolist() # The reprojection of a raster that spans over a large area # skews the data matrix and might introduce nodata values. self.assertEqual( result, [ ndv, ndv, ndv, ndv, 4, ndv, ndv, ndv, ndv, 2, 3, 9, ndv, ndv, ndv, 1, 2, 8, 13, 19, ndv, 0, 6, 6, 12, 18, 18, 24, ndv, 10, 11, 16, 22, 23, ndv, ndv, ndv, 15, 21, 22, ndv, ndv, ndv, ndv, 20, ndv, ndv, ndv, ndv, ] ) class GDALBandTests(SimpleTestCase): rs_path = os.path.join(os.path.dirname(__file__), '../data/rasters/raster.tif') def test_band_data(self): rs = GDALRaster(self.rs_path) band = rs.bands[0] self.assertEqual(band.width, 163) self.assertEqual(band.height, 174) self.assertEqual(band.description, '') self.assertEqual(band.datatype(), 1) self.assertEqual(band.datatype(as_string=True), 'GDT_Byte') self.assertEqual(band.color_interp(), 1) self.assertEqual(band.color_interp(as_string=True), 'GCI_GrayIndex') self.assertEqual(band.nodata_value, 15) if numpy: data = band.data() assert_array = numpy.loadtxt( os.path.join(os.path.dirname(__file__), '../data/rasters/raster.numpy.txt') ) numpy.testing.assert_equal(data, assert_array) self.assertEqual(data.shape, (band.height, band.width)) def test_band_statistics(self): with tempfile.TemporaryDirectory() as tmp_dir: rs_path = os.path.join(tmp_dir, 'raster.tif') shutil.copyfile(self.rs_path, rs_path) rs = GDALRaster(rs_path) band = rs.bands[0] pam_file = rs_path + '.aux.xml' smin, smax, smean, sstd = band.statistics(approximate=True) self.assertEqual(smin, 0) self.assertEqual(smax, 9) self.assertAlmostEqual(smean, 2.842331288343558) self.assertAlmostEqual(sstd, 2.3965567248965356) smin, smax, smean, sstd = band.statistics(approximate=False, refresh=True) self.assertEqual(smin, 0) self.assertEqual(smax, 9) self.assertAlmostEqual(smean, 2.828326634228898) self.assertAlmostEqual(sstd, 2.4260526986669095) self.assertEqual(band.min, 0) self.assertEqual(band.max, 9) self.assertAlmostEqual(band.mean, 2.828326634228898) self.assertAlmostEqual(band.std, 2.4260526986669095) # Statistics are persisted into PAM file on band close rs = band = None self.assertTrue(os.path.isfile(pam_file)) def test_read_mode_error(self): # Open raster in read mode rs = GDALRaster(self.rs_path, write=False) band = rs.bands[0] # Setting attributes in write mode raises exception in the _flush method with self.assertRaises(GDALException): setattr(band, 'nodata_value', 10) def test_band_data_setters(self): # Create in-memory raster and get band rsmem = GDALRaster({ 'datatype': 1, 'driver': 'MEM', 'name': 'mem_rst', 'width': 10, 'height': 10, 'nr_of_bands': 1, 'srid': 4326, }) bandmem = rsmem.bands[0] # Set nodata value bandmem.nodata_value = 99 self.assertEqual(bandmem.nodata_value, 99) # Set data for entire dataset bandmem.data(range(100)) if numpy: numpy.testing.assert_equal(bandmem.data(), numpy.arange(100).reshape(10, 10)) else: self.assertEqual(bandmem.data(), list(range(100))) # Prepare data for setting values in subsequent tests block = list(range(100, 104)) packed_block = struct.pack('<' + 'B B B B', *block) # Set data from list bandmem.data(block, (1, 1), (2, 2)) result = bandmem.data(offset=(1, 1), size=(2, 2)) if numpy: numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2)) else: self.assertEqual(result, block) # Set data from packed block bandmem.data(packed_block, (1, 1), (2, 2)) result = bandmem.data(offset=(1, 1), size=(2, 2)) if numpy: numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2)) else: self.assertEqual(result, block) # Set data from bytes bandmem.data(bytes(packed_block), (1, 1), (2, 2)) result = bandmem.data(offset=(1, 1), size=(2, 2)) if numpy: numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2)) else: self.assertEqual(result, block) # Set data from bytearray bandmem.data(bytearray(packed_block), (1, 1), (2, 2)) result = bandmem.data(offset=(1, 1), size=(2, 2)) if numpy: numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2)) else: self.assertEqual(result, block) # Set data from memoryview bandmem.data(memoryview(packed_block), (1, 1), (2, 2)) result = bandmem.data(offset=(1, 1), size=(2, 2)) if numpy: numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2)) else: self.assertEqual(result, block) # Set data from numpy array if numpy: bandmem.data(numpy.array(block, dtype='int8').reshape(2, 2), (1, 1), (2, 2)) numpy.testing.assert_equal( bandmem.data(offset=(1, 1), size=(2, 2)), numpy.array(block).reshape(2, 2) ) # Test json input data rsmemjson = GDALRaster(JSON_RASTER) bandmemjson = rsmemjson.bands[0] if numpy: numpy.testing.assert_equal( bandmemjson.data(), numpy.array(range(25)).reshape(5, 5) ) else: self.assertEqual(bandmemjson.data(), list(range(25))) def test_band_statistics_automatic_refresh(self): rsmem = GDALRaster({ 'srid': 4326, 'width': 2, 'height': 2, 'bands': [{'data': [0] * 4, 'nodata_value': 99}], }) band = rsmem.bands[0] # Populate statistics cache self.assertEqual(band.statistics(), (0, 0, 0, 0)) # Change data band.data([1, 1, 0, 0]) # Statistics are properly updated self.assertEqual(band.statistics(), (0.0, 1.0, 0.5, 0.5)) # Change nodata_value band.nodata_value = 0 # Statistics are properly updated self.assertEqual(band.statistics(), (1.0, 1.0, 1.0, 0.0)) def test_band_statistics_empty_band(self): rsmem = GDALRaster({ 'srid': 4326, 'width': 1, 'height': 1, 'bands': [{'data': [0], 'nodata_value': 0}], }) self.assertEqual(rsmem.bands[0].statistics(), (None, None, None, None)) def test_band_delete_nodata(self): rsmem = GDALRaster({ 'srid': 4326, 'width': 1, 'height': 1, 'bands': [{'data': [0], 'nodata_value': 1}], }) if GDAL_VERSION < (2, 1): msg = 'GDAL >= 2.1 required to delete nodata values.' with self.assertRaisesMessage(ValueError, msg): rsmem.bands[0].nodata_value = None else: rsmem.bands[0].nodata_value = None self.assertIsNone(rsmem.bands[0].nodata_value) def test_band_data_replication(self): band = GDALRaster({ 'srid': 4326, 'width': 3, 'height': 3, 'bands': [{'data': range(10, 19), 'nodata_value': 0}], }).bands[0] # Variations for input (data, shape, expected result). combos = ( ([1], (1, 1), [1] * 9), (range(3), (1, 3), [0, 0, 0, 1, 1, 1, 2, 2, 2]), (range(3), (3, 1), [0, 1, 2, 0, 1, 2, 0, 1, 2]), ) for combo in combos: band.data(combo[0], shape=combo[1]) if numpy: numpy.testing.assert_equal(band.data(), numpy.array(combo[2]).reshape(3, 3)) else: self.assertEqual(band.data(), list(combo[2]))
ec2916aaf09a605f0e1055d7e29c4904d0c1bebc4c640dfcc9f9902c60c45d37
from unittest import skipIf from django.contrib.gis.gdal import ( GDAL_VERSION, AxisOrder, CoordTransform, GDALException, SpatialReference, SRSException, ) from django.contrib.gis.geos import GEOSGeometry from django.test import SimpleTestCase class TestSRS: def __init__(self, wkt, **kwargs): self.wkt = wkt for key, value in kwargs.items(): setattr(self, key, value) WGS84_proj = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs ' # Some Spatial Reference examples srlist = ( TestSRS( 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,' 'AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],' 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",' '0.0174532925199433,AUTHORITY["EPSG","9122"]],AXIS["Latitude",NORTH],' 'AXIS["Longitude",EAST],AUTHORITY["EPSG","4326"]]', epsg=4326, projected=False, geographic=True, local=False, lin_name='unknown', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199, auth={'GEOGCS': ('EPSG', '4326'), 'spheroid': ('EPSG', '7030')}, attr=(('DATUM', 'WGS_1984'), (('SPHEROID', 1), '6378137'), ('primem|authority', 'EPSG'),), ), TestSRS( 'PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",' 'SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],' 'AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],' 'UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],' 'AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],' 'PARAMETER["standard_parallel_1",30.2833333333333],' 'PARAMETER["standard_parallel_2",28.3833333333333],' 'PARAMETER["latitude_of_origin",27.8333333333333],' 'PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],' 'PARAMETER["false_northing",4000000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],' 'AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","32140"]]', epsg=32140, projected=True, geographic=False, local=False, lin_name='metre', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199, auth={'PROJCS': ('EPSG', '32140'), 'spheroid': ('EPSG', '7019'), 'unit': ('EPSG', '9001')}, attr=( ('DATUM', 'North_American_Datum_1983'), (('SPHEROID', 2), '298.257222101'), ('PROJECTION', 'Lambert_Conformal_Conic_2SP'), ), ), TestSRS( 'PROJCS["NAD83 / Texas South Central (ftUS)",' 'GEOGCS["NAD83",DATUM["North_American_Datum_1983",' 'SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],' 'PRIMEM["Greenwich",0],' 'UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],' 'PARAMETER["false_easting",1968500],PARAMETER["false_northing",13123333.3333333],' 'PARAMETER["central_meridian",-99],PARAMETER["standard_parallel_1",28.3833333333333],' 'PARAMETER["standard_parallel_2",30.2833333333333],PARAMETER["latitude_of_origin",27.8333333333333],' 'UNIT["US survey foot",0.304800609601219],AXIS["Easting",EAST],AXIS["Northing",NORTH]]', epsg=None, projected=True, geographic=False, local=False, lin_name='US survey foot', ang_name='Degree', lin_units=0.3048006096012192, ang_units=0.0174532925199, auth={'PROJCS': (None, None)}, attr=(('PROJCS|GeOgCs|spheroid', 'GRS 1980'), (('projcs', 9), 'UNIT'), (('projcs', 11), 'AXIS'),), ), # This is really ESRI format, not WKT -- but the import should work the same TestSRS( 'LOCAL_CS["Non-Earth (Meter)",LOCAL_DATUM["Local Datum",32767],' 'UNIT["Meter",1],AXIS["X",EAST],AXIS["Y",NORTH]]', esri=True, epsg=None, projected=False, geographic=False, local=True, lin_name='Meter', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199, attr=(('LOCAL_DATUM', 'Local Datum'),), ), ) # Well-Known Names well_known = ( TestSRS( 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,' 'AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],' 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,' 'AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]', wk='WGS84', name='WGS 84', attrs=(('GEOGCS|AUTHORITY', 1, '4326'), ('SPHEROID', 'WGS 84')), ), TestSRS( 'GEOGCS["WGS 72",DATUM["WGS_1972",SPHEROID["WGS 72",6378135,298.26,' 'AUTHORITY["EPSG","7043"]],AUTHORITY["EPSG","6322"]],' 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],' 'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],' 'AUTHORITY["EPSG","4322"]]', wk='WGS72', name='WGS 72', attrs=(('GEOGCS|AUTHORITY', 1, '4322'), ('SPHEROID', 'WGS 72')), ), TestSRS( 'GEOGCS["NAD27",DATUM["North_American_Datum_1927",' 'SPHEROID["Clarke 1866",6378206.4,294.9786982138982,' 'AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],' 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],' 'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],' 'AUTHORITY["EPSG","4267"]]', wk='NAD27', name='NAD27', attrs=(('GEOGCS|AUTHORITY', 1, '4267'), ('SPHEROID', 'Clarke 1866')) ), TestSRS( 'GEOGCS["NAD83",DATUM["North_American_Datum_1983",' 'SPHEROID["GRS 1980",6378137,298.257222101,' 'AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],' 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],' 'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],' 'AUTHORITY["EPSG","4269"]]', wk='NAD83', name='NAD83', attrs=(('GEOGCS|AUTHORITY', 1, '4269'), ('SPHEROID', 'GRS 1980')), ), TestSRS( 'PROJCS["NZGD49 / Karamea Circuit",GEOGCS["NZGD49",' 'DATUM["New_Zealand_Geodetic_Datum_1949",' 'SPHEROID["International 1924",6378388,297,' 'AUTHORITY["EPSG","7022"]],' 'TOWGS84[59.47,-5.04,187.44,0.47,-0.1,1.024,-4.5993],' 'AUTHORITY["EPSG","6272"]],PRIMEM["Greenwich",0,' 'AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,' 'AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4272"]],' 'PROJECTION["Transverse_Mercator"],' 'PARAMETER["latitude_of_origin",-41.28991152777778],' 'PARAMETER["central_meridian",172.1090281944444],' 'PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],' 'PARAMETER["false_northing",700000],' 'UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","27216"]]', wk='EPSG:27216', name='NZGD49 / Karamea Circuit', attrs=(('PROJECTION', 'Transverse_Mercator'), ('SPHEROID', 'International 1924')), ), ) bad_srlist = ( 'Foobar', 'OOJCS["NAD83 / Texas South Central",GEOGCS["NAD83",' 'DATUM["North_American_Datum_1983",' 'SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],' 'AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],' 'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],' 'AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],' 'PARAMETER["standard_parallel_1",30.28333333333333],' 'PARAMETER["standard_parallel_2",28.38333333333333],' 'PARAMETER["latitude_of_origin",27.83333333333333],' 'PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],' 'PARAMETER["false_northing",4000000],UNIT["metre",1,' 'AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32140"]]', ) class SpatialRefTest(SimpleTestCase): def test01_wkt(self): "Testing initialization on valid OGC WKT." for s in srlist: SpatialReference(s.wkt) def test02_bad_wkt(self): "Testing initialization on invalid WKT." for bad in bad_srlist: try: srs = SpatialReference(bad) srs.validate() except (SRSException, GDALException): pass else: self.fail('Should not have initialized on bad WKT "%s"!') def test03_get_wkt(self): "Testing getting the WKT." for s in srlist: srs = SpatialReference(s.wkt) # GDAL 3 strips UNIT part in the last occurrence. self.assertEqual( s.wkt.replace(',UNIT["Meter",1]', ''), srs.wkt.replace(',UNIT["Meter",1]', ''), ) def test04_proj(self): """PROJ import and export.""" proj_parts = [ '+proj=longlat', '+ellps=WGS84', '+towgs84=0,0,0,0,0,0,0', '+datum=WGS84', '+no_defs' ] srs1 = SpatialReference(srlist[0].wkt) srs2 = SpatialReference(WGS84_proj) self.assertTrue(all(part in proj_parts for part in srs1.proj.split())) self.assertTrue(all(part in proj_parts for part in srs2.proj.split())) def test05_epsg(self): "Test EPSG import." for s in srlist: if s.epsg: srs1 = SpatialReference(s.wkt) srs2 = SpatialReference(s.epsg) srs3 = SpatialReference(str(s.epsg)) srs4 = SpatialReference('EPSG:%d' % s.epsg) for srs in (srs1, srs2, srs3, srs4): for attr, expected in s.attr: self.assertEqual(expected, srs[attr]) def test07_boolean_props(self): "Testing the boolean properties." for s in srlist: srs = SpatialReference(s.wkt) self.assertEqual(s.projected, srs.projected) self.assertEqual(s.geographic, srs.geographic) def test08_angular_linear(self): "Testing the linear and angular units routines." for s in srlist: srs = SpatialReference(s.wkt) self.assertEqual(s.ang_name, srs.angular_name) self.assertEqual(s.lin_name, srs.linear_name) self.assertAlmostEqual(s.ang_units, srs.angular_units, 9) self.assertAlmostEqual(s.lin_units, srs.linear_units, 9) def test09_authority(self): "Testing the authority name & code routines." for s in srlist: if hasattr(s, 'auth'): srs = SpatialReference(s.wkt) for target, tup in s.auth.items(): self.assertEqual(tup[0], srs.auth_name(target)) self.assertEqual(tup[1], srs.auth_code(target)) def test10_attributes(self): "Testing the attribute retrieval routines." for s in srlist: srs = SpatialReference(s.wkt) for tup in s.attr: att = tup[0] # Attribute to test exp = tup[1] # Expected result self.assertEqual(exp, srs[att]) def test11_wellknown(self): "Testing Well Known Names of Spatial References." for s in well_known: srs = SpatialReference(s.wk) self.assertEqual(s.name, srs.name) for tup in s.attrs: if len(tup) == 2: key = tup[0] exp = tup[1] elif len(tup) == 3: key = tup[:2] exp = tup[2] self.assertEqual(srs[key], exp) def test12_coordtransform(self): "Testing initialization of a CoordTransform." target = SpatialReference('WGS84') CoordTransform(SpatialReference(srlist[0].wkt), target) def test13_attr_value(self): "Testing the attr_value() method." s1 = SpatialReference('WGS84') with self.assertRaises(TypeError): s1.__getitem__(0) with self.assertRaises(TypeError): s1.__getitem__(('GEOGCS', 'foo')) self.assertEqual('WGS 84', s1['GEOGCS']) self.assertEqual('WGS_1984', s1['DATUM']) self.assertEqual('EPSG', s1['AUTHORITY']) self.assertEqual(4326, int(s1['AUTHORITY', 1])) self.assertIsNone(s1['FOOBAR']) def test_unicode(self): wkt = ( 'PROJCS["DHDN / Soldner 39 Langschoß",' 'GEOGCS["DHDN",DATUM["Deutsches_Hauptdreiecksnetz",' 'SPHEROID["Bessel 1841",6377397.155,299.1528128,AUTHORITY["EPSG","7004"]],AUTHORITY["EPSG","6314"]],' 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],' 'UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],' 'AUTHORITY["EPSG","4314"]],PROJECTION["Cassini_Soldner"],' 'PARAMETER["latitude_of_origin",50.66738711],PARAMETER["central_meridian",6.28935703],' 'PARAMETER["false_easting",0],PARAMETER["false_northing",0],' 'UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["X",NORTH],AXIS["Y",EAST],AUTHORITY["mj10777.de","187939"]]' ) srs = SpatialReference(wkt) srs_list = [srs, srs.clone()] srs.import_wkt(wkt) for srs in srs_list: self.assertEqual(srs.name, 'DHDN / Soldner 39 Langschoß') self.assertEqual(srs.wkt, wkt) self.assertIn('Langschoß', srs.pretty_wkt) self.assertIn('Langschoß', srs.xml) @skipIf(GDAL_VERSION < (3, 0), 'GDAL >= 3.0 is required') def test_axis_order(self): wgs84_trad = SpatialReference(4326, axis_order=AxisOrder.TRADITIONAL) wgs84_auth = SpatialReference(4326, axis_order=AxisOrder.AUTHORITY) # Coordinate interpretation may depend on the srs axis predicate. pt = GEOSGeometry('POINT (992385.4472045 481455.4944650)', 2774) pt_trad = pt.transform(wgs84_trad, clone=True) self.assertAlmostEqual(pt_trad.x, -104.609, 3) self.assertAlmostEqual(pt_trad.y, 38.255, 3) pt_auth = pt.transform(wgs84_auth, clone=True) self.assertAlmostEqual(pt_auth.x, 38.255, 3) self.assertAlmostEqual(pt_auth.y, -104.609, 3) # clone() preserves the axis order. pt_auth = pt.transform(wgs84_auth.clone(), clone=True) self.assertAlmostEqual(pt_auth.x, 38.255, 3) self.assertAlmostEqual(pt_auth.y, -104.609, 3) def test_axis_order_invalid(self): msg = 'SpatialReference.axis_order must be an AxisOrder instance.' with self.assertRaisesMessage(ValueError, msg): SpatialReference(4326, axis_order='other') @skipIf(GDAL_VERSION > (3, 0), "GDAL < 3.0 doesn't support authority.") def test_axis_order_non_traditional_invalid(self): msg = 'AxisOrder.AUTHORITY is not supported in GDAL < 3.0.' with self.assertRaisesMessage(ValueError, msg): SpatialReference(4326, axis_order=AxisOrder.AUTHORITY)
3a397edaededb03610ab99975bb08d9b8326f832a785970ecd6bc15e1e03f6a6
import copy import inspect import warnings from functools import partialmethod from itertools import chain from django.apps import apps from django.conf import settings from django.core import checks from django.core.exceptions import ( NON_FIELD_ERRORS, FieldDoesNotExist, FieldError, MultipleObjectsReturned, ObjectDoesNotExist, ValidationError, ) from django.db import ( DEFAULT_DB_ALIAS, DJANGO_VERSION_PICKLE_KEY, DatabaseError, connection, connections, router, transaction, ) from django.db.models import ( NOT_PROVIDED, ExpressionWrapper, IntegerField, Max, Value, ) from django.db.models.constants import LOOKUP_SEP from django.db.models.constraints import CheckConstraint, UniqueConstraint from django.db.models.deletion import CASCADE, Collector from django.db.models.fields.related import ( ForeignObjectRel, OneToOneField, lazy_related_operation, resolve_relation, ) from django.db.models.functions import Coalesce from django.db.models.manager import Manager from django.db.models.options import Options from django.db.models.query import Q from django.db.models.signals import ( class_prepared, post_init, post_save, pre_init, pre_save, ) from django.db.models.utils import make_model_tuple from django.utils.encoding import force_str from django.utils.hashable import make_hashable from django.utils.text import capfirst, get_text_list from django.utils.translation import gettext_lazy as _ from django.utils.version import get_version class Deferred: def __repr__(self): return '<Deferred field>' def __str__(self): return '<Deferred field>' DEFERRED = Deferred() def subclass_exception(name, bases, module, attached_to): """ Create exception subclass. Used by ModelBase below. The exception is created in a way that allows it to be pickled, assuming that the returned exception class will be added as an attribute to the 'attached_to' class. """ return type(name, bases, { '__module__': module, '__qualname__': '%s.%s' % (attached_to.__qualname__, name), }) def _has_contribute_to_class(value): # Only call contribute_to_class() if it's bound. return not inspect.isclass(value) and hasattr(value, 'contribute_to_class') class ModelBase(type): """Metaclass for all models.""" def __new__(cls, name, bases, attrs, **kwargs): super_new = super().__new__ # Also ensure initialization is only performed for subclasses of Model # (excluding Model class itself). parents = [b for b in bases if isinstance(b, ModelBase)] if not parents: return super_new(cls, name, bases, attrs) # Create the class. module = attrs.pop('__module__') new_attrs = {'__module__': module} classcell = attrs.pop('__classcell__', None) if classcell is not None: new_attrs['__classcell__'] = classcell attr_meta = attrs.pop('Meta', None) # Pass all attrs without a (Django-specific) contribute_to_class() # method to type.__new__() so that they're properly initialized # (i.e. __set_name__()). contributable_attrs = {} for obj_name, obj in list(attrs.items()): if _has_contribute_to_class(obj): contributable_attrs[obj_name] = obj else: new_attrs[obj_name] = obj new_class = super_new(cls, name, bases, new_attrs, **kwargs) abstract = getattr(attr_meta, 'abstract', False) meta = attr_meta or getattr(new_class, 'Meta', None) base_meta = getattr(new_class, '_meta', None) app_label = None # Look for an application configuration to attach the model to. app_config = apps.get_containing_app_config(module) if getattr(meta, 'app_label', None) is None: if app_config is None: if not abstract: raise RuntimeError( "Model class %s.%s doesn't declare an explicit " "app_label and isn't in an application in " "INSTALLED_APPS." % (module, name) ) else: app_label = app_config.label new_class.add_to_class('_meta', Options(meta, app_label)) if not abstract: new_class.add_to_class( 'DoesNotExist', subclass_exception( 'DoesNotExist', tuple( x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract ) or (ObjectDoesNotExist,), module, attached_to=new_class)) new_class.add_to_class( 'MultipleObjectsReturned', subclass_exception( 'MultipleObjectsReturned', tuple( x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract ) or (MultipleObjectsReturned,), module, attached_to=new_class)) if base_meta and not base_meta.abstract: # Non-abstract child classes inherit some attributes from their # non-abstract parent (unless an ABC comes before it in the # method resolution order). if not hasattr(meta, 'ordering'): new_class._meta.ordering = base_meta.ordering if not hasattr(meta, 'get_latest_by'): new_class._meta.get_latest_by = base_meta.get_latest_by is_proxy = new_class._meta.proxy # If the model is a proxy, ensure that the base class # hasn't been swapped out. if is_proxy and base_meta and base_meta.swapped: raise TypeError("%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped)) # Add remaining attributes (those with a contribute_to_class() method) # to the class. for obj_name, obj in contributable_attrs.items(): new_class.add_to_class(obj_name, obj) # All the fields of any type declared on this model new_fields = chain( new_class._meta.local_fields, new_class._meta.local_many_to_many, new_class._meta.private_fields ) field_names = {f.name for f in new_fields} # Basic setup for proxy models. if is_proxy: base = None for parent in [kls for kls in parents if hasattr(kls, '_meta')]: if parent._meta.abstract: if parent._meta.fields: raise TypeError( "Abstract base class containing model fields not " "permitted for proxy model '%s'." % name ) else: continue if base is None: base = parent elif parent._meta.concrete_model is not base._meta.concrete_model: raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name) if base is None: raise TypeError("Proxy model '%s' has no non-abstract model base class." % name) new_class._meta.setup_proxy(base) new_class._meta.concrete_model = base._meta.concrete_model else: new_class._meta.concrete_model = new_class # Collect the parent links for multi-table inheritance. parent_links = {} for base in reversed([new_class] + parents): # Conceptually equivalent to `if base is Model`. if not hasattr(base, '_meta'): continue # Skip concrete parent classes. if base != new_class and not base._meta.abstract: continue # Locate OneToOneField instances. for field in base._meta.local_fields: if isinstance(field, OneToOneField) and field.remote_field.parent_link: related = resolve_relation(new_class, field.remote_field.model) parent_links[make_model_tuple(related)] = field # Track fields inherited from base models. inherited_attributes = set() # Do the appropriate setup for any model parents. for base in new_class.mro(): if base not in parents or not hasattr(base, '_meta'): # Things without _meta aren't functional models, so they're # uninteresting parents. inherited_attributes.update(base.__dict__) continue parent_fields = base._meta.local_fields + base._meta.local_many_to_many if not base._meta.abstract: # Check for clashes between locally declared fields and those # on the base classes. for field in parent_fields: if field.name in field_names: raise FieldError( 'Local field %r in class %r clashes with field of ' 'the same name from base class %r.' % ( field.name, name, base.__name__, ) ) else: inherited_attributes.add(field.name) # Concrete classes... base = base._meta.concrete_model base_key = make_model_tuple(base) if base_key in parent_links: field = parent_links[base_key] elif not is_proxy: attr_name = '%s_ptr' % base._meta.model_name field = OneToOneField( base, on_delete=CASCADE, name=attr_name, auto_created=True, parent_link=True, ) if attr_name in field_names: raise FieldError( "Auto-generated field '%s' in class %r for " "parent_link to base class %r clashes with " "declared field of the same name." % ( attr_name, name, base.__name__, ) ) # Only add the ptr field if it's not already present; # e.g. migrations will already have it specified if not hasattr(new_class, attr_name): new_class.add_to_class(attr_name, field) else: field = None new_class._meta.parents[base] = field else: base_parents = base._meta.parents.copy() # Add fields from abstract base class if it wasn't overridden. for field in parent_fields: if (field.name not in field_names and field.name not in new_class.__dict__ and field.name not in inherited_attributes): new_field = copy.deepcopy(field) new_class.add_to_class(field.name, new_field) # Replace parent links defined on this base by the new # field. It will be appropriately resolved if required. if field.one_to_one: for parent, parent_link in base_parents.items(): if field == parent_link: base_parents[parent] = new_field # Pass any non-abstract parent classes onto child. new_class._meta.parents.update(base_parents) # Inherit private fields (like GenericForeignKey) from the parent # class for field in base._meta.private_fields: if field.name in field_names: if not base._meta.abstract: raise FieldError( 'Local field %r in class %r clashes with field of ' 'the same name from base class %r.' % ( field.name, name, base.__name__, ) ) else: field = copy.deepcopy(field) if not base._meta.abstract: field.mti_inherited = True new_class.add_to_class(field.name, field) # Copy indexes so that index names are unique when models extend an # abstract model. new_class._meta.indexes = [copy.deepcopy(idx) for idx in new_class._meta.indexes] if abstract: # Abstract base models can't be instantiated and don't appear in # the list of models for an app. We do the final setup for them a # little differently from normal models. attr_meta.abstract = False new_class.Meta = attr_meta return new_class new_class._prepare() new_class._meta.apps.register_model(new_class._meta.app_label, new_class) return new_class def add_to_class(cls, name, value): if _has_contribute_to_class(value): value.contribute_to_class(cls, name) else: setattr(cls, name, value) def _prepare(cls): """Create some methods once self._meta has been populated.""" opts = cls._meta opts._prepare(cls) if opts.order_with_respect_to: cls.get_next_in_order = partialmethod(cls._get_next_or_previous_in_order, is_next=True) cls.get_previous_in_order = partialmethod(cls._get_next_or_previous_in_order, is_next=False) # Defer creating accessors on the foreign class until it has been # created and registered. If remote_field is None, we're ordering # with respect to a GenericForeignKey and don't know what the # foreign class is - we'll add those accessors later in # contribute_to_class(). if opts.order_with_respect_to.remote_field: wrt = opts.order_with_respect_to remote = wrt.remote_field.model lazy_related_operation(make_foreign_order_accessors, cls, remote) # Give the class a docstring -- its definition. if cls.__doc__ is None: cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join(f.name for f in opts.fields)) get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get(opts.label_lower) if get_absolute_url_override: setattr(cls, 'get_absolute_url', get_absolute_url_override) if not opts.managers: if any(f.name == 'objects' for f in opts.fields): raise ValueError( "Model %s must specify a custom Manager, because it has a " "field named 'objects'." % cls.__name__ ) manager = Manager() manager.auto_created = True cls.add_to_class('objects', manager) # Set the name of _meta.indexes. This can't be done in # Options.contribute_to_class() because fields haven't been added to # the model at that point. for index in cls._meta.indexes: if not index.name: index.set_name_with_model(cls) class_prepared.send(sender=cls) @property def _base_manager(cls): return cls._meta.base_manager @property def _default_manager(cls): return cls._meta.default_manager class ModelStateFieldsCacheDescriptor: def __get__(self, instance, cls=None): if instance is None: return self res = instance.fields_cache = {} return res class ModelState: """Store model instance state.""" db = None # If true, uniqueness validation checks will consider this a new, unsaved # object. Necessary for correct validation of new instances of objects with # explicit (non-auto) PKs. This impacts validation only; it has no effect # on the actual save. adding = True fields_cache = ModelStateFieldsCacheDescriptor() class Model(metaclass=ModelBase): def __init__(self, *args, **kwargs): # Alias some things as locals to avoid repeat global lookups cls = self.__class__ opts = self._meta _setattr = setattr _DEFERRED = DEFERRED pre_init.send(sender=cls, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. if len(args) > len(opts.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(opts.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): if val is _DEFERRED: continue _setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(opts.fields) for val, field in zip(args, fields_iter): if val is _DEFERRED: continue _setattr(self, field.attname, val) kwargs.pop(field.name, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # Virtual field if field.attname not in kwargs and field.column is None: continue if kwargs: if isinstance(field.remote_field, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. if rel_obj is not _DEFERRED: _setattr(self, field.name, rel_obj) else: if val is not _DEFERRED: _setattr(self, field.attname, val) if kwargs: property_names = opts._property_names for prop in tuple(kwargs): try: # Any remaining kwargs must correspond to properties or # virtual fields. if prop in property_names or opts.get_field(prop): if kwargs[prop] is not _DEFERRED: _setattr(self, prop, kwargs[prop]) del kwargs[prop] except (AttributeError, FieldDoesNotExist): pass for kwarg in kwargs: raise TypeError("%s() got an unexpected keyword argument '%s'" % (cls.__name__, kwarg)) super().__init__() post_init.send(sender=cls, instance=self) @classmethod def from_db(cls, db, field_names, values): if len(values) != len(cls._meta.concrete_fields): values_iter = iter(values) values = [ next(values_iter) if f.attname in field_names else DEFERRED for f in cls._meta.concrete_fields ] new = cls(*values) new._state.adding = False new._state.db = db return new def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def __str__(self): return '%s object (%s)' % (self.__class__.__name__, self.pk) def __eq__(self, other): if not isinstance(other, Model): return NotImplemented if self._meta.concrete_model != other._meta.concrete_model: return False my_pk = self.pk if my_pk is None: return self is other return my_pk == other.pk def __hash__(self): if self.pk is None: raise TypeError("Model instances without primary key value are unhashable") return hash(self.pk) def __reduce__(self): data = self.__getstate__() data[DJANGO_VERSION_PICKLE_KEY] = get_version() class_id = self._meta.app_label, self._meta.object_name return model_unpickle, (class_id,), data def __getstate__(self): """Hook to allow choosing the attributes to pickle.""" return self.__dict__ def __setstate__(self, state): msg = None pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY) if pickled_version: current_version = get_version() if current_version != pickled_version: msg = ( "Pickled model instance's Django version %s does not match " "the current version %s." % (pickled_version, current_version) ) else: msg = "Pickled model instance's Django version is not specified." if msg: warnings.warn(msg, RuntimeWarning, stacklevel=2) self.__dict__.update(state) def _get_pk_val(self, meta=None): meta = meta or self._meta return getattr(self, meta.pk.attname) def _set_pk_val(self, value): for parent_link in self._meta.parents.values(): if parent_link and parent_link != self._meta.pk: setattr(self, parent_link.target_field.attname, value) return setattr(self, self._meta.pk.attname, value) pk = property(_get_pk_val, _set_pk_val) def get_deferred_fields(self): """ Return a set containing names of deferred fields for this instance. """ return { f.attname for f in self._meta.concrete_fields if f.attname not in self.__dict__ } def refresh_from_db(self, using=None, fields=None): """ Reload field values from the database. By default, the reloading happens from the database this instance was loaded from, or by the read router if this instance wasn't loaded from any database. The using parameter will override the default. Fields can be used to specify which fields to reload. The fields should be an iterable of field attnames. If fields is None, then all non-deferred fields are reloaded. When accessing deferred fields of an instance, the deferred loading of the field will call this method. """ if fields is None: self._prefetched_objects_cache = {} else: prefetched_objects_cache = getattr(self, '_prefetched_objects_cache', ()) for field in fields: if field in prefetched_objects_cache: del prefetched_objects_cache[field] fields.remove(field) if not fields: return if any(LOOKUP_SEP in f for f in fields): raise ValueError( 'Found "%s" in fields argument. Relations and transforms ' 'are not allowed in fields.' % LOOKUP_SEP) hints = {'instance': self} db_instance_qs = self.__class__._base_manager.db_manager(using, hints=hints).filter(pk=self.pk) # Use provided fields, if not set then reload all non-deferred fields. deferred_fields = self.get_deferred_fields() if fields is not None: fields = list(fields) db_instance_qs = db_instance_qs.only(*fields) elif deferred_fields: fields = [f.attname for f in self._meta.concrete_fields if f.attname not in deferred_fields] db_instance_qs = db_instance_qs.only(*fields) db_instance = db_instance_qs.get() non_loaded_fields = db_instance.get_deferred_fields() for field in self._meta.concrete_fields: if field.attname in non_loaded_fields: # This field wasn't refreshed - skip ahead. continue setattr(self, field.attname, getattr(db_instance, field.attname)) # Clear cached foreign keys. if field.is_relation and field.is_cached(self): field.delete_cached_value(self) # Clear cached relations. for field in self._meta.related_objects: if field.is_cached(self): field.delete_cached_value(self) self._state.db = db_instance._state.db def serializable_value(self, field_name): """ Return the value of the field name for this instance. If the field is a foreign key, return the id value instead of the object. If there's no Field object with this name on the model, return the model attribute's value. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field(field_name) except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname) def save(self, force_insert=False, force_update=False, using=None, update_fields=None): """ Save the current instance. Override this in a subclass if you want to control the saving process. The 'force_insert' and 'force_update' parameters can be used to insist that the "save" must be an SQL insert or update (or equivalent for non-SQL backends), respectively. Normally, they should not be set. """ # Ensure that a model instance without a PK hasn't been assigned to # a ForeignKey or OneToOneField on this model. If the field is # nullable, allowing the save() would result in silent data loss. for field in self._meta.concrete_fields: # If the related field isn't cached, then an instance hasn't # been assigned and there's no need to worry about this check. if field.is_relation and field.is_cached(self): obj = getattr(self, field.name, None) if not obj: continue # A pk may have been assigned manually to a model instance not # saved to the database (or auto-generated in a case like # UUIDField), but we allow the save to proceed and rely on the # database to raise an IntegrityError if applicable. If # constraints aren't supported by the database, there's the # unavoidable risk of data corruption. if obj.pk is None: # Remove the object from a related instance cache. if not field.remote_field.multiple: field.remote_field.delete_cached_value(obj) raise ValueError( "save() prohibited to prevent data loss due to " "unsaved related object '%s'." % field.name ) elif getattr(self, field.attname) is None: # Use pk from related object if it has been saved after # an assignment. setattr(self, field.attname, obj.pk) # If the relationship's pk/to_field was changed, clear the # cached relationship. if getattr(obj, field.target_field.attname) != getattr(self, field.attname): field.delete_cached_value(self) using = using or router.db_for_write(self.__class__, instance=self) if force_insert and (force_update or update_fields): raise ValueError("Cannot force both insert and updating in model saving.") deferred_fields = self.get_deferred_fields() if update_fields is not None: # If update_fields is empty, skip the save. We do also check for # no-op saves later on for inheritance cases. This bailout is # still needed for skipping signal sending. if not update_fields: return update_fields = frozenset(update_fields) field_names = set() for field in self._meta.fields: if not field.primary_key: field_names.add(field.name) if field.name != field.attname: field_names.add(field.attname) non_model_fields = update_fields.difference(field_names) if non_model_fields: raise ValueError("The following fields do not exist in this " "model or are m2m fields: %s" % ', '.join(non_model_fields)) # If saving to the same database, and this model is deferred, then # automatically do an "update_fields" save on the loaded fields. elif not force_insert and deferred_fields and using == self._state.db: field_names = set() for field in self._meta.concrete_fields: if not field.primary_key and not hasattr(field, 'through'): field_names.add(field.attname) loaded_fields = field_names.difference(deferred_fields) if loaded_fields: update_fields = frozenset(loaded_fields) self.save_base(using=using, force_insert=force_insert, force_update=force_update, update_fields=update_fields) save.alters_data = True def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handle the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or update_fields cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: pre_save.send( sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields, ) # A transaction isn't needed if one query is issued. if meta.parents: context_manager = transaction.atomic(using=using, savepoint=False) else: context_manager = transaction.mark_for_rollback_on_error(using=using) with context_manager: parent_inserted = False if not raw: parent_inserted = self._save_parents(cls, using, update_fields) updated = self._save_table( raw, cls, force_insert or parent_inserted, force_update, using, update_fields, ) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: post_save.send( sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using, ) save_base.alters_data = True def _save_parents(self, cls, using, update_fields): """Save all the parents of cls using values from self.""" meta = cls._meta inserted = False for parent, field in meta.parents.items(): # Make sure the link fields are synced between parent and self. if (field and getattr(self, parent._meta.pk.attname) is None and getattr(self, field.attname) is not None): setattr(self, parent._meta.pk.attname, getattr(self, field.attname)) parent_inserted = self._save_parents(cls=parent, using=using, update_fields=update_fields) updated = self._save_table( cls=parent, using=using, update_fields=update_fields, force_insert=parent_inserted, ) if not updated: inserted = True # Set the parent's PK value to self. if field: setattr(self, field.attname, self._get_pk_val(parent._meta)) # Since we didn't have an instance of the parent handy set # attname directly, bypassing the descriptor. Invalidate # the related object cache, in case it's been accidentally # populated. A fresh instance will be re-built from the # database if necessary. if field.is_cached(self): field.delete_cached_value(self) return inserted def _save_table(self, raw=False, cls=None, force_insert=False, force_update=False, using=None, update_fields=None): """ Do the heavy-lifting involved in saving. Update or insert the data for a single table. """ meta = cls._meta non_pks = [f for f in meta.local_concrete_fields if not f.primary_key] if update_fields: non_pks = [f for f in non_pks if f.name in update_fields or f.attname in update_fields] pk_val = self._get_pk_val(meta) if pk_val is None: pk_val = meta.pk.get_pk_value_on_save(self) setattr(self, meta.pk.attname, pk_val) pk_set = pk_val is not None if not pk_set and (force_update or update_fields): raise ValueError("Cannot force an update in save() with no primary key.") updated = False # Skip an UPDATE when adding an instance and primary key has a default. if ( not raw and not force_insert and self._state.adding and meta.pk.default and meta.pk.default is not NOT_PROVIDED ): force_insert = True # If possible, try an UPDATE. If that doesn't update anything, do an INSERT. if pk_set and not force_insert: base_qs = cls._base_manager.using(using) values = [(f, None, (getattr(self, f.attname) if raw else f.pre_save(self, False))) for f in non_pks] forced_update = update_fields or force_update updated = self._do_update(base_qs, using, pk_val, values, update_fields, forced_update) if force_update and not updated: raise DatabaseError("Forced update did not affect any rows.") if update_fields and not updated: raise DatabaseError("Save with update_fields did not affect any rows.") if not updated: if meta.order_with_respect_to: # If this is a model with an order_with_respect_to # autopopulate the _order field field = meta.order_with_respect_to filter_args = field.get_filter_kwargs_for_object(self) self._order = cls._base_manager.using(using).filter(**filter_args).aggregate( _order__max=Coalesce( ExpressionWrapper(Max('_order') + Value(1), output_field=IntegerField()), Value(0), ), )['_order__max'] fields = meta.local_concrete_fields if not pk_set: fields = [f for f in fields if f is not meta.auto_field] returning_fields = meta.db_returning_fields results = self._do_insert(cls._base_manager, using, fields, returning_fields, raw) if results: for value, field in zip(results[0], returning_fields): setattr(self, field.attname, value) return updated def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update): """ Try to update the model. Return True if the model was updated (if an update query was done and a matching row was found in the DB). """ filtered = base_qs.filter(pk=pk_val) if not values: # We can end up here when saving a model in inheritance chain where # update_fields doesn't target any field in current model. In that # case we just say the update succeeded. Another case ending up here # is a model with just PK - in that case check that the PK still # exists. return update_fields is not None or filtered.exists() if self._meta.select_on_save and not forced_update: return ( filtered.exists() and # It may happen that the object is deleted from the DB right after # this check, causing the subsequent UPDATE to return zero matching # rows. The same result can occur in some rare cases when the # database returns zero despite the UPDATE being executed # successfully (a row is matched and updated). In order to # distinguish these two cases, the object's existence in the # database is again checked for if the UPDATE query returns 0. (filtered._update(values) > 0 or filtered.exists()) ) return filtered._update(values) > 0 def _do_insert(self, manager, using, fields, returning_fields, raw): """ Do an INSERT. If returning_fields is defined then this method should return the newly created data for the model. """ return manager._insert( [self], fields=fields, returning_fields=returning_fields, using=using, raw=raw, ) def delete(self, using=None, keep_parents=False): using = using or router.db_for_write(self.__class__, instance=self) assert self.pk is not None, ( "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname) ) collector = Collector(using=using) collector.collect([self], keep_parents=keep_parents) return collector.delete() delete.alters_data = True def _get_FIELD_display(self, field): value = getattr(self, field.attname) choices_dict = dict(make_hashable(field.flatchoices)) # force_str() to coerce lazy strings. return force_str(choices_dict.get(make_hashable(value), value), strings_only=True) def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs): if not self.pk: raise ValueError("get_next/get_previous cannot be used on unsaved objects.") op = 'gt' if is_next else 'lt' order = '' if is_next else '-' param = getattr(self, field.attname) q = Q(**{'%s__%s' % (field.name, op): param}) q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk}) qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by( '%s%s' % (order, field.name), '%spk' % order ) try: return qs[0] except IndexError: raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name) def _get_next_or_previous_in_order(self, is_next): cachename = "__%s_order_cache" % is_next if not hasattr(self, cachename): op = 'gt' if is_next else 'lt' order = '_order' if is_next else '-_order' order_field = self._meta.order_with_respect_to filter_args = order_field.get_filter_kwargs_for_object(self) obj = self.__class__._default_manager.filter(**filter_args).filter(**{ '_order__%s' % op: self.__class__._default_manager.values('_order').filter(**{ self._meta.pk.name: self.pk }) }).order_by(order)[:1].get() setattr(self, cachename, obj) return getattr(self, cachename) def prepare_database_save(self, field): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return getattr(self, field.remote_field.get_related_field().attname) def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass def validate_unique(self, exclude=None): """ Check unique constraints on the model and raise ValidationError if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors) def _get_unique_checks(self, exclude=None): """ Return a list of checks to perform. Since validate_unique() could be called from a ModelForm, some fields may have been excluded; we can't perform a unique check on a model that is missing fields involved in that check. Fields that did not validate should also be excluded, but they need to be passed in via the exclude argument. """ if exclude is None: exclude = [] unique_checks = [] unique_togethers = [(self.__class__, self._meta.unique_together)] constraints = [(self.__class__, self._meta.total_unique_constraints)] for parent_class in self._meta.get_parent_list(): if parent_class._meta.unique_together: unique_togethers.append((parent_class, parent_class._meta.unique_together)) if parent_class._meta.total_unique_constraints: constraints.append( (parent_class, parent_class._meta.total_unique_constraints) ) for model_class, unique_together in unique_togethers: for check in unique_together: if not any(name in exclude for name in check): # Add the check if the field isn't excluded. unique_checks.append((model_class, tuple(check))) for model_class, model_constraints in constraints: for constraint in model_constraints: if not any(name in exclude for name in constraint.fields): unique_checks.append((model_class, constraint.fields)) # These are checks for the unique_for_<date/year/month>. date_checks = [] # Gather a list of checks for fields declared as unique and add them to # the list of checks. fields_with_class = [(self.__class__, self._meta.local_fields)] for parent_class in self._meta.get_parent_list(): fields_with_class.append((parent_class, parent_class._meta.local_fields)) for model_class, fields in fields_with_class: for f in fields: name = f.name if name in exclude: continue if f.unique: unique_checks.append((model_class, (name,))) if f.unique_for_date and f.unique_for_date not in exclude: date_checks.append((model_class, 'date', name, f.unique_for_date)) if f.unique_for_year and f.unique_for_year not in exclude: date_checks.append((model_class, 'year', name, f.unique_for_year)) if f.unique_for_month and f.unique_for_month not in exclude: date_checks.append((model_class, 'month', name, f.unique_for_month)) return unique_checks, date_checks def _perform_unique_checks(self, unique_checks): errors = {} for model_class, unique_check in unique_checks: # Try to look up an existing object with the same values as this # object's values for all the unique field. lookup_kwargs = {} for field_name in unique_check: f = self._meta.get_field(field_name) lookup_value = getattr(self, f.attname) # TODO: Handle multiple backends with different feature flags. if (lookup_value is None or (lookup_value == '' and connection.features.interprets_empty_strings_as_nulls)): # no value, skip the lookup continue if f.primary_key and not self._state.adding: # no need to check for unique primary key when editing continue lookup_kwargs[str(field_name)] = lookup_value # some fields were skipped, no reason to do the check if len(unique_check) != len(lookup_kwargs): continue qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) # Note that we need to use the pk as defined by model_class, not # self.pk. These can be different fields because model inheritance # allows single model to have effectively multiple primary keys. # Refs #17615. model_class_pk = self._get_pk_val(model_class._meta) if not self._state.adding and model_class_pk is not None: qs = qs.exclude(pk=model_class_pk) if qs.exists(): if len(unique_check) == 1: key = unique_check[0] else: key = NON_FIELD_ERRORS errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check)) return errors def _perform_date_checks(self, date_checks): errors = {} for model_class, lookup_type, field, unique_for in date_checks: lookup_kwargs = {} # there's a ticket to add a date lookup, we can remove this special # case if that makes it's way in date = getattr(self, unique_for) if date is None: continue if lookup_type == 'date': lookup_kwargs['%s__day' % unique_for] = date.day lookup_kwargs['%s__month' % unique_for] = date.month lookup_kwargs['%s__year' % unique_for] = date.year else: lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type) lookup_kwargs[field] = getattr(self, field) qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) if not self._state.adding and self.pk is not None: qs = qs.exclude(pk=self.pk) if qs.exists(): errors.setdefault(field, []).append( self.date_error_message(lookup_type, field, unique_for) ) return errors def date_error_message(self, lookup_type, field_name, unique_for): opts = self._meta field = opts.get_field(field_name) return ValidationError( message=field.error_messages['unique_for_date'], code='unique_for_date', params={ 'model': self, 'model_name': capfirst(opts.verbose_name), 'lookup_type': lookup_type, 'field': field_name, 'field_label': capfirst(field.verbose_name), 'date_field': unique_for, 'date_field_label': capfirst(opts.get_field(unique_for).verbose_name), } ) def unique_error_message(self, model_class, unique_check): opts = model_class._meta params = { 'model': self, 'model_class': model_class, 'model_name': capfirst(opts.verbose_name), 'unique_check': unique_check, } # A unique field if len(unique_check) == 1: field = opts.get_field(unique_check[0]) params['field_label'] = capfirst(field.verbose_name) return ValidationError( message=field.error_messages['unique'], code='unique', params=params, ) # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] params['field_labels'] = get_text_list(field_labels, _('and')) return ValidationError( message=_("%(model_name)s with this %(field_labels)s already exists."), code='unique_together', params=params, ) def full_clean(self, exclude=None, validate_unique=True): """ Call clean_fields(), clean(), and validate_unique() on the model. Raise a ValidationError for any errors that occur. """ errors = {} if exclude is None: exclude = [] else: exclude = list(exclude) try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors: if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors) def clean_fields(self, exclude=None): """ Clean all fields and raise a ValidationError containing a dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors) @classmethod def check(cls, **kwargs): errors = [*cls._check_swappable(), *cls._check_model(), *cls._check_managers(**kwargs)] if not cls._meta.swapped: databases = kwargs.get('databases') or [] errors += [ *cls._check_fields(**kwargs), *cls._check_m2m_through_same_relationship(), *cls._check_long_column_names(databases), ] clash_errors = ( *cls._check_id_field(), *cls._check_field_name_clashes(), *cls._check_model_name_db_lookup_clashes(), *cls._check_property_name_related_field_accessor_clashes(), *cls._check_single_primary_key(), ) errors.extend(clash_errors) # If there are field name clashes, hide consequent column name # clashes. if not clash_errors: errors.extend(cls._check_column_name_clashes()) errors += [ *cls._check_index_together(), *cls._check_unique_together(), *cls._check_indexes(databases), *cls._check_ordering(), *cls._check_constraints(databases), ] return errors @classmethod def _check_swappable(cls): """Check if the swapped model exists.""" errors = [] if cls._meta.swapped: try: apps.get_model(cls._meta.swapped) except ValueError: errors.append( checks.Error( "'%s' is not of the form 'app_label.app_name'." % cls._meta.swappable, id='models.E001', ) ) except LookupError: app_label, model_name = cls._meta.swapped.split('.') errors.append( checks.Error( "'%s' references '%s.%s', which has not been " "installed, or is abstract." % ( cls._meta.swappable, app_label, model_name ), id='models.E002', ) ) return errors @classmethod def _check_model(cls): errors = [] if cls._meta.proxy: if cls._meta.local_fields or cls._meta.local_many_to_many: errors.append( checks.Error( "Proxy model '%s' contains model fields." % cls.__name__, id='models.E017', ) ) return errors @classmethod def _check_managers(cls, **kwargs): """Perform all manager checks.""" errors = [] for manager in cls._meta.managers: errors.extend(manager.check(**kwargs)) return errors @classmethod def _check_fields(cls, **kwargs): """Perform all field checks.""" errors = [] for field in cls._meta.local_fields: errors.extend(field.check(**kwargs)) for field in cls._meta.local_many_to_many: errors.extend(field.check(from_model=cls, **kwargs)) return errors @classmethod def _check_m2m_through_same_relationship(cls): """ Check if no relationship model is used by more than one m2m field. """ errors = [] seen_intermediary_signatures = [] fields = cls._meta.local_many_to_many # Skip when the target model wasn't found. fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase)) # Skip when the relationship model wasn't found. fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase)) for f in fields: signature = (f.remote_field.model, cls, f.remote_field.through, f.remote_field.through_fields) if signature in seen_intermediary_signatures: errors.append( checks.Error( "The model has two identical many-to-many relations " "through the intermediate model '%s'." % f.remote_field.through._meta.label, obj=cls, id='models.E003', ) ) else: seen_intermediary_signatures.append(signature) return errors @classmethod def _check_id_field(cls): """Check if `id` field is a primary key.""" fields = [f for f in cls._meta.local_fields if f.name == 'id' and f != cls._meta.pk] # fields is empty or consists of the invalid "id" field if fields and not fields[0].primary_key and cls._meta.pk.name == 'id': return [ checks.Error( "'id' can only be used as a field name if the field also " "sets 'primary_key=True'.", obj=cls, id='models.E004', ) ] else: return [] @classmethod def _check_field_name_clashes(cls): """Forbid field shadowing in multi-table inheritance.""" errors = [] used_fields = {} # name or attname -> field # Check that multi-inheritance doesn't cause field name shadowing. for parent in cls._meta.get_parent_list(): for f in parent._meta.local_fields: clash = used_fields.get(f.name) or used_fields.get(f.attname) or None if clash: errors.append( checks.Error( "The field '%s' from parent model " "'%s' clashes with the field '%s' " "from parent model '%s'." % ( clash.name, clash.model._meta, f.name, f.model._meta ), obj=cls, id='models.E005', ) ) used_fields[f.name] = f used_fields[f.attname] = f # Check that fields defined in the model don't clash with fields from # parents, including auto-generated fields like multi-table inheritance # child accessors. for parent in cls._meta.get_parent_list(): for f in parent._meta.get_fields(): if f not in used_fields: used_fields[f.name] = f for f in cls._meta.local_fields: clash = used_fields.get(f.name) or used_fields.get(f.attname) or None # Note that we may detect clash between user-defined non-unique # field "id" and automatically added unique field "id", both # defined at the same model. This special case is considered in # _check_id_field and here we ignore it. id_conflict = f.name == "id" and clash and clash.name == "id" and clash.model == cls if clash and not id_conflict: errors.append( checks.Error( "The field '%s' clashes with the field '%s' " "from model '%s'." % ( f.name, clash.name, clash.model._meta ), obj=f, id='models.E006', ) ) used_fields[f.name] = f used_fields[f.attname] = f return errors @classmethod def _check_column_name_clashes(cls): # Store a list of column names which have already been used by other fields. used_column_names = [] errors = [] for f in cls._meta.local_fields: _, column_name = f.get_attname_column() # Ensure the column name is not already in use. if column_name and column_name in used_column_names: errors.append( checks.Error( "Field '%s' has column name '%s' that is used by " "another field." % (f.name, column_name), hint="Specify a 'db_column' for the field.", obj=cls, id='models.E007' ) ) else: used_column_names.append(column_name) return errors @classmethod def _check_model_name_db_lookup_clashes(cls): errors = [] model_name = cls.__name__ if model_name.startswith('_') or model_name.endswith('_'): errors.append( checks.Error( "The model name '%s' cannot start or end with an underscore " "as it collides with the query lookup syntax." % model_name, obj=cls, id='models.E023' ) ) elif LOOKUP_SEP in model_name: errors.append( checks.Error( "The model name '%s' cannot contain double underscores as " "it collides with the query lookup syntax." % model_name, obj=cls, id='models.E024' ) ) return errors @classmethod def _check_property_name_related_field_accessor_clashes(cls): errors = [] property_names = cls._meta._property_names related_field_accessors = ( f.get_attname() for f in cls._meta._get_fields(reverse=False) if f.is_relation and f.related_model is not None ) for accessor in related_field_accessors: if accessor in property_names: errors.append( checks.Error( "The property '%s' clashes with a related field " "accessor." % accessor, obj=cls, id='models.E025', ) ) return errors @classmethod def _check_single_primary_key(cls): errors = [] if sum(1 for f in cls._meta.local_fields if f.primary_key) > 1: errors.append( checks.Error( "The model cannot have more than one field with " "'primary_key=True'.", obj=cls, id='models.E026', ) ) return errors @classmethod def _check_index_together(cls): """Check the value of "index_together" option.""" if not isinstance(cls._meta.index_together, (tuple, list)): return [ checks.Error( "'index_together' must be a list or tuple.", obj=cls, id='models.E008', ) ] elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.index_together): return [ checks.Error( "All 'index_together' elements must be lists or tuples.", obj=cls, id='models.E009', ) ] else: errors = [] for fields in cls._meta.index_together: errors.extend(cls._check_local_fields(fields, "index_together")) return errors @classmethod def _check_unique_together(cls): """Check the value of "unique_together" option.""" if not isinstance(cls._meta.unique_together, (tuple, list)): return [ checks.Error( "'unique_together' must be a list or tuple.", obj=cls, id='models.E010', ) ] elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.unique_together): return [ checks.Error( "All 'unique_together' elements must be lists or tuples.", obj=cls, id='models.E011', ) ] else: errors = [] for fields in cls._meta.unique_together: errors.extend(cls._check_local_fields(fields, "unique_together")) return errors @classmethod def _check_indexes(cls, databases): """Check fields, names, and conditions of indexes.""" errors = [] for index in cls._meta.indexes: # Index name can't start with an underscore or a number, restricted # for cross-database compatibility with Oracle. if index.name[0] == '_' or index.name[0].isdigit(): errors.append( checks.Error( "The index name '%s' cannot start with an underscore " "or a number." % index.name, obj=cls, id='models.E033', ), ) if len(index.name) > index.max_name_length: errors.append( checks.Error( "The index name '%s' cannot be longer than %d " "characters." % (index.name, index.max_name_length), obj=cls, id='models.E034', ), ) for db in databases: if not router.allow_migrate_model(db, cls): continue connection = connections[db] if ( connection.features.supports_partial_indexes or 'supports_partial_indexes' in cls._meta.required_db_features ): continue if any(index.condition is not None for index in cls._meta.indexes): errors.append( checks.Warning( '%s does not support indexes with conditions.' % connection.display_name, hint=( "Conditions will be ignored. Silence this warning " "if you don't care about it." ), obj=cls, id='models.W037', ) ) fields = [field for index in cls._meta.indexes for field, _ in index.fields_orders] errors.extend(cls._check_local_fields(fields, 'indexes')) return errors @classmethod def _check_local_fields(cls, fields, option): from django.db import models # In order to avoid hitting the relation tree prematurely, we use our # own fields_map instead of using get_field() forward_fields_map = {} for field in cls._meta._get_fields(reverse=False): forward_fields_map[field.name] = field if hasattr(field, 'attname'): forward_fields_map[field.attname] = field errors = [] for field_name in fields: try: field = forward_fields_map[field_name] except KeyError: errors.append( checks.Error( "'%s' refers to the nonexistent field '%s'." % ( option, field_name, ), obj=cls, id='models.E012', ) ) else: if isinstance(field.remote_field, models.ManyToManyRel): errors.append( checks.Error( "'%s' refers to a ManyToManyField '%s', but " "ManyToManyFields are not permitted in '%s'." % ( option, field_name, option, ), obj=cls, id='models.E013', ) ) elif field not in cls._meta.local_fields: errors.append( checks.Error( "'%s' refers to field '%s' which is not local to model '%s'." % (option, field_name, cls._meta.object_name), hint="This issue may be caused by multi-table inheritance.", obj=cls, id='models.E016', ) ) return errors @classmethod def _check_ordering(cls): """ Check "ordering" option -- is it a list of strings and do all fields exist? """ if cls._meta._ordering_clash: return [ checks.Error( "'ordering' and 'order_with_respect_to' cannot be used together.", obj=cls, id='models.E021', ), ] if cls._meta.order_with_respect_to or not cls._meta.ordering: return [] if not isinstance(cls._meta.ordering, (list, tuple)): return [ checks.Error( "'ordering' must be a tuple or list (even if you want to order by only one field).", obj=cls, id='models.E014', ) ] errors = [] fields = cls._meta.ordering # Skip expressions and '?' fields. fields = (f for f in fields if isinstance(f, str) and f != '?') # Convert "-field" to "field". fields = ((f[1:] if f.startswith('-') else f) for f in fields) # Separate related fields and non-related fields. _fields = [] related_fields = [] for f in fields: if LOOKUP_SEP in f: related_fields.append(f) else: _fields.append(f) fields = _fields # Check related fields. for field in related_fields: _cls = cls fld = None for part in field.split(LOOKUP_SEP): try: # pk is an alias that won't be found by opts.get_field. if part == 'pk': fld = _cls._meta.pk else: fld = _cls._meta.get_field(part) if fld.is_relation: _cls = fld.get_path_info()[-1].to_opts.model else: _cls = None except (FieldDoesNotExist, AttributeError): if fld is None or fld.get_transform(part) is None: errors.append( checks.Error( "'ordering' refers to the nonexistent field, " "related field, or lookup '%s'." % field, obj=cls, id='models.E015', ) ) # Skip ordering on pk. This is always a valid order_by field # but is an alias and therefore won't be found by opts.get_field. fields = {f for f in fields if f != 'pk'} # Check for invalid or nonexistent fields in ordering. invalid_fields = [] # Any field name that is not present in field_names does not exist. # Also, ordering by m2m fields is not allowed. opts = cls._meta valid_fields = set(chain.from_iterable( (f.name, f.attname) if not (f.auto_created and not f.concrete) else (f.field.related_query_name(),) for f in chain(opts.fields, opts.related_objects) )) invalid_fields.extend(fields - valid_fields) for invalid_field in invalid_fields: errors.append( checks.Error( "'ordering' refers to the nonexistent field, related " "field, or lookup '%s'." % invalid_field, obj=cls, id='models.E015', ) ) return errors @classmethod def _check_long_column_names(cls, databases): """ Check that any auto-generated column names are shorter than the limits for each database in which the model will be created. """ if not databases: return [] errors = [] allowed_len = None db_alias = None # Find the minimum max allowed length among all specified db_aliases. for db in databases: # skip databases where the model won't be created if not router.allow_migrate_model(db, cls): continue connection = connections[db] max_name_length = connection.ops.max_name_length() if max_name_length is None or connection.features.truncates_names: continue else: if allowed_len is None: allowed_len = max_name_length db_alias = db elif max_name_length < allowed_len: allowed_len = max_name_length db_alias = db if allowed_len is None: return errors for f in cls._meta.local_fields: _, column_name = f.get_attname_column() # Check if auto-generated name for the field is too long # for the database. if f.db_column is None and column_name is not None and len(column_name) > allowed_len: errors.append( checks.Error( 'Autogenerated column name too long for field "%s". ' 'Maximum length is "%s" for database "%s".' % (column_name, allowed_len, db_alias), hint="Set the column name manually using 'db_column'.", obj=cls, id='models.E018', ) ) for f in cls._meta.local_many_to_many: # Skip nonexistent models. if isinstance(f.remote_field.through, str): continue # Check if auto-generated name for the M2M field is too long # for the database. for m2m in f.remote_field.through._meta.local_fields: _, rel_name = m2m.get_attname_column() if m2m.db_column is None and rel_name is not None and len(rel_name) > allowed_len: errors.append( checks.Error( 'Autogenerated column name too long for M2M field ' '"%s". Maximum length is "%s" for database "%s".' % (rel_name, allowed_len, db_alias), hint=( "Use 'through' to create a separate model for " "M2M and then set column_name using 'db_column'." ), obj=cls, id='models.E019', ) ) return errors @classmethod def _check_constraints(cls, databases): errors = [] for db in databases: if not router.allow_migrate_model(db, cls): continue connection = connections[db] if not ( connection.features.supports_table_check_constraints or 'supports_table_check_constraints' in cls._meta.required_db_features ) and any( isinstance(constraint, CheckConstraint) for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support check constraints.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W027', ) ) if not ( connection.features.supports_partial_indexes or 'supports_partial_indexes' in cls._meta.required_db_features ) and any( isinstance(constraint, UniqueConstraint) and constraint.condition is not None for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support unique constraints with ' 'conditions.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W036', ) ) if not ( connection.features.supports_deferrable_unique_constraints or 'supports_deferrable_unique_constraints' in cls._meta.required_db_features ) and any( isinstance(constraint, UniqueConstraint) and constraint.deferrable is not None for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support deferrable unique constraints.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W038', ) ) return errors ############################################ # HELPER FUNCTIONS (CURRIED MODEL METHODS) # ############################################ # ORDERING METHODS ######################### def method_set_order(self, ordered_obj, id_list, using=None): if using is None: using = DEFAULT_DB_ALIAS order_wrt = ordered_obj._meta.order_with_respect_to filter_args = order_wrt.get_forward_related_filter(self) ordered_obj.objects.db_manager(using).filter(**filter_args).bulk_update([ ordered_obj(pk=pk, _order=order) for order, pk in enumerate(id_list) ], ['_order']) def method_get_order(self, ordered_obj): order_wrt = ordered_obj._meta.order_with_respect_to filter_args = order_wrt.get_forward_related_filter(self) pk_name = ordered_obj._meta.pk.name return ordered_obj.objects.filter(**filter_args).values_list(pk_name, flat=True) def make_foreign_order_accessors(model, related_model): setattr( related_model, 'get_%s_order' % model.__name__.lower(), partialmethod(method_get_order, model) ) setattr( related_model, 'set_%s_order' % model.__name__.lower(), partialmethod(method_set_order, model) ) ######## # MISC # ######## def model_unpickle(model_id): """Used to unpickle Model subclasses with deferred fields.""" if isinstance(model_id, tuple): model = apps.get_model(*model_id) else: # Backwards compat - the model was cached directly in earlier versions. model = model_id return model.__new__(model) model_unpickle.__safe_for_unpickle__ = True
3bb1a58ab7c1e135c5faf8c0e285f374428896f91ab2a2609d3297df0a4105ea
import collections.abc import copy import datetime import decimal import operator import uuid import warnings from base64 import b64decode, b64encode from functools import partialmethod, total_ordering from django import forms from django.apps import apps from django.conf import settings from django.core import checks, exceptions, validators from django.db import connection, connections, router from django.db.models.constants import LOOKUP_SEP from django.db.models.query_utils import DeferredAttribute, RegisterLookupMixin from django.utils import timezone from django.utils.datastructures import DictWrapper from django.utils.dateparse import ( parse_date, parse_datetime, parse_duration, parse_time, ) from django.utils.duration import duration_microseconds, duration_string from django.utils.functional import Promise, cached_property from django.utils.ipv6 import clean_ipv6_address from django.utils.itercompat import is_iterable from django.utils.text import capfirst from django.utils.translation import gettext_lazy as _ __all__ = [ 'AutoField', 'BLANK_CHOICE_DASH', 'BigAutoField', 'BigIntegerField', 'BinaryField', 'BooleanField', 'CharField', 'CommaSeparatedIntegerField', 'DateField', 'DateTimeField', 'DecimalField', 'DurationField', 'EmailField', 'Empty', 'Field', 'FilePathField', 'FloatField', 'GenericIPAddressField', 'IPAddressField', 'IntegerField', 'NOT_PROVIDED', 'NullBooleanField', 'PositiveBigIntegerField', 'PositiveIntegerField', 'PositiveSmallIntegerField', 'SlugField', 'SmallAutoField', 'SmallIntegerField', 'TextField', 'TimeField', 'URLField', 'UUIDField', ] class Empty: pass class NOT_PROVIDED: pass # The values to use for "blank" in SelectFields. Will be appended to the start # of most "choices" lists. BLANK_CHOICE_DASH = [("", "---------")] def _load_field(app_label, model_name, field_name): return apps.get_model(app_label, model_name)._meta.get_field(field_name) # A guide to Field parameters: # # * name: The name of the field specified in the model. # * attname: The attribute to use on the model object. This is the same as # "name", except in the case of ForeignKeys, where "_id" is # appended. # * db_column: The db_column specified in the model (or None). # * column: The database column for this field. This is the same as # "attname", except if db_column is specified. # # Code that introspects values, or does other dynamic things, should use # attname. For example, this gets the primary key value of object "obj": # # getattr(obj, opts.pk.attname) def _empty(of_cls): new = Empty() new.__class__ = of_cls return new def return_None(): return None @total_ordering class Field(RegisterLookupMixin): """Base class for all field types""" # Designates whether empty strings fundamentally are allowed at the # database level. empty_strings_allowed = True empty_values = list(validators.EMPTY_VALUES) # These track each time a Field instance is created. Used to retain order. # The auto_creation_counter is used for fields that Django implicitly # creates, creation_counter is used for all user-specified fields. creation_counter = 0 auto_creation_counter = -1 default_validators = [] # Default set of validators default_error_messages = { 'invalid_choice': _('Value %(value)r is not a valid choice.'), 'null': _('This field cannot be null.'), 'blank': _('This field cannot be blank.'), 'unique': _('%(model_name)s with this %(field_label)s ' 'already exists.'), # Translators: The 'lookup_type' is one of 'date', 'year' or 'month'. # Eg: "Title must be unique for pub_date year" 'unique_for_date': _("%(field_label)s must be unique for " "%(date_field_label)s %(lookup_type)s."), } system_check_deprecated_details = None system_check_removed_details = None # Field flags hidden = False many_to_many = None many_to_one = None one_to_many = None one_to_one = None related_model = None descriptor_class = DeferredAttribute # Generic field type description, usually overridden by subclasses def _description(self): return _('Field of type: %(field_type)s') % { 'field_type': self.__class__.__name__ } description = property(_description) def __init__(self, verbose_name=None, name=None, primary_key=False, max_length=None, unique=False, blank=False, null=False, db_index=False, rel=None, default=NOT_PROVIDED, editable=True, serialize=True, unique_for_date=None, unique_for_month=None, unique_for_year=None, choices=None, help_text='', db_column=None, db_tablespace=None, auto_created=False, validators=(), error_messages=None): self.name = name self.verbose_name = verbose_name # May be set by set_attributes_from_name self._verbose_name = verbose_name # Store original for deconstruction self.primary_key = primary_key self.max_length, self._unique = max_length, unique self.blank, self.null = blank, null self.remote_field = rel self.is_relation = self.remote_field is not None self.default = default self.editable = editable self.serialize = serialize self.unique_for_date = unique_for_date self.unique_for_month = unique_for_month self.unique_for_year = unique_for_year if isinstance(choices, collections.abc.Iterator): choices = list(choices) self.choices = choices self.help_text = help_text self.db_index = db_index self.db_column = db_column self._db_tablespace = db_tablespace self.auto_created = auto_created # Adjust the appropriate creation counter, and save our local copy. if auto_created: self.creation_counter = Field.auto_creation_counter Field.auto_creation_counter -= 1 else: self.creation_counter = Field.creation_counter Field.creation_counter += 1 self._validators = list(validators) # Store for deconstruction later messages = {} for c in reversed(self.__class__.__mro__): messages.update(getattr(c, 'default_error_messages', {})) messages.update(error_messages or {}) self._error_messages = error_messages # Store for deconstruction later self.error_messages = messages def __str__(self): """ Return "app_label.model_label.field_name" for fields attached to models. """ if not hasattr(self, 'model'): return super().__str__() model = self.model app = model._meta.app_label return '%s.%s.%s' % (app, model._meta.object_name, self.name) def __repr__(self): """Display the module, class, and name of the field.""" path = '%s.%s' % (self.__class__.__module__, self.__class__.__qualname__) name = getattr(self, 'name', None) if name is not None: return '<%s: %s>' % (path, name) return '<%s>' % path def check(self, **kwargs): return [ *self._check_field_name(), *self._check_choices(), *self._check_db_index(), *self._check_null_allowed_for_primary_keys(), *self._check_backend_specific_checks(**kwargs), *self._check_validators(), *self._check_deprecation_details(), ] def _check_field_name(self): """ Check if field name is valid, i.e. 1) does not end with an underscore, 2) does not contain "__" and 3) is not "pk". """ if self.name.endswith('_'): return [ checks.Error( 'Field names must not end with an underscore.', obj=self, id='fields.E001', ) ] elif LOOKUP_SEP in self.name: return [ checks.Error( 'Field names must not contain "%s".' % LOOKUP_SEP, obj=self, id='fields.E002', ) ] elif self.name == 'pk': return [ checks.Error( "'pk' is a reserved word that cannot be used as a field name.", obj=self, id='fields.E003', ) ] else: return [] @classmethod def _choices_is_value(cls, value): return isinstance(value, (str, Promise)) or not is_iterable(value) def _check_choices(self): if not self.choices: return [] if not is_iterable(self.choices) or isinstance(self.choices, str): return [ checks.Error( "'choices' must be an iterable (e.g., a list or tuple).", obj=self, id='fields.E004', ) ] choice_max_length = 0 # Expect [group_name, [value, display]] for choices_group in self.choices: try: group_name, group_choices = choices_group except (TypeError, ValueError): # Containing non-pairs break try: if not all( self._choices_is_value(value) and self._choices_is_value(human_name) for value, human_name in group_choices ): break if self.max_length is not None and group_choices: choice_max_length = max([ choice_max_length, *(len(value) for value, _ in group_choices if isinstance(value, str)), ]) except (TypeError, ValueError): # No groups, choices in the form [value, display] value, human_name = group_name, group_choices if not self._choices_is_value(value) or not self._choices_is_value(human_name): break if self.max_length is not None and isinstance(value, str): choice_max_length = max(choice_max_length, len(value)) # Special case: choices=['ab'] if isinstance(choices_group, str): break else: if self.max_length is not None and choice_max_length > self.max_length: return [ checks.Error( "'max_length' is too small to fit the longest value " "in 'choices' (%d characters)." % choice_max_length, obj=self, id='fields.E009', ), ] return [] return [ checks.Error( "'choices' must be an iterable containing " "(actual value, human readable name) tuples.", obj=self, id='fields.E005', ) ] def _check_db_index(self): if self.db_index not in (None, True, False): return [ checks.Error( "'db_index' must be None, True or False.", obj=self, id='fields.E006', ) ] else: return [] def _check_null_allowed_for_primary_keys(self): if (self.primary_key and self.null and not connection.features.interprets_empty_strings_as_nulls): # We cannot reliably check this for backends like Oracle which # consider NULL and '' to be equal (and thus set up # character-based fields a little differently). return [ checks.Error( 'Primary keys must not have null=True.', hint=('Set null=False on the field, or ' 'remove primary_key=True argument.'), obj=self, id='fields.E007', ) ] else: return [] def _check_backend_specific_checks(self, databases=None, **kwargs): if databases is None: return [] app_label = self.model._meta.app_label errors = [] for alias in databases: if router.allow_migrate(alias, app_label, model_name=self.model._meta.model_name): errors.extend(connections[alias].validation.check_field(self, **kwargs)) return errors def _check_validators(self): errors = [] for i, validator in enumerate(self.validators): if not callable(validator): errors.append( checks.Error( "All 'validators' must be callable.", hint=( "validators[{i}] ({repr}) isn't a function or " "instance of a validator class.".format( i=i, repr=repr(validator), ) ), obj=self, id='fields.E008', ) ) return errors def _check_deprecation_details(self): if self.system_check_removed_details is not None: return [ checks.Error( self.system_check_removed_details.get( 'msg', '%s has been removed except for support in historical ' 'migrations.' % self.__class__.__name__ ), hint=self.system_check_removed_details.get('hint'), obj=self, id=self.system_check_removed_details.get('id', 'fields.EXXX'), ) ] elif self.system_check_deprecated_details is not None: return [ checks.Warning( self.system_check_deprecated_details.get( 'msg', '%s has been deprecated.' % self.__class__.__name__ ), hint=self.system_check_deprecated_details.get('hint'), obj=self, id=self.system_check_deprecated_details.get('id', 'fields.WXXX'), ) ] return [] def get_col(self, alias, output_field=None): if output_field is None: output_field = self if alias != self.model._meta.db_table or output_field != self: from django.db.models.expressions import Col return Col(alias, self, output_field) else: return self.cached_col @cached_property def cached_col(self): from django.db.models.expressions import Col return Col(self.model._meta.db_table, self) def select_format(self, compiler, sql, params): """ Custom format for select clauses. For example, GIS columns need to be selected as AsText(table.col) on MySQL as the table.col data can't be used by Django. """ return sql, params def deconstruct(self): """ Return enough information to recreate the field as a 4-tuple: * The name of the field on the model, if contribute_to_class() has been run. * The import path of the field, including the class:e.g. django.db.models.IntegerField This should be the most portable version, so less specific may be better. * A list of positional arguments. * A dict of keyword arguments. Note that the positional or keyword arguments must contain values of the following types (including inner values of collection types): * None, bool, str, int, float, complex, set, frozenset, list, tuple, dict * UUID * datetime.datetime (naive), datetime.date * top-level classes, top-level functions - will be referenced by their full import path * Storage instances - these have their own deconstruct() method This is because the values here must be serialized into a text format (possibly new Python code, possibly JSON) and these are the only types with encoding handlers defined. There's no need to return the exact way the field was instantiated this time, just ensure that the resulting field is the same - prefer keyword arguments over positional ones, and omit parameters with their default values. """ # Short-form way of fetching all the default parameters keywords = {} possibles = { "verbose_name": None, "primary_key": False, "max_length": None, "unique": False, "blank": False, "null": False, "db_index": False, "default": NOT_PROVIDED, "editable": True, "serialize": True, "unique_for_date": None, "unique_for_month": None, "unique_for_year": None, "choices": None, "help_text": '', "db_column": None, "db_tablespace": None, "auto_created": False, "validators": [], "error_messages": None, } attr_overrides = { "unique": "_unique", "error_messages": "_error_messages", "validators": "_validators", "verbose_name": "_verbose_name", "db_tablespace": "_db_tablespace", } equals_comparison = {"choices", "validators"} for name, default in possibles.items(): value = getattr(self, attr_overrides.get(name, name)) # Unroll anything iterable for choices into a concrete list if name == "choices" and isinstance(value, collections.abc.Iterable): value = list(value) # Do correct kind of comparison if name in equals_comparison: if value != default: keywords[name] = value else: if value is not default: keywords[name] = value # Work out path - we shorten it for known Django core fields path = "%s.%s" % (self.__class__.__module__, self.__class__.__qualname__) if path.startswith("django.db.models.fields.related"): path = path.replace("django.db.models.fields.related", "django.db.models") elif path.startswith("django.db.models.fields.files"): path = path.replace("django.db.models.fields.files", "django.db.models") elif path.startswith("django.db.models.fields.proxy"): path = path.replace("django.db.models.fields.proxy", "django.db.models") elif path.startswith("django.db.models.fields"): path = path.replace("django.db.models.fields", "django.db.models") # Return basic info - other fields should override this. return (self.name, path, [], keywords) def clone(self): """ Uses deconstruct() to clone a new copy of this Field. Will not preserve any class attachments/attribute names. """ name, path, args, kwargs = self.deconstruct() return self.__class__(*args, **kwargs) def __eq__(self, other): # Needed for @total_ordering if isinstance(other, Field): return self.creation_counter == other.creation_counter return NotImplemented def __lt__(self, other): # This is needed because bisect does not take a comparison function. if isinstance(other, Field): return self.creation_counter < other.creation_counter return NotImplemented def __hash__(self): return hash(self.creation_counter) def __deepcopy__(self, memodict): # We don't have to deepcopy very much here, since most things are not # intended to be altered after initial creation. obj = copy.copy(self) if self.remote_field: obj.remote_field = copy.copy(self.remote_field) if hasattr(self.remote_field, 'field') and self.remote_field.field is self: obj.remote_field.field = obj memodict[id(self)] = obj return obj def __copy__(self): # We need to avoid hitting __reduce__, so define this # slightly weird copy construct. obj = Empty() obj.__class__ = self.__class__ obj.__dict__ = self.__dict__.copy() return obj def __reduce__(self): """ Pickling should return the model._meta.fields instance of the field, not a new copy of that field. So, use the app registry to load the model and then the field back. """ if not hasattr(self, 'model'): # Fields are sometimes used without attaching them to models (for # example in aggregation). In this case give back a plain field # instance. The code below will create a new empty instance of # class self.__class__, then update its dict with self.__dict__ # values - so, this is very close to normal pickle. state = self.__dict__.copy() # The _get_default cached_property can't be pickled due to lambda # usage. state.pop('_get_default', None) return _empty, (self.__class__,), state return _load_field, (self.model._meta.app_label, self.model._meta.object_name, self.name) def get_pk_value_on_save(self, instance): """ Hook to generate new PK values on save. This method is called when saving instances with no primary key value set. If this method returns something else than None, then the returned value is used when saving the new instance. """ if self.default: return self.get_default() return None def to_python(self, value): """ Convert the input value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Return the converted value. Subclasses should override this. """ return value @cached_property def validators(self): """ Some validators can't be created at field initialization time. This method provides a way to delay their creation until required. """ return [*self.default_validators, *self._validators] def run_validators(self, value): if value in self.empty_values: return errors = [] for v in self.validators: try: v(value) except exceptions.ValidationError as e: if hasattr(e, 'code') and e.code in self.error_messages: e.message = self.error_messages[e.code] errors.extend(e.error_list) if errors: raise exceptions.ValidationError(errors) def validate(self, value, model_instance): """ Validate value and raise ValidationError if necessary. Subclasses should override this to provide validation logic. """ if not self.editable: # Skip validation for non-editable fields. return if self.choices is not None and value not in self.empty_values: for option_key, option_value in self.choices: if isinstance(option_value, (list, tuple)): # This is an optgroup, so look inside the group for # options. for optgroup_key, optgroup_value in option_value: if value == optgroup_key: return elif value == option_key: return raise exceptions.ValidationError( self.error_messages['invalid_choice'], code='invalid_choice', params={'value': value}, ) if value is None and not self.null: raise exceptions.ValidationError(self.error_messages['null'], code='null') if not self.blank and value in self.empty_values: raise exceptions.ValidationError(self.error_messages['blank'], code='blank') def clean(self, value, model_instance): """ Convert the value's type and run validation. Validation errors from to_python() and validate() are propagated. Return the correct value if no error is raised. """ value = self.to_python(value) self.validate(value, model_instance) self.run_validators(value) return value def db_type_parameters(self, connection): return DictWrapper(self.__dict__, connection.ops.quote_name, 'qn_') def db_check(self, connection): """ Return the database column check constraint for this field, for the provided connection. Works the same way as db_type() for the case that get_internal_type() does not map to a preexisting model field. """ data = self.db_type_parameters(connection) try: return connection.data_type_check_constraints[self.get_internal_type()] % data except KeyError: return None def db_type(self, connection): """ Return the database column data type for this field, for the provided connection. """ # The default implementation of this method looks at the # backend-specific data_types dictionary, looking up the field by its # "internal type". # # A Field class can implement the get_internal_type() method to specify # which *preexisting* Django Field class it's most similar to -- i.e., # a custom field might be represented by a TEXT column type, which is # the same as the TextField Django field type, which means the custom # field's get_internal_type() returns 'TextField'. # # But the limitation of the get_internal_type() / data_types approach # is that it cannot handle database column types that aren't already # mapped to one of the built-in Django field types. In this case, you # can implement db_type() instead of get_internal_type() to specify # exactly which wacky database column type you want to use. data = self.db_type_parameters(connection) try: return connection.data_types[self.get_internal_type()] % data except KeyError: return None def rel_db_type(self, connection): """ Return the data type that a related field pointing to this field should use. For example, this method is called by ForeignKey and OneToOneField to determine its data type. """ return self.db_type(connection) def cast_db_type(self, connection): """Return the data type to use in the Cast() function.""" db_type = connection.ops.cast_data_types.get(self.get_internal_type()) if db_type: return db_type % self.db_type_parameters(connection) return self.db_type(connection) def db_parameters(self, connection): """ Extension of db_type(), providing a range of different return values (type, checks). This will look at db_type(), allowing custom model fields to override it. """ type_string = self.db_type(connection) check_string = self.db_check(connection) return { "type": type_string, "check": check_string, } def db_type_suffix(self, connection): return connection.data_types_suffix.get(self.get_internal_type()) def get_db_converters(self, connection): if hasattr(self, 'from_db_value'): return [self.from_db_value] return [] @property def unique(self): return self._unique or self.primary_key @property def db_tablespace(self): return self._db_tablespace or settings.DEFAULT_INDEX_TABLESPACE @property def db_returning(self): """ Private API intended only to be used by Django itself. Currently only the PostgreSQL backend supports returning multiple fields on a model. """ return False def set_attributes_from_name(self, name): self.name = self.name or name self.attname, self.column = self.get_attname_column() self.concrete = self.column is not None if self.verbose_name is None and self.name: self.verbose_name = self.name.replace('_', ' ') def contribute_to_class(self, cls, name, private_only=False): """ Register the field with the model class it belongs to. If private_only is True, create a separate instance of this field for every subclass of cls, even if cls is not an abstract model. """ self.set_attributes_from_name(name) self.model = cls cls._meta.add_field(self, private=private_only) if self.column: # Don't override classmethods with the descriptor. This means that # if you have a classmethod and a field with the same name, then # such fields can't be deferred (we don't have a check for this). if not getattr(cls, self.attname, None): setattr(cls, self.attname, self.descriptor_class(self)) if self.choices is not None: # Don't override a get_FOO_display() method defined explicitly on # this class, but don't check methods derived from inheritance, to # allow overriding inherited choices. For more complex inheritance # structures users should override contribute_to_class(). if 'get_%s_display' % self.name not in cls.__dict__: setattr( cls, 'get_%s_display' % self.name, partialmethod(cls._get_FIELD_display, field=self), ) def get_filter_kwargs_for_object(self, obj): """ Return a dict that when passed as kwargs to self.model.filter(), would yield all instances having the same value for this field as obj has. """ return {self.name: getattr(obj, self.attname)} def get_attname(self): return self.name def get_attname_column(self): attname = self.get_attname() column = self.db_column or attname return attname, column def get_internal_type(self): return self.__class__.__name__ def pre_save(self, model_instance, add): """Return field's value just before saving.""" return getattr(model_instance, self.attname) def get_prep_value(self, value): """Perform preliminary non-db specific value checks and conversions.""" if isinstance(value, Promise): value = value._proxy____cast() return value def get_db_prep_value(self, value, connection, prepared=False): """ Return field's value prepared for interacting with the database backend. Used by the default implementations of get_db_prep_save(). """ if not prepared: value = self.get_prep_value(value) return value def get_db_prep_save(self, value, connection): """Return field's value prepared for saving into a database.""" return self.get_db_prep_value(value, connection=connection, prepared=False) def has_default(self): """Return a boolean of whether this field has a default value.""" return self.default is not NOT_PROVIDED def get_default(self): """Return the default value for this field.""" return self._get_default() @cached_property def _get_default(self): if self.has_default(): if callable(self.default): return self.default return lambda: self.default if not self.empty_strings_allowed or self.null and not connection.features.interprets_empty_strings_as_nulls: return return_None return str # return empty string def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_choices_to=None, ordering=()): """ Return choices with a default blank choices included, for use as <select> choices for this field. """ if self.choices is not None: choices = list(self.choices) if include_blank: blank_defined = any(choice in ('', None) for choice, _ in self.flatchoices) if not blank_defined: choices = blank_choice + choices return choices rel_model = self.remote_field.model limit_choices_to = limit_choices_to or self.get_limit_choices_to() choice_func = operator.attrgetter( self.remote_field.get_related_field().attname if hasattr(self.remote_field, 'get_related_field') else 'pk' ) qs = rel_model._default_manager.complex_filter(limit_choices_to) if ordering: qs = qs.order_by(*ordering) return (blank_choice if include_blank else []) + [ (choice_func(x), str(x)) for x in qs ] def value_to_string(self, obj): """ Return a string value of this field from the passed obj. This is used by the serialization framework. """ return str(self.value_from_object(obj)) def _get_flatchoices(self): """Flattened version of choices tuple.""" if self.choices is None: return [] flat = [] for choice, value in self.choices: if isinstance(value, (list, tuple)): flat.extend(value) else: flat.append((choice, value)) return flat flatchoices = property(_get_flatchoices) def save_form_data(self, instance, data): setattr(instance, self.name, data) def formfield(self, form_class=None, choices_form_class=None, **kwargs): """Return a django.forms.Field instance for this field.""" defaults = { 'required': not self.blank, 'label': capfirst(self.verbose_name), 'help_text': self.help_text, } if self.has_default(): if callable(self.default): defaults['initial'] = self.default defaults['show_hidden_initial'] = True else: defaults['initial'] = self.get_default() if self.choices is not None: # Fields with choices get special treatment. include_blank = (self.blank or not (self.has_default() or 'initial' in kwargs)) defaults['choices'] = self.get_choices(include_blank=include_blank) defaults['coerce'] = self.to_python if self.null: defaults['empty_value'] = None if choices_form_class is not None: form_class = choices_form_class else: form_class = forms.TypedChoiceField # Many of the subclass-specific formfield arguments (min_value, # max_value) don't apply for choice fields, so be sure to only pass # the values that TypedChoiceField will understand. for k in list(kwargs): if k not in ('coerce', 'empty_value', 'choices', 'required', 'widget', 'label', 'initial', 'help_text', 'error_messages', 'show_hidden_initial', 'disabled'): del kwargs[k] defaults.update(kwargs) if form_class is None: form_class = forms.CharField return form_class(**defaults) def value_from_object(self, obj): """Return the value of this field in the given model instance.""" return getattr(obj, self.attname) class BooleanField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be either True or False.'), 'invalid_nullable': _('“%(value)s” value must be either True, False, or None.'), } description = _("Boolean (Either True or False)") def get_internal_type(self): return "BooleanField" def to_python(self, value): if self.null and value in self.empty_values: return None if value in (True, False): # 1/0 are equal to True/False. bool() converts former to latter. return bool(value) if value in ('t', 'True', '1'): return True if value in ('f', 'False', '0'): return False raise exceptions.ValidationError( self.error_messages['invalid_nullable' if self.null else 'invalid'], code='invalid', params={'value': value}, ) def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None return self.to_python(value) def formfield(self, **kwargs): if self.choices is not None: include_blank = not (self.has_default() or 'initial' in kwargs) defaults = {'choices': self.get_choices(include_blank=include_blank)} else: form_class = forms.NullBooleanField if self.null else forms.BooleanField # In HTML checkboxes, 'required' means "must be checked" which is # different from the choices case ("must select some value"). # required=False allows unchecked checkboxes. defaults = {'form_class': form_class, 'required': False} return super().formfield(**{**defaults, **kwargs}) class CharField(Field): description = _("String (up to %(max_length)s)") def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.validators.append(validators.MaxLengthValidator(self.max_length)) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_max_length_attribute(**kwargs), ] def _check_max_length_attribute(self, **kwargs): if self.max_length is None: return [ checks.Error( "CharFields must define a 'max_length' attribute.", obj=self, id='fields.E120', ) ] elif (not isinstance(self.max_length, int) or isinstance(self.max_length, bool) or self.max_length <= 0): return [ checks.Error( "'max_length' must be a positive integer.", obj=self, id='fields.E121', ) ] else: return [] def cast_db_type(self, connection): if self.max_length is None: return connection.ops.cast_char_field_without_max_length return super().cast_db_type(connection) def get_internal_type(self): return "CharField" def to_python(self, value): if isinstance(value, str) or value is None: return value return str(value) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def formfield(self, **kwargs): # Passing max_length to forms.CharField means that the value's length # will be validated twice. This is considered acceptable since we want # the value in the form field (to pass into widget for example). defaults = {'max_length': self.max_length} # TODO: Handle multiple backends with different feature flags. if self.null and not connection.features.interprets_empty_strings_as_nulls: defaults['empty_value'] = None defaults.update(kwargs) return super().formfield(**defaults) class CommaSeparatedIntegerField(CharField): default_validators = [validators.validate_comma_separated_integer_list] description = _("Comma-separated integers") system_check_removed_details = { 'msg': ( 'CommaSeparatedIntegerField is removed except for support in ' 'historical migrations.' ), 'hint': ( 'Use CharField(validators=[validate_comma_separated_integer_list]) ' 'instead.' ), 'id': 'fields.E901', } class DateTimeCheckMixin: def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_mutually_exclusive_options(), *self._check_fix_default_value(), ] def _check_mutually_exclusive_options(self): # auto_now, auto_now_add, and default are mutually exclusive # options. The use of more than one of these options together # will trigger an Error mutually_exclusive_options = [self.auto_now_add, self.auto_now, self.has_default()] enabled_options = [option not in (None, False) for option in mutually_exclusive_options].count(True) if enabled_options > 1: return [ checks.Error( "The options auto_now, auto_now_add, and default " "are mutually exclusive. Only one of these options " "may be present.", obj=self, id='fields.E160', ) ] else: return [] def _check_fix_default_value(self): return [] class DateField(DateTimeCheckMixin, Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid date format. It must be ' 'in YYYY-MM-DD format.'), 'invalid_date': _('“%(value)s” value has the correct format (YYYY-MM-DD) ' 'but it is an invalid date.'), } description = _("Date (without time)") def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs): self.auto_now, self.auto_now_add = auto_now, auto_now_add if auto_now or auto_now_add: kwargs['editable'] = False kwargs['blank'] = True super().__init__(verbose_name, name, **kwargs) def _check_fix_default_value(self): """ Warn that using an actual date or datetime value is probably wrong; it's only evaluated on server startup. """ if not self.has_default(): return [] now = timezone.now() if not timezone.is_naive(now): now = timezone.make_naive(now, timezone.utc) value = self.default if isinstance(value, datetime.datetime): if not timezone.is_naive(value): value = timezone.make_naive(value, timezone.utc) value = value.date() elif isinstance(value, datetime.date): # Nothing to do, as dates don't have tz information pass else: # No explicit date / datetime value -- no checks necessary return [] offset = datetime.timedelta(days=1) lower = (now - offset).date() upper = (now + offset).date() if lower <= value <= upper: return [ checks.Warning( 'Fixed default value provided.', hint='It seems you set a fixed date / time / datetime ' 'value as default for this field. This may not be ' 'what you want. If you want to have the current date ' 'as default, use `django.utils.timezone.now`', obj=self, id='fields.W161', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.auto_now: kwargs['auto_now'] = True if self.auto_now_add: kwargs['auto_now_add'] = True if self.auto_now or self.auto_now_add: del kwargs['editable'] del kwargs['blank'] return name, path, args, kwargs def get_internal_type(self): return "DateField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): if settings.USE_TZ and timezone.is_aware(value): # Convert aware datetimes to the default time zone # before casting them to dates (#17742). default_timezone = timezone.get_default_timezone() value = timezone.make_naive(value, default_timezone) return value.date() if isinstance(value, datetime.date): return value try: parsed = parse_date(value) if parsed is not None: return parsed except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_date'], code='invalid_date', params={'value': value}, ) raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = datetime.date.today() setattr(model_instance, self.attname, value) return value else: return super().pre_save(model_instance, add) def contribute_to_class(self, cls, name, **kwargs): super().contribute_to_class(cls, name, **kwargs) if not self.null: setattr( cls, 'get_next_by_%s' % self.name, partialmethod(cls._get_next_or_previous_by_FIELD, field=self, is_next=True) ) setattr( cls, 'get_previous_by_%s' % self.name, partialmethod(cls._get_next_or_previous_by_FIELD, field=self, is_next=False) ) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): # Casts dates into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_datefield_value(value) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.DateField, **kwargs, }) class DateTimeField(DateField): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid format. It must be in ' 'YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format.'), 'invalid_date': _("“%(value)s” value has the correct format " "(YYYY-MM-DD) but it is an invalid date."), 'invalid_datetime': _('“%(value)s” value has the correct format ' '(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) ' 'but it is an invalid date/time.'), } description = _("Date (with time)") # __init__ is inherited from DateField def _check_fix_default_value(self): """ Warn that using an actual date or datetime value is probably wrong; it's only evaluated on server startup. """ if not self.has_default(): return [] now = timezone.now() if not timezone.is_naive(now): now = timezone.make_naive(now, timezone.utc) value = self.default if isinstance(value, datetime.datetime): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset upper = now + second_offset if timezone.is_aware(value): value = timezone.make_naive(value, timezone.utc) elif isinstance(value, datetime.date): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset lower = datetime.datetime(lower.year, lower.month, lower.day) upper = now + second_offset upper = datetime.datetime(upper.year, upper.month, upper.day) value = datetime.datetime(value.year, value.month, value.day) else: # No explicit date / datetime value -- no checks necessary return [] if lower <= value <= upper: return [ checks.Warning( 'Fixed default value provided.', hint='It seems you set a fixed date / time / datetime ' 'value as default for this field. This may not be ' 'what you want. If you want to have the current date ' 'as default, use `django.utils.timezone.now`', obj=self, id='fields.W161', ) ] return [] def get_internal_type(self): return "DateTimeField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): return value if isinstance(value, datetime.date): value = datetime.datetime(value.year, value.month, value.day) if settings.USE_TZ: # For backwards compatibility, interpret naive datetimes in # local time. This won't work during DST change, but we can't # do much about it, so we let the exceptions percolate up the # call stack. warnings.warn("DateTimeField %s.%s received a naive datetime " "(%s) while time zone support is active." % (self.model.__name__, self.name, value), RuntimeWarning) default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) return value try: parsed = parse_datetime(value) if parsed is not None: return parsed except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_datetime'], code='invalid_datetime', params={'value': value}, ) try: parsed = parse_date(value) if parsed is not None: return datetime.datetime(parsed.year, parsed.month, parsed.day) except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_date'], code='invalid_date', params={'value': value}, ) raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = timezone.now() setattr(model_instance, self.attname, value) return value else: return super().pre_save(model_instance, add) # contribute_to_class is inherited from DateField, it registers # get_next_by_FOO and get_prev_by_FOO def get_prep_value(self, value): value = super().get_prep_value(value) value = self.to_python(value) if value is not None and settings.USE_TZ and timezone.is_naive(value): # For backwards compatibility, interpret naive datetimes in local # time. This won't work during DST change, but we can't do much # about it, so we let the exceptions percolate up the call stack. try: name = '%s.%s' % (self.model.__name__, self.name) except AttributeError: name = '(unbound)' warnings.warn("DateTimeField %s received a naive datetime (%s)" " while time zone support is active." % (name, value), RuntimeWarning) default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) return value def get_db_prep_value(self, value, connection, prepared=False): # Casts datetimes into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_datetimefield_value(value) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.DateTimeField, **kwargs, }) class DecimalField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be a decimal number.'), } description = _("Decimal number") def __init__(self, verbose_name=None, name=None, max_digits=None, decimal_places=None, **kwargs): self.max_digits, self.decimal_places = max_digits, decimal_places super().__init__(verbose_name, name, **kwargs) def check(self, **kwargs): errors = super().check(**kwargs) digits_errors = [ *self._check_decimal_places(), *self._check_max_digits(), ] if not digits_errors: errors.extend(self._check_decimal_places_and_max_digits(**kwargs)) else: errors.extend(digits_errors) return errors def _check_decimal_places(self): try: decimal_places = int(self.decimal_places) if decimal_places < 0: raise ValueError() except TypeError: return [ checks.Error( "DecimalFields must define a 'decimal_places' attribute.", obj=self, id='fields.E130', ) ] except ValueError: return [ checks.Error( "'decimal_places' must be a non-negative integer.", obj=self, id='fields.E131', ) ] else: return [] def _check_max_digits(self): try: max_digits = int(self.max_digits) if max_digits <= 0: raise ValueError() except TypeError: return [ checks.Error( "DecimalFields must define a 'max_digits' attribute.", obj=self, id='fields.E132', ) ] except ValueError: return [ checks.Error( "'max_digits' must be a positive integer.", obj=self, id='fields.E133', ) ] else: return [] def _check_decimal_places_and_max_digits(self, **kwargs): if int(self.decimal_places) > int(self.max_digits): return [ checks.Error( "'max_digits' must be greater or equal to 'decimal_places'.", obj=self, id='fields.E134', ) ] return [] @cached_property def validators(self): return super().validators + [ validators.DecimalValidator(self.max_digits, self.decimal_places) ] @cached_property def context(self): return decimal.Context(prec=self.max_digits) def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.max_digits is not None: kwargs['max_digits'] = self.max_digits if self.decimal_places is not None: kwargs['decimal_places'] = self.decimal_places return name, path, args, kwargs def get_internal_type(self): return "DecimalField" def to_python(self, value): if value is None: return value if isinstance(value, float): return self.context.create_decimal_from_float(value) try: return decimal.Decimal(value) except decimal.InvalidOperation: raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def get_db_prep_save(self, value, connection): return connection.ops.adapt_decimalfield_value(self.to_python(value), self.max_digits, self.decimal_places) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def formfield(self, **kwargs): return super().formfield(**{ 'max_digits': self.max_digits, 'decimal_places': self.decimal_places, 'form_class': forms.DecimalField, **kwargs, }) class DurationField(Field): """ Store timedelta objects. Use interval on PostgreSQL, INTERVAL DAY TO SECOND on Oracle, and bigint of microseconds on other databases. """ empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid format. It must be in ' '[DD] [[HH:]MM:]ss[.uuuuuu] format.') } description = _("Duration") def get_internal_type(self): return "DurationField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.timedelta): return value try: parsed = parse_duration(value) except ValueError: pass else: if parsed is not None: return parsed raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def get_db_prep_value(self, value, connection, prepared=False): if connection.features.has_native_duration_field: return value if value is None: return None return duration_microseconds(value) def get_db_converters(self, connection): converters = [] if not connection.features.has_native_duration_field: converters.append(connection.ops.convert_durationfield_value) return converters + super().get_db_converters(connection) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else duration_string(val) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.DurationField, **kwargs, }) class EmailField(CharField): default_validators = [validators.validate_email] description = _("Email address") def __init__(self, *args, **kwargs): # max_length=254 to be compliant with RFCs 3696 and 5321 kwargs.setdefault('max_length', 254) super().__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() # We do not exclude max_length if it matches default as we want to change # the default in future. return name, path, args, kwargs def formfield(self, **kwargs): # As with CharField, this will cause email validation to be performed # twice. return super().formfield(**{ 'form_class': forms.EmailField, **kwargs, }) class FilePathField(Field): description = _("File path") def __init__(self, verbose_name=None, name=None, path='', match=None, recursive=False, allow_files=True, allow_folders=False, **kwargs): self.path, self.match, self.recursive = path, match, recursive self.allow_files, self.allow_folders = allow_files, allow_folders kwargs.setdefault('max_length', 100) super().__init__(verbose_name, name, **kwargs) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_allowing_files_or_folders(**kwargs), ] def _check_allowing_files_or_folders(self, **kwargs): if not self.allow_files and not self.allow_folders: return [ checks.Error( "FilePathFields must have either 'allow_files' or 'allow_folders' set to True.", obj=self, id='fields.E140', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.path != '': kwargs['path'] = self.path if self.match is not None: kwargs['match'] = self.match if self.recursive is not False: kwargs['recursive'] = self.recursive if self.allow_files is not True: kwargs['allow_files'] = self.allow_files if self.allow_folders is not False: kwargs['allow_folders'] = self.allow_folders if kwargs.get("max_length") == 100: del kwargs["max_length"] return name, path, args, kwargs def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None return str(value) def formfield(self, **kwargs): return super().formfield(**{ 'path': self.path() if callable(self.path) else self.path, 'match': self.match, 'recursive': self.recursive, 'form_class': forms.FilePathField, 'allow_files': self.allow_files, 'allow_folders': self.allow_folders, **kwargs, }) def get_internal_type(self): return "FilePathField" class FloatField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be a float.'), } description = _("Floating point number") def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None try: return float(value) except (TypeError, ValueError) as e: raise e.__class__( "Field '%s' expected a number but got %r." % (self.name, value), ) from e def get_internal_type(self): return "FloatField" def to_python(self, value): if value is None: return value try: return float(value) except (TypeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.FloatField, **kwargs, }) class IntegerField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be an integer.'), } description = _("Integer") def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_max_length_warning(), ] def _check_max_length_warning(self): if self.max_length is not None: return [ checks.Warning( "'max_length' is ignored when used with %s." % self.__class__.__name__, hint="Remove 'max_length' from field", obj=self, id='fields.W122', ) ] return [] @cached_property def validators(self): # These validators can't be added at field initialization time since # they're based on values retrieved from `connection`. validators_ = super().validators internal_type = self.get_internal_type() min_value, max_value = connection.ops.integer_field_range(internal_type) if min_value is not None and not any( ( isinstance(validator, validators.MinValueValidator) and ( validator.limit_value() if callable(validator.limit_value) else validator.limit_value ) >= min_value ) for validator in validators_ ): validators_.append(validators.MinValueValidator(min_value)) if max_value is not None and not any( ( isinstance(validator, validators.MaxValueValidator) and ( validator.limit_value() if callable(validator.limit_value) else validator.limit_value ) <= max_value ) for validator in validators_ ): validators_.append(validators.MaxValueValidator(max_value)) return validators_ def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None try: return int(value) except (TypeError, ValueError) as e: raise e.__class__( "Field '%s' expected a number but got %r." % (self.name, value), ) from e def get_internal_type(self): return "IntegerField" def to_python(self, value): if value is None: return value try: return int(value) except (TypeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.IntegerField, **kwargs, }) class BigIntegerField(IntegerField): description = _("Big (8 byte) integer") MAX_BIGINT = 9223372036854775807 def get_internal_type(self): return "BigIntegerField" def formfield(self, **kwargs): return super().formfield(**{ 'min_value': -BigIntegerField.MAX_BIGINT - 1, 'max_value': BigIntegerField.MAX_BIGINT, **kwargs, }) class IPAddressField(Field): empty_strings_allowed = False description = _("IPv4 address") system_check_removed_details = { 'msg': ( 'IPAddressField has been removed except for support in ' 'historical migrations.' ), 'hint': 'Use GenericIPAddressField instead.', 'id': 'fields.E900', } def __init__(self, *args, **kwargs): kwargs['max_length'] = 15 super().__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['max_length'] return name, path, args, kwargs def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None return str(value) def get_internal_type(self): return "IPAddressField" class GenericIPAddressField(Field): empty_strings_allowed = False description = _("IP address") default_error_messages = {} def __init__(self, verbose_name=None, name=None, protocol='both', unpack_ipv4=False, *args, **kwargs): self.unpack_ipv4 = unpack_ipv4 self.protocol = protocol self.default_validators, invalid_error_message = \ validators.ip_address_validators(protocol, unpack_ipv4) self.default_error_messages['invalid'] = invalid_error_message kwargs['max_length'] = 39 super().__init__(verbose_name, name, *args, **kwargs) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_blank_and_null_values(**kwargs), ] def _check_blank_and_null_values(self, **kwargs): if not getattr(self, 'null', False) and getattr(self, 'blank', False): return [ checks.Error( 'GenericIPAddressFields cannot have blank=True if null=False, ' 'as blank values are stored as nulls.', obj=self, id='fields.E150', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.unpack_ipv4 is not False: kwargs['unpack_ipv4'] = self.unpack_ipv4 if self.protocol != "both": kwargs['protocol'] = self.protocol if kwargs.get("max_length") == 39: del kwargs['max_length'] return name, path, args, kwargs def get_internal_type(self): return "GenericIPAddressField" def to_python(self, value): if value is None: return None if not isinstance(value, str): value = str(value) value = value.strip() if ':' in value: return clean_ipv6_address(value, self.unpack_ipv4, self.error_messages['invalid']) return value def get_db_prep_value(self, value, connection, prepared=False): if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_ipaddressfield_value(value) def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None if value and ':' in value: try: return clean_ipv6_address(value, self.unpack_ipv4) except exceptions.ValidationError: pass return str(value) def formfield(self, **kwargs): return super().formfield(**{ 'protocol': self.protocol, 'form_class': forms.GenericIPAddressField, **kwargs, }) class NullBooleanField(BooleanField): default_error_messages = { 'invalid': _('“%(value)s” value must be either None, True or False.'), 'invalid_nullable': _('“%(value)s” value must be either None, True or False.'), } description = _("Boolean (Either True, False or None)") system_check_deprecated_details = { 'msg': ( 'NullBooleanField is deprecated. Support for it (except in ' 'historical migrations) will be removed in Django 4.0.' ), 'hint': 'Use BooleanField(null=True) instead.', 'id': 'fields.W903', } def __init__(self, *args, **kwargs): kwargs['null'] = True kwargs['blank'] = True super().__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['null'] del kwargs['blank'] return name, path, args, kwargs def get_internal_type(self): return "NullBooleanField" class PositiveIntegerRelDbTypeMixin: def rel_db_type(self, connection): """ Return the data type that a related field pointing to this field should use. In most cases, a foreign key pointing to a positive integer primary key will have an integer column data type but some databases (e.g. MySQL) have an unsigned integer type. In that case (related_fields_match_type=True), the primary key should return its db_type. """ if connection.features.related_fields_match_type: return self.db_type(connection) else: return IntegerField().db_type(connection=connection) class PositiveBigIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField): description = _('Positive big integer') def get_internal_type(self): return 'PositiveBigIntegerField' def formfield(self, **kwargs): return super().formfield(**{ 'min_value': 0, **kwargs, }) class PositiveIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField): description = _("Positive integer") def get_internal_type(self): return "PositiveIntegerField" def formfield(self, **kwargs): return super().formfield(**{ 'min_value': 0, **kwargs, }) class PositiveSmallIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField): description = _("Positive small integer") def get_internal_type(self): return "PositiveSmallIntegerField" def formfield(self, **kwargs): return super().formfield(**{ 'min_value': 0, **kwargs, }) class SlugField(CharField): default_validators = [validators.validate_slug] description = _("Slug (up to %(max_length)s)") def __init__(self, *args, max_length=50, db_index=True, allow_unicode=False, **kwargs): self.allow_unicode = allow_unicode if self.allow_unicode: self.default_validators = [validators.validate_unicode_slug] super().__init__(*args, max_length=max_length, db_index=db_index, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() if kwargs.get("max_length") == 50: del kwargs['max_length'] if self.db_index is False: kwargs['db_index'] = False else: del kwargs['db_index'] if self.allow_unicode is not False: kwargs['allow_unicode'] = self.allow_unicode return name, path, args, kwargs def get_internal_type(self): return "SlugField" def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.SlugField, 'allow_unicode': self.allow_unicode, **kwargs, }) class SmallIntegerField(IntegerField): description = _("Small integer") def get_internal_type(self): return "SmallIntegerField" class TextField(Field): description = _("Text") def get_internal_type(self): return "TextField" def to_python(self, value): if isinstance(value, str) or value is None: return value return str(value) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def formfield(self, **kwargs): # Passing max_length to forms.CharField means that the value's length # will be validated twice. This is considered acceptable since we want # the value in the form field (to pass into widget for example). return super().formfield(**{ 'max_length': self.max_length, **({} if self.choices is not None else {'widget': forms.Textarea}), **kwargs, }) class TimeField(DateTimeCheckMixin, Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid format. It must be in ' 'HH:MM[:ss[.uuuuuu]] format.'), 'invalid_time': _('“%(value)s” value has the correct format ' '(HH:MM[:ss[.uuuuuu]]) but it is an invalid time.'), } description = _("Time") def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs): self.auto_now, self.auto_now_add = auto_now, auto_now_add if auto_now or auto_now_add: kwargs['editable'] = False kwargs['blank'] = True super().__init__(verbose_name, name, **kwargs) def _check_fix_default_value(self): """ Warn that using an actual date or datetime value is probably wrong; it's only evaluated on server startup. """ if not self.has_default(): return [] now = timezone.now() if not timezone.is_naive(now): now = timezone.make_naive(now, timezone.utc) value = self.default if isinstance(value, datetime.datetime): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset upper = now + second_offset if timezone.is_aware(value): value = timezone.make_naive(value, timezone.utc) elif isinstance(value, datetime.time): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset upper = now + second_offset value = datetime.datetime.combine(now.date(), value) if timezone.is_aware(value): value = timezone.make_naive(value, timezone.utc).time() else: # No explicit time / datetime value -- no checks necessary return [] if lower <= value <= upper: return [ checks.Warning( 'Fixed default value provided.', hint='It seems you set a fixed date / time / datetime ' 'value as default for this field. This may not be ' 'what you want. If you want to have the current date ' 'as default, use `django.utils.timezone.now`', obj=self, id='fields.W161', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.auto_now is not False: kwargs["auto_now"] = self.auto_now if self.auto_now_add is not False: kwargs["auto_now_add"] = self.auto_now_add if self.auto_now or self.auto_now_add: del kwargs['blank'] del kwargs['editable'] return name, path, args, kwargs def get_internal_type(self): return "TimeField" def to_python(self, value): if value is None: return None if isinstance(value, datetime.time): return value if isinstance(value, datetime.datetime): # Not usually a good idea to pass in a datetime here (it loses # information), but this can be a side-effect of interacting with a # database backend (e.g. Oracle), so we'll be accommodating. return value.time() try: parsed = parse_time(value) if parsed is not None: return parsed except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_time'], code='invalid_time', params={'value': value}, ) raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = datetime.datetime.now().time() setattr(model_instance, self.attname, value) return value else: return super().pre_save(model_instance, add) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): # Casts times into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_timefield_value(value) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.TimeField, **kwargs, }) class URLField(CharField): default_validators = [validators.URLValidator()] description = _("URL") def __init__(self, verbose_name=None, name=None, **kwargs): kwargs.setdefault('max_length', 200) super().__init__(verbose_name, name, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() if kwargs.get("max_length") == 200: del kwargs['max_length'] return name, path, args, kwargs def formfield(self, **kwargs): # As with CharField, this will cause URL validation to be performed # twice. return super().formfield(**{ 'form_class': forms.URLField, **kwargs, }) class BinaryField(Field): description = _("Raw binary data") empty_values = [None, b''] def __init__(self, *args, **kwargs): kwargs.setdefault('editable', False) super().__init__(*args, **kwargs) if self.max_length is not None: self.validators.append(validators.MaxLengthValidator(self.max_length)) def check(self, **kwargs): return [*super().check(**kwargs), *self._check_str_default_value()] def _check_str_default_value(self): if self.has_default() and isinstance(self.default, str): return [ checks.Error( "BinaryField's default cannot be a string. Use bytes " "content instead.", obj=self, id='fields.E170', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.editable: kwargs['editable'] = True else: del kwargs['editable'] return name, path, args, kwargs def get_internal_type(self): return "BinaryField" def get_placeholder(self, value, compiler, connection): return connection.ops.binary_placeholder_sql(value) def get_default(self): if self.has_default() and not callable(self.default): return self.default default = super().get_default() if default == '': return b'' return default def get_db_prep_value(self, value, connection, prepared=False): value = super().get_db_prep_value(value, connection, prepared) if value is not None: return connection.Database.Binary(value) return value def value_to_string(self, obj): """Binary data is serialized as base64""" return b64encode(self.value_from_object(obj)).decode('ascii') def to_python(self, value): # If it's a string, it should be base64-encoded data if isinstance(value, str): return memoryview(b64decode(value.encode('ascii'))) return value class UUIDField(Field): default_error_messages = { 'invalid': _('“%(value)s” is not a valid UUID.'), } description = _('Universally unique identifier') empty_strings_allowed = False def __init__(self, verbose_name=None, **kwargs): kwargs['max_length'] = 32 super().__init__(verbose_name, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['max_length'] return name, path, args, kwargs def get_internal_type(self): return "UUIDField" def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): if value is None: return None if not isinstance(value, uuid.UUID): value = self.to_python(value) if connection.features.has_native_uuid_field: return value return value.hex def to_python(self, value): if value is not None and not isinstance(value, uuid.UUID): input_form = 'int' if isinstance(value, int) else 'hex' try: return uuid.UUID(**{input_form: value}) except (AttributeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) return value def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.UUIDField, **kwargs, }) class AutoFieldMixin: db_returning = True def __init__(self, *args, **kwargs): kwargs['blank'] = True super().__init__(*args, **kwargs) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_primary_key(), ] def _check_primary_key(self): if not self.primary_key: return [ checks.Error( 'AutoFields must set primary_key=True.', obj=self, id='fields.E100', ), ] else: return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['blank'] kwargs['primary_key'] = True return name, path, args, kwargs def validate(self, value, model_instance): pass def get_db_prep_value(self, value, connection, prepared=False): if not prepared: value = self.get_prep_value(value) value = connection.ops.validate_autopk_value(value) return value def contribute_to_class(self, cls, name, **kwargs): assert not cls._meta.auto_field, ( "Model %s can't have more than one auto-generated field." % cls._meta.label ) super().contribute_to_class(cls, name, **kwargs) cls._meta.auto_field = self def formfield(self, **kwargs): return None class AutoFieldMeta(type): """ Metaclass to maintain backward inheritance compatibility for AutoField. It is intended that AutoFieldMixin become public API when it is possible to create a non-integer automatically-generated field using column defaults stored in the database. In many areas Django also relies on using isinstance() to check for an automatically-generated field as a subclass of AutoField. A new flag needs to be implemented on Field to be used instead. When these issues have been addressed, this metaclass could be used to deprecate inheritance from AutoField and use of isinstance() with AutoField for detecting automatically-generated fields. """ @property def _subclasses(self): return (BigAutoField, SmallAutoField) def __instancecheck__(self, instance): return isinstance(instance, self._subclasses) or super().__instancecheck__(instance) def __subclasscheck__(self, subclass): return subclass in self._subclasses or super().__subclasscheck__(subclass) class AutoField(AutoFieldMixin, IntegerField, metaclass=AutoFieldMeta): def get_internal_type(self): return 'AutoField' def rel_db_type(self, connection): return IntegerField().db_type(connection=connection) class BigAutoField(AutoFieldMixin, BigIntegerField): def get_internal_type(self): return 'BigAutoField' def rel_db_type(self, connection): return BigIntegerField().db_type(connection=connection) class SmallAutoField(AutoFieldMixin, SmallIntegerField): def get_internal_type(self): return 'SmallAutoField' def rel_db_type(self, connection): return SmallIntegerField().db_type(connection=connection)
ab82ff2d50b1d64fd5f679a73fb0ab4270bbb2eb3b4131f7ac600e6d250b96c1
from django.db import InterfaceError from django.db.backends.base.features import BaseDatabaseFeatures class DatabaseFeatures(BaseDatabaseFeatures): interprets_empty_strings_as_nulls = True has_select_for_update = True has_select_for_update_nowait = True has_select_for_update_skip_locked = True has_select_for_update_of = True select_for_update_of_column = True can_return_columns_from_insert = True can_introspect_autofield = True supports_subqueries_in_group_by = False supports_transactions = True supports_timezones = False has_native_duration_field = True can_defer_constraint_checks = True supports_partially_nullable_unique_constraints = False supports_deferrable_unique_constraints = True truncates_names = True supports_tablespaces = True supports_sequence_reset = False can_introspect_materialized_views = True can_introspect_time_field = False atomic_transactions = False supports_combined_alters = False nulls_order_largest = True requires_literal_defaults = True closed_cursor_error_class = InterfaceError bare_select_suffix = " FROM DUAL" # select for update with limit can be achieved on Oracle, but not with the current backend. supports_select_for_update_with_limit = False supports_temporal_subtraction = True # Oracle doesn't ignore quoted identifiers case but the current backend # does by uppercasing all identifiers. ignores_table_name_case = True supports_index_on_text_field = False has_case_insensitive_like = False create_test_procedure_without_params_sql = """ CREATE PROCEDURE "TEST_PROCEDURE" AS V_I INTEGER; BEGIN V_I := 1; END; """ create_test_procedure_with_int_param_sql = """ CREATE PROCEDURE "TEST_PROCEDURE" (P_I INTEGER) AS V_I INTEGER; BEGIN V_I := P_I; END; """ supports_callproc_kwargs = True supports_over_clause = True supports_frame_range_fixed_distance = True supports_ignore_conflicts = False max_query_params = 2**16 - 1 supports_partial_indexes = False supports_slicing_ordering_in_compound = True allows_multiple_constraints_on_same_fields = False supports_boolean_expr_in_select_clause = False
e4339722dd3de4a485a162b136ef89376c1332aee438611718d8056f088fd826
""" Oracle database backend for Django. Requires cx_Oracle: https://oracle.github.io/python-cx_Oracle/ """ import datetime import decimal import os import platform from contextlib import contextmanager from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.db import IntegrityError from django.db.backends.base.base import BaseDatabaseWrapper from django.utils.asyncio import async_unsafe from django.utils.encoding import force_bytes, force_str from django.utils.functional import cached_property def _setup_environment(environ): # Cygwin requires some special voodoo to set the environment variables # properly so that Oracle will see them. if platform.system().upper().startswith('CYGWIN'): try: import ctypes except ImportError as e: raise ImproperlyConfigured("Error loading ctypes: %s; " "the Oracle backend requires ctypes to " "operate correctly under Cygwin." % e) kernel32 = ctypes.CDLL('kernel32') for name, value in environ: kernel32.SetEnvironmentVariableA(name, value) else: os.environ.update(environ) _setup_environment([ # Oracle takes client-side character set encoding from the environment. ('NLS_LANG', '.AL32UTF8'), # This prevents Unicode from getting mangled by getting encoded into the # potentially non-Unicode database character set. ('ORA_NCHAR_LITERAL_REPLACE', 'TRUE'), ]) try: import cx_Oracle as Database except ImportError as e: raise ImproperlyConfigured("Error loading cx_Oracle module: %s" % e) # Some of these import cx_Oracle, so import them after checking if it's installed. from .client import DatabaseClient # NOQA isort:skip from .creation import DatabaseCreation # NOQA isort:skip from .features import DatabaseFeatures # NOQA isort:skip from .introspection import DatabaseIntrospection # NOQA isort:skip from .operations import DatabaseOperations # NOQA isort:skip from .schema import DatabaseSchemaEditor # NOQA isort:skip from .utils import Oracle_datetime # NOQA isort:skip from .validation import DatabaseValidation # NOQA isort:skip @contextmanager def wrap_oracle_errors(): try: yield except Database.DatabaseError as e: # cx_Oracle raises a cx_Oracle.DatabaseError exception with the # following attributes and values: # code = 2091 # message = 'ORA-02091: transaction rolled back # 'ORA-02291: integrity constraint (TEST_DJANGOTEST.SYS # _C00102056) violated - parent key not found' # or: # 'ORA-00001: unique constraint (DJANGOTEST.DEFERRABLE_ # PINK_CONSTRAINT) violated # Convert that case to Django's IntegrityError exception. x = e.args[0] if ( hasattr(x, 'code') and hasattr(x, 'message') and x.code == 2091 and ('ORA-02291' in x.message or 'ORA-00001' in x.message) ): raise IntegrityError(*tuple(e.args)) raise class _UninitializedOperatorsDescriptor: def __get__(self, instance, cls=None): # If connection.operators is looked up before a connection has been # created, transparently initialize connection.operators to avert an # AttributeError. if instance is None: raise AttributeError("operators not available as class attribute") # Creating a cursor will initialize the operators. instance.cursor().close() return instance.__dict__['operators'] class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'oracle' display_name = 'Oracle' # This dictionary maps Field objects to their associated Oracle column # types, as strings. Column-type strings can contain format strings; they'll # be interpolated against the values of Field.__dict__ before being output. # If a column type is set to None, it won't be included in the output. # # Any format strings starting with "qn_" are quoted before being used in the # output (the "qn_" prefix is stripped before the lookup is performed. data_types = { 'AutoField': 'NUMBER(11) GENERATED BY DEFAULT ON NULL AS IDENTITY', 'BigAutoField': 'NUMBER(19) GENERATED BY DEFAULT ON NULL AS IDENTITY', 'BinaryField': 'BLOB', 'BooleanField': 'NUMBER(1)', 'CharField': 'NVARCHAR2(%(max_length)s)', 'DateField': 'DATE', 'DateTimeField': 'TIMESTAMP', 'DecimalField': 'NUMBER(%(max_digits)s, %(decimal_places)s)', 'DurationField': 'INTERVAL DAY(9) TO SECOND(6)', 'FileField': 'NVARCHAR2(%(max_length)s)', 'FilePathField': 'NVARCHAR2(%(max_length)s)', 'FloatField': 'DOUBLE PRECISION', 'IntegerField': 'NUMBER(11)', 'BigIntegerField': 'NUMBER(19)', 'IPAddressField': 'VARCHAR2(15)', 'GenericIPAddressField': 'VARCHAR2(39)', 'NullBooleanField': 'NUMBER(1)', 'OneToOneField': 'NUMBER(11)', 'PositiveBigIntegerField': 'NUMBER(19)', 'PositiveIntegerField': 'NUMBER(11)', 'PositiveSmallIntegerField': 'NUMBER(11)', 'SlugField': 'NVARCHAR2(%(max_length)s)', 'SmallAutoField': 'NUMBER(5) GENERATED BY DEFAULT ON NULL AS IDENTITY', 'SmallIntegerField': 'NUMBER(11)', 'TextField': 'NCLOB', 'TimeField': 'TIMESTAMP', 'URLField': 'VARCHAR2(%(max_length)s)', 'UUIDField': 'VARCHAR2(32)', } data_type_check_constraints = { 'BooleanField': '%(qn_column)s IN (0,1)', 'NullBooleanField': '%(qn_column)s IN (0,1)', 'PositiveBigIntegerField': '%(qn_column)s >= 0', 'PositiveIntegerField': '%(qn_column)s >= 0', 'PositiveSmallIntegerField': '%(qn_column)s >= 0', } # Oracle doesn't support a database index on these columns. _limited_data_types = ('clob', 'nclob', 'blob') operators = _UninitializedOperatorsDescriptor() _standard_operators = { 'exact': '= %s', 'iexact': '= UPPER(%s)', 'contains': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", 'icontains': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", 'gt': '> %s', 'gte': '>= %s', 'lt': '< %s', 'lte': '<= %s', 'startswith': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", 'endswith': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", 'istartswith': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", 'iendswith': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)", } _likec_operators = { **_standard_operators, 'contains': "LIKEC %s ESCAPE '\\'", 'icontains': "LIKEC UPPER(%s) ESCAPE '\\'", 'startswith': "LIKEC %s ESCAPE '\\'", 'endswith': "LIKEC %s ESCAPE '\\'", 'istartswith': "LIKEC UPPER(%s) ESCAPE '\\'", 'iendswith': "LIKEC UPPER(%s) ESCAPE '\\'", } # The patterns below are used to generate SQL pattern lookup clauses when # the right-hand side of the lookup isn't a raw string (it might be an expression # or the result of a bilateral transformation). # In those cases, special characters for LIKE operators (e.g. \, %, _) # should be escaped on the database side. # # Note: we use str.format() here for readability as '%' is used as a wildcard for # the LIKE operator. pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')" _pattern_ops = { 'contains': "'%%' || {} || '%%'", 'icontains': "'%%' || UPPER({}) || '%%'", 'startswith': "{} || '%%'", 'istartswith': "UPPER({}) || '%%'", 'endswith': "'%%' || {}", 'iendswith': "'%%' || UPPER({})", } _standard_pattern_ops = {k: "LIKE TRANSLATE( " + v + " USING NCHAR_CS)" " ESCAPE TRANSLATE('\\' USING NCHAR_CS)" for k, v in _pattern_ops.items()} _likec_pattern_ops = {k: "LIKEC " + v + " ESCAPE '\\'" for k, v in _pattern_ops.items()} Database = Database SchemaEditorClass = DatabaseSchemaEditor # Classes instantiated in __init__(). client_class = DatabaseClient creation_class = DatabaseCreation features_class = DatabaseFeatures introspection_class = DatabaseIntrospection ops_class = DatabaseOperations validation_class = DatabaseValidation def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) use_returning_into = self.settings_dict["OPTIONS"].get('use_returning_into', True) self.features.can_return_columns_from_insert = use_returning_into def _dsn(self): settings_dict = self.settings_dict if not settings_dict['HOST'].strip(): settings_dict['HOST'] = 'localhost' if settings_dict['PORT']: return Database.makedsn(settings_dict['HOST'], int(settings_dict['PORT']), settings_dict['NAME']) return settings_dict['NAME'] def _connect_string(self): return '%s/"%s"@%s' % (self.settings_dict['USER'], self.settings_dict['PASSWORD'], self._dsn()) def get_connection_params(self): conn_params = self.settings_dict['OPTIONS'].copy() if 'use_returning_into' in conn_params: del conn_params['use_returning_into'] return conn_params @async_unsafe def get_new_connection(self, conn_params): return Database.connect( user=self.settings_dict['USER'], password=self.settings_dict['PASSWORD'], dsn=self._dsn(), **conn_params, ) def init_connection_state(self): cursor = self.create_cursor() # Set the territory first. The territory overrides NLS_DATE_FORMAT # and NLS_TIMESTAMP_FORMAT to the territory default. When all of # these are set in single statement it isn't clear what is supposed # to happen. cursor.execute("ALTER SESSION SET NLS_TERRITORY = 'AMERICA'") # Set Oracle date to ANSI date format. This only needs to execute # once when we create a new connection. We also set the Territory # to 'AMERICA' which forces Sunday to evaluate to a '1' in # TO_CHAR(). cursor.execute( "ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS'" " NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'" + (" TIME_ZONE = 'UTC'" if settings.USE_TZ else '') ) cursor.close() if 'operators' not in self.__dict__: # Ticket #14149: Check whether our LIKE implementation will # work for this connection or we need to fall back on LIKEC. # This check is performed only once per DatabaseWrapper # instance per thread, since subsequent connections will use # the same settings. cursor = self.create_cursor() try: cursor.execute("SELECT 1 FROM DUAL WHERE DUMMY %s" % self._standard_operators['contains'], ['X']) except Database.DatabaseError: self.operators = self._likec_operators self.pattern_ops = self._likec_pattern_ops else: self.operators = self._standard_operators self.pattern_ops = self._standard_pattern_ops cursor.close() self.connection.stmtcachesize = 20 # Ensure all changes are preserved even when AUTOCOMMIT is False. if not self.get_autocommit(): self.commit() @async_unsafe def create_cursor(self, name=None): return FormatStylePlaceholderCursor(self.connection) def _commit(self): if self.connection is not None: with wrap_oracle_errors(): return self.connection.commit() # Oracle doesn't support releasing savepoints. But we fake them when query # logging is enabled to keep query counts consistent with other backends. def _savepoint_commit(self, sid): if self.queries_logged: self.queries_log.append({ 'sql': '-- RELEASE SAVEPOINT %s (faked)' % self.ops.quote_name(sid), 'time': '0.000', }) def _set_autocommit(self, autocommit): with self.wrap_database_errors: self.connection.autocommit = autocommit def check_constraints(self, table_names=None): """ Check constraints by setting them to immediate. Return them to deferred afterward. """ with self.cursor() as cursor: cursor.execute('SET CONSTRAINTS ALL IMMEDIATE') cursor.execute('SET CONSTRAINTS ALL DEFERRED') def is_usable(self): try: self.connection.ping() except Database.Error: return False else: return True @cached_property def oracle_version(self): with self.temporary_connection(): return tuple(int(x) for x in self.connection.version.split('.')) class OracleParam: """ Wrapper object for formatting parameters for Oracle. If the string representation of the value is large enough (greater than 4000 characters) the input size needs to be set as CLOB. Alternatively, if the parameter has an `input_size` attribute, then the value of the `input_size` attribute will be used instead. Otherwise, no input size will be set for the parameter when executing the query. """ def __init__(self, param, cursor, strings_only=False): # With raw SQL queries, datetimes can reach this function # without being converted by DateTimeField.get_db_prep_value. if settings.USE_TZ and (isinstance(param, datetime.datetime) and not isinstance(param, Oracle_datetime)): param = Oracle_datetime.from_datetime(param) string_size = 0 # Oracle doesn't recognize True and False correctly. if param is True: param = 1 elif param is False: param = 0 if hasattr(param, 'bind_parameter'): self.force_bytes = param.bind_parameter(cursor) elif isinstance(param, (Database.Binary, datetime.timedelta)): self.force_bytes = param else: # To transmit to the database, we need Unicode if supported # To get size right, we must consider bytes. self.force_bytes = force_str(param, cursor.charset, strings_only) if isinstance(self.force_bytes, str): # We could optimize by only converting up to 4000 bytes here string_size = len(force_bytes(param, cursor.charset, strings_only)) if hasattr(param, 'input_size'): # If parameter has `input_size` attribute, use that. self.input_size = param.input_size elif string_size > 4000: # Mark any string param greater than 4000 characters as a CLOB. self.input_size = Database.CLOB elif isinstance(param, datetime.datetime): self.input_size = Database.TIMESTAMP else: self.input_size = None class VariableWrapper: """ An adapter class for cursor variables that prevents the wrapped object from being converted into a string when used to instantiate an OracleParam. This can be used generally for any other object that should be passed into Cursor.execute as-is. """ def __init__(self, var): self.var = var def bind_parameter(self, cursor): return self.var def __getattr__(self, key): return getattr(self.var, key) def __setattr__(self, key, value): if key == 'var': self.__dict__[key] = value else: setattr(self.var, key, value) class FormatStylePlaceholderCursor: """ Django uses "format" (e.g. '%s') style placeholders, but Oracle uses ":var" style. This fixes it -- but note that if you want to use a literal "%s" in a query, you'll need to use "%%s". """ charset = 'utf-8' def __init__(self, connection): self.cursor = connection.cursor() self.cursor.outputtypehandler = self._output_type_handler @staticmethod def _output_number_converter(value): return decimal.Decimal(value) if '.' in value else int(value) @staticmethod def _get_decimal_converter(precision, scale): if scale == 0: return int context = decimal.Context(prec=precision) quantize_value = decimal.Decimal(1).scaleb(-scale) return lambda v: decimal.Decimal(v).quantize(quantize_value, context=context) @staticmethod def _output_type_handler(cursor, name, defaultType, length, precision, scale): """ Called for each db column fetched from cursors. Return numbers as the appropriate Python type. """ if defaultType == Database.NUMBER: if scale == -127: if precision == 0: # NUMBER column: decimal-precision floating point. # This will normally be an integer from a sequence, # but it could be a decimal value. outconverter = FormatStylePlaceholderCursor._output_number_converter else: # FLOAT column: binary-precision floating point. # This comes from FloatField columns. outconverter = float elif precision > 0: # NUMBER(p,s) column: decimal-precision fixed point. # This comes from IntegerField and DecimalField columns. outconverter = FormatStylePlaceholderCursor._get_decimal_converter(precision, scale) else: # No type information. This normally comes from a # mathematical expression in the SELECT list. Guess int # or Decimal based on whether it has a decimal point. outconverter = FormatStylePlaceholderCursor._output_number_converter return cursor.var( Database.STRING, size=255, arraysize=cursor.arraysize, outconverter=outconverter, ) def _format_params(self, params): try: return {k: OracleParam(v, self, True) for k, v in params.items()} except AttributeError: return tuple(OracleParam(p, self, True) for p in params) def _guess_input_sizes(self, params_list): # Try dict handling; if that fails, treat as sequence if hasattr(params_list[0], 'keys'): sizes = {} for params in params_list: for k, value in params.items(): if value.input_size: sizes[k] = value.input_size if sizes: self.setinputsizes(**sizes) else: # It's not a list of dicts; it's a list of sequences sizes = [None] * len(params_list[0]) for params in params_list: for i, value in enumerate(params): if value.input_size: sizes[i] = value.input_size if sizes: self.setinputsizes(*sizes) def _param_generator(self, params): # Try dict handling; if that fails, treat as sequence if hasattr(params, 'items'): return {k: v.force_bytes for k, v in params.items()} else: return [p.force_bytes for p in params] def _fix_for_params(self, query, params, unify_by_values=False): # cx_Oracle wants no trailing ';' for SQL statements. For PL/SQL, it # it does want a trailing ';' but not a trailing '/'. However, these # characters must be included in the original query in case the query # is being passed to SQL*Plus. if query.endswith(';') or query.endswith('/'): query = query[:-1] if params is None: params = [] elif hasattr(params, 'keys'): # Handle params as dict args = {k: ":%s" % k for k in params} query = query % args elif unify_by_values and params: # Handle params as a dict with unified query parameters by their # values. It can be used only in single query execute() because # executemany() shares the formatted query with each of the params # list. e.g. for input params = [0.75, 2, 0.75, 'sth', 0.75] # params_dict = {0.75: ':arg0', 2: ':arg1', 'sth': ':arg2'} # args = [':arg0', ':arg1', ':arg0', ':arg2', ':arg0'] # params = {':arg0': 0.75, ':arg1': 2, ':arg2': 'sth'} params_dict = { param: ':arg%d' % i for i, param in enumerate(dict.fromkeys(params)) } args = [params_dict[param] for param in params] params = {value: key for key, value in params_dict.items()} query = query % tuple(args) else: # Handle params as sequence args = [(':arg%d' % i) for i in range(len(params))] query = query % tuple(args) return query, self._format_params(params) def execute(self, query, params=None): query, params = self._fix_for_params(query, params, unify_by_values=True) self._guess_input_sizes([params]) with wrap_oracle_errors(): return self.cursor.execute(query, self._param_generator(params)) def executemany(self, query, params=None): if not params: # No params given, nothing to do return None # uniform treatment for sequences and iterables params_iter = iter(params) query, firstparams = self._fix_for_params(query, next(params_iter)) # we build a list of formatted params; as we're going to traverse it # more than once, we can't make it lazy by using a generator formatted = [firstparams] + [self._format_params(p) for p in params_iter] self._guess_input_sizes(formatted) with wrap_oracle_errors(): return self.cursor.executemany(query, [self._param_generator(p) for p in formatted]) def close(self): try: self.cursor.close() except Database.InterfaceError: # already closed pass def var(self, *args): return VariableWrapper(self.cursor.var(*args)) def arrayvar(self, *args): return VariableWrapper(self.cursor.arrayvar(*args)) def __getattr__(self, attr): return getattr(self.cursor, attr) def __iter__(self): return iter(self.cursor)
e7ee865647105d4043ab6b88a09e9f9bf249fac4a7c933c1002cc2aea2e7c8c5
from django.db import ProgrammingError from django.utils.functional import cached_property class BaseDatabaseFeatures: gis_enabled = False allows_group_by_pk = False allows_group_by_selected_pks = False empty_fetchmany_value = [] update_can_self_select = True # Does the backend distinguish between '' and None? interprets_empty_strings_as_nulls = False # Does the backend allow inserting duplicate NULL rows in a nullable # unique field? All core backends implement this correctly, but other # databases such as SQL Server do not. supports_nullable_unique_constraints = True # Does the backend allow inserting duplicate rows when a unique_together # constraint exists and some fields are nullable but not all of them? supports_partially_nullable_unique_constraints = True # Does the backend support initially deferrable unique constraints? supports_deferrable_unique_constraints = False can_use_chunked_reads = True can_return_columns_from_insert = False can_return_rows_from_bulk_insert = False has_bulk_insert = True uses_savepoints = True can_release_savepoints = False # If True, don't use integer foreign keys referring to, e.g., positive # integer primary keys. related_fields_match_type = False allow_sliced_subqueries_with_in = True has_select_for_update = False has_select_for_update_nowait = False has_select_for_update_skip_locked = False has_select_for_update_of = False # Does the database's SELECT FOR UPDATE OF syntax require a column rather # than a table? select_for_update_of_column = False # Does the default test database allow multiple connections? # Usually an indication that the test database is in-memory test_db_allows_multiple_connections = True # Can an object be saved without an explicit primary key? supports_unspecified_pk = False # Can a fixture contain forward references? i.e., are # FK constraints checked at the end of transaction, or # at the end of each save operation? supports_forward_references = True # Does the backend truncate names properly when they are too long? truncates_names = False # Is there a REAL datatype in addition to floats/doubles? has_real_datatype = False supports_subqueries_in_group_by = True # Is there a true datatype for uuid? has_native_uuid_field = False # Is there a true datatype for timedeltas? has_native_duration_field = False # Does the database driver supports same type temporal data subtraction # by returning the type used to store duration field? supports_temporal_subtraction = False # Does the __regex lookup support backreferencing and grouping? supports_regex_backreferencing = True # Can date/datetime lookups be performed using a string? supports_date_lookup_using_string = True # Can datetimes with timezones be used? supports_timezones = True # Does the database have a copy of the zoneinfo database? has_zoneinfo_database = True # When performing a GROUP BY, is an ORDER BY NULL required # to remove any ordering? requires_explicit_null_ordering_when_grouping = False # Does the backend order NULL values as largest or smallest? nulls_order_largest = False # Does the backend support NULLS FIRST and NULLS LAST in ORDER BY? supports_order_by_nulls_modifier = True # Does the backend orders NULLS FIRST by default? order_by_nulls_first = False # The database's limit on the number of query parameters. max_query_params = None # Can an object have an autoincrement primary key of 0? MySQL says No. allows_auto_pk_0 = True # Do we need to NULL a ForeignKey out, or can the constraint check be # deferred can_defer_constraint_checks = False # date_interval_sql can properly handle mixed Date/DateTime fields and timedeltas supports_mixed_date_datetime_comparisons = True # Does the backend support tablespaces? Default to False because it isn't # in the SQL standard. supports_tablespaces = False # Does the backend reset sequences between tests? supports_sequence_reset = True # Can the backend introspect the default value of a column? can_introspect_default = True # Confirm support for introspected foreign keys # Every database can do this reliably, except MySQL, # which can't do it for MyISAM tables can_introspect_foreign_keys = True # Can the backend introspect an AutoField, instead of an IntegerField? can_introspect_autofield = False # Can the backend introspect a BigIntegerField, instead of an IntegerField? can_introspect_big_integer_field = True # Can the backend introspect an BinaryField, instead of an TextField? can_introspect_binary_field = True # Can the backend introspect an DecimalField, instead of an FloatField? can_introspect_decimal_field = True # Can the backend introspect a DurationField, instead of a BigIntegerField? can_introspect_duration_field = True # Can the backend introspect an IPAddressField, instead of an CharField? can_introspect_ip_address_field = False # Can the backend introspect a PositiveIntegerField, instead of an IntegerField? can_introspect_positive_integer_field = False # Can the backend introspect a SmallIntegerField, instead of an IntegerField? can_introspect_small_integer_field = False # Can the backend introspect a TimeField, instead of a DateTimeField? can_introspect_time_field = True # Some backends may not be able to differentiate BigAutoField or # SmallAutoField from other fields such as AutoField. introspected_big_auto_field_type = 'BigAutoField' introspected_small_auto_field_type = 'SmallAutoField' # Some backends may not be able to differentiate BooleanField from other # fields such as IntegerField. introspected_boolean_field_type = 'BooleanField' # Can the backend introspect the column order (ASC/DESC) for indexes? supports_index_column_ordering = True # Does the backend support introspection of materialized views? can_introspect_materialized_views = False # Support for the DISTINCT ON clause can_distinct_on_fields = False # Does the backend prevent running SQL queries in broken transactions? atomic_transactions = True # Can we roll back DDL in a transaction? can_rollback_ddl = False # Does it support operations requiring references rename in a transaction? supports_atomic_references_rename = True # Can we issue more than one ALTER COLUMN clause in an ALTER TABLE? supports_combined_alters = False # Does it support foreign keys? supports_foreign_keys = True # Can it create foreign key constraints inline when adding columns? can_create_inline_fk = True # Does it support CHECK constraints? supports_column_check_constraints = True supports_table_check_constraints = True # Does the backend support introspection of CHECK constraints? can_introspect_check_constraints = True # Does the backend support 'pyformat' style ("... %(name)s ...", {'name': value}) # parameter passing? Note this can be provided by the backend even if not # supported by the Python driver supports_paramstyle_pyformat = True # Does the backend require literal defaults, rather than parameterized ones? requires_literal_defaults = False # Does the backend require a connection reset after each material schema change? connection_persists_old_columns = False # What kind of error does the backend throw when accessing closed cursor? closed_cursor_error_class = ProgrammingError # Does 'a' LIKE 'A' match? has_case_insensitive_like = True # Suffix for backends that don't support "SELECT xxx;" queries. bare_select_suffix = '' # If NULL is implied on columns without needing to be explicitly specified implied_column_null = False # Does the backend support "select for update" queries with limit (and offset)? supports_select_for_update_with_limit = True # Does the backend ignore null expressions in GREATEST and LEAST queries unless # every expression is null? greatest_least_ignores_nulls = False # Can the backend clone databases for parallel test execution? # Defaults to False to allow third-party backends to opt-in. can_clone_databases = False # Does the backend consider table names with different casing to # be equal? ignores_table_name_case = False # Place FOR UPDATE right after FROM clause. Used on MSSQL. for_update_after_from = False # Combinatorial flags supports_select_union = True supports_select_intersection = True supports_select_difference = True supports_slicing_ordering_in_compound = False supports_parentheses_in_compound = True # Does the database support SQL 2003 FILTER (WHERE ...) in aggregate # expressions? supports_aggregate_filter_clause = False # Does the backend support indexing a TextField? supports_index_on_text_field = True # Does the backend support window expressions (expression OVER (...))? supports_over_clause = False supports_frame_range_fixed_distance = False only_supports_unbounded_with_preceding_and_following = False # Does the backend support CAST with precision? supports_cast_with_precision = True # How many second decimals does the database return when casting a value to # a type with time? time_cast_precision = 6 # SQL to create a procedure for use by the Django test suite. The # functionality of the procedure isn't important. create_test_procedure_without_params_sql = None create_test_procedure_with_int_param_sql = None # Does the backend support keyword parameters for cursor.callproc()? supports_callproc_kwargs = False # Convert CharField results from bytes to str in database functions. db_functions_convert_bytes_to_str = False # What formats does the backend EXPLAIN syntax support? supported_explain_formats = set() # Does DatabaseOperations.explain_query_prefix() raise ValueError if # unknown kwargs are passed to QuerySet.explain()? validates_explain_options = True # Does the backend support the default parameter in lead() and lag()? supports_default_in_lead_lag = True # Does the backend support ignoring constraint or uniqueness errors during # INSERT? supports_ignore_conflicts = True # Does this backend require casting the results of CASE expressions used # in UPDATE statements to ensure the expression has the correct type? requires_casted_case_in_updates = False # Does the backend support partial indexes (CREATE INDEX ... WHERE ...)? supports_partial_indexes = True supports_functions_in_partial_indexes = True # Does the database allow more than one constraint or index on the same # field(s)? allows_multiple_constraints_on_same_fields = True # Does the backend support boolean expressions in the SELECT clause? supports_boolean_expr_in_select_clause = True def __init__(self, connection): self.connection = connection @cached_property def supports_explaining_query_execution(self): """Does this backend support explaining query execution?""" return self.connection.ops.explain_prefix is not None @cached_property def supports_transactions(self): """Confirm support for transactions.""" with self.connection.cursor() as cursor: cursor.execute('CREATE TABLE ROLLBACK_TEST (X INT)') self.connection.set_autocommit(False) cursor.execute('INSERT INTO ROLLBACK_TEST (X) VALUES (8)') self.connection.rollback() self.connection.set_autocommit(True) cursor.execute('SELECT COUNT(X) FROM ROLLBACK_TEST') count, = cursor.fetchone() cursor.execute('DROP TABLE ROLLBACK_TEST') return count == 0 def allows_group_by_selected_pks_on_model(self, model): if not self.allows_group_by_selected_pks: return False return model._meta.managed
29c12907fa6b382cf18b51489a6249f8e051bcb31a007a7910d70ae7924ac8f6
import operator from django.db import InterfaceError from django.db.backends.base.features import BaseDatabaseFeatures from django.utils.functional import cached_property class DatabaseFeatures(BaseDatabaseFeatures): allows_group_by_selected_pks = True can_return_columns_from_insert = True can_return_rows_from_bulk_insert = True has_real_datatype = True has_native_uuid_field = True has_native_duration_field = True can_defer_constraint_checks = True has_select_for_update = True has_select_for_update_nowait = True has_select_for_update_of = True has_select_for_update_skip_locked = True can_release_savepoints = True supports_tablespaces = True supports_transactions = True can_introspect_autofield = True can_introspect_ip_address_field = True can_introspect_materialized_views = True can_introspect_small_integer_field = True can_distinct_on_fields = True can_rollback_ddl = True supports_combined_alters = True nulls_order_largest = True closed_cursor_error_class = InterfaceError has_case_insensitive_like = False greatest_least_ignores_nulls = True can_clone_databases = True supports_temporal_subtraction = True supports_slicing_ordering_in_compound = True create_test_procedure_without_params_sql = """ CREATE FUNCTION test_procedure () RETURNS void AS $$ DECLARE V_I INTEGER; BEGIN V_I := 1; END; $$ LANGUAGE plpgsql;""" create_test_procedure_with_int_param_sql = """ CREATE FUNCTION test_procedure (P_I INTEGER) RETURNS void AS $$ DECLARE V_I INTEGER; BEGIN V_I := P_I; END; $$ LANGUAGE plpgsql;""" requires_casted_case_in_updates = True supports_over_clause = True only_supports_unbounded_with_preceding_and_following = True supports_aggregate_filter_clause = True supported_explain_formats = {'JSON', 'TEXT', 'XML', 'YAML'} validates_explain_options = False # A query will error on invalid options. supports_deferrable_unique_constraints = True @cached_property def is_postgresql_9_6(self): return self.connection.pg_version >= 90600 @cached_property def is_postgresql_10(self): return self.connection.pg_version >= 100000 @cached_property def is_postgresql_11(self): return self.connection.pg_version >= 110000 @cached_property def is_postgresql_12(self): return self.connection.pg_version >= 120000 has_bloom_index = property(operator.attrgetter('is_postgresql_9_6')) has_brin_autosummarize = property(operator.attrgetter('is_postgresql_10')) has_phraseto_tsquery = property(operator.attrgetter('is_postgresql_9_6')) has_websearch_to_tsquery = property(operator.attrgetter('is_postgresql_11')) supports_table_partitions = property(operator.attrgetter('is_postgresql_10'))
866027d1df444091a519f7c36447587894143e55248499bf2719f67527ee54fd
import json from django import forms from django.core.exceptions import ValidationError from django.utils.translation import gettext_lazy as _ __all__ = ['JSONField'] class InvalidJSONInput(str): pass class JSONString(str): pass class JSONField(forms.CharField): default_error_messages = { 'invalid': _('“%(value)s” value must be valid JSON.'), } widget = forms.Textarea def to_python(self, value): if self.disabled: return value if value in self.empty_values: return None elif isinstance(value, (list, dict, int, float, JSONString)): return value try: converted = json.loads(value) except json.JSONDecodeError: raise ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) if isinstance(converted, str): return JSONString(converted) else: return converted def bound_data(self, data, initial): if self.disabled: return initial try: return json.loads(data) except json.JSONDecodeError: return InvalidJSONInput(data) def prepare_value(self, value): if isinstance(value, InvalidJSONInput): return value return json.dumps(value) def has_changed(self, initial, data): if super().has_changed(initial, data): return True # For purposes of seeing whether something has changed, True isn't the # same as 1 and the order of keys doesn't matter. data = self.to_python(data) return json.dumps(initial, sort_keys=True) != json.dumps(data, sort_keys=True)
5262e3306585081b4f8d96dfaaf636014c263b7d799227d278d9f27d81c59873
import datetime from unittest import mock from django.db import IntegrityError, connection, transaction from django.db.models import CheckConstraint, Deferrable, F, Func, Q from django.utils import timezone from . import PostgreSQLTestCase from .models import HotelReservation, RangesModel, Room try: from django.contrib.postgres.constraints import ExclusionConstraint from django.contrib.postgres.fields import DateTimeRangeField, RangeBoundary, RangeOperators from psycopg2.extras import DateRange, NumericRange except ImportError: pass class SchemaTests(PostgreSQLTestCase): def get_constraints(self, table): """Get the constraints on the table using a new cursor.""" with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) def test_check_constraint_range_value(self): constraint_name = 'ints_between' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = CheckConstraint( check=Q(ints__contained_by=NumericRange(10, 30)), name=constraint_name, ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) with self.assertRaises(IntegrityError), transaction.atomic(): RangesModel.objects.create(ints=(20, 50)) RangesModel.objects.create(ints=(10, 30)) def test_check_constraint_daterange_contains(self): constraint_name = 'dates_contains' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = CheckConstraint( check=Q(dates__contains=F('dates_inner')), name=constraint_name, ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) date_1 = datetime.date(2016, 1, 1) date_2 = datetime.date(2016, 1, 4) with self.assertRaises(IntegrityError), transaction.atomic(): RangesModel.objects.create( dates=(date_1, date_2), dates_inner=(date_1, date_2.replace(day=5)), ) RangesModel.objects.create( dates=(date_1, date_2), dates_inner=(date_1, date_2), ) def test_check_constraint_datetimerange_contains(self): constraint_name = 'timestamps_contains' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = CheckConstraint( check=Q(timestamps__contains=F('timestamps_inner')), name=constraint_name, ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) datetime_1 = datetime.datetime(2016, 1, 1) datetime_2 = datetime.datetime(2016, 1, 2, 12) with self.assertRaises(IntegrityError), transaction.atomic(): RangesModel.objects.create( timestamps=(datetime_1, datetime_2), timestamps_inner=(datetime_1, datetime_2.replace(hour=13)), ) RangesModel.objects.create( timestamps=(datetime_1, datetime_2), timestamps_inner=(datetime_1, datetime_2), ) class ExclusionConstraintTests(PostgreSQLTestCase): def get_constraints(self, table): """Get the constraints on the table using a new cursor.""" with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) def test_invalid_condition(self): msg = 'ExclusionConstraint.condition must be a Q instance.' with self.assertRaisesMessage(ValueError, msg): ExclusionConstraint( index_type='GIST', name='exclude_invalid_condition', expressions=[(F('datespan'), RangeOperators.OVERLAPS)], condition=F('invalid'), ) def test_invalid_index_type(self): msg = 'Exclusion constraints only support GiST or SP-GiST indexes.' with self.assertRaisesMessage(ValueError, msg): ExclusionConstraint( index_type='gin', name='exclude_invalid_index_type', expressions=[(F('datespan'), RangeOperators.OVERLAPS)], ) def test_invalid_expressions(self): msg = 'The expressions must be a list of 2-tuples.' for expressions in (['foo'], [('foo')], [('foo_1', 'foo_2', 'foo_3')]): with self.subTest(expressions), self.assertRaisesMessage(ValueError, msg): ExclusionConstraint( index_type='GIST', name='exclude_invalid_expressions', expressions=expressions, ) def test_empty_expressions(self): msg = 'At least one expression is required to define an exclusion constraint.' for empty_expressions in (None, []): with self.subTest(empty_expressions), self.assertRaisesMessage(ValueError, msg): ExclusionConstraint( index_type='GIST', name='exclude_empty_expressions', expressions=empty_expressions, ) def test_invalid_deferrable(self): msg = 'ExclusionConstraint.deferrable must be a Deferrable instance.' with self.assertRaisesMessage(ValueError, msg): ExclusionConstraint( name='exclude_invalid_deferrable', expressions=[(F('datespan'), RangeOperators.OVERLAPS)], deferrable='invalid', ) def test_deferrable_with_condition(self): msg = 'ExclusionConstraint with conditions cannot be deferred.' with self.assertRaisesMessage(ValueError, msg): ExclusionConstraint( name='exclude_invalid_condition', expressions=[(F('datespan'), RangeOperators.OVERLAPS)], condition=Q(cancelled=False), deferrable=Deferrable.DEFERRED, ) def test_repr(self): constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[ (F('datespan'), RangeOperators.OVERLAPS), (F('room'), RangeOperators.EQUAL), ], ) self.assertEqual( repr(constraint), "<ExclusionConstraint: index_type=GIST, expressions=[" "(F(datespan), '&&'), (F(room), '=')]>", ) constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[(F('datespan'), RangeOperators.ADJACENT_TO)], condition=Q(cancelled=False), index_type='SPGiST', ) self.assertEqual( repr(constraint), "<ExclusionConstraint: index_type=SPGiST, expressions=[" "(F(datespan), '-|-')], condition=(AND: ('cancelled', False))>", ) constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[(F('datespan'), RangeOperators.ADJACENT_TO)], deferrable=Deferrable.IMMEDIATE, ) self.assertEqual( repr(constraint), "<ExclusionConstraint: index_type=GIST, expressions=[" "(F(datespan), '-|-')], deferrable=Deferrable.IMMEDIATE>", ) def test_eq(self): constraint_1 = ExclusionConstraint( name='exclude_overlapping', expressions=[ (F('datespan'), RangeOperators.OVERLAPS), (F('room'), RangeOperators.EQUAL), ], condition=Q(cancelled=False), ) constraint_2 = ExclusionConstraint( name='exclude_overlapping', expressions=[ ('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL), ], ) constraint_3 = ExclusionConstraint( name='exclude_overlapping', expressions=[('datespan', RangeOperators.OVERLAPS)], condition=Q(cancelled=False), ) constraint_4 = ExclusionConstraint( name='exclude_overlapping', expressions=[ ('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL), ], deferrable=Deferrable.DEFERRED, ) constraint_5 = ExclusionConstraint( name='exclude_overlapping', expressions=[ ('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL), ], deferrable=Deferrable.IMMEDIATE, ) self.assertEqual(constraint_1, constraint_1) self.assertEqual(constraint_1, mock.ANY) self.assertNotEqual(constraint_1, constraint_2) self.assertNotEqual(constraint_1, constraint_3) self.assertNotEqual(constraint_1, constraint_4) self.assertNotEqual(constraint_2, constraint_3) self.assertNotEqual(constraint_2, constraint_4) self.assertNotEqual(constraint_4, constraint_5) self.assertNotEqual(constraint_1, object()) def test_deconstruct(self): constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)], ) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'name': 'exclude_overlapping', 'expressions': [('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)], }) def test_deconstruct_index_type(self): constraint = ExclusionConstraint( name='exclude_overlapping', index_type='SPGIST', expressions=[('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)], ) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'name': 'exclude_overlapping', 'index_type': 'SPGIST', 'expressions': [('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)], }) def test_deconstruct_condition(self): constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)], condition=Q(cancelled=False), ) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'name': 'exclude_overlapping', 'expressions': [('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)], 'condition': Q(cancelled=False), }) def test_deconstruct_deferrable(self): constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[('datespan', RangeOperators.OVERLAPS)], deferrable=Deferrable.DEFERRED, ) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'name': 'exclude_overlapping', 'expressions': [('datespan', RangeOperators.OVERLAPS)], 'deferrable': Deferrable.DEFERRED, }) def _test_range_overlaps(self, constraint): # Create exclusion constraint. self.assertNotIn(constraint.name, self.get_constraints(HotelReservation._meta.db_table)) with connection.schema_editor() as editor: editor.add_constraint(HotelReservation, constraint) self.assertIn(constraint.name, self.get_constraints(HotelReservation._meta.db_table)) # Add initial reservations. room101 = Room.objects.create(number=101) room102 = Room.objects.create(number=102) datetimes = [ timezone.datetime(2018, 6, 20), timezone.datetime(2018, 6, 24), timezone.datetime(2018, 6, 26), timezone.datetime(2018, 6, 28), timezone.datetime(2018, 6, 29), ] HotelReservation.objects.create( datespan=DateRange(datetimes[0].date(), datetimes[1].date()), start=datetimes[0], end=datetimes[1], room=room102, ) HotelReservation.objects.create( datespan=DateRange(datetimes[1].date(), datetimes[3].date()), start=datetimes[1], end=datetimes[3], room=room102, ) # Overlap dates. with self.assertRaises(IntegrityError), transaction.atomic(): reservation = HotelReservation( datespan=(datetimes[1].date(), datetimes[2].date()), start=datetimes[1], end=datetimes[2], room=room102, ) reservation.save() # Valid range. HotelReservation.objects.bulk_create([ # Other room. HotelReservation( datespan=(datetimes[1].date(), datetimes[2].date()), start=datetimes[1], end=datetimes[2], room=room101, ), # Cancelled reservation. HotelReservation( datespan=(datetimes[1].date(), datetimes[1].date()), start=datetimes[1], end=datetimes[2], room=room102, cancelled=True, ), # Other adjacent dates. HotelReservation( datespan=(datetimes[3].date(), datetimes[4].date()), start=datetimes[3], end=datetimes[4], room=room102, ), ]) def test_range_overlaps_custom(self): class TsTzRange(Func): function = 'TSTZRANGE' output_field = DateTimeRangeField() constraint = ExclusionConstraint( name='exclude_overlapping_reservations_custom', expressions=[ (TsTzRange('start', 'end', RangeBoundary()), RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL) ], condition=Q(cancelled=False), ) self._test_range_overlaps(constraint) def test_range_overlaps(self): constraint = ExclusionConstraint( name='exclude_overlapping_reservations', expressions=[ (F('datespan'), RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL) ], condition=Q(cancelled=False), ) self._test_range_overlaps(constraint) def test_range_adjacent(self): constraint_name = 'ints_adjacent' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) RangesModel.objects.create(ints=(20, 50)) with self.assertRaises(IntegrityError), transaction.atomic(): RangesModel.objects.create(ints=(10, 20)) RangesModel.objects.create(ints=(10, 19)) RangesModel.objects.create(ints=(51, 60)) def test_range_adjacent_initially_deferred(self): constraint_name = 'ints_adjacent_deferred' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], deferrable=Deferrable.DEFERRED, ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) RangesModel.objects.create(ints=(20, 50)) adjacent_range = RangesModel.objects.create(ints=(10, 20)) # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(constraint_name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) # Remove adjacent range before the end of transaction. adjacent_range.delete() RangesModel.objects.create(ints=(10, 19)) RangesModel.objects.create(ints=(51, 60))
eb858d86f223f283078972f0254a87f7a144f524d5fe7f718bc38978f7c0ce04
""" Various complex queries that have been problematic in the past. """ import threading from django.db import models from django.db.models.functions import Now class DumbCategory(models.Model): pass class ProxyCategory(DumbCategory): class Meta: proxy = True class NamedCategory(DumbCategory): name = models.CharField(max_length=10) def __str__(self): return self.name class Tag(models.Model): name = models.CharField(max_length=10) parent = models.ForeignKey( 'self', models.SET_NULL, blank=True, null=True, related_name='children', ) category = models.ForeignKey(NamedCategory, models.SET_NULL, null=True, default=None) class Meta: ordering = ['name'] def __str__(self): return self.name class Note(models.Model): note = models.CharField(max_length=100) misc = models.CharField(max_length=10) tag = models.ForeignKey(Tag, models.SET_NULL, blank=True, null=True) class Meta: ordering = ['note'] def __str__(self): return self.note def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # Regression for #13227 -- having an attribute that # is unpicklable doesn't stop you from cloning queries # that use objects of that type as an argument. self.lock = threading.Lock() class Annotation(models.Model): name = models.CharField(max_length=10) tag = models.ForeignKey(Tag, models.CASCADE) notes = models.ManyToManyField(Note) def __str__(self): return self.name class DateTimePK(models.Model): date = models.DateTimeField(primary_key=True, auto_now_add=True) class ExtraInfo(models.Model): info = models.CharField(max_length=100) note = models.ForeignKey(Note, models.CASCADE, null=True) value = models.IntegerField(null=True) date = models.ForeignKey(DateTimePK, models.SET_NULL, null=True) class Meta: ordering = ['info'] def __str__(self): return self.info class Author(models.Model): name = models.CharField(max_length=10) num = models.IntegerField(unique=True) extra = models.ForeignKey(ExtraInfo, models.CASCADE) class Meta: ordering = ['name'] def __str__(self): return self.name class Item(models.Model): name = models.CharField(max_length=10) created = models.DateTimeField() modified = models.DateTimeField(blank=True, null=True) tags = models.ManyToManyField(Tag, blank=True) creator = models.ForeignKey(Author, models.CASCADE) note = models.ForeignKey(Note, models.CASCADE) class Meta: ordering = ['-note', 'name'] def __str__(self): return self.name class Report(models.Model): name = models.CharField(max_length=10) creator = models.ForeignKey(Author, models.SET_NULL, to_field='num', null=True) def __str__(self): return self.name class ReportComment(models.Model): report = models.ForeignKey(Report, models.CASCADE) class Ranking(models.Model): rank = models.IntegerField() author = models.ForeignKey(Author, models.CASCADE) class Meta: # A complex ordering specification. Should stress the system a bit. ordering = ('author__extra__note', 'author__name', 'rank') def __str__(self): return '%d: %s' % (self.rank, self.author.name) class Cover(models.Model): title = models.CharField(max_length=50) item = models.ForeignKey(Item, models.CASCADE) class Meta: ordering = ['item'] def __str__(self): return self.title class Number(models.Model): num = models.IntegerField() other_num = models.IntegerField(null=True) def __str__(self): return str(self.num) # Symmetrical m2m field with a normal field using the reverse accessor name # ("valid"). class Valid(models.Model): valid = models.CharField(max_length=10) parent = models.ManyToManyField('self') class Meta: ordering = ['valid'] # Some funky cross-linked models for testing a couple of infinite recursion # cases. class X(models.Model): y = models.ForeignKey('Y', models.CASCADE) class Y(models.Model): x1 = models.ForeignKey(X, models.CASCADE, related_name='y1') # Some models with a cycle in the default ordering. This would be bad if we # didn't catch the infinite loop. class LoopX(models.Model): y = models.ForeignKey('LoopY', models.CASCADE) class Meta: ordering = ['y'] class LoopY(models.Model): x = models.ForeignKey(LoopX, models.CASCADE) class Meta: ordering = ['x'] class LoopZ(models.Model): z = models.ForeignKey('self', models.CASCADE) class Meta: ordering = ['z'] # A model and custom default manager combination. class CustomManager(models.Manager): def get_queryset(self): qs = super().get_queryset() return qs.filter(public=True, tag__name='t1') class ManagedModel(models.Model): data = models.CharField(max_length=10) tag = models.ForeignKey(Tag, models.CASCADE) public = models.BooleanField(default=True) objects = CustomManager() normal_manager = models.Manager() def __str__(self): return self.data # An inter-related setup with multiple paths from Child to Detail. class Detail(models.Model): data = models.CharField(max_length=10) class MemberManager(models.Manager): def get_queryset(self): return super().get_queryset().select_related("details") class Member(models.Model): name = models.CharField(max_length=10) details = models.OneToOneField(Detail, models.CASCADE, primary_key=True) objects = MemberManager() class Child(models.Model): person = models.OneToOneField(Member, models.CASCADE, primary_key=True) parent = models.ForeignKey(Member, models.CASCADE, related_name="children") # Custom primary keys interfered with ordering in the past. class CustomPk(models.Model): name = models.CharField(max_length=10, primary_key=True) extra = models.CharField(max_length=10) class Meta: ordering = ['name', 'extra'] class Related(models.Model): custom = models.ForeignKey(CustomPk, models.CASCADE, null=True) class CustomPkTag(models.Model): id = models.CharField(max_length=20, primary_key=True) custom_pk = models.ManyToManyField(CustomPk) tag = models.CharField(max_length=20) # An inter-related setup with a model subclass that has a nullable # path to another model, and a return path from that model. class Celebrity(models.Model): name = models.CharField("Name", max_length=20) greatest_fan = models.ForeignKey("Fan", models.SET_NULL, null=True, unique=True) def __str__(self): return self.name class TvChef(Celebrity): pass class Fan(models.Model): fan_of = models.ForeignKey(Celebrity, models.CASCADE) # Multiple foreign keys class LeafA(models.Model): data = models.CharField(max_length=10) def __str__(self): return self.data class LeafB(models.Model): data = models.CharField(max_length=10) class Join(models.Model): a = models.ForeignKey(LeafA, models.CASCADE) b = models.ForeignKey(LeafB, models.CASCADE) class ReservedName(models.Model): name = models.CharField(max_length=20) order = models.IntegerField() def __str__(self): return self.name # A simpler shared-foreign-key setup that can expose some problems. class SharedConnection(models.Model): data = models.CharField(max_length=10) def __str__(self): return self.data class PointerA(models.Model): connection = models.ForeignKey(SharedConnection, models.CASCADE) class PointerB(models.Model): connection = models.ForeignKey(SharedConnection, models.CASCADE) # Multi-layer ordering class SingleObject(models.Model): name = models.CharField(max_length=10) class Meta: ordering = ['name'] def __str__(self): return self.name class RelatedObject(models.Model): single = models.ForeignKey(SingleObject, models.SET_NULL, null=True) f = models.IntegerField(null=True) class Meta: ordering = ['single'] class Plaything(models.Model): name = models.CharField(max_length=10) others = models.ForeignKey(RelatedObject, models.SET_NULL, null=True) class Meta: ordering = ['others'] def __str__(self): return self.name class Article(models.Model): name = models.CharField(max_length=20) created = models.DateTimeField() def __str__(self): return self.name class Food(models.Model): name = models.CharField(max_length=20, unique=True) def __str__(self): return self.name class Eaten(models.Model): food = models.ForeignKey(Food, models.SET_NULL, to_field="name", null=True) meal = models.CharField(max_length=20) def __str__(self): return "%s at %s" % (self.food, self.meal) class Node(models.Model): num = models.IntegerField(unique=True) parent = models.ForeignKey("self", models.SET_NULL, to_field="num", null=True) def __str__(self): return str(self.num) # Bug #12252 class ObjectA(models.Model): name = models.CharField(max_length=50) def __str__(self): return self.name def __iter__(self): # Ticket #23721 assert False, 'type checking should happen without calling model __iter__' class ProxyObjectA(ObjectA): class Meta: proxy = True class ChildObjectA(ObjectA): pass class ObjectB(models.Model): name = models.CharField(max_length=50) objecta = models.ForeignKey(ObjectA, models.CASCADE) num = models.PositiveSmallIntegerField() def __str__(self): return self.name class ProxyObjectB(ObjectB): class Meta: proxy = True class ObjectC(models.Model): name = models.CharField(max_length=50) objecta = models.ForeignKey(ObjectA, models.SET_NULL, null=True) objectb = models.ForeignKey(ObjectB, models.SET_NULL, null=True) childobjecta = models.ForeignKey(ChildObjectA, models.SET_NULL, null=True, related_name='ca_pk') def __str__(self): return self.name class SimpleCategory(models.Model): name = models.CharField(max_length=15) def __str__(self): return self.name class SpecialCategory(SimpleCategory): special_name = models.CharField(max_length=15) def __str__(self): return self.name + " " + self.special_name class CategoryItem(models.Model): category = models.ForeignKey(SimpleCategory, models.CASCADE) def __str__(self): return "category item: " + str(self.category) class MixedCaseFieldCategoryItem(models.Model): CaTeGoRy = models.ForeignKey(SimpleCategory, models.CASCADE) class MixedCaseDbColumnCategoryItem(models.Model): category = models.ForeignKey(SimpleCategory, models.CASCADE, db_column='CaTeGoRy_Id') class OneToOneCategory(models.Model): new_name = models.CharField(max_length=15) category = models.OneToOneField(SimpleCategory, models.CASCADE) def __str__(self): return "one2one " + self.new_name class CategoryRelationship(models.Model): first = models.ForeignKey(SimpleCategory, models.CASCADE, related_name='first_rel') second = models.ForeignKey(SimpleCategory, models.CASCADE, related_name='second_rel') class CommonMixedCaseForeignKeys(models.Model): category = models.ForeignKey(CategoryItem, models.CASCADE) mixed_case_field_category = models.ForeignKey(MixedCaseFieldCategoryItem, models.CASCADE) mixed_case_db_column_category = models.ForeignKey(MixedCaseDbColumnCategoryItem, models.CASCADE) class NullableName(models.Model): name = models.CharField(max_length=20, null=True) class Meta: ordering = ['id'] class ModelD(models.Model): name = models.TextField() class ModelC(models.Model): name = models.TextField() class ModelB(models.Model): name = models.TextField() c = models.ForeignKey(ModelC, models.CASCADE) class ModelA(models.Model): name = models.TextField() b = models.ForeignKey(ModelB, models.SET_NULL, null=True) d = models.ForeignKey(ModelD, models.CASCADE) class Job(models.Model): name = models.CharField(max_length=20, unique=True) def __str__(self): return self.name class JobResponsibilities(models.Model): job = models.ForeignKey(Job, models.CASCADE, to_field='name') responsibility = models.ForeignKey('Responsibility', models.CASCADE, to_field='description') class Responsibility(models.Model): description = models.CharField(max_length=20, unique=True) jobs = models.ManyToManyField(Job, through=JobResponsibilities, related_name='responsibilities') def __str__(self): return self.description # Models for disjunction join promotion low level testing. class FK1(models.Model): f1 = models.TextField() f2 = models.TextField() class FK2(models.Model): f1 = models.TextField() f2 = models.TextField() class FK3(models.Model): f1 = models.TextField() f2 = models.TextField() class BaseA(models.Model): a = models.ForeignKey(FK1, models.SET_NULL, null=True) b = models.ForeignKey(FK2, models.SET_NULL, null=True) c = models.ForeignKey(FK3, models.SET_NULL, null=True) class Identifier(models.Model): name = models.CharField(max_length=100) def __str__(self): return self.name class Program(models.Model): identifier = models.OneToOneField(Identifier, models.CASCADE) class Channel(models.Model): programs = models.ManyToManyField(Program) identifier = models.OneToOneField(Identifier, models.CASCADE) class Book(models.Model): title = models.TextField() chapter = models.ForeignKey('Chapter', models.CASCADE) class Chapter(models.Model): title = models.TextField() paragraph = models.ForeignKey('Paragraph', models.CASCADE) class Paragraph(models.Model): text = models.TextField() page = models.ManyToManyField('Page') class Page(models.Model): text = models.TextField() class MyObject(models.Model): parent = models.ForeignKey('self', models.SET_NULL, null=True, blank=True, related_name='children') data = models.CharField(max_length=100) created_at = models.DateTimeField(auto_now_add=True) # Models for #17600 regressions class Order(models.Model): id = models.IntegerField(primary_key=True) name = models.CharField(max_length=12, null=True, default='') class Meta: ordering = ('pk',) def __str__(self): return str(self.pk) class OrderItem(models.Model): order = models.ForeignKey(Order, models.CASCADE, related_name='items') status = models.IntegerField() class Meta: ordering = ('pk',) def __str__(self): return str(self.pk) class BaseUser(models.Model): pass class Task(models.Model): title = models.CharField(max_length=10) owner = models.ForeignKey(BaseUser, models.CASCADE, related_name='owner') creator = models.ForeignKey(BaseUser, models.CASCADE, related_name='creator') def __str__(self): return self.title class Staff(models.Model): name = models.CharField(max_length=10) def __str__(self): return self.name class StaffUser(BaseUser): staff = models.OneToOneField(Staff, models.CASCADE, related_name='user') def __str__(self): return self.staff class Ticket21203Parent(models.Model): parentid = models.AutoField(primary_key=True) parent_bool = models.BooleanField(default=True) created = models.DateTimeField(auto_now=True) class Ticket21203Child(models.Model): childid = models.AutoField(primary_key=True) parent = models.ForeignKey(Ticket21203Parent, models.CASCADE) class Person(models.Model): name = models.CharField(max_length=128) class Company(models.Model): name = models.CharField(max_length=128) employees = models.ManyToManyField(Person, related_name='employers', through='Employment') def __str__(self): return self.name class Employment(models.Model): employer = models.ForeignKey(Company, models.CASCADE) employee = models.ForeignKey(Person, models.CASCADE) title = models.CharField(max_length=128) class School(models.Model): pass class Student(models.Model): school = models.ForeignKey(School, models.CASCADE) class Classroom(models.Model): name = models.CharField(max_length=20) has_blackboard = models.BooleanField(null=True) school = models.ForeignKey(School, models.CASCADE) students = models.ManyToManyField(Student, related_name='classroom') class Teacher(models.Model): schools = models.ManyToManyField(School) friends = models.ManyToManyField('self') class Ticket23605AParent(models.Model): pass class Ticket23605A(Ticket23605AParent): pass class Ticket23605B(models.Model): modela_fk = models.ForeignKey(Ticket23605A, models.CASCADE) modelc_fk = models.ForeignKey("Ticket23605C", models.CASCADE) field_b0 = models.IntegerField(null=True) field_b1 = models.BooleanField(default=False) class Ticket23605C(models.Model): field_c0 = models.FloatField() # db_table names have capital letters to ensure they are quoted in queries. class Individual(models.Model): alive = models.BooleanField() class Meta: db_table = 'Individual' class RelatedIndividual(models.Model): related = models.ForeignKey(Individual, models.CASCADE, related_name='related_individual') class Meta: db_table = 'RelatedIndividual' class CustomDbColumn(models.Model): custom_column = models.IntegerField(db_column='custom_name', null=True) ip_address = models.GenericIPAddressField(null=True) class CreatedField(models.DateTimeField): db_returning = True def __init__(self, *args, **kwargs): kwargs.setdefault('default', Now) super().__init__(*args, **kwargs) class ReturningModel(models.Model): created = CreatedField(editable=False) class NonIntegerPKReturningModel(models.Model): created = CreatedField(editable=False, primary_key=True)
1b644688c96221d9d247994d816140d2ddfd10ae0dbfb9f32f42723be3e7d423
import unittest from django.core.checks import Error, Warning from django.core.checks.model_checks import _check_lazy_references from django.db import connection, connections, models from django.db.models.functions import Lower from django.db.models.signals import post_init from django.test import SimpleTestCase, TestCase from django.test.utils import isolate_apps, override_settings, register_lookup class EmptyRouter: pass def get_max_column_name_length(): allowed_len = None db_alias = None for db in ('default', 'other'): connection = connections[db] max_name_length = connection.ops.max_name_length() if max_name_length is not None and not connection.features.truncates_names: if allowed_len is None or max_name_length < allowed_len: allowed_len = max_name_length db_alias = db return (allowed_len, db_alias) @isolate_apps('invalid_models_tests') class IndexTogetherTests(SimpleTestCase): def test_non_iterable(self): class Model(models.Model): class Meta: index_together = 42 self.assertEqual(Model.check(), [ Error( "'index_together' must be a list or tuple.", obj=Model, id='models.E008', ), ]) def test_non_list(self): class Model(models.Model): class Meta: index_together = 'not-a-list' self.assertEqual(Model.check(), [ Error( "'index_together' must be a list or tuple.", obj=Model, id='models.E008', ), ]) def test_list_containing_non_iterable(self): class Model(models.Model): class Meta: index_together = [('a', 'b'), 42] self.assertEqual(Model.check(), [ Error( "All 'index_together' elements must be lists or tuples.", obj=Model, id='models.E009', ), ]) def test_pointing_to_missing_field(self): class Model(models.Model): class Meta: index_together = [['missing_field']] self.assertEqual(Model.check(), [ Error( "'index_together' refers to the nonexistent field 'missing_field'.", obj=Model, id='models.E012', ), ]) def test_pointing_to_non_local_field(self): class Foo(models.Model): field1 = models.IntegerField() class Bar(Foo): field2 = models.IntegerField() class Meta: index_together = [['field2', 'field1']] self.assertEqual(Bar.check(), [ Error( "'index_together' refers to field 'field1' which is not " "local to model 'Bar'.", hint='This issue may be caused by multi-table inheritance.', obj=Bar, id='models.E016', ), ]) def test_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: index_together = [['m2m']] self.assertEqual(Model.check(), [ Error( "'index_together' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'index_together'.", obj=Model, id='models.E013', ), ]) def test_pointing_to_fk(self): class Foo(models.Model): pass class Bar(models.Model): foo_1 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_1') foo_2 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_2') class Meta: index_together = [['foo_1_id', 'foo_2']] self.assertEqual(Bar.check(), []) # unique_together tests are very similar to index_together tests. @isolate_apps('invalid_models_tests') class UniqueTogetherTests(SimpleTestCase): def test_non_iterable(self): class Model(models.Model): class Meta: unique_together = 42 self.assertEqual(Model.check(), [ Error( "'unique_together' must be a list or tuple.", obj=Model, id='models.E010', ), ]) def test_list_containing_non_iterable(self): class Model(models.Model): one = models.IntegerField() two = models.IntegerField() class Meta: unique_together = [('a', 'b'), 42] self.assertEqual(Model.check(), [ Error( "All 'unique_together' elements must be lists or tuples.", obj=Model, id='models.E011', ), ]) def test_non_list(self): class Model(models.Model): class Meta: unique_together = 'not-a-list' self.assertEqual(Model.check(), [ Error( "'unique_together' must be a list or tuple.", obj=Model, id='models.E010', ), ]) def test_valid_model(self): class Model(models.Model): one = models.IntegerField() two = models.IntegerField() class Meta: # unique_together can be a simple tuple unique_together = ('one', 'two') self.assertEqual(Model.check(), []) def test_pointing_to_missing_field(self): class Model(models.Model): class Meta: unique_together = [['missing_field']] self.assertEqual(Model.check(), [ Error( "'unique_together' refers to the nonexistent field 'missing_field'.", obj=Model, id='models.E012', ), ]) def test_pointing_to_m2m(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: unique_together = [['m2m']] self.assertEqual(Model.check(), [ Error( "'unique_together' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'unique_together'.", obj=Model, id='models.E013', ), ]) def test_pointing_to_fk(self): class Foo(models.Model): pass class Bar(models.Model): foo_1 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_1') foo_2 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_2') class Meta: unique_together = [['foo_1_id', 'foo_2']] self.assertEqual(Bar.check(), []) @isolate_apps('invalid_models_tests') class IndexesTests(TestCase): def test_pointing_to_missing_field(self): class Model(models.Model): class Meta: indexes = [models.Index(fields=['missing_field'], name='name')] self.assertEqual(Model.check(), [ Error( "'indexes' refers to the nonexistent field 'missing_field'.", obj=Model, id='models.E012', ), ]) def test_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: indexes = [models.Index(fields=['m2m'], name='name')] self.assertEqual(Model.check(), [ Error( "'indexes' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'indexes'.", obj=Model, id='models.E013', ), ]) def test_pointing_to_non_local_field(self): class Foo(models.Model): field1 = models.IntegerField() class Bar(Foo): field2 = models.IntegerField() class Meta: indexes = [models.Index(fields=['field2', 'field1'], name='name')] self.assertEqual(Bar.check(), [ Error( "'indexes' refers to field 'field1' which is not local to " "model 'Bar'.", hint='This issue may be caused by multi-table inheritance.', obj=Bar, id='models.E016', ), ]) def test_pointing_to_fk(self): class Foo(models.Model): pass class Bar(models.Model): foo_1 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_1') foo_2 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_2') class Meta: indexes = [models.Index(fields=['foo_1_id', 'foo_2'], name='index_name')] self.assertEqual(Bar.check(), []) def test_name_constraints(self): class Model(models.Model): class Meta: indexes = [ models.Index(fields=['id'], name='_index_name'), models.Index(fields=['id'], name='5index_name'), ] self.assertEqual(Model.check(), [ Error( "The index name '%sindex_name' cannot start with an " "underscore or a number." % prefix, obj=Model, id='models.E033', ) for prefix in ('_', '5') ]) def test_max_name_length(self): index_name = 'x' * 31 class Model(models.Model): class Meta: indexes = [models.Index(fields=['id'], name=index_name)] self.assertEqual(Model.check(), [ Error( "The index name '%s' cannot be longer than 30 characters." % index_name, obj=Model, id='models.E034', ), ]) def test_index_with_condition(self): class Model(models.Model): age = models.IntegerField() class Meta: indexes = [ models.Index( fields=['age'], name='index_age_gte_10', condition=models.Q(age__gte=10), ), ] errors = Model.check(databases=self.databases) expected = [] if connection.features.supports_partial_indexes else [ Warning( '%s does not support indexes with conditions.' % connection.display_name, hint=( "Conditions will be ignored. Silence this warning if you " "don't care about it." ), obj=Model, id='models.W037', ) ] self.assertEqual(errors, expected) def test_index_with_condition_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_partial_indexes'} indexes = [ models.Index( fields=['age'], name='index_age_gte_10', condition=models.Q(age__gte=10), ), ] self.assertEqual(Model.check(databases=self.databases), []) @isolate_apps('invalid_models_tests') class FieldNamesTests(TestCase): databases = {'default', 'other'} def test_ending_with_underscore(self): class Model(models.Model): field_ = models.CharField(max_length=10) m2m_ = models.ManyToManyField('self') self.assertEqual(Model.check(), [ Error( 'Field names must not end with an underscore.', obj=Model._meta.get_field('field_'), id='fields.E001', ), Error( 'Field names must not end with an underscore.', obj=Model._meta.get_field('m2m_'), id='fields.E001', ), ]) max_column_name_length, column_limit_db_alias = get_max_column_name_length() @unittest.skipIf(max_column_name_length is None, "The database doesn't have a column name length limit.") def test_M2M_long_column_name(self): """ #13711 -- Model check for long M2M column names when database has column name length limits. """ # A model with very long name which will be used to set relations to. class VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz(models.Model): title = models.CharField(max_length=11) # Main model for which checks will be performed. class ModelWithLongField(models.Model): m2m_field = models.ManyToManyField( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, related_name='rn1', ) m2m_field2 = models.ManyToManyField( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, related_name='rn2', through='m2msimple', ) m2m_field3 = models.ManyToManyField( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, related_name='rn3', through='m2mcomplex', ) fk = models.ForeignKey( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, models.CASCADE, related_name='rn4', ) # Models used for setting `through` in M2M field. class m2msimple(models.Model): id2 = models.ForeignKey(ModelWithLongField, models.CASCADE) class m2mcomplex(models.Model): id2 = models.ForeignKey(ModelWithLongField, models.CASCADE) long_field_name = 'a' * (self.max_column_name_length + 1) models.ForeignKey( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, models.CASCADE, ).contribute_to_class(m2msimple, long_field_name) models.ForeignKey( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, models.CASCADE, db_column=long_field_name ).contribute_to_class(m2mcomplex, long_field_name) errors = ModelWithLongField.check(databases=('default', 'other')) # First error because of M2M field set on the model with long name. m2m_long_name = "verylongmodelnamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz_id" if self.max_column_name_length > len(m2m_long_name): # Some databases support names longer than the test name. expected = [] else: expected = [ Error( 'Autogenerated column name too long for M2M field "%s". ' 'Maximum length is "%s" for database "%s".' % (m2m_long_name, self.max_column_name_length, self.column_limit_db_alias), hint="Use 'through' to create a separate model for " "M2M and then set column_name using 'db_column'.", obj=ModelWithLongField, id='models.E019', ) ] # Second error because the FK specified in the `through` model # `m2msimple` has auto-generated name longer than allowed. # There will be no check errors in the other M2M because it # specifies db_column for the FK in `through` model even if the actual # name is longer than the limits of the database. expected.append( Error( 'Autogenerated column name too long for M2M field "%s_id". ' 'Maximum length is "%s" for database "%s".' % (long_field_name, self.max_column_name_length, self.column_limit_db_alias), hint="Use 'through' to create a separate model for " "M2M and then set column_name using 'db_column'.", obj=ModelWithLongField, id='models.E019', ) ) self.assertEqual(errors, expected) # Check for long column names is called only for specified database # aliases. self.assertEqual(ModelWithLongField.check(databases=None), []) @unittest.skipIf(max_column_name_length is None, "The database doesn't have a column name length limit.") def test_local_field_long_column_name(self): """ #13711 -- Model check for long column names when database does not support long names. """ class ModelWithLongField(models.Model): title = models.CharField(max_length=11) long_field_name = 'a' * (self.max_column_name_length + 1) long_field_name2 = 'b' * (self.max_column_name_length + 1) models.CharField(max_length=11).contribute_to_class(ModelWithLongField, long_field_name) models.CharField(max_length=11, db_column='vlmn').contribute_to_class(ModelWithLongField, long_field_name2) self.assertEqual(ModelWithLongField.check(databases=('default', 'other')), [ Error( 'Autogenerated column name too long for field "%s". ' 'Maximum length is "%s" for database "%s".' % (long_field_name, self.max_column_name_length, self.column_limit_db_alias), hint="Set the column name manually using 'db_column'.", obj=ModelWithLongField, id='models.E018', ) ]) # Check for long column names is called only for specified database # aliases. self.assertEqual(ModelWithLongField.check(databases=None), []) def test_including_separator(self): class Model(models.Model): some__field = models.IntegerField() self.assertEqual(Model.check(), [ Error( 'Field names must not contain "__".', obj=Model._meta.get_field('some__field'), id='fields.E002', ) ]) def test_pk(self): class Model(models.Model): pk = models.IntegerField() self.assertEqual(Model.check(), [ Error( "'pk' is a reserved word that cannot be used as a field name.", obj=Model._meta.get_field('pk'), id='fields.E003', ) ]) def test_db_column_clash(self): class Model(models.Model): foo = models.IntegerField() bar = models.IntegerField(db_column='foo') self.assertEqual(Model.check(), [ Error( "Field 'bar' has column name 'foo' that is used by " "another field.", hint="Specify a 'db_column' for the field.", obj=Model, id='models.E007', ) ]) @isolate_apps('invalid_models_tests') class ShadowingFieldsTests(SimpleTestCase): def test_field_name_clash_with_child_accessor(self): class Parent(models.Model): pass class Child(Parent): child = models.CharField(max_length=100) self.assertEqual(Child.check(), [ Error( "The field 'child' clashes with the field " "'child' from model 'invalid_models_tests.parent'.", obj=Child._meta.get_field('child'), id='models.E006', ) ]) def test_multiinheritance_clash(self): class Mother(models.Model): clash = models.IntegerField() class Father(models.Model): clash = models.IntegerField() class Child(Mother, Father): # Here we have two clashed: id (automatic field) and clash, because # both parents define these fields. pass self.assertEqual(Child.check(), [ Error( "The field 'id' from parent model " "'invalid_models_tests.mother' clashes with the field 'id' " "from parent model 'invalid_models_tests.father'.", obj=Child, id='models.E005', ), Error( "The field 'clash' from parent model " "'invalid_models_tests.mother' clashes with the field 'clash' " "from parent model 'invalid_models_tests.father'.", obj=Child, id='models.E005', ) ]) def test_inheritance_clash(self): class Parent(models.Model): f_id = models.IntegerField() class Target(models.Model): # This field doesn't result in a clash. f_id = models.IntegerField() class Child(Parent): # This field clashes with parent "f_id" field. f = models.ForeignKey(Target, models.CASCADE) self.assertEqual(Child.check(), [ Error( "The field 'f' clashes with the field 'f_id' " "from model 'invalid_models_tests.parent'.", obj=Child._meta.get_field('f'), id='models.E006', ) ]) def test_multigeneration_inheritance(self): class GrandParent(models.Model): clash = models.IntegerField() class Parent(GrandParent): pass class Child(Parent): pass class GrandChild(Child): clash = models.IntegerField() self.assertEqual(GrandChild.check(), [ Error( "The field 'clash' clashes with the field 'clash' " "from model 'invalid_models_tests.grandparent'.", obj=GrandChild._meta.get_field('clash'), id='models.E006', ) ]) def test_id_clash(self): class Target(models.Model): pass class Model(models.Model): fk = models.ForeignKey(Target, models.CASCADE) fk_id = models.IntegerField() self.assertEqual(Model.check(), [ Error( "The field 'fk_id' clashes with the field 'fk' from model " "'invalid_models_tests.model'.", obj=Model._meta.get_field('fk_id'), id='models.E006', ) ]) @isolate_apps('invalid_models_tests') class OtherModelTests(SimpleTestCase): def test_unique_primary_key(self): invalid_id = models.IntegerField(primary_key=False) class Model(models.Model): id = invalid_id self.assertEqual(Model.check(), [ Error( "'id' can only be used as a field name if the field also sets " "'primary_key=True'.", obj=Model, id='models.E004', ), ]) def test_ordering_non_iterable(self): class Model(models.Model): class Meta: ordering = 'missing_field' self.assertEqual(Model.check(), [ Error( "'ordering' must be a tuple or list " "(even if you want to order by only one field).", obj=Model, id='models.E014', ), ]) def test_just_ordering_no_errors(self): class Model(models.Model): order = models.PositiveIntegerField() class Meta: ordering = ['order'] self.assertEqual(Model.check(), []) def test_just_order_with_respect_to_no_errors(self): class Question(models.Model): pass class Answer(models.Model): question = models.ForeignKey(Question, models.CASCADE) class Meta: order_with_respect_to = 'question' self.assertEqual(Answer.check(), []) def test_ordering_with_order_with_respect_to(self): class Question(models.Model): pass class Answer(models.Model): question = models.ForeignKey(Question, models.CASCADE) order = models.IntegerField() class Meta: order_with_respect_to = 'question' ordering = ['order'] self.assertEqual(Answer.check(), [ Error( "'ordering' and 'order_with_respect_to' cannot be used together.", obj=Answer, id='models.E021', ), ]) def test_non_valid(self): class RelationModel(models.Model): pass class Model(models.Model): relation = models.ManyToManyField(RelationModel) class Meta: ordering = ['relation'] self.assertEqual(Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'relation'.", obj=Model, id='models.E015', ), ]) def test_ordering_pointing_to_missing_field(self): class Model(models.Model): class Meta: ordering = ('missing_field',) self.assertEqual(Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'missing_field'.", obj=Model, id='models.E015', ) ]) def test_ordering_pointing_to_missing_foreignkey_field(self): class Model(models.Model): missing_fk_field = models.IntegerField() class Meta: ordering = ('missing_fk_field_id',) self.assertEqual(Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'missing_fk_field_id'.", obj=Model, id='models.E015', ) ]) def test_ordering_pointing_to_missing_related_field(self): class Model(models.Model): test = models.IntegerField() class Meta: ordering = ('missing_related__id',) self.assertEqual(Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'missing_related__id'.", obj=Model, id='models.E015', ) ]) def test_ordering_pointing_to_missing_related_model_field(self): class Parent(models.Model): pass class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ('parent__missing_field',) self.assertEqual(Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent__missing_field'.", obj=Child, id='models.E015', ) ]) def test_ordering_pointing_to_non_related_field(self): class Child(models.Model): parent = models.IntegerField() class Meta: ordering = ('parent__missing_field',) self.assertEqual(Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent__missing_field'.", obj=Child, id='models.E015', ) ]) def test_ordering_pointing_to_two_related_model_field(self): class Parent2(models.Model): pass class Parent1(models.Model): parent2 = models.ForeignKey(Parent2, models.CASCADE) class Child(models.Model): parent1 = models.ForeignKey(Parent1, models.CASCADE) class Meta: ordering = ('parent1__parent2__missing_field',) self.assertEqual(Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent1__parent2__missing_field'.", obj=Child, id='models.E015', ) ]) def test_ordering_pointing_multiple_times_to_model_fields(self): class Parent(models.Model): field1 = models.CharField(max_length=100) field2 = models.CharField(max_length=100) class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ('parent__field1__field2',) self.assertEqual(Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent__field1__field2'.", obj=Child, id='models.E015', ) ]) def test_ordering_allows_registered_lookups(self): class Model(models.Model): test = models.CharField(max_length=100) class Meta: ordering = ('test__lower',) with register_lookup(models.CharField, Lower): self.assertEqual(Model.check(), []) def test_ordering_pointing_to_related_model_pk(self): class Parent(models.Model): pass class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ('parent__pk',) self.assertEqual(Child.check(), []) def test_ordering_pointing_to_foreignkey_field(self): class Parent(models.Model): pass class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ('parent_id',) self.assertFalse(Child.check()) def test_name_beginning_with_underscore(self): class _Model(models.Model): pass self.assertEqual(_Model.check(), [ Error( "The model name '_Model' cannot start or end with an underscore " "as it collides with the query lookup syntax.", obj=_Model, id='models.E023', ) ]) def test_name_ending_with_underscore(self): class Model_(models.Model): pass self.assertEqual(Model_.check(), [ Error( "The model name 'Model_' cannot start or end with an underscore " "as it collides with the query lookup syntax.", obj=Model_, id='models.E023', ) ]) def test_name_contains_double_underscores(self): class Test__Model(models.Model): pass self.assertEqual(Test__Model.check(), [ Error( "The model name 'Test__Model' cannot contain double underscores " "as it collides with the query lookup syntax.", obj=Test__Model, id='models.E024', ) ]) def test_property_and_related_field_accessor_clash(self): class Model(models.Model): fk = models.ForeignKey('self', models.CASCADE) @property def fk_id(self): pass self.assertEqual(Model.check(), [ Error( "The property 'fk_id' clashes with a related field accessor.", obj=Model, id='models.E025', ) ]) def test_single_primary_key(self): class Model(models.Model): foo = models.IntegerField(primary_key=True) bar = models.IntegerField(primary_key=True) self.assertEqual(Model.check(), [ Error( "The model cannot have more than one field with 'primary_key=True'.", obj=Model, id='models.E026', ) ]) @override_settings(TEST_SWAPPED_MODEL_BAD_VALUE='not-a-model') def test_swappable_missing_app_name(self): class Model(models.Model): class Meta: swappable = 'TEST_SWAPPED_MODEL_BAD_VALUE' self.assertEqual(Model.check(), [ Error( "'TEST_SWAPPED_MODEL_BAD_VALUE' is not of the form 'app_label.app_name'.", id='models.E001', ), ]) @override_settings(TEST_SWAPPED_MODEL_BAD_MODEL='not_an_app.Target') def test_swappable_missing_app(self): class Model(models.Model): class Meta: swappable = 'TEST_SWAPPED_MODEL_BAD_MODEL' self.assertEqual(Model.check(), [ Error( "'TEST_SWAPPED_MODEL_BAD_MODEL' references 'not_an_app.Target', " 'which has not been installed, or is abstract.', id='models.E002', ), ]) def test_two_m2m_through_same_relationship(self): class Person(models.Model): pass class Group(models.Model): primary = models.ManyToManyField(Person, through='Membership', related_name='primary') secondary = models.ManyToManyField(Person, through='Membership', related_name='secondary') class Membership(models.Model): person = models.ForeignKey(Person, models.CASCADE) group = models.ForeignKey(Group, models.CASCADE) self.assertEqual(Group.check(), [ Error( "The model has two identical many-to-many relations through " "the intermediate model 'invalid_models_tests.Membership'.", obj=Group, id='models.E003', ) ]) def test_two_m2m_through_same_model_with_different_through_fields(self): class Country(models.Model): pass class ShippingMethod(models.Model): to_countries = models.ManyToManyField( Country, through='ShippingMethodPrice', through_fields=('method', 'to_country'), ) from_countries = models.ManyToManyField( Country, through='ShippingMethodPrice', through_fields=('method', 'from_country'), related_name='+', ) class ShippingMethodPrice(models.Model): method = models.ForeignKey(ShippingMethod, models.CASCADE) to_country = models.ForeignKey(Country, models.CASCADE) from_country = models.ForeignKey(Country, models.CASCADE) self.assertEqual(ShippingMethod.check(), []) def test_onetoone_with_parent_model(self): class Place(models.Model): pass class ParkingLot(Place): other_place = models.OneToOneField(Place, models.CASCADE, related_name='other_parking') self.assertEqual(ParkingLot.check(), []) def test_onetoone_with_explicit_parent_link_parent_model(self): class Place(models.Model): pass class ParkingLot(Place): place = models.OneToOneField(Place, models.CASCADE, parent_link=True, primary_key=True) other_place = models.OneToOneField(Place, models.CASCADE, related_name='other_parking') self.assertEqual(ParkingLot.check(), []) def test_m2m_table_name_clash(self): class Foo(models.Model): bar = models.ManyToManyField('Bar', db_table='myapp_bar') class Meta: db_table = 'myapp_foo' class Bar(models.Model): class Meta: db_table = 'myapp_bar' self.assertEqual(Foo.check(), [ Error( "The field's intermediary table 'myapp_bar' clashes with the " "table name of 'invalid_models_tests.Bar'.", obj=Foo._meta.get_field('bar'), id='fields.E340', ) ]) @override_settings(DATABASE_ROUTERS=['invalid_models_tests.test_models.EmptyRouter']) def test_m2m_table_name_clash_database_routers_installed(self): class Foo(models.Model): bar = models.ManyToManyField('Bar', db_table='myapp_bar') class Meta: db_table = 'myapp_foo' class Bar(models.Model): class Meta: db_table = 'myapp_bar' self.assertEqual(Foo.check(), [ Warning( "The field's intermediary table 'myapp_bar' clashes with the " "table name of 'invalid_models_tests.Bar'.", obj=Foo._meta.get_field('bar'), hint=( "You have configured settings.DATABASE_ROUTERS. Verify " "that the table of 'invalid_models_tests.Bar' is " "correctly routed to a separate database." ), id='fields.W344', ), ]) def test_m2m_field_table_name_clash(self): class Foo(models.Model): pass class Bar(models.Model): foos = models.ManyToManyField(Foo, db_table='clash') class Baz(models.Model): foos = models.ManyToManyField(Foo, db_table='clash') self.assertEqual(Bar.check() + Baz.check(), [ Error( "The field's intermediary table 'clash' clashes with the " "table name of 'invalid_models_tests.Baz.foos'.", obj=Bar._meta.get_field('foos'), id='fields.E340', ), Error( "The field's intermediary table 'clash' clashes with the " "table name of 'invalid_models_tests.Bar.foos'.", obj=Baz._meta.get_field('foos'), id='fields.E340', ) ]) @override_settings(DATABASE_ROUTERS=['invalid_models_tests.test_models.EmptyRouter']) def test_m2m_field_table_name_clash_database_routers_installed(self): class Foo(models.Model): pass class Bar(models.Model): foos = models.ManyToManyField(Foo, db_table='clash') class Baz(models.Model): foos = models.ManyToManyField(Foo, db_table='clash') self.assertEqual(Bar.check() + Baz.check(), [ Warning( "The field's intermediary table 'clash' clashes with the " "table name of 'invalid_models_tests.%s.foos'." % clashing_model, obj=model_cls._meta.get_field('foos'), hint=( "You have configured settings.DATABASE_ROUTERS. Verify " "that the table of 'invalid_models_tests.%s.foos' is " "correctly routed to a separate database." % clashing_model ), id='fields.W344', ) for model_cls, clashing_model in [(Bar, 'Baz'), (Baz, 'Bar')] ]) def test_m2m_autogenerated_table_name_clash(self): class Foo(models.Model): class Meta: db_table = 'bar_foos' class Bar(models.Model): # The autogenerated `db_table` will be bar_foos. foos = models.ManyToManyField(Foo) class Meta: db_table = 'bar' self.assertEqual(Bar.check(), [ Error( "The field's intermediary table 'bar_foos' clashes with the " "table name of 'invalid_models_tests.Foo'.", obj=Bar._meta.get_field('foos'), id='fields.E340', ) ]) @override_settings(DATABASE_ROUTERS=['invalid_models_tests.test_models.EmptyRouter']) def test_m2m_autogenerated_table_name_clash_database_routers_installed(self): class Foo(models.Model): class Meta: db_table = 'bar_foos' class Bar(models.Model): # The autogenerated db_table is bar_foos. foos = models.ManyToManyField(Foo) class Meta: db_table = 'bar' self.assertEqual(Bar.check(), [ Warning( "The field's intermediary table 'bar_foos' clashes with the " "table name of 'invalid_models_tests.Foo'.", obj=Bar._meta.get_field('foos'), hint=( "You have configured settings.DATABASE_ROUTERS. Verify " "that the table of 'invalid_models_tests.Foo' is " "correctly routed to a separate database." ), id='fields.W344', ), ]) def test_m2m_unmanaged_shadow_models_not_checked(self): class A1(models.Model): pass class C1(models.Model): mm_a = models.ManyToManyField(A1, db_table='d1') # Unmanaged models that shadow the above models. Reused table names # shouldn't be flagged by any checks. class A2(models.Model): class Meta: managed = False class C2(models.Model): mm_a = models.ManyToManyField(A2, through='Intermediate') class Meta: managed = False class Intermediate(models.Model): a2 = models.ForeignKey(A2, models.CASCADE, db_column='a1_id') c2 = models.ForeignKey(C2, models.CASCADE, db_column='c1_id') class Meta: db_table = 'd1' managed = False self.assertEqual(C1.check(), []) self.assertEqual(C2.check(), []) def test_m2m_to_concrete_and_proxy_allowed(self): class A(models.Model): pass class Through(models.Model): a = models.ForeignKey('A', models.CASCADE) c = models.ForeignKey('C', models.CASCADE) class ThroughProxy(Through): class Meta: proxy = True class C(models.Model): mm_a = models.ManyToManyField(A, through=Through) mm_aproxy = models.ManyToManyField(A, through=ThroughProxy, related_name='proxied_m2m') self.assertEqual(C.check(), []) @isolate_apps('django.contrib.auth', kwarg_name='apps') def test_lazy_reference_checks(self, apps): class DummyModel(models.Model): author = models.ForeignKey('Author', models.CASCADE) class Meta: app_label = 'invalid_models_tests' class DummyClass: def __call__(self, **kwargs): pass def dummy_method(self): pass def dummy_function(*args, **kwargs): pass apps.lazy_model_operation(dummy_function, ('auth', 'imaginarymodel')) apps.lazy_model_operation(dummy_function, ('fanciful_app', 'imaginarymodel')) post_init.connect(dummy_function, sender='missing-app.Model', apps=apps) post_init.connect(DummyClass(), sender='missing-app.Model', apps=apps) post_init.connect(DummyClass().dummy_method, sender='missing-app.Model', apps=apps) self.assertEqual(_check_lazy_references(apps), [ Error( "%r contains a lazy reference to auth.imaginarymodel, " "but app 'auth' doesn't provide model 'imaginarymodel'." % dummy_function, obj=dummy_function, id='models.E022', ), Error( "%r contains a lazy reference to fanciful_app.imaginarymodel, " "but app 'fanciful_app' isn't installed." % dummy_function, obj=dummy_function, id='models.E022', ), Error( "An instance of class 'DummyClass' was connected to " "the 'post_init' signal with a lazy reference to the sender " "'missing-app.model', but app 'missing-app' isn't installed.", hint=None, obj='invalid_models_tests.test_models', id='signals.E001', ), Error( "Bound method 'DummyClass.dummy_method' was connected to the " "'post_init' signal with a lazy reference to the sender " "'missing-app.model', but app 'missing-app' isn't installed.", hint=None, obj='invalid_models_tests.test_models', id='signals.E001', ), Error( "The field invalid_models_tests.DummyModel.author was declared " "with a lazy reference to 'invalid_models_tests.author', but app " "'invalid_models_tests' isn't installed.", hint=None, obj=DummyModel.author.field, id='fields.E307', ), Error( "The function 'dummy_function' was connected to the 'post_init' " "signal with a lazy reference to the sender " "'missing-app.model', but app 'missing-app' isn't installed.", hint=None, obj='invalid_models_tests.test_models', id='signals.E001', ), ]) @isolate_apps('invalid_models_tests') class ConstraintsTests(TestCase): def test_check_constraints(self): class Model(models.Model): age = models.IntegerField() class Meta: constraints = [models.CheckConstraint(check=models.Q(age__gte=18), name='is_adult')] errors = Model.check(databases=self.databases) warn = Warning( '%s does not support check constraints.' % connection.display_name, hint=( "A constraint won't be created. Silence this warning if you " "don't care about it." ), obj=Model, id='models.W027', ) expected = [] if connection.features.supports_table_check_constraints else [warn] self.assertCountEqual(errors, expected) def test_check_constraints_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_table_check_constraints'} constraints = [models.CheckConstraint(check=models.Q(age__gte=18), name='is_adult')] self.assertEqual(Model.check(databases=self.databases), []) def test_unique_constraint_with_condition(self): class Model(models.Model): age = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_gte_100', condition=models.Q(age__gte=100), ), ] errors = Model.check(databases=self.databases) expected = [] if connection.features.supports_partial_indexes else [ Warning( '%s does not support unique constraints with conditions.' % connection.display_name, hint=( "A constraint won't be created. Silence this warning if " "you don't care about it." ), obj=Model, id='models.W036', ), ] self.assertEqual(errors, expected) def test_unique_constraint_with_condition_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_partial_indexes'} constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_gte_100', condition=models.Q(age__gte=100), ), ] self.assertEqual(Model.check(databases=self.databases), []) def test_deferrable_unique_constraint(self): class Model(models.Model): age = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_deferrable', deferrable=models.Deferrable.DEFERRED, ), ] errors = Model.check(databases=self.databases) expected = [] if connection.features.supports_deferrable_unique_constraints else [ Warning( '%s does not support deferrable unique constraints.' % connection.display_name, hint=( "A constraint won't be created. Silence this warning if " "you don't care about it." ), obj=Model, id='models.W038', ), ] self.assertEqual(errors, expected) def test_deferrable_unique_constraint_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_deferrable_unique_constraints'} constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_deferrable', deferrable=models.Deferrable.IMMEDIATE, ), ] self.assertEqual(Model.check(databases=self.databases), [])
3b2de3f204d8cd1c33f8986f8837d9c594c134fb673ac3e816ce87649444cf06
import gettext import os import re from datetime import datetime, timedelta from importlib import import_module import pytz from django import forms from django.conf import settings from django.contrib import admin from django.contrib.admin import widgets from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.auth.models import User from django.core.files.storage import default_storage from django.core.files.uploadedfile import SimpleUploadedFile from django.db.models import ( CharField, DateField, DateTimeField, ManyToManyField, UUIDField, ) from django.test import SimpleTestCase, TestCase, override_settings from django.urls import reverse from django.utils import translation from .models import ( Advisor, Album, Band, Bee, Car, Company, Event, Honeycomb, Individual, Inventory, Member, MyFileField, Profile, School, Student, ) from .widgetadmin import site as widget_admin_site class TestDataMixin: @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email=None) cls.u2 = User.objects.create_user(username='testser', password='secret') Car.objects.create(owner=cls.superuser, make='Volkswagen', model='Passat') Car.objects.create(owner=cls.u2, make='BMW', model='M3') class AdminFormfieldForDBFieldTests(SimpleTestCase): """ Tests for correct behavior of ModelAdmin.formfield_for_dbfield """ def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides): """ Helper to call formfield_for_dbfield for a given model and field name and verify that the returned formfield is appropriate. """ # Override any settings on the model admin class MyModelAdmin(admin.ModelAdmin): pass for k in admin_overrides: setattr(MyModelAdmin, k, admin_overrides[k]) # Construct the admin, and ask it for a formfield ma = MyModelAdmin(model, admin.site) ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None) # "unwrap" the widget wrapper, if needed if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper): widget = ff.widget.widget else: widget = ff.widget self.assertIsInstance(widget, widgetclass) # Return the formfield so that other tests can continue return ff def test_DateField(self): self.assertFormfield(Event, 'start_date', widgets.AdminDateWidget) def test_DateTimeField(self): self.assertFormfield(Member, 'birthdate', widgets.AdminSplitDateTime) def test_TimeField(self): self.assertFormfield(Event, 'start_time', widgets.AdminTimeWidget) def test_TextField(self): self.assertFormfield(Event, 'description', widgets.AdminTextareaWidget) def test_URLField(self): self.assertFormfield(Event, 'link', widgets.AdminURLFieldWidget) def test_IntegerField(self): self.assertFormfield(Event, 'min_age', widgets.AdminIntegerFieldWidget) def test_CharField(self): self.assertFormfield(Member, 'name', widgets.AdminTextInputWidget) def test_EmailField(self): self.assertFormfield(Member, 'email', widgets.AdminEmailInputWidget) def test_FileField(self): self.assertFormfield(Album, 'cover_art', widgets.AdminFileWidget) def test_ForeignKey(self): self.assertFormfield(Event, 'main_band', forms.Select) def test_raw_id_ForeignKey(self): self.assertFormfield(Event, 'main_band', widgets.ForeignKeyRawIdWidget, raw_id_fields=['main_band']) def test_radio_fields_ForeignKey(self): ff = self.assertFormfield(Event, 'main_band', widgets.AdminRadioSelect, radio_fields={'main_band': admin.VERTICAL}) self.assertIsNone(ff.empty_label) def test_many_to_many(self): self.assertFormfield(Band, 'members', forms.SelectMultiple) def test_raw_id_many_to_many(self): self.assertFormfield(Band, 'members', widgets.ManyToManyRawIdWidget, raw_id_fields=['members']) def test_filtered_many_to_many(self): self.assertFormfield(Band, 'members', widgets.FilteredSelectMultiple, filter_vertical=['members']) def test_formfield_overrides(self): self.assertFormfield(Event, 'start_date', forms.TextInput, formfield_overrides={DateField: {'widget': forms.TextInput}}) def test_formfield_overrides_widget_instances(self): """ Widget instances in formfield_overrides are not shared between different fields. (#19423) """ class BandAdmin(admin.ModelAdmin): formfield_overrides = { CharField: {'widget': forms.TextInput(attrs={'size': '10'})} } ma = BandAdmin(Band, admin.site) f1 = ma.formfield_for_dbfield(Band._meta.get_field('name'), request=None) f2 = ma.formfield_for_dbfield(Band._meta.get_field('style'), request=None) self.assertNotEqual(f1.widget, f2.widget) self.assertEqual(f1.widget.attrs['maxlength'], '100') self.assertEqual(f2.widget.attrs['maxlength'], '20') self.assertEqual(f2.widget.attrs['size'], '10') def test_formfield_overrides_m2m_filter_widget(self): """ The autocomplete_fields, raw_id_fields, filter_vertical, and filter_horizontal widgets for ManyToManyFields may be overridden by specifying a widget in formfield_overrides. """ class BandAdmin(admin.ModelAdmin): filter_vertical = ['members'] formfield_overrides = { ManyToManyField: {'widget': forms.CheckboxSelectMultiple}, } ma = BandAdmin(Band, admin.site) field = ma.formfield_for_dbfield(Band._meta.get_field('members'), request=None) self.assertIsInstance(field.widget.widget, forms.CheckboxSelectMultiple) def test_formfield_overrides_for_datetime_field(self): """ Overriding the widget for DateTimeField doesn't overrides the default form_class for that field (#26449). """ class MemberAdmin(admin.ModelAdmin): formfield_overrides = {DateTimeField: {'widget': widgets.AdminSplitDateTime}} ma = MemberAdmin(Member, admin.site) f1 = ma.formfield_for_dbfield(Member._meta.get_field('birthdate'), request=None) self.assertIsInstance(f1.widget, widgets.AdminSplitDateTime) self.assertIsInstance(f1, forms.SplitDateTimeField) def test_formfield_overrides_for_custom_field(self): """ formfield_overrides works for a custom field class. """ class AlbumAdmin(admin.ModelAdmin): formfield_overrides = {MyFileField: {'widget': forms.TextInput()}} ma = AlbumAdmin(Member, admin.site) f1 = ma.formfield_for_dbfield(Album._meta.get_field('backside_art'), request=None) self.assertIsInstance(f1.widget, forms.TextInput) def test_field_with_choices(self): self.assertFormfield(Member, 'gender', forms.Select) def test_choices_with_radio_fields(self): self.assertFormfield(Member, 'gender', widgets.AdminRadioSelect, radio_fields={'gender': admin.VERTICAL}) def test_inheritance(self): self.assertFormfield(Album, 'backside_art', widgets.AdminFileWidget) def test_m2m_widgets(self): """m2m fields help text as it applies to admin app (#9321).""" class AdvisorAdmin(admin.ModelAdmin): filter_vertical = ['companies'] self.assertFormfield(Advisor, 'companies', widgets.FilteredSelectMultiple, filter_vertical=['companies']) ma = AdvisorAdmin(Advisor, admin.site) f = ma.formfield_for_dbfield(Advisor._meta.get_field('companies'), request=None) self.assertEqual( f.help_text, 'Hold down “Control”, or “Command” on a Mac, to select more than one.' ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, TestCase): def test_filter_choices_by_request_user(self): """ Ensure the user can only see their own cars in the foreign key dropdown. """ self.client.force_login(self.superuser) response = self.client.get(reverse('admin:admin_widgets_cartire_add')) self.assertNotContains(response, "BMW M3") self.assertContains(response, "Volkswagen Passat") @override_settings(ROOT_URLCONF='admin_widgets.urls') class AdminForeignKeyWidgetChangeList(TestDataMixin, TestCase): def setUp(self): self.client.force_login(self.superuser) def test_changelist_ForeignKey(self): response = self.client.get(reverse('admin:admin_widgets_car_changelist')) self.assertContains(response, '/auth/user/add/') @override_settings(ROOT_URLCONF='admin_widgets.urls') class AdminForeignKeyRawIdWidget(TestDataMixin, TestCase): def setUp(self): self.client.force_login(self.superuser) def test_nonexistent_target_id(self): band = Band.objects.create(name='Bogey Blues') pk = band.pk band.delete() post_data = { "main_band": str(pk), } # Try posting with a nonexistent pk in a raw id field: this # should result in an error message, not a server exception. response = self.client.post(reverse('admin:admin_widgets_event_add'), post_data) self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.') def test_invalid_target_id(self): for test_str in ('Iñtërnâtiônàlizætiøn', "1234'", -1234): # This should result in an error message, not a server exception. response = self.client.post(reverse('admin:admin_widgets_event_add'), {"main_band": test_str}) self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.') def test_url_params_from_lookup_dict_any_iterable(self): lookup1 = widgets.url_params_from_lookup_dict({'color__in': ('red', 'blue')}) lookup2 = widgets.url_params_from_lookup_dict({'color__in': ['red', 'blue']}) self.assertEqual(lookup1, {'color__in': 'red,blue'}) self.assertEqual(lookup1, lookup2) def test_url_params_from_lookup_dict_callable(self): def my_callable(): return 'works' lookup1 = widgets.url_params_from_lookup_dict({'myfield': my_callable}) lookup2 = widgets.url_params_from_lookup_dict({'myfield': my_callable()}) self.assertEqual(lookup1, lookup2) def test_label_and_url_for_value_invalid_uuid(self): field = Bee._meta.get_field('honeycomb') self.assertIsInstance(field.target_field, UUIDField) widget = widgets.ForeignKeyRawIdWidget(field.remote_field, admin.site) self.assertEqual(widget.label_and_url_for_value('invalid-uuid'), ('', '')) class FilteredSelectMultipleWidgetTest(SimpleTestCase): def test_render(self): # Backslash in verbose_name to ensure it is JavaScript escaped. w = widgets.FilteredSelectMultiple('test\\', False) self.assertHTMLEqual( w.render('test', 'test'), '<select multiple name="test" class="selectfilter" ' 'data-field-name="test\\" data-is-stacked="0">\n</select>' ) def test_stacked_render(self): # Backslash in verbose_name to ensure it is JavaScript escaped. w = widgets.FilteredSelectMultiple('test\\', True) self.assertHTMLEqual( w.render('test', 'test'), '<select multiple name="test" class="selectfilterstacked" ' 'data-field-name="test\\" data-is-stacked="1">\n</select>' ) class AdminDateWidgetTest(SimpleTestCase): def test_attrs(self): w = widgets.AdminDateWidget() self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="2007-12-01" type="text" class="vDateField" name="test" size="10">', ) # pass attrs to widget w = widgets.AdminDateWidget(attrs={'size': 20, 'class': 'myDateField'}) self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="2007-12-01" type="text" class="myDateField" name="test" size="20">', ) class AdminTimeWidgetTest(SimpleTestCase): def test_attrs(self): w = widgets.AdminTimeWidget() self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="09:30:00" type="text" class="vTimeField" name="test" size="8">', ) # pass attrs to widget w = widgets.AdminTimeWidget(attrs={'size': 20, 'class': 'myTimeField'}) self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="09:30:00" type="text" class="myTimeField" name="test" size="20">', ) class AdminSplitDateTimeWidgetTest(SimpleTestCase): def test_render(self): w = widgets.AdminSplitDateTime() self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<p class="datetime">' 'Date: <input value="2007-12-01" type="text" class="vDateField" ' 'name="test_0" size="10"><br>' 'Time: <input value="09:30:00" type="text" class="vTimeField" ' 'name="test_1" size="8"></p>' ) def test_localization(self): w = widgets.AdminSplitDateTime() with self.settings(USE_L10N=True), translation.override('de-at'): w.is_localized = True self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<p class="datetime">' 'Datum: <input value="01.12.2007" type="text" ' 'class="vDateField" name="test_0"size="10"><br>' 'Zeit: <input value="09:30:00" type="text" class="vTimeField" ' 'name="test_1" size="8"></p>' ) class AdminURLWidgetTest(SimpleTestCase): def test_get_context_validates_url(self): w = widgets.AdminURLFieldWidget() for invalid in ['', '/not/a/full/url/', 'javascript:alert("Danger XSS!")']: with self.subTest(url=invalid): self.assertFalse(w.get_context('name', invalid, {})['url_valid']) self.assertTrue(w.get_context('name', 'http://example.com', {})['url_valid']) def test_render(self): w = widgets.AdminURLFieldWidget() self.assertHTMLEqual( w.render('test', ''), '<input class="vURLField" name="test" type="url">' ) self.assertHTMLEqual( w.render('test', 'http://example.com'), '<p class="url">Currently:<a href="http://example.com">' 'http://example.com</a><br>' 'Change:<input class="vURLField" name="test" type="url" ' 'value="http://example.com"></p>' ) def test_render_idn(self): w = widgets.AdminURLFieldWidget() self.assertHTMLEqual( w.render('test', 'http://example-äüö.com'), '<p class="url">Currently: <a href="http://xn--example--7za4pnc.com">' 'http://example-äüö.com</a><br>' 'Change:<input class="vURLField" name="test" type="url" ' 'value="http://example-äüö.com"></p>' ) def test_render_quoting(self): """ WARNING: This test doesn't use assertHTMLEqual since it will get rid of some escapes which are tested here! """ HREF_RE = re.compile('href="([^"]+)"') VALUE_RE = re.compile('value="([^"]+)"') TEXT_RE = re.compile('<a[^>]+>([^>]+)</a>') w = widgets.AdminURLFieldWidget() output = w.render('test', 'http://example.com/<sometag>some-text</sometag>') self.assertEqual( HREF_RE.search(output).groups()[0], 'http://example.com/%3Csometag%3Esome-text%3C/sometag%3E', ) self.assertEqual( TEXT_RE.search(output).groups()[0], 'http://example.com/&lt;sometag&gt;some-text&lt;/sometag&gt;', ) self.assertEqual( VALUE_RE.search(output).groups()[0], 'http://example.com/&lt;sometag&gt;some-text&lt;/sometag&gt;', ) output = w.render('test', 'http://example-äüö.com/<sometag>some-text</sometag>') self.assertEqual( HREF_RE.search(output).groups()[0], 'http://xn--example--7za4pnc.com/%3Csometag%3Esome-text%3C/sometag%3E', ) self.assertEqual( TEXT_RE.search(output).groups()[0], 'http://example-äüö.com/&lt;sometag&gt;some-text&lt;/sometag&gt;', ) self.assertEqual( VALUE_RE.search(output).groups()[0], 'http://example-äüö.com/&lt;sometag&gt;some-text&lt;/sometag&gt;', ) output = w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"') self.assertEqual( HREF_RE.search(output).groups()[0], 'http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22', ) self.assertEqual( TEXT_RE.search(output).groups()[0], 'http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;' 'alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;' ) self.assertEqual( VALUE_RE.search(output).groups()[0], 'http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;', ) class AdminUUIDWidgetTests(SimpleTestCase): def test_attrs(self): w = widgets.AdminUUIDInputWidget() self.assertHTMLEqual( w.render('test', '550e8400-e29b-41d4-a716-446655440000'), '<input value="550e8400-e29b-41d4-a716-446655440000" type="text" class="vUUIDField" name="test">', ) w = widgets.AdminUUIDInputWidget(attrs={'class': 'myUUIDInput'}) self.assertHTMLEqual( w.render('test', '550e8400-e29b-41d4-a716-446655440000'), '<input value="550e8400-e29b-41d4-a716-446655440000" type="text" class="myUUIDInput" name="test">', ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class AdminFileWidgetTests(TestDataMixin, TestCase): @classmethod def setUpTestData(cls): super().setUpTestData() band = Band.objects.create(name='Linkin Park') cls.album = band.album_set.create( name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg' ) def test_render(self): w = widgets.AdminFileWidget() self.assertHTMLEqual( w.render('test', self.album.cover_art), '<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/' r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> ' '<span class="clearable-file-input">' '<input type="checkbox" name="test-clear" id="test-clear_id"> ' '<label for="test-clear_id">Clear</label></span><br>' 'Change: <input type="file" name="test"></p>' % { 'STORAGE_URL': default_storage.url(''), }, ) self.assertHTMLEqual( w.render('test', SimpleUploadedFile('test', b'content')), '<input type="file" name="test">', ) def test_render_required(self): widget = widgets.AdminFileWidget() widget.is_required = True self.assertHTMLEqual( widget.render('test', self.album.cover_art), '<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/' r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a><br>' 'Change: <input type="file" name="test"></p>' % { 'STORAGE_URL': default_storage.url(''), }, ) def test_readonly_fields(self): """ File widgets should render as a link when they're marked "read only." """ self.client.force_login(self.superuser) response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,))) self.assertContains( response, '<div class="readonly"><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">' r'albums\hybrid_theory.jpg</a></div>' % {'STORAGE_URL': default_storage.url('')}, html=True, ) self.assertNotContains( response, '<input type="file" name="cover_art" id="id_cover_art">', html=True, ) response = self.client.get(reverse('admin:admin_widgets_album_add')) self.assertContains( response, '<div class="readonly"></div>', html=True, ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class ForeignKeyRawIdWidgetTest(TestCase): def test_render(self): band = Band.objects.create(name='Linkin Park') band.album_set.create( name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg' ) rel = Album._meta.get_field('band').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('test', band.pk, attrs={}), '<input type="text" name="test" value="%(bandpk)s" ' 'class="vForeignKeyRawIdAdminField">' '<a href="/admin_widgets/band/?_to_field=id" class="related-lookup" ' 'id="lookup_id_test" title="Lookup"></a>&nbsp;<strong>' '<a href="/admin_widgets/band/%(bandpk)s/change/">Linkin Park</a>' '</strong>' % {'bandpk': band.pk} ) def test_relations_to_non_primary_key(self): # ForeignKeyRawIdWidget works with fields which aren't related to # the model's primary key. apple = Inventory.objects.create(barcode=86, name='Apple') Inventory.objects.create(barcode=22, name='Pear') core = Inventory.objects.create( barcode=87, name='Core', parent=apple ) rel = Inventory._meta.get_field('parent').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('test', core.parent_id, attrs={}), '<input type="text" name="test" value="86" ' 'class="vForeignKeyRawIdAdminField">' '<a href="/admin_widgets/inventory/?_to_field=barcode" ' 'class="related-lookup" id="lookup_id_test" title="Lookup"></a>' '&nbsp;<strong><a href="/admin_widgets/inventory/%(pk)s/change/">' 'Apple</a></strong>' % {'pk': apple.pk} ) def test_fk_related_model_not_in_admin(self): # FK to a model not registered with admin site. Raw ID widget should # have no magnifying glass link. See #16542 big_honeycomb = Honeycomb.objects.create(location='Old tree') big_honeycomb.bee_set.create() rel = Bee._meta.get_field('honeycomb').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('honeycomb_widget', big_honeycomb.pk, attrs={}), '<input type="text" name="honeycomb_widget" value="%(hcombpk)s">' '&nbsp;<strong>%(hcomb)s</strong>' % {'hcombpk': big_honeycomb.pk, 'hcomb': big_honeycomb} ) def test_fk_to_self_model_not_in_admin(self): # FK to self, not registered with admin site. Raw ID widget should have # no magnifying glass link. See #16542 subject1 = Individual.objects.create(name='Subject #1') Individual.objects.create(name='Child', parent=subject1) rel = Individual._meta.get_field('parent').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('individual_widget', subject1.pk, attrs={}), '<input type="text" name="individual_widget" value="%(subj1pk)s">' '&nbsp;<strong>%(subj1)s</strong>' % {'subj1pk': subject1.pk, 'subj1': subject1} ) def test_proper_manager_for_label_lookup(self): # see #9258 rel = Inventory._meta.get_field('parent').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) hidden = Inventory.objects.create( barcode=93, name='Hidden', hidden=True ) child_of_hidden = Inventory.objects.create( barcode=94, name='Child of hidden', parent=hidden ) self.assertHTMLEqual( w.render('test', child_of_hidden.parent_id, attrs={}), '<input type="text" name="test" value="93" class="vForeignKeyRawIdAdminField">' '<a href="/admin_widgets/inventory/?_to_field=barcode" ' 'class="related-lookup" id="lookup_id_test" title="Lookup"></a>' '&nbsp;<strong><a href="/admin_widgets/inventory/%(pk)s/change/">' 'Hidden</a></strong>' % {'pk': hidden.pk} ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class ManyToManyRawIdWidgetTest(TestCase): def test_render(self): band = Band.objects.create(name='Linkin Park') m1 = Member.objects.create(name='Chester') m2 = Member.objects.create(name='Mike') band.members.add(m1, m2) rel = Band._meta.get_field('members').remote_field w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('test', [m1.pk, m2.pk], attrs={}), ( '<input type="text" name="test" value="%(m1pk)s,%(m2pk)s" class="vManyToManyRawIdAdminField">' '<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>' ) % {'m1pk': m1.pk, 'm2pk': m2.pk} ) self.assertHTMLEqual( w.render('test', [m1.pk]), ( '<input type="text" name="test" value="%(m1pk)s" class="vManyToManyRawIdAdminField">' '<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>' ) % {'m1pk': m1.pk} ) def test_m2m_related_model_not_in_admin(self): # M2M relationship with model not registered with admin site. Raw ID # widget should have no magnifying glass link. See #16542 consultor1 = Advisor.objects.create(name='Rockstar Techie') c1 = Company.objects.create(name='Doodle') c2 = Company.objects.create(name='Pear') consultor1.companies.add(c1, c2) rel = Advisor._meta.get_field('companies').remote_field w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('company_widget1', [c1.pk, c2.pk], attrs={}), '<input type="text" name="company_widget1" value="%(c1pk)s,%(c2pk)s">' % {'c1pk': c1.pk, 'c2pk': c2.pk} ) self.assertHTMLEqual( w.render('company_widget2', [c1.pk]), '<input type="text" name="company_widget2" value="%(c1pk)s">' % {'c1pk': c1.pk} ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class RelatedFieldWidgetWrapperTests(SimpleTestCase): def test_no_can_add_related(self): rel = Individual._meta.get_field('parent').remote_field w = widgets.AdminRadioSelect() # Used to fail with a name error. w = widgets.RelatedFieldWidgetWrapper(w, rel, widget_admin_site) self.assertFalse(w.can_add_related) def test_select_multiple_widget_cant_change_delete_related(self): rel = Individual._meta.get_field('parent').remote_field widget = forms.SelectMultiple() wrapper = widgets.RelatedFieldWidgetWrapper( widget, rel, widget_admin_site, can_add_related=True, can_change_related=True, can_delete_related=True, ) self.assertTrue(wrapper.can_add_related) self.assertFalse(wrapper.can_change_related) self.assertFalse(wrapper.can_delete_related) def test_on_delete_cascade_rel_cant_delete_related(self): rel = Individual._meta.get_field('soulmate').remote_field widget = forms.Select() wrapper = widgets.RelatedFieldWidgetWrapper( widget, rel, widget_admin_site, can_add_related=True, can_change_related=True, can_delete_related=True, ) self.assertTrue(wrapper.can_add_related) self.assertTrue(wrapper.can_change_related) self.assertFalse(wrapper.can_delete_related) def test_custom_widget_render(self): class CustomWidget(forms.Select): def render(self, *args, **kwargs): return 'custom render output' rel = Album._meta.get_field('band').remote_field widget = CustomWidget() wrapper = widgets.RelatedFieldWidgetWrapper( widget, rel, widget_admin_site, can_add_related=True, can_change_related=True, can_delete_related=True, ) output = wrapper.render('name', 'value') self.assertIn('custom render output', output) def test_widget_delegates_value_omitted_from_data(self): class CustomWidget(forms.Select): def value_omitted_from_data(self, data, files, name): return False rel = Album._meta.get_field('band').remote_field widget = CustomWidget() wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site) self.assertIs(wrapper.value_omitted_from_data({}, {}, 'band'), False) def test_widget_is_hidden(self): rel = Album._meta.get_field('band').remote_field widget = forms.HiddenInput() widget.choices = () wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site) self.assertIs(wrapper.is_hidden, True) context = wrapper.get_context('band', None, {}) self.assertIs(context['is_hidden'], True) output = wrapper.render('name', 'value') # Related item links are hidden. self.assertNotIn('<a ', output) def test_widget_is_not_hidden(self): rel = Album._meta.get_field('band').remote_field widget = forms.Select() wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site) self.assertIs(wrapper.is_hidden, False) context = wrapper.get_context('band', None, {}) self.assertIs(context['is_hidden'], False) output = wrapper.render('name', 'value') # Related item links are present. self.assertIn('<a ', output) @override_settings(ROOT_URLCONF='admin_widgets.urls') class AdminWidgetSeleniumTestCase(AdminSeleniumTestCase): available_apps = ['admin_widgets'] + AdminSeleniumTestCase.available_apps def setUp(self): self.u1 = User.objects.create_superuser(username='super', password='secret', email='[email protected]') class DateTimePickerSeleniumTests(AdminWidgetSeleniumTestCase): def test_show_hide_date_time_picker_widgets(self): """ Pressing the ESC key or clicking on a widget value closes the date and time picker widgets. """ from selenium.webdriver.common.keys import Keys self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add')) # First, with the date picker widget --------------------------------- cal_icon = self.selenium.find_element_by_id('calendarlink0') # The date picker is hidden self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed()) # Click the calendar icon cal_icon.click() # The date picker is visible self.assertTrue(self.selenium.find_element_by_id('calendarbox0').is_displayed()) # Press the ESC key self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE]) # The date picker is hidden again self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed()) # Click the calendar icon, then on the 15th of current month cal_icon.click() self.selenium.find_element_by_xpath("//a[contains(text(), '15')]").click() self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed()) self.assertEqual( self.selenium.find_element_by_id('id_birthdate_0').get_attribute('value'), datetime.today().strftime('%Y-%m-') + '15', ) # Then, with the time picker widget ---------------------------------- time_icon = self.selenium.find_element_by_id('clocklink0') # The time picker is hidden self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed()) # Click the time icon time_icon.click() # The time picker is visible self.assertTrue(self.selenium.find_element_by_id('clockbox0').is_displayed()) self.assertEqual( [ x.text for x in self.selenium.find_elements_by_xpath("//ul[@class='timelist']/li/a") ], ['Now', 'Midnight', '6 a.m.', 'Noon', '6 p.m.'] ) # Press the ESC key self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE]) # The time picker is hidden again self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed()) # Click the time icon, then select the 'Noon' value time_icon.click() self.selenium.find_element_by_xpath("//a[contains(text(), 'Noon')]").click() self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed()) self.assertEqual( self.selenium.find_element_by_id('id_birthdate_1').get_attribute('value'), '12:00:00', ) def test_calendar_nonday_class(self): """ Ensure cells that are not days of the month have the `nonday` CSS class. Refs #4574. """ self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add')) # fill in the birth date. self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01') # Click the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # get all the tds within the calendar calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') # make sure the first and last 6 cells have class nonday for td in tds[:6] + tds[-6:]: self.assertEqual(td.get_attribute('class'), 'nonday') def test_calendar_selected_class(self): """ Ensure cell for the day in the input has the `selected` CSS class. Refs #4574. """ self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add')) # fill in the birth date. self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01') # Click the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # get all the tds within the calendar calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') # verify the selected cell selected = tds[6] self.assertEqual(selected.get_attribute('class'), 'selected') self.assertEqual(selected.text, '1') def test_calendar_no_selected_class(self): """ Ensure no cells are given the selected class when the field is empty. Refs #4574. """ self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add')) # Click the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # get all the tds within the calendar calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') # verify there are no cells with the selected class selected = [td for td in tds if td.get_attribute('class') == 'selected'] self.assertEqual(len(selected), 0) def test_calendar_show_date_from_input(self): """ The calendar shows the date from the input field for every locale supported by Django. """ self.admin_login(username='super', password='secret', login_url='/') # Enter test data member = Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M') # Get month name translations for every locale month_string = 'May' path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale') for language_code, language_name in settings.LANGUAGES: try: catalog = gettext.translation('djangojs', path, [language_code]) except OSError: continue if month_string in catalog._catalog: month_name = catalog._catalog[month_string] else: month_name = month_string # Get the expected caption may_translation = month_name expected_caption = '{:s} {:d}'.format(may_translation.upper(), 1984) # Test with every locale with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True): # Open a page that has a date picker widget url = reverse('admin:admin_widgets_member_change', args=(member.pk,)) self.selenium.get(self.live_server_url + url) # Click on the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # Make sure that the right month and year are displayed self.wait_for_text('#calendarin0 caption', expected_caption) @override_settings(TIME_ZONE='Asia/Singapore') class DateTimePickerShortcutsSeleniumTests(AdminWidgetSeleniumTestCase): def test_date_time_picker_shortcuts(self): """ date/time/datetime picker shortcuts work in the current time zone. Refs #20663. This test case is fairly tricky, it relies on selenium still running the browser in the default time zone "America/Chicago" despite `override_settings` changing the time zone to "Asia/Singapore". """ self.admin_login(username='super', password='secret', login_url='/') error_margin = timedelta(seconds=10) # If we are neighbouring a DST, we add an hour of error margin. tz = pytz.timezone('America/Chicago') utc_now = datetime.now(pytz.utc) tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname() tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname() if tz_yesterday != tz_tomorrow: error_margin += timedelta(hours=1) now = datetime.now() self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add')) self.selenium.find_element_by_id('id_name').send_keys('test') # Click on the "today" and "now" shortcuts. shortcuts = self.selenium.find_elements_by_css_selector('.field-birthdate .datetimeshortcuts') for shortcut in shortcuts: shortcut.find_element_by_tag_name('a').click() # There is a time zone mismatch warning. # Warning: This would effectively fail if the TIME_ZONE defined in the # settings has the same UTC offset as "Asia/Singapore" because the # mismatch warning would be rightfully missing from the page. self.selenium.find_elements_by_css_selector('.field-birthdate .timezonewarning') # Submit the form. with self.wait_page_loaded(): self.selenium.find_element_by_name('_save').click() # Make sure that "now" in javascript is within 10 seconds # from "now" on the server side. member = Member.objects.get(name='test') self.assertGreater(member.birthdate, now - error_margin) self.assertLess(member.birthdate, now + error_margin) # The above tests run with Asia/Singapore which are on the positive side of # UTC. Here we test with a timezone on the negative side. @override_settings(TIME_ZONE='US/Eastern') class DateTimePickerAltTimezoneSeleniumTests(DateTimePickerShortcutsSeleniumTests): pass class HorizontalVerticalFilterSeleniumTests(AdminWidgetSeleniumTestCase): def setUp(self): super().setUp() self.lisa = Student.objects.create(name='Lisa') self.john = Student.objects.create(name='John') self.bob = Student.objects.create(name='Bob') self.peter = Student.objects.create(name='Peter') self.jenny = Student.objects.create(name='Jenny') self.jason = Student.objects.create(name='Jason') self.cliff = Student.objects.create(name='Cliff') self.arthur = Student.objects.create(name='Arthur') self.school = School.objects.create(name='School of Awesome') def assertActiveButtons(self, mode, field_name, choose, remove, choose_all=None, remove_all=None): choose_link = '#id_%s_add_link' % field_name choose_all_link = '#id_%s_add_all_link' % field_name remove_link = '#id_%s_remove_link' % field_name remove_all_link = '#id_%s_remove_all_link' % field_name self.assertEqual(self.has_css_class(choose_link, 'active'), choose) self.assertEqual(self.has_css_class(remove_link, 'active'), remove) if mode == 'horizontal': self.assertEqual(self.has_css_class(choose_all_link, 'active'), choose_all) self.assertEqual(self.has_css_class(remove_all_link, 'active'), remove_all) def execute_basic_operations(self, mode, field_name): original_url = self.selenium.current_url from_box = '#id_%s_from' % field_name to_box = '#id_%s_to' % field_name choose_link = 'id_%s_add_link' % field_name choose_all_link = 'id_%s_add_all_link' % field_name remove_link = 'id_%s_remove_link' % field_name remove_all_link = 'id_%s_remove_all_link' % field_name # Initial positions --------------------------------------------------- self.assertSelectOptions(from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ]) self.assertSelectOptions(to_box, [str(self.lisa.id), str(self.peter.id)]) self.assertActiveButtons(mode, field_name, False, False, True, True) # Click 'Choose all' -------------------------------------------------- if mode == 'horizontal': self.selenium.find_element_by_id(choose_all_link).click() elif mode == 'vertical': # There 's no 'Choose all' button in vertical mode, so individually # select all options and click 'Choose'. for option in self.selenium.find_elements_by_css_selector(from_box + ' > option'): option.click() self.selenium.find_element_by_id(choose_link).click() self.assertSelectOptions(from_box, []) self.assertSelectOptions(to_box, [ str(self.lisa.id), str(self.peter.id), str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ]) self.assertActiveButtons(mode, field_name, False, False, False, True) # Click 'Remove all' -------------------------------------------------- if mode == 'horizontal': self.selenium.find_element_by_id(remove_all_link).click() elif mode == 'vertical': # There 's no 'Remove all' button in vertical mode, so individually # select all options and click 'Remove'. for option in self.selenium.find_elements_by_css_selector(to_box + ' > option'): option.click() self.selenium.find_element_by_id(remove_link).click() self.assertSelectOptions(from_box, [ str(self.lisa.id), str(self.peter.id), str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ]) self.assertSelectOptions(to_box, []) self.assertActiveButtons(mode, field_name, False, False, True, False) # Choose some options ------------------------------------------------ from_lisa_select_option = self.selenium.find_element_by_css_selector( '{} > option[value="{}"]'.format(from_box, self.lisa.id) ) # Check the title attribute is there for tool tips: ticket #20821 self.assertEqual(from_lisa_select_option.get_attribute('title'), from_lisa_select_option.get_attribute('text')) self.select_option(from_box, str(self.lisa.id)) self.select_option(from_box, str(self.jason.id)) self.select_option(from_box, str(self.bob.id)) self.select_option(from_box, str(self.john.id)) self.assertActiveButtons(mode, field_name, True, False, True, False) self.selenium.find_element_by_id(choose_link).click() self.assertActiveButtons(mode, field_name, False, False, True, True) self.assertSelectOptions(from_box, [ str(self.peter.id), str(self.arthur.id), str(self.cliff.id), str(self.jenny.id), ]) self.assertSelectOptions(to_box, [ str(self.lisa.id), str(self.bob.id), str(self.jason.id), str(self.john.id), ]) # Check the tooltip is still there after moving: ticket #20821 to_lisa_select_option = self.selenium.find_element_by_css_selector( '{} > option[value="{}"]'.format(to_box, self.lisa.id) ) self.assertEqual(to_lisa_select_option.get_attribute('title'), to_lisa_select_option.get_attribute('text')) # Remove some options ------------------------------------------------- self.select_option(to_box, str(self.lisa.id)) self.select_option(to_box, str(self.bob.id)) self.assertActiveButtons(mode, field_name, False, True, True, True) self.selenium.find_element_by_id(remove_link).click() self.assertActiveButtons(mode, field_name, False, False, True, True) self.assertSelectOptions(from_box, [ str(self.peter.id), str(self.arthur.id), str(self.cliff.id), str(self.jenny.id), str(self.lisa.id), str(self.bob.id) ]) self.assertSelectOptions(to_box, [str(self.jason.id), str(self.john.id)]) # Choose some more options -------------------------------------------- self.select_option(from_box, str(self.arthur.id)) self.select_option(from_box, str(self.cliff.id)) self.selenium.find_element_by_id(choose_link).click() self.assertSelectOptions(from_box, [ str(self.peter.id), str(self.jenny.id), str(self.lisa.id), str(self.bob.id), ]) self.assertSelectOptions(to_box, [ str(self.jason.id), str(self.john.id), str(self.arthur.id), str(self.cliff.id), ]) # Choose some more options -------------------------------------------- self.select_option(from_box, str(self.peter.id)) self.select_option(from_box, str(self.lisa.id)) # Confirm they're selected after clicking inactive buttons: ticket #26575 self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)]) self.selenium.find_element_by_id(remove_link).click() self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)]) # Unselect the options ------------------------------------------------ self.deselect_option(from_box, str(self.peter.id)) self.deselect_option(from_box, str(self.lisa.id)) # Choose some more options -------------------------------------------- self.select_option(to_box, str(self.jason.id)) self.select_option(to_box, str(self.john.id)) # Confirm they're selected after clicking inactive buttons: ticket #26575 self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)]) self.selenium.find_element_by_id(choose_link).click() self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)]) # Unselect the options ------------------------------------------------ self.deselect_option(to_box, str(self.jason.id)) self.deselect_option(to_box, str(self.john.id)) # Pressing buttons shouldn't change the URL. self.assertEqual(self.selenium.current_url, original_url) def test_basic(self): self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username='super', password='secret', login_url='/') self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,))) self.wait_page_ready() self.execute_basic_operations('vertical', 'students') self.execute_basic_operations('horizontal', 'alumni') # Save and check that everything is properly stored in the database --- self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.wait_page_ready() self.school = School.objects.get(id=self.school.id) # Reload from database self.assertEqual(list(self.school.students.all()), [self.arthur, self.cliff, self.jason, self.john]) self.assertEqual(list(self.school.alumni.all()), [self.arthur, self.cliff, self.jason, self.john]) def test_filter(self): """ Typing in the search box filters out options displayed in the 'from' box. """ from selenium.webdriver.common.keys import Keys self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username='super', password='secret', login_url='/') self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,))) for field_name in ['students', 'alumni']: from_box = '#id_%s_from' % field_name to_box = '#id_%s_to' % field_name choose_link = 'id_%s_add_link' % field_name remove_link = 'id_%s_remove_link' % field_name input = self.selenium.find_element_by_id('id_%s_input' % field_name) # Initial values self.assertSelectOptions(from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ]) # Typing in some characters filters out non-matching options input.send_keys('a') self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) input.send_keys('R') self.assertSelectOptions(from_box, [str(self.arthur.id)]) # Clearing the text box makes the other options reappear input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ]) # ----------------------------------------------------------------- # Choosing a filtered option sends it properly to the 'to' box. input.send_keys('a') self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) self.select_option(from_box, str(self.jason.id)) self.selenium.find_element_by_id(choose_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id)]) self.assertSelectOptions(to_box, [ str(self.lisa.id), str(self.peter.id), str(self.jason.id), ]) self.select_option(to_box, str(self.lisa.id)) self.selenium.find_element_by_id(remove_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.lisa.id)]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE]) # Clear text box self.assertSelectOptions(from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jenny.id), str(self.john.id), str(self.lisa.id), ]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) # ----------------------------------------------------------------- # Pressing enter on a filtered option sends it properly to # the 'to' box. self.select_option(to_box, str(self.jason.id)) self.selenium.find_element_by_id(remove_link).click() input.send_keys('ja') self.assertSelectOptions(from_box, [str(self.jason.id)]) input.send_keys([Keys.ENTER]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE]) # Save and check that everything is properly stored in the database --- with self.wait_page_loaded(): self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.school = School.objects.get(id=self.school.id) # Reload from database self.assertEqual(list(self.school.students.all()), [self.jason, self.peter]) self.assertEqual(list(self.school.alumni.all()), [self.jason, self.peter]) def test_back_button_bug(self): """ Some browsers had a bug where navigating away from the change page and then clicking the browser's back button would clear the filter_horizontal/filter_vertical widgets (#13614). """ self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username='super', password='secret', login_url='/') change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,)) self.selenium.get(self.live_server_url + change_url) # Navigate away and go back to the change form page. self.selenium.find_element_by_link_text('Home').click() self.selenium.back() expected_unselected_values = [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ] expected_selected_values = [str(self.lisa.id), str(self.peter.id)] # Everything is still in place self.assertSelectOptions('#id_students_from', expected_unselected_values) self.assertSelectOptions('#id_students_to', expected_selected_values) self.assertSelectOptions('#id_alumni_from', expected_unselected_values) self.assertSelectOptions('#id_alumni_to', expected_selected_values) def test_refresh_page(self): """ Horizontal and vertical filter widgets keep selected options on page reload (#22955). """ self.school.students.add(self.arthur, self.jason) self.school.alumni.add(self.arthur, self.jason) self.admin_login(username='super', password='secret', login_url='/') change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,)) self.selenium.get(self.live_server_url + change_url) options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option')) self.assertEqual(options_len, 2) # self.selenium.refresh() or send_keys(Keys.F5) does hard reload and # doesn't replicate what happens when a user clicks the browser's # 'Refresh' button. with self.wait_page_loaded(): self.selenium.execute_script("location.reload()") options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option')) self.assertEqual(options_len, 2) class AdminRawIdWidgetSeleniumTests(AdminWidgetSeleniumTestCase): def setUp(self): super().setUp() Band.objects.create(id=42, name='Bogey Blues') Band.objects.create(id=98, name='Green Potatoes') def test_ForeignKey(self): self.admin_login(username='super', password='secret', login_url='/') self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_event_add')) main_window = self.selenium.current_window_handle # No value has been selected yet self.assertEqual(self.selenium.find_element_by_id('id_main_band').get_attribute('value'), '') # Open the popup window and click on a band self.selenium.find_element_by_id('lookup_id_main_band').click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element_by_link_text('Bogey Blues') self.assertIn('/band/42/', link.get_attribute('href')) link.click() # The field now contains the selected band's id self.selenium.switch_to.window(main_window) self.wait_for_value('#id_main_band', '42') # Reopen the popup window and click on another band self.selenium.find_element_by_id('lookup_id_main_band').click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element_by_link_text('Green Potatoes') self.assertIn('/band/98/', link.get_attribute('href')) link.click() # The field now contains the other selected band's id self.selenium.switch_to.window(main_window) self.wait_for_value('#id_main_band', '98') def test_many_to_many(self): self.admin_login(username='super', password='secret', login_url='/') self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_event_add')) main_window = self.selenium.current_window_handle # No value has been selected yet self.assertEqual(self.selenium.find_element_by_id('id_supporting_bands').get_attribute('value'), '') # Help text for the field is displayed self.assertEqual( self.selenium.find_element_by_css_selector('.field-supporting_bands div.help').text, 'Supporting Bands.' ) # Open the popup window and click on a band self.selenium.find_element_by_id('lookup_id_supporting_bands').click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element_by_link_text('Bogey Blues') self.assertIn('/band/42/', link.get_attribute('href')) link.click() # The field now contains the selected band's id self.selenium.switch_to.window(main_window) self.wait_for_value('#id_supporting_bands', '42') # Reopen the popup window and click on another band self.selenium.find_element_by_id('lookup_id_supporting_bands').click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element_by_link_text('Green Potatoes') self.assertIn('/band/98/', link.get_attribute('href')) link.click() # The field now contains the two selected bands' ids self.selenium.switch_to.window(main_window) self.wait_for_value('#id_supporting_bands', '42,98') class RelatedFieldWidgetSeleniumTests(AdminWidgetSeleniumTestCase): def test_ForeignKey_using_to_field(self): self.admin_login(username='super', password='secret', login_url='/') self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_profile_add')) main_window = self.selenium.current_window_handle # Click the Add User button to add new self.selenium.find_element_by_id('add_id_user').click() self.wait_for_and_switch_to_popup() password_field = self.selenium.find_element_by_id('id_password') password_field.send_keys('password') username_field = self.selenium.find_element_by_id('id_username') username_value = 'newuser' username_field.send_keys(username_value) save_button_css_selector = '.submit-row > input[type=submit]' self.selenium.find_element_by_css_selector(save_button_css_selector).click() self.selenium.switch_to.window(main_window) # The field now contains the new user self.selenium.find_element_by_css_selector('#id_user option[value=newuser]') # Click the Change User button to change it self.selenium.find_element_by_id('change_id_user').click() self.wait_for_and_switch_to_popup() username_field = self.selenium.find_element_by_id('id_username') username_value = 'changednewuser' username_field.clear() username_field.send_keys(username_value) save_button_css_selector = '.submit-row > input[type=submit]' self.selenium.find_element_by_css_selector(save_button_css_selector).click() self.selenium.switch_to.window(main_window) self.selenium.find_element_by_css_selector('#id_user option[value=changednewuser]') # Go ahead and submit the form to make sure it works self.selenium.find_element_by_css_selector(save_button_css_selector).click() self.wait_for_text('li.success', 'The profile “changednewuser” was added successfully.') profiles = Profile.objects.all() self.assertEqual(len(profiles), 1) self.assertEqual(profiles[0].user.username, username_value)
560a5259ac8ac21b6e12aae57ad7ec29ab758d76ef24214446a287604003d1a0
import collections import re from functools import partial from itertools import chain from django.core.exceptions import EmptyResultSet, FieldError from django.db import DatabaseError, NotSupportedError from django.db.models.constants import LOOKUP_SEP from django.db.models.expressions import OrderBy, Random, RawSQL, Ref, Value from django.db.models.functions import Cast from django.db.models.query_utils import Q, select_related_descend from django.db.models.sql.constants import ( CURSOR, GET_ITERATOR_CHUNK_SIZE, MULTI, NO_RESULTS, ORDER_DIR, SINGLE, ) from django.db.models.sql.query import Query, get_order_dir from django.db.transaction import TransactionManagementError from django.utils.functional import cached_property from django.utils.hashable import make_hashable from django.utils.regex_helper import _lazy_re_compile class SQLCompiler: # Multiline ordering SQL clause may appear from RawSQL. ordering_parts = _lazy_re_compile( r'^(.*)\s(?:ASC|DESC).*', re.MULTILINE | re.DOTALL, ) def __init__(self, query, connection, using): self.query = query self.connection = connection self.using = using self.quote_cache = {'*': '*'} # The select, klass_info, and annotations are needed by QuerySet.iterator() # these are set as a side-effect of executing the query. Note that we calculate # separately a list of extra select columns needed for grammatical correctness # of the query, but these columns are not included in self.select. self.select = None self.annotation_col_map = None self.klass_info = None self._meta_ordering = None def setup_query(self): if all(self.query.alias_refcount[a] == 0 for a in self.query.alias_map): self.query.get_initial_alias() self.select, self.klass_info, self.annotation_col_map = self.get_select() self.col_count = len(self.select) def pre_sql_setup(self): """ Do any necessary class setup immediately prior to producing SQL. This is for things that can't necessarily be done in __init__ because we might not have all the pieces in place at that time. """ self.setup_query() order_by = self.get_order_by() self.where, self.having = self.query.where.split_having() extra_select = self.get_extra_select(order_by, self.select) self.has_extra_select = bool(extra_select) group_by = self.get_group_by(self.select + extra_select, order_by) return extra_select, order_by, group_by def get_group_by(self, select, order_by): """ Return a list of 2-tuples of form (sql, params). The logic of what exactly the GROUP BY clause contains is hard to describe in other words than "if it passes the test suite, then it is correct". """ # Some examples: # SomeModel.objects.annotate(Count('somecol')) # GROUP BY: all fields of the model # # SomeModel.objects.values('name').annotate(Count('somecol')) # GROUP BY: name # # SomeModel.objects.annotate(Count('somecol')).values('name') # GROUP BY: all cols of the model # # SomeModel.objects.values('name', 'pk').annotate(Count('somecol')).values('pk') # GROUP BY: name, pk # # SomeModel.objects.values('name').annotate(Count('somecol')).values('pk') # GROUP BY: name, pk # # In fact, the self.query.group_by is the minimal set to GROUP BY. It # can't be ever restricted to a smaller set, but additional columns in # HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately # the end result is that it is impossible to force the query to have # a chosen GROUP BY clause - you can almost do this by using the form: # .values(*wanted_cols).annotate(AnAggregate()) # but any later annotations, extra selects, values calls that # refer some column outside of the wanted_cols, order_by, or even # filter calls can alter the GROUP BY clause. # The query.group_by is either None (no GROUP BY at all), True # (group by select fields), or a list of expressions to be added # to the group by. if self.query.group_by is None: return [] expressions = [] if self.query.group_by is not True: # If the group by is set to a list (by .values() call most likely), # then we need to add everything in it to the GROUP BY clause. # Backwards compatibility hack for setting query.group_by. Remove # when we have public API way of forcing the GROUP BY clause. # Converts string references to expressions. for expr in self.query.group_by: if not hasattr(expr, 'as_sql'): expressions.append(self.query.resolve_ref(expr)) else: expressions.append(expr) # Note that even if the group_by is set, it is only the minimal # set to group by. So, we need to add cols in select, order_by, and # having into the select in any case. ref_sources = { expr.source for expr in expressions if isinstance(expr, Ref) } for expr, _, _ in select: # Skip members of the select clause that are already included # by reference. if expr in ref_sources: continue cols = expr.get_group_by_cols() for col in cols: expressions.append(col) for expr, (sql, params, is_ref) in order_by: # Skip References to the select clause, as all expressions in the # select clause are already part of the group by. if not is_ref: expressions.extend(expr.get_group_by_cols()) having_group_by = self.having.get_group_by_cols() if self.having else () for expr in having_group_by: expressions.append(expr) result = [] seen = set() expressions = self.collapse_group_by(expressions, having_group_by) for expr in expressions: sql, params = self.compile(expr) sql, params = expr.select_format(self, sql, params) params_hash = make_hashable(params) if (sql, params_hash) not in seen: result.append((sql, params)) seen.add((sql, params_hash)) return result def collapse_group_by(self, expressions, having): # If the DB can group by primary key, then group by the primary key of # query's main model. Note that for PostgreSQL the GROUP BY clause must # include the primary key of every table, but for MySQL it is enough to # have the main table's primary key. if self.connection.features.allows_group_by_pk: # Determine if the main model's primary key is in the query. pk = None for expr in expressions: # Is this a reference to query's base table primary key? If the # expression isn't a Col-like, then skip the expression. if (getattr(expr, 'target', None) == self.query.model._meta.pk and getattr(expr, 'alias', None) == self.query.base_table): pk = expr break # If the main model's primary key is in the query, group by that # field, HAVING expressions, and expressions associated with tables # that don't have a primary key included in the grouped columns. if pk: pk_aliases = { expr.alias for expr in expressions if hasattr(expr, 'target') and expr.target.primary_key } expressions = [pk] + [ expr for expr in expressions if expr in having or ( getattr(expr, 'alias', None) is not None and expr.alias not in pk_aliases ) ] elif self.connection.features.allows_group_by_selected_pks: # Filter out all expressions associated with a table's primary key # present in the grouped columns. This is done by identifying all # tables that have their primary key included in the grouped # columns and removing non-primary key columns referring to them. # Unmanaged models are excluded because they could be representing # database views on which the optimization might not be allowed. pks = { expr for expr in expressions if ( hasattr(expr, 'target') and expr.target.primary_key and self.connection.features.allows_group_by_selected_pks_on_model(expr.target.model) ) } aliases = {expr.alias for expr in pks} expressions = [ expr for expr in expressions if expr in pks or getattr(expr, 'alias', None) not in aliases ] return expressions def get_select(self): """ Return three values: - a list of 3-tuples of (expression, (sql, params), alias) - a klass_info structure, - a dictionary of annotations The (sql, params) is what the expression will produce, and alias is the "AS alias" for the column (possibly None). The klass_info structure contains the following information: - The base model of the query. - Which columns for that model are present in the query (by position of the select clause). - related_klass_infos: [f, klass_info] to descent into The annotations is a dictionary of {'attname': column position} values. """ select = [] klass_info = None annotations = {} select_idx = 0 for alias, (sql, params) in self.query.extra_select.items(): annotations[alias] = select_idx select.append((RawSQL(sql, params), alias)) select_idx += 1 assert not (self.query.select and self.query.default_cols) if self.query.default_cols: cols = self.get_default_columns() else: # self.query.select is a special case. These columns never go to # any model. cols = self.query.select if cols: select_list = [] for col in cols: select_list.append(select_idx) select.append((col, None)) select_idx += 1 klass_info = { 'model': self.query.model, 'select_fields': select_list, } for alias, annotation in self.query.annotation_select.items(): annotations[alias] = select_idx select.append((annotation, alias)) select_idx += 1 if self.query.select_related: related_klass_infos = self.get_related_selections(select) klass_info['related_klass_infos'] = related_klass_infos def get_select_from_parent(klass_info): for ki in klass_info['related_klass_infos']: if ki['from_parent']: ki['select_fields'] = (klass_info['select_fields'] + ki['select_fields']) get_select_from_parent(ki) get_select_from_parent(klass_info) ret = [] for col, alias in select: try: sql, params = self.compile(col) except EmptyResultSet: # Select a predicate that's always False. sql, params = '0', () else: sql, params = col.select_format(self, sql, params) ret.append((col, (sql, params), alias)) return ret, klass_info, annotations def get_order_by(self): """ Return a list of 2-tuples of form (expr, (sql, params, is_ref)) for the ORDER BY clause. The order_by clause can alter the select clause (for example it can add aliases to clauses that do not yet have one, or it can add totally new select clauses). """ if self.query.extra_order_by: ordering = self.query.extra_order_by elif not self.query.default_ordering: ordering = self.query.order_by elif self.query.order_by: ordering = self.query.order_by elif self.query.get_meta().ordering: ordering = self.query.get_meta().ordering self._meta_ordering = ordering else: ordering = [] if self.query.standard_ordering: asc, desc = ORDER_DIR['ASC'] else: asc, desc = ORDER_DIR['DESC'] order_by = [] for field in ordering: if hasattr(field, 'resolve_expression'): if isinstance(field, Value): # output_field must be resolved for constants. field = Cast(field, field.output_field) if not isinstance(field, OrderBy): field = field.asc() if not self.query.standard_ordering: field = field.copy() field.reverse_ordering() order_by.append((field, False)) continue if field == '?': # random order_by.append((OrderBy(Random()), False)) continue col, order = get_order_dir(field, asc) descending = order == 'DESC' if col in self.query.annotation_select: # Reference to expression in SELECT clause order_by.append(( OrderBy(Ref(col, self.query.annotation_select[col]), descending=descending), True)) continue if col in self.query.annotations: # References to an expression which is masked out of the SELECT # clause. expr = self.query.annotations[col] if isinstance(expr, Value): # output_field must be resolved for constants. expr = Cast(expr, expr.output_field) order_by.append((OrderBy(expr, descending=descending), False)) continue if '.' in field: # This came in through an extra(order_by=...) addition. Pass it # on verbatim. table, col = col.split('.', 1) order_by.append(( OrderBy( RawSQL('%s.%s' % (self.quote_name_unless_alias(table), col), []), descending=descending ), False)) continue if not self.query.extra or col not in self.query.extra: # 'col' is of the form 'field' or 'field1__field2' or # '-field1__field2__field', etc. order_by.extend(self.find_ordering_name( field, self.query.get_meta(), default_order=asc)) else: if col not in self.query.extra_select: order_by.append(( OrderBy(RawSQL(*self.query.extra[col]), descending=descending), False)) else: order_by.append(( OrderBy(Ref(col, RawSQL(*self.query.extra[col])), descending=descending), True)) result = [] seen = set() for expr, is_ref in order_by: resolved = expr.resolve_expression(self.query, allow_joins=True, reuse=None) if self.query.combinator: src = resolved.get_source_expressions()[0] # Relabel order by columns to raw numbers if this is a combined # query; necessary since the columns can't be referenced by the # fully qualified name and the simple column names may collide. for idx, (sel_expr, _, col_alias) in enumerate(self.select): if is_ref and col_alias == src.refs: src = src.source elif col_alias: continue if src == sel_expr: resolved.set_source_expressions([RawSQL('%d' % (idx + 1), ())]) break else: if col_alias: raise DatabaseError('ORDER BY term does not match any column in the result set.') # Add column used in ORDER BY clause without an alias to # the selected columns. self.query.add_select_col(src) resolved.set_source_expressions([RawSQL('%d' % len(self.query.select), ())]) sql, params = self.compile(resolved) # Don't add the same column twice, but the order direction is # not taken into account so we strip it. When this entire method # is refactored into expressions, then we can check each part as we # generate it. without_ordering = self.ordering_parts.search(sql)[1] params_hash = make_hashable(params) if (without_ordering, params_hash) in seen: continue seen.add((without_ordering, params_hash)) result.append((resolved, (sql, params, is_ref))) return result def get_extra_select(self, order_by, select): extra_select = [] if self.query.distinct and not self.query.distinct_fields: select_sql = [t[1] for t in select] for expr, (sql, params, is_ref) in order_by: without_ordering = self.ordering_parts.search(sql)[1] if not is_ref and (without_ordering, params) not in select_sql: extra_select.append((expr, (without_ordering, params), None)) return extra_select def quote_name_unless_alias(self, name): """ A wrapper around connection.ops.quote_name that doesn't quote aliases for table names. This avoids problems with some SQL dialects that treat quoted strings specially (e.g. PostgreSQL). """ if name in self.quote_cache: return self.quote_cache[name] if ((name in self.query.alias_map and name not in self.query.table_map) or name in self.query.extra_select or ( self.query.external_aliases.get(name) and name not in self.query.table_map)): self.quote_cache[name] = name return name r = self.connection.ops.quote_name(name) self.quote_cache[name] = r return r def compile(self, node): vendor_impl = getattr(node, 'as_' + self.connection.vendor, None) if vendor_impl: sql, params = vendor_impl(self, self.connection) else: sql, params = node.as_sql(self, self.connection) return sql, params def get_combinator_sql(self, combinator, all): features = self.connection.features compilers = [ query.get_compiler(self.using, self.connection) for query in self.query.combined_queries if not query.is_empty() ] if not features.supports_slicing_ordering_in_compound: for query, compiler in zip(self.query.combined_queries, compilers): if query.low_mark or query.high_mark: raise DatabaseError('LIMIT/OFFSET not allowed in subqueries of compound statements.') if compiler.get_order_by(): raise DatabaseError('ORDER BY not allowed in subqueries of compound statements.') parts = () for compiler in compilers: try: # If the columns list is limited, then all combined queries # must have the same columns list. Set the selects defined on # the query on all combined queries, if not already set. if not compiler.query.values_select and self.query.values_select: compiler.query = compiler.query.clone() compiler.query.set_values(( *self.query.extra_select, *self.query.values_select, *self.query.annotation_select, )) part_sql, part_args = compiler.as_sql() if compiler.query.combinator: # Wrap in a subquery if wrapping in parentheses isn't # supported. if not features.supports_parentheses_in_compound: part_sql = 'SELECT * FROM ({})'.format(part_sql) # Add parentheses when combining with compound query if not # already added for all compound queries. elif not features.supports_slicing_ordering_in_compound: part_sql = '({})'.format(part_sql) parts += ((part_sql, part_args),) except EmptyResultSet: # Omit the empty queryset with UNION and with DIFFERENCE if the # first queryset is nonempty. if combinator == 'union' or (combinator == 'difference' and parts): continue raise if not parts: raise EmptyResultSet combinator_sql = self.connection.ops.set_operators[combinator] if all and combinator == 'union': combinator_sql += ' ALL' braces = '({})' if features.supports_slicing_ordering_in_compound else '{}' sql_parts, args_parts = zip(*((braces.format(sql), args) for sql, args in parts)) result = [' {} '.format(combinator_sql).join(sql_parts)] params = [] for part in args_parts: params.extend(part) return result, params def as_sql(self, with_limits=True, with_col_aliases=False): """ Create the SQL for this query. Return the SQL string and list of parameters. If 'with_limits' is False, any limit/offset information is not included in the query. """ refcounts_before = self.query.alias_refcount.copy() try: extra_select, order_by, group_by = self.pre_sql_setup() for_update_part = None # Is a LIMIT/OFFSET clause needed? with_limit_offset = with_limits and (self.query.high_mark is not None or self.query.low_mark) combinator = self.query.combinator features = self.connection.features if combinator: if not getattr(features, 'supports_select_{}'.format(combinator)): raise NotSupportedError('{} is not supported on this database backend.'.format(combinator)) result, params = self.get_combinator_sql(combinator, self.query.combinator_all) else: distinct_fields, distinct_params = self.get_distinct() # This must come after 'select', 'ordering', and 'distinct' # (see docstring of get_from_clause() for details). from_, f_params = self.get_from_clause() where, w_params = self.compile(self.where) if self.where is not None else ("", []) having, h_params = self.compile(self.having) if self.having is not None else ("", []) result = ['SELECT'] params = [] if self.query.distinct: distinct_result, distinct_params = self.connection.ops.distinct_sql( distinct_fields, distinct_params, ) result += distinct_result params += distinct_params out_cols = [] col_idx = 1 for _, (s_sql, s_params), alias in self.select + extra_select: if alias: s_sql = '%s AS %s' % (s_sql, self.connection.ops.quote_name(alias)) elif with_col_aliases: s_sql = '%s AS %s' % (s_sql, 'Col%d' % col_idx) col_idx += 1 params.extend(s_params) out_cols.append(s_sql) result += [', '.join(out_cols), 'FROM', *from_] params.extend(f_params) if self.query.select_for_update and self.connection.features.has_select_for_update: if self.connection.get_autocommit(): raise TransactionManagementError('select_for_update cannot be used outside of a transaction.') if with_limit_offset and not self.connection.features.supports_select_for_update_with_limit: raise NotSupportedError( 'LIMIT/OFFSET is not supported with ' 'select_for_update on this database backend.' ) nowait = self.query.select_for_update_nowait skip_locked = self.query.select_for_update_skip_locked of = self.query.select_for_update_of no_key = self.query.select_for_no_key_update # If it's a NOWAIT/SKIP LOCKED/OF/NO KEY query but the # backend doesn't support it, raise NotSupportedError to # prevent a possible deadlock. if nowait and not self.connection.features.has_select_for_update_nowait: raise NotSupportedError('NOWAIT is not supported on this database backend.') elif skip_locked and not self.connection.features.has_select_for_update_skip_locked: raise NotSupportedError('SKIP LOCKED is not supported on this database backend.') elif of and not self.connection.features.has_select_for_update_of: raise NotSupportedError('FOR UPDATE OF is not supported on this database backend.') elif no_key and not self.connection.features.has_select_for_no_key_update: raise NotSupportedError( 'FOR NO KEY UPDATE is not supported on this ' 'database backend.' ) for_update_part = self.connection.ops.for_update_sql( nowait=nowait, skip_locked=skip_locked, of=self.get_select_for_update_of_arguments(), no_key=no_key, ) if for_update_part and self.connection.features.for_update_after_from: result.append(for_update_part) if where: result.append('WHERE %s' % where) params.extend(w_params) grouping = [] for g_sql, g_params in group_by: grouping.append(g_sql) params.extend(g_params) if grouping: if distinct_fields: raise NotImplementedError('annotate() + distinct(fields) is not implemented.') order_by = order_by or self.connection.ops.force_no_ordering() result.append('GROUP BY %s' % ', '.join(grouping)) if self._meta_ordering: order_by = None if having: result.append('HAVING %s' % having) params.extend(h_params) if self.query.explain_query: result.insert(0, self.connection.ops.explain_query_prefix( self.query.explain_format, **self.query.explain_options )) if order_by: ordering = [] for _, (o_sql, o_params, _) in order_by: ordering.append(o_sql) params.extend(o_params) result.append('ORDER BY %s' % ', '.join(ordering)) if with_limit_offset: result.append(self.connection.ops.limit_offset_sql(self.query.low_mark, self.query.high_mark)) if for_update_part and not self.connection.features.for_update_after_from: result.append(for_update_part) if self.query.subquery and extra_select: # If the query is used as a subquery, the extra selects would # result in more columns than the left-hand side expression is # expecting. This can happen when a subquery uses a combination # of order_by() and distinct(), forcing the ordering expressions # to be selected as well. Wrap the query in another subquery # to exclude extraneous selects. sub_selects = [] sub_params = [] for index, (select, _, alias) in enumerate(self.select, start=1): if not alias and with_col_aliases: alias = 'col%d' % index if alias: sub_selects.append("%s.%s" % ( self.connection.ops.quote_name('subquery'), self.connection.ops.quote_name(alias), )) else: select_clone = select.relabeled_clone({select.alias: 'subquery'}) subselect, subparams = select_clone.as_sql(self, self.connection) sub_selects.append(subselect) sub_params.extend(subparams) return 'SELECT %s FROM (%s) subquery' % ( ', '.join(sub_selects), ' '.join(result), ), tuple(sub_params + params) return ' '.join(result), tuple(params) finally: # Finally do cleanup - get rid of the joins we created above. self.query.reset_refcounts(refcounts_before) def get_default_columns(self, start_alias=None, opts=None, from_parent=None): """ Compute the default columns for selecting every field in the base model. Will sometimes be called to pull in related models (e.g. via select_related), in which case "opts" and "start_alias" will be given to provide a starting point for the traversal. Return a list of strings, quoted appropriately for use in SQL directly, as well as a set of aliases used in the select statement (if 'as_pairs' is True, return a list of (alias, col_name) pairs instead of strings as the first component and None as the second component). """ result = [] if opts is None: opts = self.query.get_meta() only_load = self.deferred_to_columns() start_alias = start_alias or self.query.get_initial_alias() # The 'seen_models' is used to optimize checking the needed parent # alias for a given field. This also includes None -> start_alias to # be used by local fields. seen_models = {None: start_alias} for field in opts.concrete_fields: model = field.model._meta.concrete_model # A proxy model will have a different model and concrete_model. We # will assign None if the field belongs to this model. if model == opts.model: model = None if from_parent and model is not None and issubclass( from_parent._meta.concrete_model, model._meta.concrete_model): # Avoid loading data for already loaded parents. # We end up here in the case select_related() resolution # proceeds from parent model to child model. In that case the # parent model data is already present in the SELECT clause, # and we want to avoid reloading the same data again. continue if field.model in only_load and field.attname not in only_load[field.model]: continue alias = self.query.join_parent_model(opts, model, start_alias, seen_models) column = field.get_col(alias) result.append(column) return result def get_distinct(self): """ Return a quoted list of fields to use in DISTINCT ON part of the query. This method can alter the tables in the query, and thus it must be called before get_from_clause(). """ result = [] params = [] opts = self.query.get_meta() for name in self.query.distinct_fields: parts = name.split(LOOKUP_SEP) _, targets, alias, joins, path, _, transform_function = self._setup_joins(parts, opts, None) targets, alias, _ = self.query.trim_joins(targets, joins, path) for target in targets: if name in self.query.annotation_select: result.append(name) else: r, p = self.compile(transform_function(target, alias)) result.append(r) params.append(p) return result, params def find_ordering_name(self, name, opts, alias=None, default_order='ASC', already_seen=None): """ Return the table alias (the name might be ambiguous, the alias will not be) and column name for ordering by the given 'name' parameter. The 'name' is of the form 'field1__field2__...__fieldN'. """ name, order = get_order_dir(name, default_order) descending = order == 'DESC' pieces = name.split(LOOKUP_SEP) field, targets, alias, joins, path, opts, transform_function = self._setup_joins(pieces, opts, alias) # If we get to this point and the field is a relation to another model, # append the default ordering for that model unless it is the pk # shortcut or the attribute name of the field that is specified. if field.is_relation and opts.ordering and getattr(field, 'attname', None) != name and name != 'pk': # Firstly, avoid infinite loops. already_seen = already_seen or set() join_tuple = tuple(getattr(self.query.alias_map[j], 'join_cols', None) for j in joins) if join_tuple in already_seen: raise FieldError('Infinite loop caused by ordering.') already_seen.add(join_tuple) results = [] for item in opts.ordering: if hasattr(item, 'resolve_expression') and not isinstance(item, OrderBy): item = item.desc() if descending else item.asc() if isinstance(item, OrderBy): results.append((item, False)) continue results.extend(self.find_ordering_name(item, opts, alias, order, already_seen)) return results targets, alias, _ = self.query.trim_joins(targets, joins, path) return [(OrderBy(transform_function(t, alias), descending=descending), False) for t in targets] def _setup_joins(self, pieces, opts, alias): """ Helper method for get_order_by() and get_distinct(). get_ordering() and get_distinct() must produce same target columns on same input, as the prefixes of get_ordering() and get_distinct() must match. Executing SQL where this is not true is an error. """ alias = alias or self.query.get_initial_alias() field, targets, opts, joins, path, transform_function = self.query.setup_joins(pieces, opts, alias) alias = joins[-1] return field, targets, alias, joins, path, opts, transform_function def get_from_clause(self): """ Return a list of strings that are joined together to go after the "FROM" part of the query, as well as a list any extra parameters that need to be included. Subclasses, can override this to create a from-clause via a "select". This should only be called after any SQL construction methods that might change the tables that are needed. This means the select columns, ordering, and distinct must be done first. """ result = [] params = [] for alias in tuple(self.query.alias_map): if not self.query.alias_refcount[alias]: continue try: from_clause = self.query.alias_map[alias] except KeyError: # Extra tables can end up in self.tables, but not in the # alias_map if they aren't in a join. That's OK. We skip them. continue clause_sql, clause_params = self.compile(from_clause) result.append(clause_sql) params.extend(clause_params) for t in self.query.extra_tables: alias, _ = self.query.table_alias(t) # Only add the alias if it's not already present (the table_alias() # call increments the refcount, so an alias refcount of one means # this is the only reference). if alias not in self.query.alias_map or self.query.alias_refcount[alias] == 1: result.append(', %s' % self.quote_name_unless_alias(alias)) return result, params def get_related_selections(self, select, opts=None, root_alias=None, cur_depth=1, requested=None, restricted=None): """ Fill in the information needed for a select_related query. The current depth is measured as the number of connections away from the root model (for example, cur_depth=1 means we are looking at models with direct connections to the root model). """ def _get_field_choices(): direct_choices = (f.name for f in opts.fields if f.is_relation) reverse_choices = ( f.field.related_query_name() for f in opts.related_objects if f.field.unique ) return chain(direct_choices, reverse_choices, self.query._filtered_relations) related_klass_infos = [] if not restricted and cur_depth > self.query.max_depth: # We've recursed far enough; bail out. return related_klass_infos if not opts: opts = self.query.get_meta() root_alias = self.query.get_initial_alias() only_load = self.query.get_loaded_field_names() # Setup for the case when only particular related fields should be # included in the related selection. fields_found = set() if requested is None: restricted = isinstance(self.query.select_related, dict) if restricted: requested = self.query.select_related def get_related_klass_infos(klass_info, related_klass_infos): klass_info['related_klass_infos'] = related_klass_infos for f in opts.fields: field_model = f.model._meta.concrete_model fields_found.add(f.name) if restricted: next = requested.get(f.name, {}) if not f.is_relation: # If a non-related field is used like a relation, # or if a single non-relational field is given. if next or f.name in requested: raise FieldError( "Non-relational field given in select_related: '%s'. " "Choices are: %s" % ( f.name, ", ".join(_get_field_choices()) or '(none)', ) ) else: next = False if not select_related_descend(f, restricted, requested, only_load.get(field_model)): continue klass_info = { 'model': f.remote_field.model, 'field': f, 'reverse': False, 'local_setter': f.set_cached_value, 'remote_setter': f.remote_field.set_cached_value if f.unique else lambda x, y: None, 'from_parent': False, } related_klass_infos.append(klass_info) select_fields = [] _, _, _, joins, _, _ = self.query.setup_joins( [f.name], opts, root_alias) alias = joins[-1] columns = self.get_default_columns(start_alias=alias, opts=f.remote_field.model._meta) for col in columns: select_fields.append(len(select)) select.append((col, None)) klass_info['select_fields'] = select_fields next_klass_infos = self.get_related_selections( select, f.remote_field.model._meta, alias, cur_depth + 1, next, restricted) get_related_klass_infos(klass_info, next_klass_infos) if restricted: related_fields = [ (o.field, o.related_model) for o in opts.related_objects if o.field.unique and not o.many_to_many ] for f, model in related_fields: if not select_related_descend(f, restricted, requested, only_load.get(model), reverse=True): continue related_field_name = f.related_query_name() fields_found.add(related_field_name) join_info = self.query.setup_joins([related_field_name], opts, root_alias) alias = join_info.joins[-1] from_parent = issubclass(model, opts.model) and model is not opts.model klass_info = { 'model': model, 'field': f, 'reverse': True, 'local_setter': f.remote_field.set_cached_value, 'remote_setter': f.set_cached_value, 'from_parent': from_parent, } related_klass_infos.append(klass_info) select_fields = [] columns = self.get_default_columns( start_alias=alias, opts=model._meta, from_parent=opts.model) for col in columns: select_fields.append(len(select)) select.append((col, None)) klass_info['select_fields'] = select_fields next = requested.get(f.related_query_name(), {}) next_klass_infos = self.get_related_selections( select, model._meta, alias, cur_depth + 1, next, restricted) get_related_klass_infos(klass_info, next_klass_infos) def local_setter(obj, from_obj): # Set a reverse fk object when relation is non-empty. if from_obj: f.remote_field.set_cached_value(from_obj, obj) def remote_setter(name, obj, from_obj): setattr(from_obj, name, obj) for name in list(requested): # Filtered relations work only on the topmost level. if cur_depth > 1: break if name in self.query._filtered_relations: fields_found.add(name) f, _, join_opts, joins, _, _ = self.query.setup_joins([name], opts, root_alias) model = join_opts.model alias = joins[-1] from_parent = issubclass(model, opts.model) and model is not opts.model klass_info = { 'model': model, 'field': f, 'reverse': True, 'local_setter': local_setter, 'remote_setter': partial(remote_setter, name), 'from_parent': from_parent, } related_klass_infos.append(klass_info) select_fields = [] columns = self.get_default_columns( start_alias=alias, opts=model._meta, from_parent=opts.model, ) for col in columns: select_fields.append(len(select)) select.append((col, None)) klass_info['select_fields'] = select_fields next_requested = requested.get(name, {}) next_klass_infos = self.get_related_selections( select, opts=model._meta, root_alias=alias, cur_depth=cur_depth + 1, requested=next_requested, restricted=restricted, ) get_related_klass_infos(klass_info, next_klass_infos) fields_not_found = set(requested).difference(fields_found) if fields_not_found: invalid_fields = ("'%s'" % s for s in fields_not_found) raise FieldError( 'Invalid field name(s) given in select_related: %s. ' 'Choices are: %s' % ( ', '.join(invalid_fields), ', '.join(_get_field_choices()) or '(none)', ) ) return related_klass_infos def get_select_for_update_of_arguments(self): """ Return a quoted list of arguments for the SELECT FOR UPDATE OF part of the query. """ def _get_parent_klass_info(klass_info): for parent_model, parent_link in klass_info['model']._meta.parents.items(): parent_list = parent_model._meta.get_parent_list() yield { 'model': parent_model, 'field': parent_link, 'reverse': False, 'select_fields': [ select_index for select_index in klass_info['select_fields'] # Selected columns from a model or its parents. if ( self.select[select_index][0].target.model == parent_model or self.select[select_index][0].target.model in parent_list ) ], } def _get_first_selected_col_from_model(klass_info): """ Find the first selected column from a model. If it doesn't exist, don't lock a model. select_fields is filled recursively, so it also contains fields from the parent models. """ for select_index in klass_info['select_fields']: if self.select[select_index][0].target.model == klass_info['model']: return self.select[select_index][0] def _get_field_choices(): """Yield all allowed field paths in breadth-first search order.""" queue = collections.deque([(None, self.klass_info)]) while queue: parent_path, klass_info = queue.popleft() if parent_path is None: path = [] yield 'self' else: field = klass_info['field'] if klass_info['reverse']: field = field.remote_field path = parent_path + [field.name] yield LOOKUP_SEP.join(path) queue.extend( (path, klass_info) for klass_info in _get_parent_klass_info(klass_info) ) queue.extend( (path, klass_info) for klass_info in klass_info.get('related_klass_infos', []) ) result = [] invalid_names = [] for name in self.query.select_for_update_of: klass_info = self.klass_info if name == 'self': col = _get_first_selected_col_from_model(klass_info) else: for part in name.split(LOOKUP_SEP): klass_infos = ( *klass_info.get('related_klass_infos', []), *_get_parent_klass_info(klass_info), ) for related_klass_info in klass_infos: field = related_klass_info['field'] if related_klass_info['reverse']: field = field.remote_field if field.name == part: klass_info = related_klass_info break else: klass_info = None break if klass_info is None: invalid_names.append(name) continue col = _get_first_selected_col_from_model(klass_info) if col is not None: if self.connection.features.select_for_update_of_column: result.append(self.compile(col)[0]) else: result.append(self.quote_name_unless_alias(col.alias)) if invalid_names: raise FieldError( 'Invalid field name(s) given in select_for_update(of=(...)): %s. ' 'Only relational fields followed in the query are allowed. ' 'Choices are: %s.' % ( ', '.join(invalid_names), ', '.join(_get_field_choices()), ) ) return result def deferred_to_columns(self): """ Convert the self.deferred_loading data structure to mapping of table names to sets of column names which are to be loaded. Return the dictionary. """ columns = {} self.query.deferred_to_data(columns, self.query.get_loaded_field_names_cb) return columns def get_converters(self, expressions): converters = {} for i, expression in enumerate(expressions): if expression: backend_converters = self.connection.ops.get_db_converters(expression) field_converters = expression.get_db_converters(self.connection) if backend_converters or field_converters: converters[i] = (backend_converters + field_converters, expression) return converters def apply_converters(self, rows, converters): connection = self.connection converters = list(converters.items()) for row in map(list, rows): for pos, (convs, expression) in converters: value = row[pos] for converter in convs: value = converter(value, expression, connection) row[pos] = value yield row def results_iter(self, results=None, tuple_expected=False, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE): """Return an iterator over the results from executing this query.""" if results is None: results = self.execute_sql(MULTI, chunked_fetch=chunked_fetch, chunk_size=chunk_size) fields = [s[0] for s in self.select[0:self.col_count]] converters = self.get_converters(fields) rows = chain.from_iterable(results) if converters: rows = self.apply_converters(rows, converters) if tuple_expected: rows = map(tuple, rows) return rows def has_results(self): """ Backends (e.g. NoSQL) can override this in order to use optimized versions of "query has any results." """ # This is always executed on a query clone, so we can modify self.query self.query.add_extra({'a': 1}, None, None, None, None, None) self.query.set_extra_mask(['a']) return bool(self.execute_sql(SINGLE)) def execute_sql(self, result_type=MULTI, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE): """ Run the query against the database and return the result(s). The return value is a single data item if result_type is SINGLE, or an iterator over the results if the result_type is MULTI. result_type is either MULTI (use fetchmany() to retrieve all rows), SINGLE (only retrieve a single row), or None. In this last case, the cursor is returned if any query is executed, since it's used by subclasses such as InsertQuery). It's possible, however, that no query is needed, as the filters describe an empty set. In that case, None is returned, to avoid any unnecessary database interaction. """ result_type = result_type or NO_RESULTS try: sql, params = self.as_sql() if not sql: raise EmptyResultSet except EmptyResultSet: if result_type == MULTI: return iter([]) else: return if chunked_fetch: cursor = self.connection.chunked_cursor() else: cursor = self.connection.cursor() try: cursor.execute(sql, params) except Exception: # Might fail for server-side cursors (e.g. connection closed) cursor.close() raise if result_type == CURSOR: # Give the caller the cursor to process and close. return cursor if result_type == SINGLE: try: val = cursor.fetchone() if val: return val[0:self.col_count] return val finally: # done with the cursor cursor.close() if result_type == NO_RESULTS: cursor.close() return result = cursor_iter( cursor, self.connection.features.empty_fetchmany_value, self.col_count if self.has_extra_select else None, chunk_size, ) if not chunked_fetch or not self.connection.features.can_use_chunked_reads: try: # If we are using non-chunked reads, we return the same data # structure as normally, but ensure it is all read into memory # before going any further. Use chunked_fetch if requested, # unless the database doesn't support it. return list(result) finally: # done with the cursor cursor.close() return result def as_subquery_condition(self, alias, columns, compiler): qn = compiler.quote_name_unless_alias qn2 = self.connection.ops.quote_name for index, select_col in enumerate(self.query.select): lhs_sql, lhs_params = self.compile(select_col) rhs = '%s.%s' % (qn(alias), qn2(columns[index])) self.query.where.add( RawSQL('%s = %s' % (lhs_sql, rhs), lhs_params), 'AND') sql, params = self.as_sql() return 'EXISTS (%s)' % sql, params def explain_query(self): result = list(self.execute_sql()) # Some backends return 1 item tuples with strings, and others return # tuples with integers and strings. Flatten them out into strings. for row in result[0]: if not isinstance(row, str): yield ' '.join(str(c) for c in row) else: yield row class SQLInsertCompiler(SQLCompiler): returning_fields = None returning_params = tuple() def field_as_sql(self, field, val): """ Take a field and a value intended to be saved on that field, and return placeholder SQL and accompanying params. Check for raw values, expressions, and fields with get_placeholder() defined in that order. When field is None, consider the value raw and use it as the placeholder, with no corresponding parameters returned. """ if field is None: # A field value of None means the value is raw. sql, params = val, [] elif hasattr(val, 'as_sql'): # This is an expression, let's compile it. sql, params = self.compile(val) elif hasattr(field, 'get_placeholder'): # Some fields (e.g. geo fields) need special munging before # they can be inserted. sql, params = field.get_placeholder(val, self, self.connection), [val] else: # Return the common case for the placeholder sql, params = '%s', [val] # The following hook is only used by Oracle Spatial, which sometimes # needs to yield 'NULL' and [] as its placeholder and params instead # of '%s' and [None]. The 'NULL' placeholder is produced earlier by # OracleOperations.get_geom_placeholder(). The following line removes # the corresponding None parameter. See ticket #10888. params = self.connection.ops.modify_insert_params(sql, params) return sql, params def prepare_value(self, field, value): """ Prepare a value to be used in a query by resolving it if it is an expression and otherwise calling the field's get_db_prep_save(). """ if hasattr(value, 'resolve_expression'): value = value.resolve_expression(self.query, allow_joins=False, for_save=True) # Don't allow values containing Col expressions. They refer to # existing columns on a row, but in the case of insert the row # doesn't exist yet. if value.contains_column_references: raise ValueError( 'Failed to insert expression "%s" on %s. F() expressions ' 'can only be used to update, not to insert.' % (value, field) ) if value.contains_aggregate: raise FieldError( 'Aggregate functions are not allowed in this query ' '(%s=%r).' % (field.name, value) ) if value.contains_over_clause: raise FieldError( 'Window expressions are not allowed in this query (%s=%r).' % (field.name, value) ) else: value = field.get_db_prep_save(value, connection=self.connection) return value def pre_save_val(self, field, obj): """ Get the given field's value off the given obj. pre_save() is used for things like auto_now on DateTimeField. Skip it if this is a raw query. """ if self.query.raw: return getattr(obj, field.attname) return field.pre_save(obj, add=True) def assemble_as_sql(self, fields, value_rows): """ Take a sequence of N fields and a sequence of M rows of values, and generate placeholder SQL and parameters for each field and value. Return a pair containing: * a sequence of M rows of N SQL placeholder strings, and * a sequence of M rows of corresponding parameter values. Each placeholder string may contain any number of '%s' interpolation strings, and each parameter row will contain exactly as many params as the total number of '%s's in the corresponding placeholder row. """ if not value_rows: return [], [] # list of (sql, [params]) tuples for each object to be saved # Shape: [n_objs][n_fields][2] rows_of_fields_as_sql = ( (self.field_as_sql(field, v) for field, v in zip(fields, row)) for row in value_rows ) # tuple like ([sqls], [[params]s]) for each object to be saved # Shape: [n_objs][2][n_fields] sql_and_param_pair_rows = (zip(*row) for row in rows_of_fields_as_sql) # Extract separate lists for placeholders and params. # Each of these has shape [n_objs][n_fields] placeholder_rows, param_rows = zip(*sql_and_param_pair_rows) # Params for each field are still lists, and need to be flattened. param_rows = [[p for ps in row for p in ps] for row in param_rows] return placeholder_rows, param_rows def as_sql(self): # We don't need quote_name_unless_alias() here, since these are all # going to be column names (so we can avoid the extra overhead). qn = self.connection.ops.quote_name opts = self.query.get_meta() insert_statement = self.connection.ops.insert_statement(ignore_conflicts=self.query.ignore_conflicts) result = ['%s %s' % (insert_statement, qn(opts.db_table))] fields = self.query.fields or [opts.pk] result.append('(%s)' % ', '.join(qn(f.column) for f in fields)) if self.query.fields: value_rows = [ [self.prepare_value(field, self.pre_save_val(field, obj)) for field in fields] for obj in self.query.objs ] else: # An empty object. value_rows = [[self.connection.ops.pk_default_value()] for _ in self.query.objs] fields = [None] # Currently the backends just accept values when generating bulk # queries and generate their own placeholders. Doing that isn't # necessary and it should be possible to use placeholders and # expressions in bulk inserts too. can_bulk = (not self.returning_fields and self.connection.features.has_bulk_insert) placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows) ignore_conflicts_suffix_sql = self.connection.ops.ignore_conflicts_suffix_sql( ignore_conflicts=self.query.ignore_conflicts ) if self.returning_fields and self.connection.features.can_return_columns_from_insert: if self.connection.features.can_return_rows_from_bulk_insert: result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows)) params = param_rows else: result.append("VALUES (%s)" % ", ".join(placeholder_rows[0])) params = [param_rows[0]] if ignore_conflicts_suffix_sql: result.append(ignore_conflicts_suffix_sql) # Skip empty r_sql to allow subclasses to customize behavior for # 3rd party backends. Refs #19096. r_sql, self.returning_params = self.connection.ops.return_insert_columns(self.returning_fields) if r_sql: result.append(r_sql) params += [self.returning_params] return [(" ".join(result), tuple(chain.from_iterable(params)))] if can_bulk: result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows)) if ignore_conflicts_suffix_sql: result.append(ignore_conflicts_suffix_sql) return [(" ".join(result), tuple(p for ps in param_rows for p in ps))] else: if ignore_conflicts_suffix_sql: result.append(ignore_conflicts_suffix_sql) return [ (" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals) for p, vals in zip(placeholder_rows, param_rows) ] def execute_sql(self, returning_fields=None): assert not ( returning_fields and len(self.query.objs) != 1 and not self.connection.features.can_return_rows_from_bulk_insert ) self.returning_fields = returning_fields with self.connection.cursor() as cursor: for sql, params in self.as_sql(): cursor.execute(sql, params) if not self.returning_fields: return [] if self.connection.features.can_return_rows_from_bulk_insert and len(self.query.objs) > 1: return self.connection.ops.fetch_returned_insert_rows(cursor) if self.connection.features.can_return_columns_from_insert: assert len(self.query.objs) == 1 return [self.connection.ops.fetch_returned_insert_columns(cursor, self.returning_params)] return [(self.connection.ops.last_insert_id( cursor, self.query.get_meta().db_table, self.query.get_meta().pk.column ),)] class SQLDeleteCompiler(SQLCompiler): @cached_property def single_alias(self): return sum(self.query.alias_refcount[t] > 0 for t in self.query.alias_map) == 1 def _as_sql(self, query): result = [ 'DELETE FROM %s' % self.quote_name_unless_alias(query.base_table) ] where, params = self.compile(query.where) if where: result.append('WHERE %s' % where) return ' '.join(result), tuple(params) def as_sql(self): """ Create the SQL for this query. Return the SQL string and list of parameters. """ if self.single_alias: return self._as_sql(self.query) innerq = self.query.clone() innerq.__class__ = Query innerq.clear_select_clause() pk = self.query.model._meta.pk innerq.select = [ pk.get_col(self.query.get_initial_alias()) ] outerq = Query(self.query.model) outerq.where = self.query.where_class() outerq.add_q(Q(pk__in=innerq)) return self._as_sql(outerq) class SQLUpdateCompiler(SQLCompiler): def as_sql(self): """ Create the SQL for this query. Return the SQL string and list of parameters. """ self.pre_sql_setup() if not self.query.values: return '', () qn = self.quote_name_unless_alias values, update_params = [], [] for field, model, val in self.query.values: if hasattr(val, 'resolve_expression'): val = val.resolve_expression(self.query, allow_joins=False, for_save=True) if val.contains_aggregate: raise FieldError( 'Aggregate functions are not allowed in this query ' '(%s=%r).' % (field.name, val) ) if val.contains_over_clause: raise FieldError( 'Window expressions are not allowed in this query ' '(%s=%r).' % (field.name, val) ) elif hasattr(val, 'prepare_database_save'): if field.remote_field: val = field.get_db_prep_save( val.prepare_database_save(field), connection=self.connection, ) else: raise TypeError( "Tried to update field %s with a model instance, %r. " "Use a value compatible with %s." % (field, val, field.__class__.__name__) ) else: val = field.get_db_prep_save(val, connection=self.connection) # Getting the placeholder for the field. if hasattr(field, 'get_placeholder'): placeholder = field.get_placeholder(val, self, self.connection) else: placeholder = '%s' name = field.column if hasattr(val, 'as_sql'): sql, params = self.compile(val) values.append('%s = %s' % (qn(name), placeholder % sql)) update_params.extend(params) elif val is not None: values.append('%s = %s' % (qn(name), placeholder)) update_params.append(val) else: values.append('%s = NULL' % qn(name)) table = self.query.base_table result = [ 'UPDATE %s SET' % qn(table), ', '.join(values), ] where, params = self.compile(self.query.where) if where: result.append('WHERE %s' % where) return ' '.join(result), tuple(update_params + params) def execute_sql(self, result_type): """ Execute the specified update. Return the number of rows affected by the primary update query. The "primary update query" is the first non-empty query that is executed. Row counts for any subsequent, related queries are not available. """ cursor = super().execute_sql(result_type) try: rows = cursor.rowcount if cursor else 0 is_empty = cursor is None finally: if cursor: cursor.close() for query in self.query.get_related_updates(): aux_rows = query.get_compiler(self.using).execute_sql(result_type) if is_empty and aux_rows: rows = aux_rows is_empty = False return rows def pre_sql_setup(self): """ If the update depends on results from other tables, munge the "where" conditions to match the format required for (portable) SQL updates. If multiple updates are required, pull out the id values to update at this point so that they don't change as a result of the progressive updates. """ refcounts_before = self.query.alias_refcount.copy() # Ensure base table is in the query self.query.get_initial_alias() count = self.query.count_active_tables() if not self.query.related_updates and count == 1: return query = self.query.chain(klass=Query) query.select_related = False query.clear_ordering(True) query.extra = {} query.select = [] query.add_fields([query.get_meta().pk.name]) super().pre_sql_setup() must_pre_select = count > 1 and not self.connection.features.update_can_self_select # Now we adjust the current query: reset the where clause and get rid # of all the tables we don't need (since they're in the sub-select). self.query.where = self.query.where_class() if self.query.related_updates or must_pre_select: # Either we're using the idents in multiple update queries (so # don't want them to change), or the db backend doesn't support # selecting from the updating table (e.g. MySQL). idents = [] for rows in query.get_compiler(self.using).execute_sql(MULTI): idents.extend(r[0] for r in rows) self.query.add_filter(('pk__in', idents)) self.query.related_ids = idents else: # The fast path. Filters and updates in one query. self.query.add_filter(('pk__in', query)) self.query.reset_refcounts(refcounts_before) class SQLAggregateCompiler(SQLCompiler): def as_sql(self): """ Create the SQL for this query. Return the SQL string and list of parameters. """ sql, params = [], [] for annotation in self.query.annotation_select.values(): ann_sql, ann_params = self.compile(annotation) ann_sql, ann_params = annotation.select_format(self, ann_sql, ann_params) sql.append(ann_sql) params.extend(ann_params) self.col_count = len(self.query.annotation_select) sql = ', '.join(sql) params = tuple(params) sql = 'SELECT %s FROM (%s) subquery' % (sql, self.query.subquery) params = params + self.query.sub_params return sql, params def cursor_iter(cursor, sentinel, col_count, itersize): """ Yield blocks of rows from a cursor and ensure the cursor is closed when done. """ try: for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel): yield rows if col_count is None else [r[:col_count] for r in rows] finally: cursor.close()
a9c6028b6364a799c60321caf83034f36128bf0d4c1623ef5d665e7b8aa226d6
import collections from itertools import chain from django.apps import apps from django.conf import settings from django.contrib.admin.utils import ( NotRelationField, flatten, get_fields_from_path, ) from django.core import checks from django.core.exceptions import FieldDoesNotExist from django.db import models from django.db.models.constants import LOOKUP_SEP from django.db.models.expressions import Combinable from django.forms.models import ( BaseModelForm, BaseModelFormSet, _get_foreign_key, ) from django.template import engines from django.template.backends.django import DjangoTemplates from django.utils.module_loading import import_string def _issubclass(cls, classinfo): """ issubclass() variant that doesn't raise an exception if cls isn't a class. """ try: return issubclass(cls, classinfo) except TypeError: return False def _contains_subclass(class_path, candidate_paths): """ Return whether or not a dotted class path (or a subclass of that class) is found in a list of candidate paths. """ cls = import_string(class_path) for path in candidate_paths: try: candidate_cls = import_string(path) except ImportError: # ImportErrors are raised elsewhere. continue if _issubclass(candidate_cls, cls): return True return False def check_admin_app(app_configs, **kwargs): from django.contrib.admin.sites import all_sites errors = [] for site in all_sites: errors.extend(site.check(app_configs)) return errors def check_dependencies(**kwargs): """ Check that the admin's dependencies are correctly installed. """ from django.contrib.admin.sites import all_sites if not apps.is_installed('django.contrib.admin'): return [] errors = [] app_dependencies = ( ('django.contrib.contenttypes', 401), ('django.contrib.auth', 405), ('django.contrib.messages', 406), ) for app_name, error_code in app_dependencies: if not apps.is_installed(app_name): errors.append(checks.Error( "'%s' must be in INSTALLED_APPS in order to use the admin " "application." % app_name, id='admin.E%d' % error_code, )) for engine in engines.all(): if isinstance(engine, DjangoTemplates): django_templates_instance = engine.engine break else: django_templates_instance = None if not django_templates_instance: errors.append(checks.Error( "A 'django.template.backends.django.DjangoTemplates' instance " "must be configured in TEMPLATES in order to use the admin " "application.", id='admin.E403', )) else: if ('django.contrib.auth.context_processors.auth' not in django_templates_instance.context_processors and _contains_subclass('django.contrib.auth.backends.ModelBackend', settings.AUTHENTICATION_BACKENDS)): errors.append(checks.Error( "'django.contrib.auth.context_processors.auth' must be " "enabled in DjangoTemplates (TEMPLATES) if using the default " "auth backend in order to use the admin application.", id='admin.E402', )) if ('django.contrib.messages.context_processors.messages' not in django_templates_instance.context_processors): errors.append(checks.Error( "'django.contrib.messages.context_processors.messages' must " "be enabled in DjangoTemplates (TEMPLATES) in order to use " "the admin application.", id='admin.E404', )) sidebar_enabled = any(site.enable_nav_sidebar for site in all_sites) if (sidebar_enabled and 'django.template.context_processors.request' not in django_templates_instance.context_processors): errors.append(checks.Warning( "'django.template.context_processors.request' must be enabled " "in DjangoTemplates (TEMPLATES) in order to use the admin " "navigation sidebar.", id='admin.W411', )) if not _contains_subclass('django.contrib.auth.middleware.AuthenticationMiddleware', settings.MIDDLEWARE): errors.append(checks.Error( "'django.contrib.auth.middleware.AuthenticationMiddleware' must " "be in MIDDLEWARE in order to use the admin application.", id='admin.E408', )) if not _contains_subclass('django.contrib.messages.middleware.MessageMiddleware', settings.MIDDLEWARE): errors.append(checks.Error( "'django.contrib.messages.middleware.MessageMiddleware' must " "be in MIDDLEWARE in order to use the admin application.", id='admin.E409', )) if not _contains_subclass('django.contrib.sessions.middleware.SessionMiddleware', settings.MIDDLEWARE): errors.append(checks.Error( "'django.contrib.sessions.middleware.SessionMiddleware' must " "be in MIDDLEWARE in order to use the admin application.", id='admin.E410', )) return errors class BaseModelAdminChecks: def check(self, admin_obj, **kwargs): return [ *self._check_autocomplete_fields(admin_obj), *self._check_raw_id_fields(admin_obj), *self._check_fields(admin_obj), *self._check_fieldsets(admin_obj), *self._check_exclude(admin_obj), *self._check_form(admin_obj), *self._check_filter_vertical(admin_obj), *self._check_filter_horizontal(admin_obj), *self._check_radio_fields(admin_obj), *self._check_prepopulated_fields(admin_obj), *self._check_view_on_site_url(admin_obj), *self._check_ordering(admin_obj), *self._check_readonly_fields(admin_obj), ] def _check_autocomplete_fields(self, obj): """ Check that `autocomplete_fields` is a list or tuple of model fields. """ if not isinstance(obj.autocomplete_fields, (list, tuple)): return must_be('a list or tuple', option='autocomplete_fields', obj=obj, id='admin.E036') else: return list(chain.from_iterable([ self._check_autocomplete_fields_item(obj, field_name, 'autocomplete_fields[%d]' % index) for index, field_name in enumerate(obj.autocomplete_fields) ])) def _check_autocomplete_fields_item(self, obj, field_name, label): """ Check that an item in `autocomplete_fields` is a ForeignKey or a ManyToManyField and that the item has a related ModelAdmin with search_fields defined. """ try: field = obj.model._meta.get_field(field_name) except FieldDoesNotExist: return refer_to_missing_field(field=field_name, option=label, obj=obj, id='admin.E037') else: if not field.many_to_many and not isinstance(field, models.ForeignKey): return must_be( 'a foreign key or a many-to-many field', option=label, obj=obj, id='admin.E038' ) related_admin = obj.admin_site._registry.get(field.remote_field.model) if related_admin is None: return [ checks.Error( 'An admin for model "%s" has to be registered ' 'to be referenced by %s.autocomplete_fields.' % ( field.remote_field.model.__name__, type(obj).__name__, ), obj=obj.__class__, id='admin.E039', ) ] elif not related_admin.search_fields: return [ checks.Error( '%s must define "search_fields", because it\'s ' 'referenced by %s.autocomplete_fields.' % ( related_admin.__class__.__name__, type(obj).__name__, ), obj=obj.__class__, id='admin.E040', ) ] return [] def _check_raw_id_fields(self, obj): """ Check that `raw_id_fields` only contains field names that are listed on the model. """ if not isinstance(obj.raw_id_fields, (list, tuple)): return must_be('a list or tuple', option='raw_id_fields', obj=obj, id='admin.E001') else: return list(chain.from_iterable( self._check_raw_id_fields_item(obj, field_name, 'raw_id_fields[%d]' % index) for index, field_name in enumerate(obj.raw_id_fields) )) def _check_raw_id_fields_item(self, obj, field_name, label): """ Check an item of `raw_id_fields`, i.e. check that field named `field_name` exists in model `model` and is a ForeignKey or a ManyToManyField. """ try: field = obj.model._meta.get_field(field_name) except FieldDoesNotExist: return refer_to_missing_field(field=field_name, option=label, obj=obj, id='admin.E002') else: if not field.many_to_many and not isinstance(field, models.ForeignKey): return must_be('a foreign key or a many-to-many field', option=label, obj=obj, id='admin.E003') else: return [] def _check_fields(self, obj): """ Check that `fields` only refer to existing fields, doesn't contain duplicates. Check if at most one of `fields` and `fieldsets` is defined. """ if obj.fields is None: return [] elif not isinstance(obj.fields, (list, tuple)): return must_be('a list or tuple', option='fields', obj=obj, id='admin.E004') elif obj.fieldsets: return [ checks.Error( "Both 'fieldsets' and 'fields' are specified.", obj=obj.__class__, id='admin.E005', ) ] fields = flatten(obj.fields) if len(fields) != len(set(fields)): return [ checks.Error( "The value of 'fields' contains duplicate field(s).", obj=obj.__class__, id='admin.E006', ) ] return list(chain.from_iterable( self._check_field_spec(obj, field_name, 'fields') for field_name in obj.fields )) def _check_fieldsets(self, obj): """ Check that fieldsets is properly formatted and doesn't contain duplicates. """ if obj.fieldsets is None: return [] elif not isinstance(obj.fieldsets, (list, tuple)): return must_be('a list or tuple', option='fieldsets', obj=obj, id='admin.E007') else: seen_fields = [] return list(chain.from_iterable( self._check_fieldsets_item(obj, fieldset, 'fieldsets[%d]' % index, seen_fields) for index, fieldset in enumerate(obj.fieldsets) )) def _check_fieldsets_item(self, obj, fieldset, label, seen_fields): """ Check an item of `fieldsets`, i.e. check that this is a pair of a set name and a dictionary containing "fields" key. """ if not isinstance(fieldset, (list, tuple)): return must_be('a list or tuple', option=label, obj=obj, id='admin.E008') elif len(fieldset) != 2: return must_be('of length 2', option=label, obj=obj, id='admin.E009') elif not isinstance(fieldset[1], dict): return must_be('a dictionary', option='%s[1]' % label, obj=obj, id='admin.E010') elif 'fields' not in fieldset[1]: return [ checks.Error( "The value of '%s[1]' must contain the key 'fields'." % label, obj=obj.__class__, id='admin.E011', ) ] elif not isinstance(fieldset[1]['fields'], (list, tuple)): return must_be('a list or tuple', option="%s[1]['fields']" % label, obj=obj, id='admin.E008') seen_fields.extend(flatten(fieldset[1]['fields'])) if len(seen_fields) != len(set(seen_fields)): return [ checks.Error( "There are duplicate field(s) in '%s[1]'." % label, obj=obj.__class__, id='admin.E012', ) ] return list(chain.from_iterable( self._check_field_spec(obj, fieldset_fields, '%s[1]["fields"]' % label) for fieldset_fields in fieldset[1]['fields'] )) def _check_field_spec(self, obj, fields, label): """ `fields` should be an item of `fields` or an item of fieldset[1]['fields'] for any `fieldset` in `fieldsets`. It should be a field name or a tuple of field names. """ if isinstance(fields, tuple): return list(chain.from_iterable( self._check_field_spec_item(obj, field_name, "%s[%d]" % (label, index)) for index, field_name in enumerate(fields) )) else: return self._check_field_spec_item(obj, fields, label) def _check_field_spec_item(self, obj, field_name, label): if field_name in obj.readonly_fields: # Stuff can be put in fields that isn't actually a model field if # it's in readonly_fields, readonly_fields will handle the # validation of such things. return [] else: try: field = obj.model._meta.get_field(field_name) except FieldDoesNotExist: # If we can't find a field on the model that matches, it could # be an extra field on the form. return [] else: if (isinstance(field, models.ManyToManyField) and not field.remote_field.through._meta.auto_created): return [ checks.Error( "The value of '%s' cannot include the ManyToManyField '%s', " "because that field manually specifies a relationship model." % (label, field_name), obj=obj.__class__, id='admin.E013', ) ] else: return [] def _check_exclude(self, obj): """ Check that exclude is a sequence without duplicates. """ if obj.exclude is None: # default value is None return [] elif not isinstance(obj.exclude, (list, tuple)): return must_be('a list or tuple', option='exclude', obj=obj, id='admin.E014') elif len(obj.exclude) > len(set(obj.exclude)): return [ checks.Error( "The value of 'exclude' contains duplicate field(s).", obj=obj.__class__, id='admin.E015', ) ] else: return [] def _check_form(self, obj): """ Check that form subclasses BaseModelForm. """ if not _issubclass(obj.form, BaseModelForm): return must_inherit_from(parent='BaseModelForm', option='form', obj=obj, id='admin.E016') else: return [] def _check_filter_vertical(self, obj): """ Check that filter_vertical is a sequence of field names. """ if not isinstance(obj.filter_vertical, (list, tuple)): return must_be('a list or tuple', option='filter_vertical', obj=obj, id='admin.E017') else: return list(chain.from_iterable( self._check_filter_item(obj, field_name, "filter_vertical[%d]" % index) for index, field_name in enumerate(obj.filter_vertical) )) def _check_filter_horizontal(self, obj): """ Check that filter_horizontal is a sequence of field names. """ if not isinstance(obj.filter_horizontal, (list, tuple)): return must_be('a list or tuple', option='filter_horizontal', obj=obj, id='admin.E018') else: return list(chain.from_iterable( self._check_filter_item(obj, field_name, "filter_horizontal[%d]" % index) for index, field_name in enumerate(obj.filter_horizontal) )) def _check_filter_item(self, obj, field_name, label): """ Check one item of `filter_vertical` or `filter_horizontal`, i.e. check that given field exists and is a ManyToManyField. """ try: field = obj.model._meta.get_field(field_name) except FieldDoesNotExist: return refer_to_missing_field(field=field_name, option=label, obj=obj, id='admin.E019') else: if not field.many_to_many: return must_be('a many-to-many field', option=label, obj=obj, id='admin.E020') else: return [] def _check_radio_fields(self, obj): """ Check that `radio_fields` is a dictionary. """ if not isinstance(obj.radio_fields, dict): return must_be('a dictionary', option='radio_fields', obj=obj, id='admin.E021') else: return list(chain.from_iterable( self._check_radio_fields_key(obj, field_name, 'radio_fields') + self._check_radio_fields_value(obj, val, 'radio_fields["%s"]' % field_name) for field_name, val in obj.radio_fields.items() )) def _check_radio_fields_key(self, obj, field_name, label): """ Check that a key of `radio_fields` dictionary is name of existing field and that the field is a ForeignKey or has `choices` defined. """ try: field = obj.model._meta.get_field(field_name) except FieldDoesNotExist: return refer_to_missing_field(field=field_name, option=label, obj=obj, id='admin.E022') else: if not (isinstance(field, models.ForeignKey) or field.choices): return [ checks.Error( "The value of '%s' refers to '%s', which is not an " "instance of ForeignKey, and does not have a 'choices' definition." % ( label, field_name ), obj=obj.__class__, id='admin.E023', ) ] else: return [] def _check_radio_fields_value(self, obj, val, label): """ Check type of a value of `radio_fields` dictionary. """ from django.contrib.admin.options import HORIZONTAL, VERTICAL if val not in (HORIZONTAL, VERTICAL): return [ checks.Error( "The value of '%s' must be either admin.HORIZONTAL or admin.VERTICAL." % label, obj=obj.__class__, id='admin.E024', ) ] else: return [] def _check_view_on_site_url(self, obj): if not callable(obj.view_on_site) and not isinstance(obj.view_on_site, bool): return [ checks.Error( "The value of 'view_on_site' must be a callable or a boolean value.", obj=obj.__class__, id='admin.E025', ) ] else: return [] def _check_prepopulated_fields(self, obj): """ Check that `prepopulated_fields` is a dictionary containing allowed field types. """ if not isinstance(obj.prepopulated_fields, dict): return must_be('a dictionary', option='prepopulated_fields', obj=obj, id='admin.E026') else: return list(chain.from_iterable( self._check_prepopulated_fields_key(obj, field_name, 'prepopulated_fields') + self._check_prepopulated_fields_value(obj, val, 'prepopulated_fields["%s"]' % field_name) for field_name, val in obj.prepopulated_fields.items() )) def _check_prepopulated_fields_key(self, obj, field_name, label): """ Check a key of `prepopulated_fields` dictionary, i.e. check that it is a name of existing field and the field is one of the allowed types. """ try: field = obj.model._meta.get_field(field_name) except FieldDoesNotExist: return refer_to_missing_field(field=field_name, option=label, obj=obj, id='admin.E027') else: if isinstance(field, (models.DateTimeField, models.ForeignKey, models.ManyToManyField)): return [ checks.Error( "The value of '%s' refers to '%s', which must not be a DateTimeField, " "a ForeignKey, a OneToOneField, or a ManyToManyField." % (label, field_name), obj=obj.__class__, id='admin.E028', ) ] else: return [] def _check_prepopulated_fields_value(self, obj, val, label): """ Check a value of `prepopulated_fields` dictionary, i.e. it's an iterable of existing fields. """ if not isinstance(val, (list, tuple)): return must_be('a list or tuple', option=label, obj=obj, id='admin.E029') else: return list(chain.from_iterable( self._check_prepopulated_fields_value_item(obj, subfield_name, "%s[%r]" % (label, index)) for index, subfield_name in enumerate(val) )) def _check_prepopulated_fields_value_item(self, obj, field_name, label): """ For `prepopulated_fields` equal to {"slug": ("title",)}, `field_name` is "title". """ try: obj.model._meta.get_field(field_name) except FieldDoesNotExist: return refer_to_missing_field(field=field_name, option=label, obj=obj, id='admin.E030') else: return [] def _check_ordering(self, obj): """ Check that ordering refers to existing fields or is random. """ # ordering = None if obj.ordering is None: # The default value is None return [] elif not isinstance(obj.ordering, (list, tuple)): return must_be('a list or tuple', option='ordering', obj=obj, id='admin.E031') else: return list(chain.from_iterable( self._check_ordering_item(obj, field_name, 'ordering[%d]' % index) for index, field_name in enumerate(obj.ordering) )) def _check_ordering_item(self, obj, field_name, label): """ Check that `ordering` refers to existing fields. """ if isinstance(field_name, (Combinable, models.OrderBy)): if not isinstance(field_name, models.OrderBy): field_name = field_name.asc() if isinstance(field_name.expression, models.F): field_name = field_name.expression.name else: return [] if field_name == '?' and len(obj.ordering) != 1: return [ checks.Error( "The value of 'ordering' has the random ordering marker '?', " "but contains other fields as well.", hint='Either remove the "?", or remove the other fields.', obj=obj.__class__, id='admin.E032', ) ] elif field_name == '?': return [] elif LOOKUP_SEP in field_name: # Skip ordering in the format field1__field2 (FIXME: checking # this format would be nice, but it's a little fiddly). return [] else: if field_name.startswith('-'): field_name = field_name[1:] if field_name == 'pk': return [] try: obj.model._meta.get_field(field_name) except FieldDoesNotExist: return refer_to_missing_field(field=field_name, option=label, obj=obj, id='admin.E033') else: return [] def _check_readonly_fields(self, obj): """ Check that readonly_fields refers to proper attribute or field. """ if obj.readonly_fields == (): return [] elif not isinstance(obj.readonly_fields, (list, tuple)): return must_be('a list or tuple', option='readonly_fields', obj=obj, id='admin.E034') else: return list(chain.from_iterable( self._check_readonly_fields_item(obj, field_name, "readonly_fields[%d]" % index) for index, field_name in enumerate(obj.readonly_fields) )) def _check_readonly_fields_item(self, obj, field_name, label): if callable(field_name): return [] elif hasattr(obj, field_name): return [] elif hasattr(obj.model, field_name): return [] else: try: obj.model._meta.get_field(field_name) except FieldDoesNotExist: return [ checks.Error( "The value of '%s' is not a callable, an attribute of " "'%s', or an attribute of '%s'." % ( label, obj.__class__.__name__, obj.model._meta.label, ), obj=obj.__class__, id='admin.E035', ) ] else: return [] class ModelAdminChecks(BaseModelAdminChecks): def check(self, admin_obj, **kwargs): return [ *super().check(admin_obj), *self._check_save_as(admin_obj), *self._check_save_on_top(admin_obj), *self._check_inlines(admin_obj), *self._check_list_display(admin_obj), *self._check_list_display_links(admin_obj), *self._check_list_filter(admin_obj), *self._check_list_select_related(admin_obj), *self._check_list_per_page(admin_obj), *self._check_list_max_show_all(admin_obj), *self._check_list_editable(admin_obj), *self._check_search_fields(admin_obj), *self._check_date_hierarchy(admin_obj), *self._check_action_permission_methods(admin_obj), *self._check_actions_uniqueness(admin_obj), ] def _check_save_as(self, obj): """ Check save_as is a boolean. """ if not isinstance(obj.save_as, bool): return must_be('a boolean', option='save_as', obj=obj, id='admin.E101') else: return [] def _check_save_on_top(self, obj): """ Check save_on_top is a boolean. """ if not isinstance(obj.save_on_top, bool): return must_be('a boolean', option='save_on_top', obj=obj, id='admin.E102') else: return [] def _check_inlines(self, obj): """ Check all inline model admin classes. """ if not isinstance(obj.inlines, (list, tuple)): return must_be('a list or tuple', option='inlines', obj=obj, id='admin.E103') else: return list(chain.from_iterable( self._check_inlines_item(obj, item, "inlines[%d]" % index) for index, item in enumerate(obj.inlines) )) def _check_inlines_item(self, obj, inline, label): """ Check one inline model admin. """ try: inline_label = inline.__module__ + '.' + inline.__name__ except AttributeError: return [ checks.Error( "'%s' must inherit from 'InlineModelAdmin'." % obj, obj=obj.__class__, id='admin.E104', ) ] from django.contrib.admin.options import InlineModelAdmin if not _issubclass(inline, InlineModelAdmin): return [ checks.Error( "'%s' must inherit from 'InlineModelAdmin'." % inline_label, obj=obj.__class__, id='admin.E104', ) ] elif not inline.model: return [ checks.Error( "'%s' must have a 'model' attribute." % inline_label, obj=obj.__class__, id='admin.E105', ) ] elif not _issubclass(inline.model, models.Model): return must_be('a Model', option='%s.model' % inline_label, obj=obj, id='admin.E106') else: return inline(obj.model, obj.admin_site).check() def _check_list_display(self, obj): """ Check that list_display only contains fields or usable attributes. """ if not isinstance(obj.list_display, (list, tuple)): return must_be('a list or tuple', option='list_display', obj=obj, id='admin.E107') else: return list(chain.from_iterable( self._check_list_display_item(obj, item, "list_display[%d]" % index) for index, item in enumerate(obj.list_display) )) def _check_list_display_item(self, obj, item, label): if callable(item): return [] elif hasattr(obj, item): return [] try: field = obj.model._meta.get_field(item) except FieldDoesNotExist: try: field = getattr(obj.model, item) except AttributeError: return [ checks.Error( "The value of '%s' refers to '%s', which is not a " "callable, an attribute of '%s', or an attribute or " "method on '%s'." % ( label, item, obj.__class__.__name__, obj.model._meta.label, ), obj=obj.__class__, id='admin.E108', ) ] if isinstance(field, models.ManyToManyField): return [ checks.Error( "The value of '%s' must not be a ManyToManyField." % label, obj=obj.__class__, id='admin.E109', ) ] return [] def _check_list_display_links(self, obj): """ Check that list_display_links is a unique subset of list_display. """ from django.contrib.admin.options import ModelAdmin if obj.list_display_links is None: return [] elif not isinstance(obj.list_display_links, (list, tuple)): return must_be('a list, a tuple, or None', option='list_display_links', obj=obj, id='admin.E110') # Check only if ModelAdmin.get_list_display() isn't overridden. elif obj.get_list_display.__func__ is ModelAdmin.get_list_display: return list(chain.from_iterable( self._check_list_display_links_item(obj, field_name, "list_display_links[%d]" % index) for index, field_name in enumerate(obj.list_display_links) )) return [] def _check_list_display_links_item(self, obj, field_name, label): if field_name not in obj.list_display: return [ checks.Error( "The value of '%s' refers to '%s', which is not defined in 'list_display'." % ( label, field_name ), obj=obj.__class__, id='admin.E111', ) ] else: return [] def _check_list_filter(self, obj): if not isinstance(obj.list_filter, (list, tuple)): return must_be('a list or tuple', option='list_filter', obj=obj, id='admin.E112') else: return list(chain.from_iterable( self._check_list_filter_item(obj, item, "list_filter[%d]" % index) for index, item in enumerate(obj.list_filter) )) def _check_list_filter_item(self, obj, item, label): """ Check one item of `list_filter`, i.e. check if it is one of three options: 1. 'field' -- a basic field filter, possibly w/ relationships (e.g. 'field__rel') 2. ('field', SomeFieldListFilter) - a field-based list filter class 3. SomeListFilter - a non-field list filter class """ from django.contrib.admin import ListFilter, FieldListFilter if callable(item) and not isinstance(item, models.Field): # If item is option 3, it should be a ListFilter... if not _issubclass(item, ListFilter): return must_inherit_from(parent='ListFilter', option=label, obj=obj, id='admin.E113') # ... but not a FieldListFilter. elif issubclass(item, FieldListFilter): return [ checks.Error( "The value of '%s' must not inherit from 'FieldListFilter'." % label, obj=obj.__class__, id='admin.E114', ) ] else: return [] elif isinstance(item, (tuple, list)): # item is option #2 field, list_filter_class = item if not _issubclass(list_filter_class, FieldListFilter): return must_inherit_from(parent='FieldListFilter', option='%s[1]' % label, obj=obj, id='admin.E115') else: return [] else: # item is option #1 field = item # Validate the field string try: get_fields_from_path(obj.model, field) except (NotRelationField, FieldDoesNotExist): return [ checks.Error( "The value of '%s' refers to '%s', which does not refer to a Field." % (label, field), obj=obj.__class__, id='admin.E116', ) ] else: return [] def _check_list_select_related(self, obj): """ Check that list_select_related is a boolean, a list or a tuple. """ if not isinstance(obj.list_select_related, (bool, list, tuple)): return must_be('a boolean, tuple or list', option='list_select_related', obj=obj, id='admin.E117') else: return [] def _check_list_per_page(self, obj): """ Check that list_per_page is an integer. """ if not isinstance(obj.list_per_page, int): return must_be('an integer', option='list_per_page', obj=obj, id='admin.E118') else: return [] def _check_list_max_show_all(self, obj): """ Check that list_max_show_all is an integer. """ if not isinstance(obj.list_max_show_all, int): return must_be('an integer', option='list_max_show_all', obj=obj, id='admin.E119') else: return [] def _check_list_editable(self, obj): """ Check that list_editable is a sequence of editable fields from list_display without first element. """ if not isinstance(obj.list_editable, (list, tuple)): return must_be('a list or tuple', option='list_editable', obj=obj, id='admin.E120') else: return list(chain.from_iterable( self._check_list_editable_item(obj, item, "list_editable[%d]" % index) for index, item in enumerate(obj.list_editable) )) def _check_list_editable_item(self, obj, field_name, label): try: field = obj.model._meta.get_field(field_name) except FieldDoesNotExist: return refer_to_missing_field(field=field_name, option=label, obj=obj, id='admin.E121') else: if field_name not in obj.list_display: return [ checks.Error( "The value of '%s' refers to '%s', which is not " "contained in 'list_display'." % (label, field_name), obj=obj.__class__, id='admin.E122', ) ] elif obj.list_display_links and field_name in obj.list_display_links: return [ checks.Error( "The value of '%s' cannot be in both 'list_editable' and 'list_display_links'." % field_name, obj=obj.__class__, id='admin.E123', ) ] # If list_display[0] is in list_editable, check that # list_display_links is set. See #22792 and #26229 for use cases. elif (obj.list_display[0] == field_name and not obj.list_display_links and obj.list_display_links is not None): return [ checks.Error( "The value of '%s' refers to the first field in 'list_display' ('%s'), " "which cannot be used unless 'list_display_links' is set." % ( label, obj.list_display[0] ), obj=obj.__class__, id='admin.E124', ) ] elif not field.editable: return [ checks.Error( "The value of '%s' refers to '%s', which is not editable through the admin." % ( label, field_name ), obj=obj.__class__, id='admin.E125', ) ] else: return [] def _check_search_fields(self, obj): """ Check search_fields is a sequence. """ if not isinstance(obj.search_fields, (list, tuple)): return must_be('a list or tuple', option='search_fields', obj=obj, id='admin.E126') else: return [] def _check_date_hierarchy(self, obj): """ Check that date_hierarchy refers to DateField or DateTimeField. """ if obj.date_hierarchy is None: return [] else: try: field = get_fields_from_path(obj.model, obj.date_hierarchy)[-1] except (NotRelationField, FieldDoesNotExist): return [ checks.Error( "The value of 'date_hierarchy' refers to '%s', which " "does not refer to a Field." % obj.date_hierarchy, obj=obj.__class__, id='admin.E127', ) ] else: if not isinstance(field, (models.DateField, models.DateTimeField)): return must_be('a DateField or DateTimeField', option='date_hierarchy', obj=obj, id='admin.E128') else: return [] def _check_action_permission_methods(self, obj): """ Actions with an allowed_permission attribute require the ModelAdmin to implement a has_<perm>_permission() method for each permission. """ actions = obj._get_base_actions() errors = [] for func, name, _ in actions: if not hasattr(func, 'allowed_permissions'): continue for permission in func.allowed_permissions: method_name = 'has_%s_permission' % permission if not hasattr(obj, method_name): errors.append( checks.Error( '%s must define a %s() method for the %s action.' % ( obj.__class__.__name__, method_name, func.__name__, ), obj=obj.__class__, id='admin.E129', ) ) return errors def _check_actions_uniqueness(self, obj): """Check that every action has a unique __name__.""" errors = [] names = collections.Counter(name for _, name, _ in obj._get_base_actions()) for name, count in names.items(): if count > 1: errors.append(checks.Error( '__name__ attributes of actions defined in %s must be ' 'unique. Name %r is not unique.' % ( obj.__class__.__name__, name, ), obj=obj.__class__, id='admin.E130', )) return errors class InlineModelAdminChecks(BaseModelAdminChecks): def check(self, inline_obj, **kwargs): parent_model = inline_obj.parent_model return [ *super().check(inline_obj), *self._check_relation(inline_obj, parent_model), *self._check_exclude_of_parent_model(inline_obj, parent_model), *self._check_extra(inline_obj), *self._check_max_num(inline_obj), *self._check_min_num(inline_obj), *self._check_formset(inline_obj), ] def _check_exclude_of_parent_model(self, obj, parent_model): # Do not perform more specific checks if the base checks result in an # error. errors = super()._check_exclude(obj) if errors: return [] # Skip if `fk_name` is invalid. if self._check_relation(obj, parent_model): return [] if obj.exclude is None: return [] fk = _get_foreign_key(parent_model, obj.model, fk_name=obj.fk_name) if fk.name in obj.exclude: return [ checks.Error( "Cannot exclude the field '%s', because it is the foreign key " "to the parent model '%s'." % ( fk.name, parent_model._meta.label, ), obj=obj.__class__, id='admin.E201', ) ] else: return [] def _check_relation(self, obj, parent_model): try: _get_foreign_key(parent_model, obj.model, fk_name=obj.fk_name) except ValueError as e: return [checks.Error(e.args[0], obj=obj.__class__, id='admin.E202')] else: return [] def _check_extra(self, obj): """ Check that extra is an integer. """ if not isinstance(obj.extra, int): return must_be('an integer', option='extra', obj=obj, id='admin.E203') else: return [] def _check_max_num(self, obj): """ Check that max_num is an integer. """ if obj.max_num is None: return [] elif not isinstance(obj.max_num, int): return must_be('an integer', option='max_num', obj=obj, id='admin.E204') else: return [] def _check_min_num(self, obj): """ Check that min_num is an integer. """ if obj.min_num is None: return [] elif not isinstance(obj.min_num, int): return must_be('an integer', option='min_num', obj=obj, id='admin.E205') else: return [] def _check_formset(self, obj): """ Check formset is a subclass of BaseModelFormSet. """ if not _issubclass(obj.formset, BaseModelFormSet): return must_inherit_from(parent='BaseModelFormSet', option='formset', obj=obj, id='admin.E206') else: return [] def must_be(type, option, obj, id): return [ checks.Error( "The value of '%s' must be %s." % (option, type), obj=obj.__class__, id=id, ), ] def must_inherit_from(parent, option, obj, id): return [ checks.Error( "The value of '%s' must inherit from '%s'." % (option, parent), obj=obj.__class__, id=id, ), ] def refer_to_missing_field(field, option, obj, id): return [ checks.Error( "The value of '%s' refers to '%s', which is not an attribute of " "'%s'." % (option, field, obj.model._meta.label), obj=obj.__class__, id=id, ), ]
502871eea2c2d732f8345a2a67521330ff0f2a1e6c833612c067f810f58cb4ee
from datetime import datetime from functools import partialmethod from io import StringIO from unittest import mock, skipIf from django.core import serializers from django.core.serializers import SerializerDoesNotExist from django.core.serializers.base import ProgressBar from django.db import connection, transaction from django.http import HttpResponse from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature from django.test.utils import Approximate from .models import ( Actor, Article, Author, AuthorProfile, BaseModel, Category, Child, ComplexModel, Movie, Player, ProxyBaseModel, ProxyProxyBaseModel, Score, Team, ) @override_settings( SERIALIZATION_MODULES={ "json2": "django.core.serializers.json", } ) class SerializerRegistrationTests(SimpleTestCase): def setUp(self): self.old_serializers = serializers._serializers serializers._serializers = {} def tearDown(self): serializers._serializers = self.old_serializers def test_register(self): "Registering a new serializer populates the full registry. Refs #14823" serializers.register_serializer('json3', 'django.core.serializers.json') public_formats = serializers.get_public_serializer_formats() self.assertIn('json3', public_formats) self.assertIn('json2', public_formats) self.assertIn('xml', public_formats) def test_unregister(self): "Unregistering a serializer doesn't cause the registry to be repopulated. Refs #14823" serializers.unregister_serializer('xml') serializers.register_serializer('json3', 'django.core.serializers.json') public_formats = serializers.get_public_serializer_formats() self.assertNotIn('xml', public_formats) self.assertIn('json3', public_formats) def test_unregister_unknown_serializer(self): with self.assertRaises(SerializerDoesNotExist): serializers.unregister_serializer("nonsense") def test_builtin_serializers(self): "Requesting a list of serializer formats populates the registry" all_formats = set(serializers.get_serializer_formats()) public_formats = set(serializers.get_public_serializer_formats()) self.assertIn('xml', all_formats), self.assertIn('xml', public_formats) self.assertIn('json2', all_formats) self.assertIn('json2', public_formats) self.assertIn('python', all_formats) self.assertNotIn('python', public_formats) def test_get_unknown_serializer(self): """ #15889: get_serializer('nonsense') raises a SerializerDoesNotExist """ with self.assertRaises(SerializerDoesNotExist): serializers.get_serializer("nonsense") with self.assertRaises(KeyError): serializers.get_serializer("nonsense") # SerializerDoesNotExist is instantiated with the nonexistent format with self.assertRaisesMessage(SerializerDoesNotExist, 'nonsense'): serializers.get_serializer("nonsense") def test_get_unknown_deserializer(self): with self.assertRaises(SerializerDoesNotExist): serializers.get_deserializer("nonsense") class SerializersTestBase: serializer_name = None # Set by subclasses to the serialization format name @classmethod def setUpTestData(cls): sports = Category.objects.create(name="Sports") music = Category.objects.create(name="Music") op_ed = Category.objects.create(name="Op-Ed") cls.joe = Author.objects.create(name='Joe') cls.jane = Author.objects.create(name='Jane') cls.a1 = Article( author=cls.jane, headline="Poker has no place on ESPN", pub_date=datetime(2006, 6, 16, 11, 00) ) cls.a1.save() cls.a1.categories.set([sports, op_ed]) cls.a2 = Article( author=cls.joe, headline="Time to reform copyright", pub_date=datetime(2006, 6, 16, 13, 00, 11, 345) ) cls.a2.save() cls.a2.categories.set([music, op_ed]) def test_serialize(self): """Basic serialization works.""" serial_str = serializers.serialize(self.serializer_name, Article.objects.all()) self.assertTrue(self._validate_output(serial_str)) def test_serializer_roundtrip(self): """Serialized content can be deserialized.""" serial_str = serializers.serialize(self.serializer_name, Article.objects.all()) models = list(serializers.deserialize(self.serializer_name, serial_str)) self.assertEqual(len(models), 2) def test_serialize_to_stream(self): obj = ComplexModel(field1='first', field2='second', field3='third') obj.save_base(raw=True) # Serialize the test database to a stream for stream in (StringIO(), HttpResponse()): serializers.serialize(self.serializer_name, [obj], indent=2, stream=stream) # Serialize normally for a comparison string_data = serializers.serialize(self.serializer_name, [obj], indent=2) # The two are the same if isinstance(stream, StringIO): self.assertEqual(string_data, stream.getvalue()) else: self.assertEqual(string_data, stream.content.decode()) def test_serialize_specific_fields(self): obj = ComplexModel(field1='first', field2='second', field3='third') obj.save_base(raw=True) # Serialize then deserialize the test database serialized_data = serializers.serialize( self.serializer_name, [obj], indent=2, fields=('field1', 'field3') ) result = next(serializers.deserialize(self.serializer_name, serialized_data)) # The deserialized object contains data in only the serialized fields. self.assertEqual(result.object.field1, 'first') self.assertEqual(result.object.field2, '') self.assertEqual(result.object.field3, 'third') def test_altering_serialized_output(self): """ The ability to create new objects by modifying serialized content. """ old_headline = "Poker has no place on ESPN" new_headline = "Poker has no place on television" serial_str = serializers.serialize(self.serializer_name, Article.objects.all()) serial_str = serial_str.replace(old_headline, new_headline) models = list(serializers.deserialize(self.serializer_name, serial_str)) # Prior to saving, old headline is in place self.assertTrue(Article.objects.filter(headline=old_headline)) self.assertFalse(Article.objects.filter(headline=new_headline)) for model in models: model.save() # After saving, new headline is in place self.assertTrue(Article.objects.filter(headline=new_headline)) self.assertFalse(Article.objects.filter(headline=old_headline)) def test_one_to_one_as_pk(self): """ If you use your own primary key field (such as a OneToOneField), it doesn't appear in the serialized field list - it replaces the pk identifier. """ AuthorProfile.objects.create(author=self.joe, date_of_birth=datetime(1970, 1, 1)) serial_str = serializers.serialize(self.serializer_name, AuthorProfile.objects.all()) self.assertFalse(self._get_field_values(serial_str, 'author')) for obj in serializers.deserialize(self.serializer_name, serial_str): self.assertEqual(obj.object.pk, self.joe.pk) def test_serialize_field_subset(self): """Output can be restricted to a subset of fields""" valid_fields = ('headline', 'pub_date') invalid_fields = ("author", "categories") serial_str = serializers.serialize(self.serializer_name, Article.objects.all(), fields=valid_fields) for field_name in invalid_fields: self.assertFalse(self._get_field_values(serial_str, field_name)) for field_name in valid_fields: self.assertTrue(self._get_field_values(serial_str, field_name)) def test_serialize_unicode_roundtrip(self): """Unicode makes the roundtrip intact""" actor_name = "Za\u017c\u00f3\u0142\u0107" movie_title = 'G\u0119\u015bl\u0105 ja\u017a\u0144' ac = Actor(name=actor_name) mv = Movie(title=movie_title, actor=ac) ac.save() mv.save() serial_str = serializers.serialize(self.serializer_name, [mv]) self.assertEqual(self._get_field_values(serial_str, "title")[0], movie_title) self.assertEqual(self._get_field_values(serial_str, "actor")[0], actor_name) obj_list = list(serializers.deserialize(self.serializer_name, serial_str)) mv_obj = obj_list[0].object self.assertEqual(mv_obj.title, movie_title) def test_unicode_serialization(self): unicode_name = 'יוניקוד' data = serializers.serialize(self.serializer_name, [Author(name=unicode_name)]) self.assertIn(unicode_name, data) objs = list(serializers.deserialize(self.serializer_name, data)) self.assertEqual(objs[0].object.name, unicode_name) def test_serialize_progressbar(self): fake_stdout = StringIO() serializers.serialize( self.serializer_name, Article.objects.all(), progress_output=fake_stdout, object_count=Article.objects.count() ) self.assertTrue( fake_stdout.getvalue().endswith('[' + '.' * ProgressBar.progress_width + ']\n') ) def test_serialize_superfluous_queries(self): """Ensure no superfluous queries are made when serializing ForeignKeys #17602 """ ac = Actor(name='Actor name') ac.save() mv = Movie(title='Movie title', actor_id=ac.pk) mv.save() with self.assertNumQueries(0): serializers.serialize(self.serializer_name, [mv]) def test_serialize_with_null_pk(self): """ Serialized data with no primary key results in a model instance with no id """ category = Category(name="Reference") serial_str = serializers.serialize(self.serializer_name, [category]) pk_value = self._get_pk_values(serial_str)[0] self.assertFalse(pk_value) cat_obj = list(serializers.deserialize(self.serializer_name, serial_str))[0].object self.assertIsNone(cat_obj.id) def test_float_serialization(self): """Float values serialize and deserialize intact""" sc = Score(score=3.4) sc.save() serial_str = serializers.serialize(self.serializer_name, [sc]) deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str)) self.assertEqual(deserial_objs[0].object.score, Approximate(3.4, places=1)) def test_deferred_field_serialization(self): author = Author.objects.create(name='Victor Hugo') author = Author.objects.defer('name').get(pk=author.pk) serial_str = serializers.serialize(self.serializer_name, [author]) deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str)) self.assertIsInstance(deserial_objs[0].object, Author) def test_custom_field_serialization(self): """Custom fields serialize and deserialize intact""" team_str = "Spartak Moskva" player = Player() player.name = "Soslan Djanaev" player.rank = 1 player.team = Team(team_str) player.save() serial_str = serializers.serialize(self.serializer_name, Player.objects.all()) team = self._get_field_values(serial_str, "team") self.assertTrue(team) self.assertEqual(team[0], team_str) deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str)) self.assertEqual(deserial_objs[0].object.team.to_string(), player.team.to_string()) def test_pre_1000ad_date(self): """Year values before 1000AD are properly formatted""" # Regression for #12524 -- dates before 1000AD get prefixed # 0's on the year a = Article.objects.create( author=self.jane, headline="Nobody remembers the early years", pub_date=datetime(1, 2, 3, 4, 5, 6)) serial_str = serializers.serialize(self.serializer_name, [a]) date_values = self._get_field_values(serial_str, "pub_date") self.assertEqual(date_values[0].replace('T', ' '), "0001-02-03 04:05:06") def test_pkless_serialized_strings(self): """ Serialized strings without PKs can be turned into models """ deserial_objs = list(serializers.deserialize(self.serializer_name, self.pkless_str)) for obj in deserial_objs: self.assertFalse(obj.object.id) obj.save() self.assertEqual(Category.objects.all().count(), 5) def test_deterministic_mapping_ordering(self): """Mapping such as fields should be deterministically ordered. (#24558)""" output = serializers.serialize(self.serializer_name, [self.a1], indent=2) categories = self.a1.categories.values_list('pk', flat=True) self.assertEqual(output, self.mapping_ordering_str % { 'article_pk': self.a1.pk, 'author_pk': self.a1.author_id, 'first_category_pk': categories[0], 'second_category_pk': categories[1], }) def test_deserialize_force_insert(self): """Deserialized content can be saved with force_insert as a parameter.""" serial_str = serializers.serialize(self.serializer_name, [self.a1]) deserial_obj = list(serializers.deserialize(self.serializer_name, serial_str))[0] with mock.patch('django.db.models.Model') as mock_model: deserial_obj.save(force_insert=False) mock_model.save_base.assert_called_with(deserial_obj.object, raw=True, using=None, force_insert=False) @skipUnlessDBFeature('can_defer_constraint_checks') def test_serialize_proxy_model(self): BaseModel.objects.create(parent_data=1) base_objects = BaseModel.objects.all() proxy_objects = ProxyBaseModel.objects.all() proxy_proxy_objects = ProxyProxyBaseModel.objects.all() base_data = serializers.serialize("json", base_objects) proxy_data = serializers.serialize("json", proxy_objects) proxy_proxy_data = serializers.serialize("json", proxy_proxy_objects) self.assertEqual(base_data, proxy_data.replace('proxy', '')) self.assertEqual(base_data, proxy_proxy_data.replace('proxy', '')) def test_serialize_inherited_fields(self): child_1 = Child.objects.create(parent_data='a', child_data='b') child_2 = Child.objects.create(parent_data='c', child_data='d') child_1.parent_m2m.add(child_2) child_data = serializers.serialize(self.serializer_name, [child_1, child_2]) self.assertEqual(self._get_field_values(child_data, 'parent_m2m'), []) self.assertEqual(self._get_field_values(child_data, 'parent_data'), []) class SerializerAPITests(SimpleTestCase): def test_stream_class(self): class File: def __init__(self): self.lines = [] def write(self, line): self.lines.append(line) def getvalue(self): return ''.join(self.lines) class Serializer(serializers.json.Serializer): stream_class = File serializer = Serializer() data = serializer.serialize([Score(id=1, score=3.4)]) self.assertIs(serializer.stream_class, File) self.assertIsInstance(serializer.stream, File) self.assertEqual(data, '[{"model": "serializers.score", "pk": 1, "fields": {"score": 3.4}}]') class SerializersTransactionTestBase: available_apps = ['serializers'] @skipUnlessDBFeature('supports_forward_references') def test_forward_refs(self): """ Objects ids can be referenced before they are defined in the serialization data. """ # The deserialization process needs to run in a transaction in order # to test forward reference handling. with transaction.atomic(): objs = serializers.deserialize(self.serializer_name, self.fwd_ref_str) with connection.constraint_checks_disabled(): for obj in objs: obj.save() for model_cls in (Category, Author, Article): self.assertEqual(model_cls.objects.all().count(), 1) art_obj = Article.objects.all()[0] self.assertEqual(art_obj.categories.all().count(), 1) self.assertEqual(art_obj.author.name, "Agnes") def register_tests(test_class, method_name, test_func, exclude=()): """ Dynamically create serializer tests to ensure that all registered serializers are automatically tested. """ for format_ in serializers.get_serializer_formats(): if format_ == 'geojson' or format_ in exclude: continue decorated_func = skipIf( isinstance(serializers.get_serializer(format_), serializers.BadSerializer), 'The Python library for the %s serializer is not installed.' % format_, )(test_func) setattr(test_class, method_name % format_, partialmethod(decorated_func, format_))
0b02be014b03a628884f681e3cf4fe822ee53f643c106f4dbd7a723af2fc316d
from django import forms from django.contrib import admin from django.contrib.admin import AdminSite from django.contrib.auth.backends import ModelBackend from django.contrib.auth.middleware import AuthenticationMiddleware from django.contrib.contenttypes.admin import GenericStackedInline from django.contrib.messages.middleware import MessageMiddleware from django.contrib.sessions.middleware import SessionMiddleware from django.core import checks from django.test import SimpleTestCase, override_settings from .models import ( Album, Author, Book, City, Influence, Song, State, TwoAlbumFKAndAnE, ) class SongForm(forms.ModelForm): pass class ValidFields(admin.ModelAdmin): form = SongForm fields = ['title'] class ValidFormFieldsets(admin.ModelAdmin): def get_form(self, request, obj=None, **kwargs): class ExtraFieldForm(SongForm): name = forms.CharField(max_length=50) return ExtraFieldForm fieldsets = ( (None, { 'fields': ('name',), }), ) class MyAdmin(admin.ModelAdmin): def check(self, **kwargs): return ['error!'] class AuthenticationMiddlewareSubclass(AuthenticationMiddleware): pass class MessageMiddlewareSubclass(MessageMiddleware): pass class ModelBackendSubclass(ModelBackend): pass class SessionMiddlewareSubclass(SessionMiddleware): pass @override_settings( SILENCED_SYSTEM_CHECKS=['fields.W342'], # ForeignKey(unique=True) INSTALLED_APPS=[ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.messages', 'admin_checks', ], ) class SystemChecksTestCase(SimpleTestCase): def test_checks_are_performed(self): admin.site.register(Song, MyAdmin) try: errors = checks.run_checks() expected = ['error!'] self.assertEqual(errors, expected) finally: admin.site.unregister(Song) @override_settings(INSTALLED_APPS=['django.contrib.admin']) def test_apps_dependencies(self): errors = admin.checks.check_dependencies() expected = [ checks.Error( "'django.contrib.contenttypes' must be in " "INSTALLED_APPS in order to use the admin application.", id="admin.E401", ), checks.Error( "'django.contrib.auth' must be in INSTALLED_APPS in order " "to use the admin application.", id='admin.E405', ), checks.Error( "'django.contrib.messages' must be in INSTALLED_APPS in order " "to use the admin application.", id='admin.E406', ), ] self.assertEqual(errors, expected) @override_settings(TEMPLATES=[]) def test_no_template_engines(self): self.assertEqual(admin.checks.check_dependencies(), [ checks.Error( "A 'django.template.backends.django.DjangoTemplates' " "instance must be configured in TEMPLATES in order to use " "the admin application.", id='admin.E403', ) ]) @override_settings( TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [], }, }], ) def test_context_processor_dependencies(self): expected = [ checks.Error( "'django.contrib.auth.context_processors.auth' must be " "enabled in DjangoTemplates (TEMPLATES) if using the default " "auth backend in order to use the admin application.", id='admin.E402', ), checks.Error( "'django.contrib.messages.context_processors.messages' must " "be enabled in DjangoTemplates (TEMPLATES) in order to use " "the admin application.", id='admin.E404', ), checks.Warning( "'django.template.context_processors.request' must be enabled " "in DjangoTemplates (TEMPLATES) in order to use the admin " "navigation sidebar.", id='admin.W411', ) ] self.assertEqual(admin.checks.check_dependencies(), expected) # The first error doesn't happen if # 'django.contrib.auth.backends.ModelBackend' isn't in # AUTHENTICATION_BACKENDS. with self.settings(AUTHENTICATION_BACKENDS=[]): self.assertEqual(admin.checks.check_dependencies(), expected[1:]) @override_settings( AUTHENTICATION_BACKENDS=['admin_checks.tests.ModelBackendSubclass'], TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.request', 'django.contrib.messages.context_processors.messages', ], }, }], ) def test_context_processor_dependencies_model_backend_subclass(self): self.assertEqual(admin.checks.check_dependencies(), [ checks.Error( "'django.contrib.auth.context_processors.auth' must be " "enabled in DjangoTemplates (TEMPLATES) if using the default " "auth backend in order to use the admin application.", id='admin.E402', ), ]) @override_settings( TEMPLATES=[ { 'BACKEND': 'django.template.backends.dummy.TemplateStrings', 'DIRS': [], 'APP_DIRS': True, }, { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ], ) def test_several_templates_backends(self): self.assertEqual(admin.checks.check_dependencies(), []) @override_settings(MIDDLEWARE=[]) def test_middleware_dependencies(self): errors = admin.checks.check_dependencies() expected = [ checks.Error( "'django.contrib.auth.middleware.AuthenticationMiddleware' " "must be in MIDDLEWARE in order to use the admin application.", id='admin.E408', ), checks.Error( "'django.contrib.messages.middleware.MessageMiddleware' " "must be in MIDDLEWARE in order to use the admin application.", id='admin.E409', ), checks.Error( "'django.contrib.sessions.middleware.SessionMiddleware' " "must be in MIDDLEWARE in order to use the admin application.", id='admin.E410', ), ] self.assertEqual(errors, expected) @override_settings(MIDDLEWARE=[ 'admin_checks.tests.AuthenticationMiddlewareSubclass', 'admin_checks.tests.MessageMiddlewareSubclass', 'admin_checks.tests.SessionMiddlewareSubclass', ]) def test_middleware_subclasses(self): self.assertEqual(admin.checks.check_dependencies(), []) @override_settings(MIDDLEWARE=[ 'django.contrib.does.not.Exist', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', ]) def test_admin_check_ignores_import_error_in_middleware(self): self.assertEqual(admin.checks.check_dependencies(), []) def test_custom_adminsite(self): class CustomAdminSite(admin.AdminSite): pass custom_site = CustomAdminSite() custom_site.register(Song, MyAdmin) try: errors = checks.run_checks() expected = ['error!'] self.assertEqual(errors, expected) finally: custom_site.unregister(Song) def test_allows_checks_relying_on_other_modeladmins(self): class MyBookAdmin(admin.ModelAdmin): def check(self, **kwargs): errors = super().check(**kwargs) author_admin = self.admin_site._registry.get(Author) if author_admin is None: errors.append('AuthorAdmin missing!') return errors class MyAuthorAdmin(admin.ModelAdmin): pass admin.site.register(Book, MyBookAdmin) admin.site.register(Author, MyAuthorAdmin) try: self.assertEqual(admin.site.check(None), []) finally: admin.site.unregister(Book) admin.site.unregister(Author) def test_field_name_not_in_list_display(self): class SongAdmin(admin.ModelAdmin): list_editable = ["original_release"] errors = SongAdmin(Song, AdminSite()).check() expected = [ checks.Error( "The value of 'list_editable[0]' refers to 'original_release', " "which is not contained in 'list_display'.", obj=SongAdmin, id='admin.E122', ) ] self.assertEqual(errors, expected) def test_list_editable_not_a_list_or_tuple(self): class SongAdmin(admin.ModelAdmin): list_editable = 'test' self.assertEqual(SongAdmin(Song, AdminSite()).check(), [ checks.Error( "The value of 'list_editable' must be a list or tuple.", obj=SongAdmin, id='admin.E120', ) ]) def test_list_editable_missing_field(self): class SongAdmin(admin.ModelAdmin): list_editable = ('test',) self.assertEqual(SongAdmin(Song, AdminSite()).check(), [ checks.Error( "The value of 'list_editable[0]' refers to 'test', which is " "not an attribute of 'admin_checks.Song'.", obj=SongAdmin, id='admin.E121', ) ]) def test_readonly_and_editable(self): class SongAdmin(admin.ModelAdmin): readonly_fields = ["original_release"] list_display = ["pk", "original_release"] list_editable = ["original_release"] fieldsets = [ (None, { "fields": ["title", "original_release"], }), ] errors = SongAdmin(Song, AdminSite()).check() expected = [ checks.Error( "The value of 'list_editable[0]' refers to 'original_release', " "which is not editable through the admin.", obj=SongAdmin, id='admin.E125', ) ] self.assertEqual(errors, expected) def test_editable(self): class SongAdmin(admin.ModelAdmin): list_display = ["pk", "title"] list_editable = ["title"] fieldsets = [ (None, { "fields": ["title", "original_release"], }), ] errors = SongAdmin(Song, AdminSite()).check() self.assertEqual(errors, []) def test_custom_modelforms_with_fields_fieldsets(self): """ # Regression test for #8027: custom ModelForms with fields/fieldsets """ errors = ValidFields(Song, AdminSite()).check() self.assertEqual(errors, []) def test_custom_get_form_with_fieldsets(self): """ The fieldsets checks are skipped when the ModelAdmin.get_form() method is overridden. """ errors = ValidFormFieldsets(Song, AdminSite()).check() self.assertEqual(errors, []) def test_fieldsets_fields_non_tuple(self): """ The first fieldset's fields must be a list/tuple. """ class NotATupleAdmin(admin.ModelAdmin): list_display = ["pk", "title"] list_editable = ["title"] fieldsets = [ (None, { "fields": "title" # not a tuple }), ] errors = NotATupleAdmin(Song, AdminSite()).check() expected = [ checks.Error( "The value of 'fieldsets[0][1]['fields']' must be a list or tuple.", obj=NotATupleAdmin, id='admin.E008', ) ] self.assertEqual(errors, expected) def test_nonfirst_fieldset(self): """ The second fieldset's fields must be a list/tuple. """ class NotATupleAdmin(admin.ModelAdmin): fieldsets = [ (None, { "fields": ("title",) }), ('foo', { "fields": "author" # not a tuple }), ] errors = NotATupleAdmin(Song, AdminSite()).check() expected = [ checks.Error( "The value of 'fieldsets[1][1]['fields']' must be a list or tuple.", obj=NotATupleAdmin, id='admin.E008', ) ] self.assertEqual(errors, expected) def test_exclude_values(self): """ Tests for basic system checks of 'exclude' option values (#12689) """ class ExcludedFields1(admin.ModelAdmin): exclude = 'foo' errors = ExcludedFields1(Book, AdminSite()).check() expected = [ checks.Error( "The value of 'exclude' must be a list or tuple.", obj=ExcludedFields1, id='admin.E014', ) ] self.assertEqual(errors, expected) def test_exclude_duplicate_values(self): class ExcludedFields2(admin.ModelAdmin): exclude = ('name', 'name') errors = ExcludedFields2(Book, AdminSite()).check() expected = [ checks.Error( "The value of 'exclude' contains duplicate field(s).", obj=ExcludedFields2, id='admin.E015', ) ] self.assertEqual(errors, expected) def test_exclude_in_inline(self): class ExcludedFieldsInline(admin.TabularInline): model = Song exclude = 'foo' class ExcludedFieldsAlbumAdmin(admin.ModelAdmin): model = Album inlines = [ExcludedFieldsInline] errors = ExcludedFieldsAlbumAdmin(Album, AdminSite()).check() expected = [ checks.Error( "The value of 'exclude' must be a list or tuple.", obj=ExcludedFieldsInline, id='admin.E014', ) ] self.assertEqual(errors, expected) def test_exclude_inline_model_admin(self): """ Regression test for #9932 - exclude in InlineModelAdmin should not contain the ForeignKey field used in ModelAdmin.model """ class SongInline(admin.StackedInline): model = Song exclude = ['album'] class AlbumAdmin(admin.ModelAdmin): model = Album inlines = [SongInline] errors = AlbumAdmin(Album, AdminSite()).check() expected = [ checks.Error( "Cannot exclude the field 'album', because it is the foreign key " "to the parent model 'admin_checks.Album'.", obj=SongInline, id='admin.E201', ) ] self.assertEqual(errors, expected) def test_valid_generic_inline_model_admin(self): """ Regression test for #22034 - check that generic inlines don't look for normal ForeignKey relations. """ class InfluenceInline(GenericStackedInline): model = Influence class SongAdmin(admin.ModelAdmin): inlines = [InfluenceInline] errors = SongAdmin(Song, AdminSite()).check() self.assertEqual(errors, []) def test_generic_inline_model_admin_non_generic_model(self): """ A model without a GenericForeignKey raises problems if it's included in a GenericInlineModelAdmin definition. """ class BookInline(GenericStackedInline): model = Book class SongAdmin(admin.ModelAdmin): inlines = [BookInline] errors = SongAdmin(Song, AdminSite()).check() expected = [ checks.Error( "'admin_checks.Book' has no GenericForeignKey.", obj=BookInline, id='admin.E301', ) ] self.assertEqual(errors, expected) def test_generic_inline_model_admin_bad_ct_field(self): """ A GenericInlineModelAdmin errors if the ct_field points to a nonexistent field. """ class InfluenceInline(GenericStackedInline): model = Influence ct_field = 'nonexistent' class SongAdmin(admin.ModelAdmin): inlines = [InfluenceInline] errors = SongAdmin(Song, AdminSite()).check() expected = [ checks.Error( "'ct_field' references 'nonexistent', which is not a field on 'admin_checks.Influence'.", obj=InfluenceInline, id='admin.E302', ) ] self.assertEqual(errors, expected) def test_generic_inline_model_admin_bad_fk_field(self): """ A GenericInlineModelAdmin errors if the ct_fk_field points to a nonexistent field. """ class InfluenceInline(GenericStackedInline): model = Influence ct_fk_field = 'nonexistent' class SongAdmin(admin.ModelAdmin): inlines = [InfluenceInline] errors = SongAdmin(Song, AdminSite()).check() expected = [ checks.Error( "'ct_fk_field' references 'nonexistent', which is not a field on 'admin_checks.Influence'.", obj=InfluenceInline, id='admin.E303', ) ] self.assertEqual(errors, expected) def test_generic_inline_model_admin_non_gfk_ct_field(self): """ A GenericInlineModelAdmin raises problems if the ct_field points to a field that isn't part of a GenericForeignKey. """ class InfluenceInline(GenericStackedInline): model = Influence ct_field = 'name' class SongAdmin(admin.ModelAdmin): inlines = [InfluenceInline] errors = SongAdmin(Song, AdminSite()).check() expected = [ checks.Error( "'admin_checks.Influence' has no GenericForeignKey using " "content type field 'name' and object ID field 'object_id'.", obj=InfluenceInline, id='admin.E304', ) ] self.assertEqual(errors, expected) def test_generic_inline_model_admin_non_gfk_fk_field(self): """ A GenericInlineModelAdmin raises problems if the ct_fk_field points to a field that isn't part of a GenericForeignKey. """ class InfluenceInline(GenericStackedInline): model = Influence ct_fk_field = 'name' class SongAdmin(admin.ModelAdmin): inlines = [InfluenceInline] errors = SongAdmin(Song, AdminSite()).check() expected = [ checks.Error( "'admin_checks.Influence' has no GenericForeignKey using " "content type field 'content_type' and object ID field 'name'.", obj=InfluenceInline, id='admin.E304', ) ] self.assertEqual(errors, expected) def test_app_label_in_admin_checks(self): class RawIdNonexistentAdmin(admin.ModelAdmin): raw_id_fields = ('nonexistent',) errors = RawIdNonexistentAdmin(Album, AdminSite()).check() expected = [ checks.Error( "The value of 'raw_id_fields[0]' refers to 'nonexistent', " "which is not an attribute of 'admin_checks.Album'.", obj=RawIdNonexistentAdmin, id='admin.E002', ) ] self.assertEqual(errors, expected) def test_fk_exclusion(self): """ Regression test for #11709 - when testing for fk excluding (when exclude is given) make sure fk_name is honored or things blow up when there is more than one fk to the parent model. """ class TwoAlbumFKAndAnEInline(admin.TabularInline): model = TwoAlbumFKAndAnE exclude = ("e",) fk_name = "album1" class MyAdmin(admin.ModelAdmin): inlines = [TwoAlbumFKAndAnEInline] errors = MyAdmin(Album, AdminSite()).check() self.assertEqual(errors, []) def test_inline_self_check(self): class TwoAlbumFKAndAnEInline(admin.TabularInline): model = TwoAlbumFKAndAnE class MyAdmin(admin.ModelAdmin): inlines = [TwoAlbumFKAndAnEInline] errors = MyAdmin(Album, AdminSite()).check() expected = [ checks.Error( "'admin_checks.TwoAlbumFKAndAnE' has more than one ForeignKey " "to 'admin_checks.Album'. You must specify a 'fk_name' " "attribute.", obj=TwoAlbumFKAndAnEInline, id='admin.E202', ) ] self.assertEqual(errors, expected) def test_inline_with_specified(self): class TwoAlbumFKAndAnEInline(admin.TabularInline): model = TwoAlbumFKAndAnE fk_name = "album1" class MyAdmin(admin.ModelAdmin): inlines = [TwoAlbumFKAndAnEInline] errors = MyAdmin(Album, AdminSite()).check() self.assertEqual(errors, []) def test_inlines_property(self): class CitiesInline(admin.TabularInline): model = City class StateAdmin(admin.ModelAdmin): @property def inlines(self): return [CitiesInline] errors = StateAdmin(State, AdminSite()).check() self.assertEqual(errors, []) def test_readonly(self): class SongAdmin(admin.ModelAdmin): readonly_fields = ("title",) errors = SongAdmin(Song, AdminSite()).check() self.assertEqual(errors, []) def test_readonly_on_method(self): def my_function(obj): pass class SongAdmin(admin.ModelAdmin): readonly_fields = (my_function,) errors = SongAdmin(Song, AdminSite()).check() self.assertEqual(errors, []) def test_readonly_on_modeladmin(self): class SongAdmin(admin.ModelAdmin): readonly_fields = ("readonly_method_on_modeladmin",) def readonly_method_on_modeladmin(self, obj): pass errors = SongAdmin(Song, AdminSite()).check() self.assertEqual(errors, []) def test_readonly_dynamic_attribute_on_modeladmin(self): class SongAdmin(admin.ModelAdmin): readonly_fields = ("dynamic_method",) def __getattr__(self, item): if item == "dynamic_method": def method(obj): pass return method raise AttributeError errors = SongAdmin(Song, AdminSite()).check() self.assertEqual(errors, []) def test_readonly_method_on_model(self): class SongAdmin(admin.ModelAdmin): readonly_fields = ("readonly_method_on_model",) errors = SongAdmin(Song, AdminSite()).check() self.assertEqual(errors, []) def test_nonexistent_field(self): class SongAdmin(admin.ModelAdmin): readonly_fields = ("title", "nonexistent") errors = SongAdmin(Song, AdminSite()).check() expected = [ checks.Error( "The value of 'readonly_fields[1]' is not a callable, an attribute " "of 'SongAdmin', or an attribute of 'admin_checks.Song'.", obj=SongAdmin, id='admin.E035', ) ] self.assertEqual(errors, expected) def test_nonexistent_field_on_inline(self): class CityInline(admin.TabularInline): model = City readonly_fields = ['i_dont_exist'] # Missing attribute errors = CityInline(State, AdminSite()).check() expected = [ checks.Error( "The value of 'readonly_fields[0]' is not a callable, an attribute " "of 'CityInline', or an attribute of 'admin_checks.City'.", obj=CityInline, id='admin.E035', ) ] self.assertEqual(errors, expected) def test_readonly_fields_not_list_or_tuple(self): class SongAdmin(admin.ModelAdmin): readonly_fields = 'test' self.assertEqual(SongAdmin(Song, AdminSite()).check(), [ checks.Error( "The value of 'readonly_fields' must be a list or tuple.", obj=SongAdmin, id='admin.E034', ) ]) def test_extra(self): class SongAdmin(admin.ModelAdmin): def awesome_song(self, instance): if instance.title == "Born to Run": return "Best Ever!" return "Status unknown." errors = SongAdmin(Song, AdminSite()).check() self.assertEqual(errors, []) def test_readonly_lambda(self): class SongAdmin(admin.ModelAdmin): readonly_fields = (lambda obj: "test",) errors = SongAdmin(Song, AdminSite()).check() self.assertEqual(errors, []) def test_graceful_m2m_fail(self): """ Regression test for #12203/#12237 - Fail more gracefully when a M2M field that specifies the 'through' option is included in the 'fields' or the 'fieldsets' ModelAdmin options. """ class BookAdmin(admin.ModelAdmin): fields = ['authors'] errors = BookAdmin(Book, AdminSite()).check() expected = [ checks.Error( "The value of 'fields' cannot include the ManyToManyField 'authors', " "because that field manually specifies a relationship model.", obj=BookAdmin, id='admin.E013', ) ] self.assertEqual(errors, expected) def test_cannot_include_through(self): class FieldsetBookAdmin(admin.ModelAdmin): fieldsets = ( ('Header 1', {'fields': ('name',)}), ('Header 2', {'fields': ('authors',)}), ) errors = FieldsetBookAdmin(Book, AdminSite()).check() expected = [ checks.Error( "The value of 'fieldsets[1][1][\"fields\"]' cannot include the ManyToManyField " "'authors', because that field manually specifies a relationship model.", obj=FieldsetBookAdmin, id='admin.E013', ) ] self.assertEqual(errors, expected) def test_nested_fields(self): class NestedFieldsAdmin(admin.ModelAdmin): fields = ('price', ('name', 'subtitle')) errors = NestedFieldsAdmin(Book, AdminSite()).check() self.assertEqual(errors, []) def test_nested_fieldsets(self): class NestedFieldsetAdmin(admin.ModelAdmin): fieldsets = ( ('Main', {'fields': ('price', ('name', 'subtitle'))}), ) errors = NestedFieldsetAdmin(Book, AdminSite()).check() self.assertEqual(errors, []) def test_explicit_through_override(self): """ Regression test for #12209 -- If the explicitly provided through model is specified as a string, the admin should still be able use Model.m2m_field.through """ class AuthorsInline(admin.TabularInline): model = Book.authors.through class BookAdmin(admin.ModelAdmin): inlines = [AuthorsInline] errors = BookAdmin(Book, AdminSite()).check() self.assertEqual(errors, []) def test_non_model_fields(self): """ Regression for ensuring ModelAdmin.fields can contain non-model fields that broke with r11737 """ class SongForm(forms.ModelForm): extra_data = forms.CharField() class FieldsOnFormOnlyAdmin(admin.ModelAdmin): form = SongForm fields = ['title', 'extra_data'] errors = FieldsOnFormOnlyAdmin(Song, AdminSite()).check() self.assertEqual(errors, []) def test_non_model_first_field(self): """ Regression for ensuring ModelAdmin.field can handle first elem being a non-model field (test fix for UnboundLocalError introduced with r16225). """ class SongForm(forms.ModelForm): extra_data = forms.CharField() class Meta: model = Song fields = '__all__' class FieldsOnFormOnlyAdmin(admin.ModelAdmin): form = SongForm fields = ['extra_data', 'title'] errors = FieldsOnFormOnlyAdmin(Song, AdminSite()).check() self.assertEqual(errors, []) def test_check_sublists_for_duplicates(self): class MyModelAdmin(admin.ModelAdmin): fields = ['state', ['state']] errors = MyModelAdmin(Song, AdminSite()).check() expected = [ checks.Error( "The value of 'fields' contains duplicate field(s).", obj=MyModelAdmin, id='admin.E006' ) ] self.assertEqual(errors, expected) def test_check_fieldset_sublists_for_duplicates(self): class MyModelAdmin(admin.ModelAdmin): fieldsets = [ (None, { 'fields': ['title', 'album', ('title', 'album')] }), ] errors = MyModelAdmin(Song, AdminSite()).check() expected = [ checks.Error( "There are duplicate field(s) in 'fieldsets[0][1]'.", obj=MyModelAdmin, id='admin.E012' ) ] self.assertEqual(errors, expected) def test_list_filter_works_on_through_field_even_when_apps_not_ready(self): """ Ensure list_filter can access reverse fields even when the app registry is not ready; refs #24146. """ class BookAdminWithListFilter(admin.ModelAdmin): list_filter = ['authorsbooks__featured'] # Temporarily pretending apps are not ready yet. This issue can happen # if the value of 'list_filter' refers to a 'through__field'. Book._meta.apps.ready = False try: errors = BookAdminWithListFilter(Book, AdminSite()).check() self.assertEqual(errors, []) finally: Book._meta.apps.ready = True
eadd429da97b2fcb31478fb3cef20e99307287d98237c56276d4a954565a34c5
import json import mimetypes import os import sys from copy import copy from functools import partial from http import HTTPStatus from importlib import import_module from io import BytesIO from urllib.parse import unquote_to_bytes, urljoin, urlparse, urlsplit from asgiref.sync import sync_to_async from django.conf import settings from django.core.handlers.asgi import ASGIRequest from django.core.handlers.base import BaseHandler from django.core.handlers.wsgi import WSGIRequest from django.core.serializers.json import DjangoJSONEncoder from django.core.signals import ( got_request_exception, request_finished, request_started, ) from django.db import close_old_connections from django.http import HttpRequest, QueryDict, SimpleCookie from django.test import signals from django.test.utils import ContextList from django.urls import resolve from django.utils.encoding import force_bytes from django.utils.functional import SimpleLazyObject from django.utils.http import urlencode from django.utils.itercompat import is_iterable from django.utils.regex_helper import _lazy_re_compile __all__ = ('Client', 'RedirectCycleError', 'RequestFactory', 'encode_file', 'encode_multipart') BOUNDARY = 'BoUnDaRyStRiNg' MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY CONTENT_TYPE_RE = _lazy_re_compile(r'.*; charset=([\w\d-]+);?') # Structured suffix spec: https://tools.ietf.org/html/rfc6838#section-4.2.8 JSON_CONTENT_TYPE_RE = _lazy_re_compile(r'^application\/(.+\+)?json') class RedirectCycleError(Exception): """The test client has been asked to follow a redirect loop.""" def __init__(self, message, last_response): super().__init__(message) self.last_response = last_response self.redirect_chain = last_response.redirect_chain class FakePayload: """ A wrapper around BytesIO that restricts what can be read since data from the network can't be sought and cannot be read outside of its content length. This makes sure that views can't do anything under the test client that wouldn't work in real life. """ def __init__(self, content=None): self.__content = BytesIO() self.__len = 0 self.read_started = False if content is not None: self.write(content) def __len__(self): return self.__len def read(self, num_bytes=None): if not self.read_started: self.__content.seek(0) self.read_started = True if num_bytes is None: num_bytes = self.__len or 0 assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data." content = self.__content.read(num_bytes) self.__len -= num_bytes return content def write(self, content): if self.read_started: raise ValueError("Unable to write a payload after it's been read") content = force_bytes(content) self.__content.write(content) self.__len += len(content) def closing_iterator_wrapper(iterable, close): try: yield from iterable finally: request_finished.disconnect(close_old_connections) close() # will fire request_finished request_finished.connect(close_old_connections) def conditional_content_removal(request, response): """ Simulate the behavior of most Web servers by removing the content of responses for HEAD requests, 1xx, 204, and 304 responses. Ensure compliance with RFC 7230, section 3.3.3. """ if 100 <= response.status_code < 200 or response.status_code in (204, 304): if response.streaming: response.streaming_content = [] else: response.content = b'' if request.method == 'HEAD': if response.streaming: response.streaming_content = [] else: response.content = b'' return response class ClientHandler(BaseHandler): """ A HTTP Handler that can be used for testing purposes. Use the WSGI interface to compose requests, but return the raw HttpResponse object with the originating WSGIRequest attached to its ``wsgi_request`` attribute. """ def __init__(self, enforce_csrf_checks=True, *args, **kwargs): self.enforce_csrf_checks = enforce_csrf_checks super().__init__(*args, **kwargs) def __call__(self, environ): # Set up middleware if needed. We couldn't do this earlier, because # settings weren't available. if self._middleware_chain is None: self.load_middleware() request_started.disconnect(close_old_connections) request_started.send(sender=self.__class__, environ=environ) request_started.connect(close_old_connections) request = WSGIRequest(environ) # sneaky little hack so that we can easily get round # CsrfViewMiddleware. This makes life easier, and is probably # required for backwards compatibility with external tests against # admin views. request._dont_enforce_csrf_checks = not self.enforce_csrf_checks # Request goes through middleware. response = self.get_response(request) # Simulate behaviors of most Web servers. conditional_content_removal(request, response) # Attach the originating request to the response so that it could be # later retrieved. response.wsgi_request = request # Emulate a WSGI server by calling the close method on completion. if response.streaming: response.streaming_content = closing_iterator_wrapper( response.streaming_content, response.close) else: request_finished.disconnect(close_old_connections) response.close() # will fire request_finished request_finished.connect(close_old_connections) return response class AsyncClientHandler(BaseHandler): """An async version of ClientHandler.""" def __init__(self, enforce_csrf_checks=True, *args, **kwargs): self.enforce_csrf_checks = enforce_csrf_checks super().__init__(*args, **kwargs) async def __call__(self, scope): # Set up middleware if needed. We couldn't do this earlier, because # settings weren't available. if self._middleware_chain is None: self.load_middleware(is_async=True) # Extract body file from the scope, if provided. if '_body_file' in scope: body_file = scope.pop('_body_file') else: body_file = FakePayload('') request_started.disconnect(close_old_connections) await sync_to_async(request_started.send)(sender=self.__class__, scope=scope) request_started.connect(close_old_connections) request = ASGIRequest(scope, body_file) # Sneaky little hack so that we can easily get round # CsrfViewMiddleware. This makes life easier, and is probably required # for backwards compatibility with external tests against admin views. request._dont_enforce_csrf_checks = not self.enforce_csrf_checks # Request goes through middleware. response = await self.get_response_async(request) # Simulate behaviors of most Web servers. conditional_content_removal(request, response) # Attach the originating ASGI request to the response so that it could # be later retrieved. response.asgi_request = request # Emulate a server by calling the close method on completion. if response.streaming: response.streaming_content = await sync_to_async(closing_iterator_wrapper)( response.streaming_content, response.close, ) else: request_finished.disconnect(close_old_connections) # Will fire request_finished. await sync_to_async(response.close)() request_finished.connect(close_old_connections) return response def store_rendered_templates(store, signal, sender, template, context, **kwargs): """ Store templates and contexts that are rendered. The context is copied so that it is an accurate representation at the time of rendering. """ store.setdefault('templates', []).append(template) if 'context' not in store: store['context'] = ContextList() store['context'].append(copy(context)) def encode_multipart(boundary, data): """ Encode multipart POST data from a dictionary of form values. The key will be used as the form data name; the value will be transmitted as content. If the value is a file, the contents of the file will be sent as an application/octet-stream; otherwise, str(value) will be sent. """ lines = [] def to_bytes(s): return force_bytes(s, settings.DEFAULT_CHARSET) # Not by any means perfect, but good enough for our purposes. def is_file(thing): return hasattr(thing, "read") and callable(thing.read) # Each bit of the multipart form data could be either a form value or a # file, or a *list* of form values and/or files. Remember that HTTP field # names can be duplicated! for (key, value) in data.items(): if value is None: raise TypeError( "Cannot encode None for key '%s' as POST data. Did you mean " "to pass an empty string or omit the value?" % key ) elif is_file(value): lines.extend(encode_file(boundary, key, value)) elif not isinstance(value, str) and is_iterable(value): for item in value: if is_file(item): lines.extend(encode_file(boundary, key, item)) else: lines.extend(to_bytes(val) for val in [ '--%s' % boundary, 'Content-Disposition: form-data; name="%s"' % key, '', item ]) else: lines.extend(to_bytes(val) for val in [ '--%s' % boundary, 'Content-Disposition: form-data; name="%s"' % key, '', value ]) lines.extend([ to_bytes('--%s--' % boundary), b'', ]) return b'\r\n'.join(lines) def encode_file(boundary, key, file): def to_bytes(s): return force_bytes(s, settings.DEFAULT_CHARSET) # file.name might not be a string. For example, it's an int for # tempfile.TemporaryFile(). file_has_string_name = hasattr(file, 'name') and isinstance(file.name, str) filename = os.path.basename(file.name) if file_has_string_name else '' if hasattr(file, 'content_type'): content_type = file.content_type elif filename: content_type = mimetypes.guess_type(filename)[0] else: content_type = None if content_type is None: content_type = 'application/octet-stream' filename = filename or key return [ to_bytes('--%s' % boundary), to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename)), to_bytes('Content-Type: %s' % content_type), b'', to_bytes(file.read()) ] class RequestFactory: """ Class that lets you create mock Request objects for use in testing. Usage: rf = RequestFactory() get_request = rf.get('/hello/') post_request = rf.post('/submit/', {'foo': 'bar'}) Once you have a request object you can pass it to any view function, just as if that view had been hooked up using a URLconf. """ def __init__(self, *, json_encoder=DjangoJSONEncoder, **defaults): self.json_encoder = json_encoder self.defaults = defaults self.cookies = SimpleCookie() self.errors = BytesIO() def _base_environ(self, **request): """ The base environment for a request. """ # This is a minimal valid WSGI environ dictionary, plus: # - HTTP_COOKIE: for cookie support, # - REMOTE_ADDR: often useful, see #8551. # See https://www.python.org/dev/peps/pep-3333/#environ-variables return { 'HTTP_COOKIE': '; '.join(sorted( '%s=%s' % (morsel.key, morsel.coded_value) for morsel in self.cookies.values() )), 'PATH_INFO': '/', 'REMOTE_ADDR': '127.0.0.1', 'REQUEST_METHOD': 'GET', 'SCRIPT_NAME': '', 'SERVER_NAME': 'testserver', 'SERVER_PORT': '80', 'SERVER_PROTOCOL': 'HTTP/1.1', 'wsgi.version': (1, 0), 'wsgi.url_scheme': 'http', 'wsgi.input': FakePayload(b''), 'wsgi.errors': self.errors, 'wsgi.multiprocess': True, 'wsgi.multithread': False, 'wsgi.run_once': False, **self.defaults, **request, } def request(self, **request): "Construct a generic request object." return WSGIRequest(self._base_environ(**request)) def _encode_data(self, data, content_type): if content_type is MULTIPART_CONTENT: return encode_multipart(BOUNDARY, data) else: # Encode the content so that the byte representation is correct. match = CONTENT_TYPE_RE.match(content_type) if match: charset = match[1] else: charset = settings.DEFAULT_CHARSET return force_bytes(data, encoding=charset) def _encode_json(self, data, content_type): """ Return encoded JSON if data is a dict, list, or tuple and content_type is application/json. """ should_encode = JSON_CONTENT_TYPE_RE.match(content_type) and isinstance(data, (dict, list, tuple)) return json.dumps(data, cls=self.json_encoder) if should_encode else data def _get_path(self, parsed): path = parsed.path # If there are parameters, add them if parsed.params: path += ";" + parsed.params path = unquote_to_bytes(path) # Replace the behavior where non-ASCII values in the WSGI environ are # arbitrarily decoded with ISO-8859-1. # Refs comment in `get_bytes_from_wsgi()`. return path.decode('iso-8859-1') def get(self, path, data=None, secure=False, **extra): """Construct a GET request.""" data = {} if data is None else data return self.generic('GET', path, secure=secure, **{ 'QUERY_STRING': urlencode(data, doseq=True), **extra, }) def post(self, path, data=None, content_type=MULTIPART_CONTENT, secure=False, **extra): """Construct a POST request.""" data = self._encode_json({} if data is None else data, content_type) post_data = self._encode_data(data, content_type) return self.generic('POST', path, post_data, content_type, secure=secure, **extra) def head(self, path, data=None, secure=False, **extra): """Construct a HEAD request.""" data = {} if data is None else data return self.generic('HEAD', path, secure=secure, **{ 'QUERY_STRING': urlencode(data, doseq=True), **extra, }) def trace(self, path, secure=False, **extra): """Construct a TRACE request.""" return self.generic('TRACE', path, secure=secure, **extra) def options(self, path, data='', content_type='application/octet-stream', secure=False, **extra): "Construct an OPTIONS request." return self.generic('OPTIONS', path, data, content_type, secure=secure, **extra) def put(self, path, data='', content_type='application/octet-stream', secure=False, **extra): """Construct a PUT request.""" data = self._encode_json(data, content_type) return self.generic('PUT', path, data, content_type, secure=secure, **extra) def patch(self, path, data='', content_type='application/octet-stream', secure=False, **extra): """Construct a PATCH request.""" data = self._encode_json(data, content_type) return self.generic('PATCH', path, data, content_type, secure=secure, **extra) def delete(self, path, data='', content_type='application/octet-stream', secure=False, **extra): """Construct a DELETE request.""" data = self._encode_json(data, content_type) return self.generic('DELETE', path, data, content_type, secure=secure, **extra) def generic(self, method, path, data='', content_type='application/octet-stream', secure=False, **extra): """Construct an arbitrary HTTP request.""" parsed = urlparse(str(path)) # path can be lazy data = force_bytes(data, settings.DEFAULT_CHARSET) r = { 'PATH_INFO': self._get_path(parsed), 'REQUEST_METHOD': method, 'SERVER_PORT': '443' if secure else '80', 'wsgi.url_scheme': 'https' if secure else 'http', } if data: r.update({ 'CONTENT_LENGTH': str(len(data)), 'CONTENT_TYPE': content_type, 'wsgi.input': FakePayload(data), }) r.update(extra) # If QUERY_STRING is absent or empty, we want to extract it from the URL. if not r.get('QUERY_STRING'): # WSGI requires latin-1 encoded strings. See get_path_info(). query_string = parsed[4].encode().decode('iso-8859-1') r['QUERY_STRING'] = query_string return self.request(**r) class AsyncRequestFactory(RequestFactory): """ Class that lets you create mock ASGI-like Request objects for use in testing. Usage: rf = AsyncRequestFactory() get_request = await rf.get('/hello/') post_request = await rf.post('/submit/', {'foo': 'bar'}) Once you have a request object you can pass it to any view function, including synchronous ones. The reason we have a separate class here is: a) this makes ASGIRequest subclasses, and b) AsyncTestClient can subclass it. """ def _base_scope(self, **request): """The base scope for a request.""" # This is a minimal valid ASGI scope, plus: # - headers['cookie'] for cookie support, # - 'client' often useful, see #8551. scope = { 'asgi': {'version': '3.0'}, 'type': 'http', 'http_version': '1.1', 'client': ['127.0.0.1', 0], 'server': ('testserver', '80'), 'scheme': 'http', 'method': 'GET', 'headers': [], **self.defaults, **request, } scope['headers'].append(( b'cookie', b'; '.join(sorted( ('%s=%s' % (morsel.key, morsel.coded_value)).encode('ascii') for morsel in self.cookies.values() )), )) return scope def request(self, **request): """Construct a generic request object.""" # This is synchronous, which means all methods on this class are. # AsyncClient, however, has an async request function, which makes all # its methods async. if '_body_file' in request: body_file = request.pop('_body_file') else: body_file = FakePayload('') return ASGIRequest(self._base_scope(**request), body_file) def generic( self, method, path, data='', content_type='application/octet-stream', secure=False, **extra, ): """Construct an arbitrary HTTP request.""" parsed = urlparse(str(path)) # path can be lazy. data = force_bytes(data, settings.DEFAULT_CHARSET) s = { 'method': method, 'path': self._get_path(parsed), 'server': ('127.0.0.1', '443' if secure else '80'), 'scheme': 'https' if secure else 'http', 'headers': [(b'host', b'testserver')], } if data: s['headers'].extend([ (b'content-length', bytes(len(data))), (b'content-type', content_type.encode('ascii')), ]) s['_body_file'] = FakePayload(data) s.update(extra) # If QUERY_STRING is absent or empty, we want to extract it from the # URL. if not s.get('query_string'): s['query_string'] = parsed[4] return self.request(**s) class ClientMixin: """ Mixin with common methods between Client and AsyncClient. """ def store_exc_info(self, **kwargs): """Store exceptions when they are generated by a view.""" self.exc_info = sys.exc_info() def check_exception(self, response): """ Look for a signaled exception, clear the current context exception data, re-raise the signaled exception, and clear the signaled exception from the local cache. """ response.exc_info = self.exc_info if self.exc_info: _, exc_value, _ = self.exc_info self.exc_info = None if self.raise_request_exception: raise exc_value @property def session(self): """Return the current session variables.""" engine = import_module(settings.SESSION_ENGINE) cookie = self.cookies.get(settings.SESSION_COOKIE_NAME) if cookie: return engine.SessionStore(cookie.value) session = engine.SessionStore() session.save() self.cookies[settings.SESSION_COOKIE_NAME] = session.session_key return session def login(self, **credentials): """ Set the Factory to appear as if it has successfully logged into a site. Return True if login is possible or False if the provided credentials are incorrect. """ from django.contrib.auth import authenticate user = authenticate(**credentials) if user: self._login(user) return True return False def force_login(self, user, backend=None): def get_backend(): from django.contrib.auth import load_backend for backend_path in settings.AUTHENTICATION_BACKENDS: backend = load_backend(backend_path) if hasattr(backend, 'get_user'): return backend_path if backend is None: backend = get_backend() user.backend = backend self._login(user, backend) def _login(self, user, backend=None): from django.contrib.auth import login # Create a fake request to store login details. request = HttpRequest() if self.session: request.session = self.session else: engine = import_module(settings.SESSION_ENGINE) request.session = engine.SessionStore() login(request, user, backend) # Save the session values. request.session.save() # Set the cookie to represent the session. session_cookie = settings.SESSION_COOKIE_NAME self.cookies[session_cookie] = request.session.session_key cookie_data = { 'max-age': None, 'path': '/', 'domain': settings.SESSION_COOKIE_DOMAIN, 'secure': settings.SESSION_COOKIE_SECURE or None, 'expires': None, } self.cookies[session_cookie].update(cookie_data) def logout(self): """Log out the user by removing the cookies and session object.""" from django.contrib.auth import get_user, logout request = HttpRequest() if self.session: request.session = self.session request.user = get_user(request) else: engine = import_module(settings.SESSION_ENGINE) request.session = engine.SessionStore() logout(request) self.cookies = SimpleCookie() def _parse_json(self, response, **extra): if not hasattr(response, '_json'): if not JSON_CONTENT_TYPE_RE.match(response.get('Content-Type')): raise ValueError( 'Content-Type header is "%s", not "application/json"' % response.get('Content-Type') ) response._json = json.loads(response.content.decode(response.charset), **extra) return response._json class Client(ClientMixin, RequestFactory): """ A class that can act as a client for testing purposes. It allows the user to compose GET and POST requests, and obtain the response that the server gave to those requests. The server Response objects are annotated with the details of the contexts and templates that were rendered during the process of serving the request. Client objects are stateful - they will retain cookie (and thus session) details for the lifetime of the Client instance. This is not intended as a replacement for Twill/Selenium or the like - it is here to allow testing against the contexts and templates produced by a view, rather than the HTML rendered to the end-user. """ def __init__(self, enforce_csrf_checks=False, raise_request_exception=True, **defaults): super().__init__(**defaults) self.handler = ClientHandler(enforce_csrf_checks) self.raise_request_exception = raise_request_exception self.exc_info = None self.extra = None def request(self, **request): """ The master request method. Compose the environment dictionary and pass to the handler, return the result of the handler. Assume defaults for the query environment, which can be overridden using the arguments to the request. """ environ = self._base_environ(**request) # Curry a data dictionary into an instance of the template renderer # callback function. data = {} on_template_render = partial(store_rendered_templates, data) signal_uid = "template-render-%s" % id(request) signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid) # Capture exceptions created by the handler. exception_uid = "request-exception-%s" % id(request) got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid) try: response = self.handler(environ) finally: signals.template_rendered.disconnect(dispatch_uid=signal_uid) got_request_exception.disconnect(dispatch_uid=exception_uid) # Check for signaled exceptions. self.check_exception(response) # Save the client and request that stimulated the response. response.client = self response.request = request # Add any rendered template detail to the response. response.templates = data.get('templates', []) response.context = data.get('context') response.json = partial(self._parse_json, response) # Attach the ResolverMatch instance to the response. response.resolver_match = SimpleLazyObject(lambda: resolve(request['PATH_INFO'])) # Flatten a single context. Not really necessary anymore thanks to the # __getattr__ flattening in ContextList, but has some edge case # backwards compatibility implications. if response.context and len(response.context) == 1: response.context = response.context[0] # Update persistent cookie data. if response.cookies: self.cookies.update(response.cookies) return response def get(self, path, data=None, follow=False, secure=False, **extra): """Request a response from the server using GET.""" self.extra = extra response = super().get(path, data=data, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, **extra) return response def post(self, path, data=None, content_type=MULTIPART_CONTENT, follow=False, secure=False, **extra): """Request a response from the server using POST.""" self.extra = extra response = super().post(path, data=data, content_type=content_type, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, content_type=content_type, **extra) return response def head(self, path, data=None, follow=False, secure=False, **extra): """Request a response from the server using HEAD.""" self.extra = extra response = super().head(path, data=data, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, **extra) return response def options(self, path, data='', content_type='application/octet-stream', follow=False, secure=False, **extra): """Request a response from the server using OPTIONS.""" self.extra = extra response = super().options(path, data=data, content_type=content_type, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, content_type=content_type, **extra) return response def put(self, path, data='', content_type='application/octet-stream', follow=False, secure=False, **extra): """Send a resource to the server using PUT.""" self.extra = extra response = super().put(path, data=data, content_type=content_type, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, content_type=content_type, **extra) return response def patch(self, path, data='', content_type='application/octet-stream', follow=False, secure=False, **extra): """Send a resource to the server using PATCH.""" self.extra = extra response = super().patch(path, data=data, content_type=content_type, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, content_type=content_type, **extra) return response def delete(self, path, data='', content_type='application/octet-stream', follow=False, secure=False, **extra): """Send a DELETE request to the server.""" self.extra = extra response = super().delete(path, data=data, content_type=content_type, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, content_type=content_type, **extra) return response def trace(self, path, data='', follow=False, secure=False, **extra): """Send a TRACE request to the server.""" self.extra = extra response = super().trace(path, data=data, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, **extra) return response def _handle_redirects(self, response, data='', content_type='', **extra): """ Follow any redirects by requesting responses from the server using GET. """ response.redirect_chain = [] redirect_status_codes = ( HTTPStatus.MOVED_PERMANENTLY, HTTPStatus.FOUND, HTTPStatus.SEE_OTHER, HTTPStatus.TEMPORARY_REDIRECT, HTTPStatus.PERMANENT_REDIRECT, ) while response.status_code in redirect_status_codes: response_url = response.url redirect_chain = response.redirect_chain redirect_chain.append((response_url, response.status_code)) url = urlsplit(response_url) if url.scheme: extra['wsgi.url_scheme'] = url.scheme if url.hostname: extra['SERVER_NAME'] = url.hostname if url.port: extra['SERVER_PORT'] = str(url.port) # Prepend the request path to handle relative path redirects path = url.path if not path.startswith('/'): path = urljoin(response.request['PATH_INFO'], path) if response.status_code in (HTTPStatus.TEMPORARY_REDIRECT, HTTPStatus.PERMANENT_REDIRECT): # Preserve request method and query string (if needed) # post-redirect for 307/308 responses. request_method = response.request['REQUEST_METHOD'].lower() if request_method not in ('get', 'head'): extra['QUERY_STRING'] = url.query request_method = getattr(self, request_method) else: request_method = self.get data = QueryDict(url.query) content_type = None response = request_method(path, data=data, content_type=content_type, follow=False, **extra) response.redirect_chain = redirect_chain if redirect_chain[-1] in redirect_chain[:-1]: # Check that we're not redirecting to somewhere we've already # been to, to prevent loops. raise RedirectCycleError("Redirect loop detected.", last_response=response) if len(redirect_chain) > 20: # Such a lengthy chain likely also means a loop, but one with # a growing path, changing view, or changing query argument; # 20 is the value of "network.http.redirection-limit" from Firefox. raise RedirectCycleError("Too many redirects.", last_response=response) return response class AsyncClient(ClientMixin, AsyncRequestFactory): """ An async version of Client that creates ASGIRequests and calls through an async request path. Does not currently support "follow" on its methods. """ def __init__(self, enforce_csrf_checks=False, raise_request_exception=True, **defaults): super().__init__(**defaults) self.handler = AsyncClientHandler(enforce_csrf_checks) self.raise_request_exception = raise_request_exception self.exc_info = None self.extra = None async def request(self, **request): """ The master request method. Compose the scope dictionary and pass to the handler, return the result of the handler. Assume defaults for the query environment, which can be overridden using the arguments to the request. """ if 'follow' in request: raise NotImplementedError( 'AsyncClient request methods do not accept the follow ' 'parameter.' ) scope = self._base_scope(**request) # Curry a data dictionary into an instance of the template renderer # callback function. data = {} on_template_render = partial(store_rendered_templates, data) signal_uid = 'template-render-%s' % id(request) signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid) # Capture exceptions created by the handler. exception_uid = 'request-exception-%s' % id(request) got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid) try: response = await self.handler(scope) finally: signals.template_rendered.disconnect(dispatch_uid=signal_uid) got_request_exception.disconnect(dispatch_uid=exception_uid) # Check for signaled exceptions. self.check_exception(response) # Save the client and request that stimulated the response. response.client = self response.request = request # Add any rendered template detail to the response. response.templates = data.get('templates', []) response.context = data.get('context') response.json = partial(self._parse_json, response) # Attach the ResolverMatch instance to the response. response.resolver_match = SimpleLazyObject(lambda: resolve(request['path'])) # Flatten a single context. Not really necessary anymore thanks to the # __getattr__ flattening in ContextList, but has some edge case # backwards compatibility implications. if response.context and len(response.context) == 1: response.context = response.context[0] # Update persistent cookie data. if response.cookies: self.cookies.update(response.cookies) return response
db0312b9ac794909a88ec8e890af4f1b586a2a84a68cea4c537ef1da5c9171e1
import html.entities import re import unicodedata import warnings from gzip import GzipFile from io import BytesIO from django.utils.deprecation import RemovedInDjango40Warning from django.utils.functional import SimpleLazyObject, keep_lazy_text, lazy from django.utils.regex_helper import _lazy_re_compile from django.utils.translation import gettext as _, gettext_lazy, pgettext @keep_lazy_text def capfirst(x): """Capitalize the first letter of a string.""" return x and str(x)[0].upper() + str(x)[1:] # Set up regular expressions re_words = _lazy_re_compile(r'<[^>]+?>|([^<>\s]+)', re.S) re_chars = _lazy_re_compile(r'<[^>]+?>|(.)', re.S) re_tag = _lazy_re_compile(r'<(/)?(\S+?)(?:(\s*/)|\s.*?)?>', re.S) re_newlines = _lazy_re_compile(r'\r\n|\r') # Used in normalize_newlines re_camel_case = _lazy_re_compile(r'(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))') @keep_lazy_text def wrap(text, width): """ A word-wrap function that preserves existing line breaks. Expects that existing line breaks are posix newlines. Preserve all white space except added line breaks consume the space on which they break the line. Don't wrap long words, thus the output text may have lines longer than ``width``. """ def _generator(): for line in text.splitlines(True): # True keeps trailing linebreaks max_width = min((line.endswith('\n') and width + 1 or width), width) while len(line) > max_width: space = line[:max_width + 1].rfind(' ') + 1 if space == 0: space = line.find(' ') + 1 if space == 0: yield line line = '' break yield '%s\n' % line[:space - 1] line = line[space:] max_width = min((line.endswith('\n') and width + 1 or width), width) if line: yield line return ''.join(_generator()) class Truncator(SimpleLazyObject): """ An object used to truncate text, either by characters or words. """ def __init__(self, text): super().__init__(lambda: str(text)) def add_truncation_text(self, text, truncate=None): if truncate is None: truncate = pgettext( 'String to return when truncating text', '%(truncated_text)s…') if '%(truncated_text)s' in truncate: return truncate % {'truncated_text': text} # The truncation text didn't contain the %(truncated_text)s string # replacement argument so just append it to the text. if text.endswith(truncate): # But don't append the truncation text if the current text already # ends in this. return text return '%s%s' % (text, truncate) def chars(self, num, truncate=None, html=False): """ Return the text truncated to be no longer than the specified number of characters. `truncate` specifies what should be used to notify that the string has been truncated, defaulting to a translatable string of an ellipsis. """ self._setup() length = int(num) text = unicodedata.normalize('NFC', self._wrapped) # Calculate the length to truncate to (max length - end_text length) truncate_len = length for char in self.add_truncation_text('', truncate): if not unicodedata.combining(char): truncate_len -= 1 if truncate_len == 0: break if html: return self._truncate_html(length, truncate, text, truncate_len, False) return self._text_chars(length, truncate, text, truncate_len) def _text_chars(self, length, truncate, text, truncate_len): """Truncate a string after a certain number of chars.""" s_len = 0 end_index = None for i, char in enumerate(text): if unicodedata.combining(char): # Don't consider combining characters # as adding to the string length continue s_len += 1 if end_index is None and s_len > truncate_len: end_index = i if s_len > length: # Return the truncated string return self.add_truncation_text(text[:end_index or 0], truncate) # Return the original string since no truncation was necessary return text def words(self, num, truncate=None, html=False): """ Truncate a string after a certain number of words. `truncate` specifies what should be used to notify that the string has been truncated, defaulting to ellipsis. """ self._setup() length = int(num) if html: return self._truncate_html(length, truncate, self._wrapped, length, True) return self._text_words(length, truncate) def _text_words(self, length, truncate): """ Truncate a string after a certain number of words. Strip newlines in the string. """ words = self._wrapped.split() if len(words) > length: words = words[:length] return self.add_truncation_text(' '.join(words), truncate) return ' '.join(words) def _truncate_html(self, length, truncate, text, truncate_len, words): """ Truncate HTML to a certain number of chars (not counting tags and comments), or, if words is True, then to a certain number of words. Close opened tags if they were correctly closed in the given HTML. Preserve newlines in the HTML. """ if words and length <= 0: return '' html4_singlets = ( 'br', 'col', 'link', 'base', 'img', 'param', 'area', 'hr', 'input' ) # Count non-HTML chars/words and keep note of open tags pos = 0 end_text_pos = 0 current_len = 0 open_tags = [] regex = re_words if words else re_chars while current_len <= length: m = regex.search(text, pos) if not m: # Checked through whole string break pos = m.end(0) if m[1]: # It's an actual non-HTML word or char current_len += 1 if current_len == truncate_len: end_text_pos = pos continue # Check for tag tag = re_tag.match(m[0]) if not tag or current_len >= truncate_len: # Don't worry about non tags or tags after our truncate point continue closing_tag, tagname, self_closing = tag.groups() # Element names are always case-insensitive tagname = tagname.lower() if self_closing or tagname in html4_singlets: pass elif closing_tag: # Check for match in open tags list try: i = open_tags.index(tagname) except ValueError: pass else: # SGML: An end tag closes, back to the matching start tag, # all unclosed intervening start tags with omitted end tags open_tags = open_tags[i + 1:] else: # Add it to the start of the open tags list open_tags.insert(0, tagname) if current_len <= length: return text out = text[:end_text_pos] truncate_text = self.add_truncation_text('', truncate) if truncate_text: out += truncate_text # Close any tags still open for tag in open_tags: out += '</%s>' % tag # Return string return out @keep_lazy_text def get_valid_filename(s): """ Return the given string converted to a string that can be used for a clean filename. Remove leading and trailing spaces; convert other spaces to underscores; and remove anything that is not an alphanumeric, dash, underscore, or dot. >>> get_valid_filename("john's portrait in 2004.jpg") 'johns_portrait_in_2004.jpg' """ s = str(s).strip().replace(' ', '_') return re.sub(r'(?u)[^-\w.]', '', s) @keep_lazy_text def get_text_list(list_, last_word=gettext_lazy('or')): """ >>> get_text_list(['a', 'b', 'c', 'd']) 'a, b, c or d' >>> get_text_list(['a', 'b', 'c'], 'and') 'a, b and c' >>> get_text_list(['a', 'b'], 'and') 'a and b' >>> get_text_list(['a']) 'a' >>> get_text_list([]) '' """ if not list_: return '' if len(list_) == 1: return str(list_[0]) return '%s %s %s' % ( # Translators: This string is used as a separator between list elements _(', ').join(str(i) for i in list_[:-1]), str(last_word), str(list_[-1]) ) @keep_lazy_text def normalize_newlines(text): """Normalize CRLF and CR newlines to just LF.""" return re_newlines.sub('\n', str(text)) @keep_lazy_text def phone2numeric(phone): """Convert a phone number with letters into its numeric equivalent.""" char2number = { 'a': '2', 'b': '2', 'c': '2', 'd': '3', 'e': '3', 'f': '3', 'g': '4', 'h': '4', 'i': '4', 'j': '5', 'k': '5', 'l': '5', 'm': '6', 'n': '6', 'o': '6', 'p': '7', 'q': '7', 'r': '7', 's': '7', 't': '8', 'u': '8', 'v': '8', 'w': '9', 'x': '9', 'y': '9', 'z': '9', } return ''.join(char2number.get(c, c) for c in phone.lower()) # From http://www.xhaus.com/alan/python/httpcomp.html#gzip # Used with permission. def compress_string(s): zbuf = BytesIO() with GzipFile(mode='wb', compresslevel=6, fileobj=zbuf, mtime=0) as zfile: zfile.write(s) return zbuf.getvalue() class StreamingBuffer(BytesIO): def read(self): ret = self.getvalue() self.seek(0) self.truncate() return ret # Like compress_string, but for iterators of strings. def compress_sequence(sequence): buf = StreamingBuffer() with GzipFile(mode='wb', compresslevel=6, fileobj=buf, mtime=0) as zfile: # Output headers... yield buf.read() for item in sequence: zfile.write(item) data = buf.read() if data: yield data yield buf.read() # Expression to match some_token and some_token="with spaces" (and similarly # for single-quoted strings). smart_split_re = _lazy_re_compile(r""" ((?: [^\s'"]* (?: (?:"(?:[^"\\]|\\.)*" | '(?:[^'\\]|\\.)*') [^\s'"]* )+ ) | \S+) """, re.VERBOSE) def smart_split(text): r""" Generator that splits a string by spaces, leaving quoted phrases together. Supports both single and double quotes, and supports escaping quotes with backslashes. In the output, strings will keep their initial and trailing quote marks and escaped quotes will remain escaped (the results can then be further processed with unescape_string_literal()). >>> list(smart_split(r'This is "a person\'s" test.')) ['This', 'is', '"a person\\\'s"', 'test.'] >>> list(smart_split(r"Another 'person\'s' test.")) ['Another', "'person\\'s'", 'test.'] >>> list(smart_split(r'A "\"funky\" style" test.')) ['A', '"\\"funky\\" style"', 'test.'] """ for bit in smart_split_re.finditer(str(text)): yield bit[0] def _replace_entity(match): text = match[1] if text[0] == '#': text = text[1:] try: if text[0] in 'xX': c = int(text[1:], 16) else: c = int(text) return chr(c) except ValueError: return match[0] else: try: return chr(html.entities.name2codepoint[text]) except KeyError: return match[0] _entity_re = _lazy_re_compile(r"&(#?[xX]?(?:[0-9a-fA-F]+|\w{1,8}));") @keep_lazy_text def unescape_entities(text): warnings.warn( 'django.utils.text.unescape_entities() is deprecated in favor of ' 'html.unescape().', RemovedInDjango40Warning, stacklevel=2, ) return _entity_re.sub(_replace_entity, str(text)) @keep_lazy_text def unescape_string_literal(s): r""" Convert quoted string literals to unquoted strings with escaped quotes and backslashes unquoted:: >>> unescape_string_literal('"abc"') 'abc' >>> unescape_string_literal("'abc'") 'abc' >>> unescape_string_literal('"a \"bc\""') 'a "bc"' >>> unescape_string_literal("'\'ab\' c'") "'ab' c" """ if s[0] not in "\"'" or s[-1] != s[0]: raise ValueError("Not a string literal: %r" % s) quote = s[0] return s[1:-1].replace(r'\%s' % quote, quote).replace(r'\\', '\\') @keep_lazy_text def slugify(value, allow_unicode=False): """ Convert to ASCII if 'allow_unicode' is False. Convert spaces or repeated dashes to single dashes. Remove characters that aren't alphanumerics, underscores, or hyphens. Convert to lowercase. Also strip leading and trailing whitespace, dashes, and underscores. """ value = str(value) if allow_unicode: value = unicodedata.normalize('NFKC', value) else: value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii') value = re.sub(r'[^\w\s-]', '', value.lower()) return re.sub(r'[-\s]+', '-', value).strip('-_') def camel_case_to_spaces(value): """ Split CamelCase and convert to lowercase. Strip surrounding whitespace. """ return re_camel_case.sub(r' \1', value).strip().lower() def _format_lazy(format_string, *args, **kwargs): """ Apply str.format() on 'format_string' where format_string, args, and/or kwargs might be lazy. """ return format_string.format(*args, **kwargs) format_lazy = lazy(_format_lazy, str)
0fedeffbc1dfa86409ea710df4af44a29a41974f393626cc768dd5610bbfe0ca
"""Translation helper functions.""" import functools import gettext as gettext_module import os import re import sys import warnings from asgiref.local import Local from django.apps import apps from django.conf import settings from django.conf.locale import LANG_INFO from django.core.exceptions import AppRegistryNotReady from django.core.signals import setting_changed from django.dispatch import receiver from django.utils.regex_helper import _lazy_re_compile from django.utils.safestring import SafeData, mark_safe from . import to_language, to_locale # Translations are cached in a dictionary for every language. # The active translations are stored by threadid to make them thread local. _translations = {} _active = Local() # The default translation is based on the settings file. _default = None # magic gettext number to separate context from message CONTEXT_SEPARATOR = "\x04" # Format of Accept-Language header values. From RFC 2616, section 14.4 and 3.9 # and RFC 3066, section 2.1 accept_language_re = _lazy_re_compile(r''' ([A-Za-z]{1,8}(?:-[A-Za-z0-9]{1,8})*|\*) # "en", "en-au", "x-y-z", "es-419", "*" (?:\s*;\s*q=(0(?:\.\d{,3})?|1(?:\.0{,3})?))? # Optional "q=1.00", "q=0.8" (?:\s*,\s*|$) # Multiple accepts per header. ''', re.VERBOSE) language_code_re = _lazy_re_compile( r'^[a-z]{1,8}(?:-[a-z0-9]{1,8})*(?:@[a-z0-9]{1,20})?$', re.IGNORECASE ) language_code_prefix_re = _lazy_re_compile(r'^/(\w+([@-]\w+)?)(/|$)') @receiver(setting_changed) def reset_cache(**kwargs): """ Reset global state when LANGUAGES setting has been changed, as some languages should no longer be accepted. """ if kwargs['setting'] in ('LANGUAGES', 'LANGUAGE_CODE'): check_for_language.cache_clear() get_languages.cache_clear() get_supported_language_variant.cache_clear() class TranslationCatalog: """ Simulate a dict for DjangoTranslation._catalog so as multiple catalogs with different plural equations are kept separate. """ def __init__(self, trans=None): self._catalogs = [trans._catalog.copy()] if trans else [{}] self._plurals = [trans.plural] if trans else [lambda n: int(n != 1)] def __getitem__(self, key): for cat in self._catalogs: try: return cat[key] except KeyError: pass raise KeyError(key) def __setitem__(self, key, value): self._catalogs[0][key] = value def __contains__(self, key): return any(key in cat for cat in self._catalogs) def items(self): for cat in self._catalogs: yield from cat.items() def keys(self): for cat in self._catalogs: yield from cat.keys() def update(self, trans): # Merge if plural function is the same, else prepend. for cat, plural in zip(self._catalogs, self._plurals): if trans.plural.__code__ == plural.__code__: cat.update(trans._catalog) break else: self._catalogs.insert(0, trans._catalog.copy()) self._plurals.insert(0, trans.plural) def get(self, key, default=None): missing = object() for cat in self._catalogs: result = cat.get(key, missing) if result is not missing: return result return default def plural(self, msgid, num): for cat, plural in zip(self._catalogs, self._plurals): tmsg = cat.get((msgid, plural(num))) if tmsg is not None: return tmsg raise KeyError class DjangoTranslation(gettext_module.GNUTranslations): """ Set up the GNUTranslations context with regard to output charset. This translation object will be constructed out of multiple GNUTranslations objects by merging their catalogs. It will construct an object for the requested language and add a fallback to the default language, if it's different from the requested language. """ domain = 'django' def __init__(self, language, domain=None, localedirs=None): """Create a GNUTranslations() using many locale directories""" gettext_module.GNUTranslations.__init__(self) if domain is not None: self.domain = domain self.__language = language self.__to_language = to_language(language) self.__locale = to_locale(language) self._catalog = None # If a language doesn't have a catalog, use the Germanic default for # pluralization: anything except one is pluralized. self.plural = lambda n: int(n != 1) if self.domain == 'django': if localedirs is not None: # A module-level cache is used for caching 'django' translations warnings.warn("localedirs is ignored when domain is 'django'.", RuntimeWarning) localedirs = None self._init_translation_catalog() if localedirs: for localedir in localedirs: translation = self._new_gnu_trans(localedir) self.merge(translation) else: self._add_installed_apps_translations() self._add_local_translations() if self.__language == settings.LANGUAGE_CODE and self.domain == 'django' and self._catalog is None: # default lang should have at least one translation file available. raise OSError('No translation files found for default language %s.' % settings.LANGUAGE_CODE) self._add_fallback(localedirs) if self._catalog is None: # No catalogs found for this language, set an empty catalog. self._catalog = TranslationCatalog() def __repr__(self): return "<DjangoTranslation lang:%s>" % self.__language def _new_gnu_trans(self, localedir, use_null_fallback=True): """ Return a mergeable gettext.GNUTranslations instance. A convenience wrapper. By default gettext uses 'fallback=False'. Using param `use_null_fallback` to avoid confusion with any other references to 'fallback'. """ return gettext_module.translation( domain=self.domain, localedir=localedir, languages=[self.__locale], fallback=use_null_fallback, ) def _init_translation_catalog(self): """Create a base catalog using global django translations.""" settingsfile = sys.modules[settings.__module__].__file__ localedir = os.path.join(os.path.dirname(settingsfile), 'locale') translation = self._new_gnu_trans(localedir) self.merge(translation) def _add_installed_apps_translations(self): """Merge translations from each installed app.""" try: app_configs = reversed(list(apps.get_app_configs())) except AppRegistryNotReady: raise AppRegistryNotReady( "The translation infrastructure cannot be initialized before the " "apps registry is ready. Check that you don't make non-lazy " "gettext calls at import time.") for app_config in app_configs: localedir = os.path.join(app_config.path, 'locale') if os.path.exists(localedir): translation = self._new_gnu_trans(localedir) self.merge(translation) def _add_local_translations(self): """Merge translations defined in LOCALE_PATHS.""" for localedir in reversed(settings.LOCALE_PATHS): translation = self._new_gnu_trans(localedir) self.merge(translation) def _add_fallback(self, localedirs=None): """Set the GNUTranslations() fallback with the default language.""" # Don't set a fallback for the default language or any English variant # (as it's empty, so it'll ALWAYS fall back to the default language) if self.__language == settings.LANGUAGE_CODE or self.__language.startswith('en'): return if self.domain == 'django': # Get from cache default_translation = translation(settings.LANGUAGE_CODE) else: default_translation = DjangoTranslation( settings.LANGUAGE_CODE, domain=self.domain, localedirs=localedirs ) self.add_fallback(default_translation) def merge(self, other): """Merge another translation into this catalog.""" if not getattr(other, '_catalog', None): return # NullTranslations() has no _catalog if self._catalog is None: # Take plural and _info from first catalog found (generally Django's). self.plural = other.plural self._info = other._info.copy() self._catalog = TranslationCatalog(other) else: self._catalog.update(other) if other._fallback: self.add_fallback(other._fallback) def language(self): """Return the translation language.""" return self.__language def to_language(self): """Return the translation language name.""" return self.__to_language def ngettext(self, msgid1, msgid2, n): try: tmsg = self._catalog.plural(msgid1, n) except KeyError: if self._fallback: return self._fallback.ngettext(msgid1, msgid2, n) if n == 1: tmsg = msgid1 else: tmsg = msgid2 return tmsg def translation(language): """ Return a translation object in the default 'django' domain. """ global _translations if language not in _translations: _translations[language] = DjangoTranslation(language) return _translations[language] def activate(language): """ Fetch the translation object for a given language and install it as the current translation object for the current thread. """ if not language: return _active.value = translation(language) def deactivate(): """ Uninstall the active translation object so that further _() calls resolve to the default translation object. """ if hasattr(_active, "value"): del _active.value def deactivate_all(): """ Make the active translation object a NullTranslations() instance. This is useful when we want delayed translations to appear as the original string for some reason. """ _active.value = gettext_module.NullTranslations() _active.value.to_language = lambda *args: None def get_language(): """Return the currently selected language.""" t = getattr(_active, "value", None) if t is not None: try: return t.to_language() except AttributeError: pass # If we don't have a real translation object, assume it's the default language. return settings.LANGUAGE_CODE def get_language_bidi(): """ Return selected language's BiDi layout. * False = left-to-right layout * True = right-to-left layout """ lang = get_language() if lang is None: return False else: base_lang = get_language().split('-')[0] return base_lang in settings.LANGUAGES_BIDI def catalog(): """ Return the current active catalog for further processing. This can be used if you need to modify the catalog or want to access the whole message catalog instead of just translating one string. """ global _default t = getattr(_active, "value", None) if t is not None: return t if _default is None: _default = translation(settings.LANGUAGE_CODE) return _default def gettext(message): """ Translate the 'message' string. It uses the current thread to find the translation object to use. If no current translation is activated, the message will be run through the default translation object. """ global _default eol_message = message.replace('\r\n', '\n').replace('\r', '\n') if eol_message: _default = _default or translation(settings.LANGUAGE_CODE) translation_object = getattr(_active, "value", _default) result = translation_object.gettext(eol_message) else: # Return an empty value of the corresponding type if an empty message # is given, instead of metadata, which is the default gettext behavior. result = type(message)('') if isinstance(message, SafeData): return mark_safe(result) return result def pgettext(context, message): msg_with_ctxt = "%s%s%s" % (context, CONTEXT_SEPARATOR, message) result = gettext(msg_with_ctxt) if CONTEXT_SEPARATOR in result: # Translation not found result = message elif isinstance(message, SafeData): result = mark_safe(result) return result def gettext_noop(message): """ Mark strings for translation but don't translate them now. This can be used to store strings in global variables that should stay in the base language (because they might be used externally) and will be translated later. """ return message def do_ntranslate(singular, plural, number, translation_function): global _default t = getattr(_active, "value", None) if t is not None: return getattr(t, translation_function)(singular, plural, number) if _default is None: _default = translation(settings.LANGUAGE_CODE) return getattr(_default, translation_function)(singular, plural, number) def ngettext(singular, plural, number): """ Return a string of the translation of either the singular or plural, based on the number. """ return do_ntranslate(singular, plural, number, 'ngettext') def npgettext(context, singular, plural, number): msgs_with_ctxt = ("%s%s%s" % (context, CONTEXT_SEPARATOR, singular), "%s%s%s" % (context, CONTEXT_SEPARATOR, plural), number) result = ngettext(*msgs_with_ctxt) if CONTEXT_SEPARATOR in result: # Translation not found result = ngettext(singular, plural, number) return result def all_locale_paths(): """ Return a list of paths to user-provides languages files. """ globalpath = os.path.join( os.path.dirname(sys.modules[settings.__module__].__file__), 'locale') app_paths = [] for app_config in apps.get_app_configs(): locale_path = os.path.join(app_config.path, 'locale') if os.path.exists(locale_path): app_paths.append(locale_path) return [globalpath, *settings.LOCALE_PATHS, *app_paths] @functools.lru_cache(maxsize=1000) def check_for_language(lang_code): """ Check whether there is a global language file for the given language code. This is used to decide whether a user-provided language is available. lru_cache should have a maxsize to prevent from memory exhaustion attacks, as the provided language codes are taken from the HTTP request. See also <https://www.djangoproject.com/weblog/2007/oct/26/security-fix/>. """ # First, a quick check to make sure lang_code is well-formed (#21458) if lang_code is None or not language_code_re.search(lang_code): return False return any( gettext_module.find('django', path, [to_locale(lang_code)]) is not None for path in all_locale_paths() ) @functools.lru_cache() def get_languages(): """ Cache of settings.LANGUAGES in a dictionary for easy lookups by key. """ return dict(settings.LANGUAGES) @functools.lru_cache(maxsize=1000) def get_supported_language_variant(lang_code, strict=False): """ Return the language code that's listed in supported languages, possibly selecting a more generic variant. Raise LookupError if nothing is found. If `strict` is False (the default), look for a country-specific variant when neither the language code nor its generic variant is found. lru_cache should have a maxsize to prevent from memory exhaustion attacks, as the provided language codes are taken from the HTTP request. See also <https://www.djangoproject.com/weblog/2007/oct/26/security-fix/>. """ if lang_code: # If 'fr-ca' is not supported, try special fallback or language-only 'fr'. possible_lang_codes = [lang_code] try: possible_lang_codes.extend(LANG_INFO[lang_code]['fallback']) except KeyError: pass generic_lang_code = lang_code.split('-')[0] possible_lang_codes.append(generic_lang_code) supported_lang_codes = get_languages() for code in possible_lang_codes: if code in supported_lang_codes and check_for_language(code): return code if not strict: # if fr-fr is not supported, try fr-ca. for supported_code in supported_lang_codes: if supported_code.startswith(generic_lang_code + '-'): return supported_code raise LookupError(lang_code) def get_language_from_path(path, strict=False): """ Return the language code if there's a valid language code found in `path`. If `strict` is False (the default), look for a country-specific variant when neither the language code nor its generic variant is found. """ regex_match = language_code_prefix_re.match(path) if not regex_match: return None lang_code = regex_match[1] try: return get_supported_language_variant(lang_code, strict=strict) except LookupError: return None def get_language_from_request(request, check_path=False): """ Analyze the request to find what language the user wants the system to show. Only languages listed in settings.LANGUAGES are taken into account. If the user requests a sublanguage where we have a main language, we send out the main language. If check_path is True, the URL path prefix will be checked for a language code, otherwise this is skipped for backwards compatibility. """ if check_path: lang_code = get_language_from_path(request.path_info) if lang_code is not None: return lang_code lang_code = request.COOKIES.get(settings.LANGUAGE_COOKIE_NAME) if lang_code is not None and lang_code in get_languages() and check_for_language(lang_code): return lang_code try: return get_supported_language_variant(lang_code) except LookupError: pass accept = request.META.get('HTTP_ACCEPT_LANGUAGE', '') for accept_lang, unused in parse_accept_lang_header(accept): if accept_lang == '*': break if not language_code_re.search(accept_lang): continue try: return get_supported_language_variant(accept_lang) except LookupError: continue try: return get_supported_language_variant(settings.LANGUAGE_CODE) except LookupError: return settings.LANGUAGE_CODE @functools.lru_cache(maxsize=1000) def parse_accept_lang_header(lang_string): """ Parse the lang_string, which is the body of an HTTP Accept-Language header, and return a tuple of (lang, q-value), ordered by 'q' values. Return an empty tuple if there are any format errors in lang_string. """ result = [] pieces = accept_language_re.split(lang_string.lower()) if pieces[-1]: return () for i in range(0, len(pieces) - 1, 3): first, lang, priority = pieces[i:i + 3] if first: return () if priority: priority = float(priority) else: priority = 1.0 result.append((lang, priority)) result.sort(key=lambda k: k[1], reverse=True) return tuple(result)
bda999342dee11aa8066ead0edadabd2d3547f76262bb8795bc4fe118f643c92
import functools import re from itertools import chain from django.conf import settings from django.db import models from django.db.migrations import operations from django.db.migrations.migration import Migration from django.db.migrations.operations.models import AlterModelOptions from django.db.migrations.optimizer import MigrationOptimizer from django.db.migrations.questioner import MigrationQuestioner from django.db.migrations.utils import ( COMPILED_REGEX_TYPE, RegexObject, get_migration_name_timestamp, ) from django.utils.topological_sort import stable_topological_sort class MigrationAutodetector: """ Take a pair of ProjectStates and compare them to see what the first would need doing to make it match the second (the second usually being the project's current state). Note that this naturally operates on entire projects at a time, as it's likely that changes interact (for example, you can't add a ForeignKey without having a migration to add the table it depends on first). A user interface may offer single-app usage if it wishes, with the caveat that it may not always be possible. """ def __init__(self, from_state, to_state, questioner=None): self.from_state = from_state self.to_state = to_state self.questioner = questioner or MigrationQuestioner() self.existing_apps = {app for app, model in from_state.models} def changes(self, graph, trim_to_apps=None, convert_apps=None, migration_name=None): """ Main entry point to produce a list of applicable changes. Take a graph to base names on and an optional set of apps to try and restrict to (restriction is not guaranteed) """ changes = self._detect_changes(convert_apps, graph) changes = self.arrange_for_graph(changes, graph, migration_name) if trim_to_apps: changes = self._trim_to_apps(changes, trim_to_apps) return changes def deep_deconstruct(self, obj): """ Recursive deconstruction for a field and its arguments. Used for full comparison for rename/alter; sometimes a single-level deconstruction will not compare correctly. """ if isinstance(obj, list): return [self.deep_deconstruct(value) for value in obj] elif isinstance(obj, tuple): return tuple(self.deep_deconstruct(value) for value in obj) elif isinstance(obj, dict): return { key: self.deep_deconstruct(value) for key, value in obj.items() } elif isinstance(obj, functools.partial): return (obj.func, self.deep_deconstruct(obj.args), self.deep_deconstruct(obj.keywords)) elif isinstance(obj, COMPILED_REGEX_TYPE): return RegexObject(obj) elif isinstance(obj, type): # If this is a type that implements 'deconstruct' as an instance method, # avoid treating this as being deconstructible itself - see #22951 return obj elif hasattr(obj, 'deconstruct'): deconstructed = obj.deconstruct() if isinstance(obj, models.Field): # we have a field which also returns a name deconstructed = deconstructed[1:] path, args, kwargs = deconstructed return ( path, [self.deep_deconstruct(value) for value in args], { key: self.deep_deconstruct(value) for key, value in kwargs.items() }, ) else: return obj def only_relation_agnostic_fields(self, fields): """ Return a definition of the fields that ignores field names and what related fields actually relate to. Used for detecting renames (as the related fields change during renames). """ fields_def = [] for name, field in sorted(fields.items()): deconstruction = self.deep_deconstruct(field) if field.remote_field and field.remote_field.model: del deconstruction[2]['to'] fields_def.append(deconstruction) return fields_def def _detect_changes(self, convert_apps=None, graph=None): """ Return a dict of migration plans which will achieve the change from from_state to to_state. The dict has app labels as keys and a list of migrations as values. The resulting migrations aren't specially named, but the names do matter for dependencies inside the set. convert_apps is the list of apps to convert to use migrations (i.e. to make initial migrations for, in the usual case) graph is an optional argument that, if provided, can help improve dependency generation and avoid potential circular dependencies. """ # The first phase is generating all the operations for each app # and gathering them into a big per-app list. # Then go through that list, order it, and split into migrations to # resolve dependencies caused by M2Ms and FKs. self.generated_operations = {} self.altered_indexes = {} self.altered_constraints = {} # Prepare some old/new state and model lists, separating # proxy models and ignoring unmigrated apps. self.old_apps = self.from_state.concrete_apps self.new_apps = self.to_state.apps self.old_model_keys = set() self.old_proxy_keys = set() self.old_unmanaged_keys = set() self.new_model_keys = set() self.new_proxy_keys = set() self.new_unmanaged_keys = set() for al, mn in self.from_state.models: model = self.old_apps.get_model(al, mn) if not model._meta.managed: self.old_unmanaged_keys.add((al, mn)) elif al not in self.from_state.real_apps: if model._meta.proxy: self.old_proxy_keys.add((al, mn)) else: self.old_model_keys.add((al, mn)) for al, mn in self.to_state.models: model = self.new_apps.get_model(al, mn) if not model._meta.managed: self.new_unmanaged_keys.add((al, mn)) elif ( al not in self.from_state.real_apps or (convert_apps and al in convert_apps) ): if model._meta.proxy: self.new_proxy_keys.add((al, mn)) else: self.new_model_keys.add((al, mn)) # Renames have to come first self.generate_renamed_models() # Prepare lists of fields and generate through model map self._prepare_field_lists() self._generate_through_model_map() # Generate non-rename model operations self.generate_deleted_models() self.generate_created_models() self.generate_deleted_proxies() self.generate_created_proxies() self.generate_altered_options() self.generate_altered_managers() # Create the altered indexes and store them in self.altered_indexes. # This avoids the same computation in generate_removed_indexes() # and generate_added_indexes(). self.create_altered_indexes() self.create_altered_constraints() # Generate index removal operations before field is removed self.generate_removed_constraints() self.generate_removed_indexes() # Generate field operations self.generate_renamed_fields() self.generate_removed_fields() self.generate_added_fields() self.generate_altered_fields() self.generate_altered_unique_together() self.generate_altered_index_together() self.generate_added_indexes() self.generate_added_constraints() self.generate_altered_db_table() self.generate_altered_order_with_respect_to() self._sort_migrations() self._build_migration_list(graph) self._optimize_migrations() return self.migrations def _prepare_field_lists(self): """ Prepare field lists and a list of the fields that used through models in the old state so dependencies can be made from the through model deletion to the field that uses it. """ self.kept_model_keys = self.old_model_keys & self.new_model_keys self.kept_proxy_keys = self.old_proxy_keys & self.new_proxy_keys self.kept_unmanaged_keys = self.old_unmanaged_keys & self.new_unmanaged_keys self.through_users = {} self.old_field_keys = { (app_label, model_name, field_name) for app_label, model_name in self.kept_model_keys for field_name in self.from_state.models[ app_label, self.renamed_models.get((app_label, model_name), model_name) ].fields } self.new_field_keys = { (app_label, model_name, field_name) for app_label, model_name in self.kept_model_keys for field_name in self.to_state.models[app_label, model_name].fields } def _generate_through_model_map(self): """Through model map generation.""" for app_label, model_name in sorted(self.old_model_keys): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] for field_name in old_model_state.fields: old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field(field_name) if (hasattr(old_field, "remote_field") and getattr(old_field.remote_field, "through", None) and not old_field.remote_field.through._meta.auto_created): through_key = ( old_field.remote_field.through._meta.app_label, old_field.remote_field.through._meta.model_name, ) self.through_users[through_key] = (app_label, old_model_name, field_name) @staticmethod def _resolve_dependency(dependency): """ Return the resolved dependency and a boolean denoting whether or not it was swappable. """ if dependency[0] != '__setting__': return dependency, False resolved_app_label, resolved_object_name = getattr(settings, dependency[1]).split('.') return (resolved_app_label, resolved_object_name.lower()) + dependency[2:], True def _build_migration_list(self, graph=None): """ Chop the lists of operations up into migrations with dependencies on each other. Do this by going through an app's list of operations until one is found that has an outgoing dependency that isn't in another app's migration yet (hasn't been chopped off its list). Then chop off the operations before it into a migration and move onto the next app. If the loops completes without doing anything, there's a circular dependency (which _should_ be impossible as the operations are all split at this point so they can't depend and be depended on). """ self.migrations = {} num_ops = sum(len(x) for x in self.generated_operations.values()) chop_mode = False while num_ops: # On every iteration, we step through all the apps and see if there # is a completed set of operations. # If we find that a subset of the operations are complete we can # try to chop it off from the rest and continue, but we only # do this if we've already been through the list once before # without any chopping and nothing has changed. for app_label in sorted(self.generated_operations): chopped = [] dependencies = set() for operation in list(self.generated_operations[app_label]): deps_satisfied = True operation_dependencies = set() for dep in operation._auto_deps: # Temporarily resolve the swappable dependency to # prevent circular references. While keeping the # dependency checks on the resolved model, add the # swappable dependencies. original_dep = dep dep, is_swappable_dep = self._resolve_dependency(dep) if dep[0] != app_label: # External app dependency. See if it's not yet # satisfied. for other_operation in self.generated_operations.get(dep[0], []): if self.check_dependency(other_operation, dep): deps_satisfied = False break if not deps_satisfied: break else: if is_swappable_dep: operation_dependencies.add((original_dep[0], original_dep[1])) elif dep[0] in self.migrations: operation_dependencies.add((dep[0], self.migrations[dep[0]][-1].name)) else: # If we can't find the other app, we add a first/last dependency, # but only if we've already been through once and checked everything if chop_mode: # If the app already exists, we add a dependency on the last migration, # as we don't know which migration contains the target field. # If it's not yet migrated or has no migrations, we use __first__ if graph and graph.leaf_nodes(dep[0]): operation_dependencies.add(graph.leaf_nodes(dep[0])[0]) else: operation_dependencies.add((dep[0], "__first__")) else: deps_satisfied = False if deps_satisfied: chopped.append(operation) dependencies.update(operation_dependencies) del self.generated_operations[app_label][0] else: break # Make a migration! Well, only if there's stuff to put in it if dependencies or chopped: if not self.generated_operations[app_label] or chop_mode: subclass = type("Migration", (Migration,), {"operations": [], "dependencies": []}) instance = subclass("auto_%i" % (len(self.migrations.get(app_label, [])) + 1), app_label) instance.dependencies = list(dependencies) instance.operations = chopped instance.initial = app_label not in self.existing_apps self.migrations.setdefault(app_label, []).append(instance) chop_mode = False else: self.generated_operations[app_label] = chopped + self.generated_operations[app_label] new_num_ops = sum(len(x) for x in self.generated_operations.values()) if new_num_ops == num_ops: if not chop_mode: chop_mode = True else: raise ValueError("Cannot resolve operation dependencies: %r" % self.generated_operations) num_ops = new_num_ops def _sort_migrations(self): """ Reorder to make things possible. Reordering may be needed so FKs work nicely inside the same app. """ for app_label, ops in sorted(self.generated_operations.items()): # construct a dependency graph for intra-app dependencies dependency_graph = {op: set() for op in ops} for op in ops: for dep in op._auto_deps: # Resolve intra-app dependencies to handle circular # references involving a swappable model. dep = self._resolve_dependency(dep)[0] if dep[0] == app_label: for op2 in ops: if self.check_dependency(op2, dep): dependency_graph[op].add(op2) # we use a stable sort for deterministic tests & general behavior self.generated_operations[app_label] = stable_topological_sort(ops, dependency_graph) def _optimize_migrations(self): # Add in internal dependencies among the migrations for app_label, migrations in self.migrations.items(): for m1, m2 in zip(migrations, migrations[1:]): m2.dependencies.append((app_label, m1.name)) # De-dupe dependencies for migrations in self.migrations.values(): for migration in migrations: migration.dependencies = list(set(migration.dependencies)) # Optimize migrations for app_label, migrations in self.migrations.items(): for migration in migrations: migration.operations = MigrationOptimizer().optimize(migration.operations, app_label) def check_dependency(self, operation, dependency): """ Return True if the given operation depends on the given dependency, False otherwise. """ # Created model if dependency[2] is None and dependency[3] is True: return ( isinstance(operation, operations.CreateModel) and operation.name_lower == dependency[1].lower() ) # Created field elif dependency[2] is not None and dependency[3] is True: return ( ( isinstance(operation, operations.CreateModel) and operation.name_lower == dependency[1].lower() and any(dependency[2] == x for x, y in operation.fields) ) or ( isinstance(operation, operations.AddField) and operation.model_name_lower == dependency[1].lower() and operation.name_lower == dependency[2].lower() ) ) # Removed field elif dependency[2] is not None and dependency[3] is False: return ( isinstance(operation, operations.RemoveField) and operation.model_name_lower == dependency[1].lower() and operation.name_lower == dependency[2].lower() ) # Removed model elif dependency[2] is None and dependency[3] is False: return ( isinstance(operation, operations.DeleteModel) and operation.name_lower == dependency[1].lower() ) # Field being altered elif dependency[2] is not None and dependency[3] == "alter": return ( isinstance(operation, operations.AlterField) and operation.model_name_lower == dependency[1].lower() and operation.name_lower == dependency[2].lower() ) # order_with_respect_to being unset for a field elif dependency[2] is not None and dependency[3] == "order_wrt_unset": return ( isinstance(operation, operations.AlterOrderWithRespectTo) and operation.name_lower == dependency[1].lower() and (operation.order_with_respect_to or "").lower() != dependency[2].lower() ) # Field is removed and part of an index/unique_together elif dependency[2] is not None and dependency[3] == "foo_together_change": return ( isinstance(operation, (operations.AlterUniqueTogether, operations.AlterIndexTogether)) and operation.name_lower == dependency[1].lower() ) # Unknown dependency. Raise an error. else: raise ValueError("Can't handle dependency %r" % (dependency,)) def add_operation(self, app_label, operation, dependencies=None, beginning=False): # Dependencies are (app_label, model_name, field_name, create/delete as True/False) operation._auto_deps = dependencies or [] if beginning: self.generated_operations.setdefault(app_label, []).insert(0, operation) else: self.generated_operations.setdefault(app_label, []).append(operation) def swappable_first_key(self, item): """ Place potential swappable models first in lists of created models (only real way to solve #22783). """ try: model = self.new_apps.get_model(item[0], item[1]) base_names = [base.__name__ for base in model.__bases__] string_version = "%s.%s" % (item[0], item[1]) if ( model._meta.swappable or "AbstractUser" in base_names or "AbstractBaseUser" in base_names or settings.AUTH_USER_MODEL.lower() == string_version.lower() ): return ("___" + item[0], "___" + item[1]) except LookupError: pass return item def generate_renamed_models(self): """ Find any renamed models, generate the operations for them, and remove the old entry from the model lists. Must be run before other model-level generation. """ self.renamed_models = {} self.renamed_models_rel = {} added_models = self.new_model_keys - self.old_model_keys for app_label, model_name in sorted(added_models): model_state = self.to_state.models[app_label, model_name] model_fields_def = self.only_relation_agnostic_fields(model_state.fields) removed_models = self.old_model_keys - self.new_model_keys for rem_app_label, rem_model_name in removed_models: if rem_app_label == app_label: rem_model_state = self.from_state.models[rem_app_label, rem_model_name] rem_model_fields_def = self.only_relation_agnostic_fields(rem_model_state.fields) if model_fields_def == rem_model_fields_def: if self.questioner.ask_rename_model(rem_model_state, model_state): model_opts = self.new_apps.get_model(app_label, model_name)._meta dependencies = [] for field in model_opts.get_fields(): if field.is_relation: dependencies.extend(self._get_dependencies_for_foreign_key(field)) self.add_operation( app_label, operations.RenameModel( old_name=rem_model_state.name, new_name=model_state.name, ), dependencies=dependencies, ) self.renamed_models[app_label, model_name] = rem_model_name renamed_models_rel_key = '%s.%s' % ( rem_model_state.app_label, rem_model_state.name_lower, ) self.renamed_models_rel[renamed_models_rel_key] = '%s.%s' % ( model_state.app_label, model_state.name_lower, ) self.old_model_keys.remove((rem_app_label, rem_model_name)) self.old_model_keys.add((app_label, model_name)) break def generate_created_models(self): """ Find all new models (both managed and unmanaged) and make create operations for them as well as separate operations to create any foreign key or M2M relationships (these are optimized later, if possible). Defer any model options that refer to collections of fields that might be deferred (e.g. unique_together, index_together). """ old_keys = self.old_model_keys | self.old_unmanaged_keys added_models = self.new_model_keys - old_keys added_unmanaged_models = self.new_unmanaged_keys - old_keys all_added_models = chain( sorted(added_models, key=self.swappable_first_key, reverse=True), sorted(added_unmanaged_models, key=self.swappable_first_key, reverse=True) ) for app_label, model_name in all_added_models: model_state = self.to_state.models[app_label, model_name] model_opts = self.new_apps.get_model(app_label, model_name)._meta # Gather related fields related_fields = {} primary_key_rel = None for field in model_opts.local_fields: if field.remote_field: if field.remote_field.model: if field.primary_key: primary_key_rel = field.remote_field.model elif not field.remote_field.parent_link: related_fields[field.name] = field # through will be none on M2Ms on swapped-out models; # we can treat lack of through as auto_created=True, though. if (getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created): related_fields[field.name] = field for field in model_opts.local_many_to_many: if field.remote_field.model: related_fields[field.name] = field if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created: related_fields[field.name] = field # Are there indexes/unique|index_together to defer? indexes = model_state.options.pop('indexes') constraints = model_state.options.pop('constraints') unique_together = model_state.options.pop('unique_together', None) index_together = model_state.options.pop('index_together', None) order_with_respect_to = model_state.options.pop('order_with_respect_to', None) # Depend on the deletion of any possible proxy version of us dependencies = [ (app_label, model_name, None, False), ] # Depend on all bases for base in model_state.bases: if isinstance(base, str) and "." in base: base_app_label, base_name = base.split(".", 1) dependencies.append((base_app_label, base_name, None, True)) # Depend on the removal of base fields if the new model has # a field with the same name. old_base_model_state = self.from_state.models.get((base_app_label, base_name)) new_base_model_state = self.to_state.models.get((base_app_label, base_name)) if old_base_model_state and new_base_model_state: removed_base_fields = set(old_base_model_state.fields).difference( new_base_model_state.fields, ).intersection(model_state.fields) for removed_base_field in removed_base_fields: dependencies.append((base_app_label, base_name, removed_base_field, False)) # Depend on the other end of the primary key if it's a relation if primary_key_rel: dependencies.append(( primary_key_rel._meta.app_label, primary_key_rel._meta.object_name, None, True )) # Generate creation operation self.add_operation( app_label, operations.CreateModel( name=model_state.name, fields=[d for d in model_state.fields.items() if d[0] not in related_fields], options=model_state.options, bases=model_state.bases, managers=model_state.managers, ), dependencies=dependencies, beginning=True, ) # Don't add operations which modify the database for unmanaged models if not model_opts.managed: continue # Generate operations for each related field for name, field in sorted(related_fields.items()): dependencies = self._get_dependencies_for_foreign_key(field) # Depend on our own model being created dependencies.append((app_label, model_name, None, True)) # Make operation self.add_operation( app_label, operations.AddField( model_name=model_name, name=name, field=field, ), dependencies=list(set(dependencies)), ) # Generate other opns related_dependencies = [ (app_label, model_name, name, True) for name in sorted(related_fields) ] related_dependencies.append((app_label, model_name, None, True)) for index in indexes: self.add_operation( app_label, operations.AddIndex( model_name=model_name, index=index, ), dependencies=related_dependencies, ) for constraint in constraints: self.add_operation( app_label, operations.AddConstraint( model_name=model_name, constraint=constraint, ), dependencies=related_dependencies, ) if unique_together: self.add_operation( app_label, operations.AlterUniqueTogether( name=model_name, unique_together=unique_together, ), dependencies=related_dependencies ) if index_together: self.add_operation( app_label, operations.AlterIndexTogether( name=model_name, index_together=index_together, ), dependencies=related_dependencies ) if order_with_respect_to: self.add_operation( app_label, operations.AlterOrderWithRespectTo( name=model_name, order_with_respect_to=order_with_respect_to, ), dependencies=[ (app_label, model_name, order_with_respect_to, True), (app_label, model_name, None, True), ] ) # Fix relationships if the model changed from a proxy model to a # concrete model. if (app_label, model_name) in self.old_proxy_keys: for related_object in model_opts.related_objects: self.add_operation( related_object.related_model._meta.app_label, operations.AlterField( model_name=related_object.related_model._meta.object_name, name=related_object.field.name, field=related_object.field, ), dependencies=[(app_label, model_name, None, True)], ) def generate_created_proxies(self): """ Make CreateModel statements for proxy models. Use the same statements as that way there's less code duplication, but for proxy models it's safe to skip all the pointless field stuff and chuck out an operation. """ added = self.new_proxy_keys - self.old_proxy_keys for app_label, model_name in sorted(added): model_state = self.to_state.models[app_label, model_name] assert model_state.options.get("proxy") # Depend on the deletion of any possible non-proxy version of us dependencies = [ (app_label, model_name, None, False), ] # Depend on all bases for base in model_state.bases: if isinstance(base, str) and "." in base: base_app_label, base_name = base.split(".", 1) dependencies.append((base_app_label, base_name, None, True)) # Generate creation operation self.add_operation( app_label, operations.CreateModel( name=model_state.name, fields=[], options=model_state.options, bases=model_state.bases, managers=model_state.managers, ), # Depend on the deletion of any possible non-proxy version of us dependencies=dependencies, ) def generate_deleted_models(self): """ Find all deleted models (managed and unmanaged) and make delete operations for them as well as separate operations to delete any foreign key or M2M relationships (these are optimized later, if possible). Also bring forward removal of any model options that refer to collections of fields - the inverse of generate_created_models(). """ new_keys = self.new_model_keys | self.new_unmanaged_keys deleted_models = self.old_model_keys - new_keys deleted_unmanaged_models = self.old_unmanaged_keys - new_keys all_deleted_models = chain(sorted(deleted_models), sorted(deleted_unmanaged_models)) for app_label, model_name in all_deleted_models: model_state = self.from_state.models[app_label, model_name] model = self.old_apps.get_model(app_label, model_name) # Gather related fields related_fields = {} for field in model._meta.local_fields: if field.remote_field: if field.remote_field.model: related_fields[field.name] = field # through will be none on M2Ms on swapped-out models; # we can treat lack of through as auto_created=True, though. if (getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created): related_fields[field.name] = field for field in model._meta.local_many_to_many: if field.remote_field.model: related_fields[field.name] = field if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created: related_fields[field.name] = field # Generate option removal first unique_together = model_state.options.pop('unique_together', None) index_together = model_state.options.pop('index_together', None) if unique_together: self.add_operation( app_label, operations.AlterUniqueTogether( name=model_name, unique_together=None, ) ) if index_together: self.add_operation( app_label, operations.AlterIndexTogether( name=model_name, index_together=None, ) ) # Then remove each related field for name in sorted(related_fields): self.add_operation( app_label, operations.RemoveField( model_name=model_name, name=name, ) ) # Finally, remove the model. # This depends on both the removal/alteration of all incoming fields # and the removal of all its own related fields, and if it's # a through model the field that references it. dependencies = [] for related_object in model._meta.related_objects: related_object_app_label = related_object.related_model._meta.app_label object_name = related_object.related_model._meta.object_name field_name = related_object.field.name dependencies.append((related_object_app_label, object_name, field_name, False)) if not related_object.many_to_many: dependencies.append((related_object_app_label, object_name, field_name, "alter")) for name in sorted(related_fields): dependencies.append((app_label, model_name, name, False)) # We're referenced in another field's through= through_user = self.through_users.get((app_label, model_state.name_lower)) if through_user: dependencies.append((through_user[0], through_user[1], through_user[2], False)) # Finally, make the operation, deduping any dependencies self.add_operation( app_label, operations.DeleteModel( name=model_state.name, ), dependencies=list(set(dependencies)), ) def generate_deleted_proxies(self): """Make DeleteModel options for proxy models.""" deleted = self.old_proxy_keys - self.new_proxy_keys for app_label, model_name in sorted(deleted): model_state = self.from_state.models[app_label, model_name] assert model_state.options.get("proxy") self.add_operation( app_label, operations.DeleteModel( name=model_state.name, ), ) def generate_renamed_fields(self): """Work out renamed fields.""" self.renamed_fields = {} for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name) # Scan to see if this is actually a rename! field_dec = self.deep_deconstruct(field) for rem_app_label, rem_model_name, rem_field_name in sorted(self.old_field_keys - self.new_field_keys): if rem_app_label == app_label and rem_model_name == model_name: old_field = old_model_state.fields[rem_field_name] old_field_dec = self.deep_deconstruct(old_field) if field.remote_field and field.remote_field.model and 'to' in old_field_dec[2]: old_rel_to = old_field_dec[2]['to'] if old_rel_to in self.renamed_models_rel: old_field_dec[2]['to'] = self.renamed_models_rel[old_rel_to] old_field.set_attributes_from_name(rem_field_name) old_db_column = old_field.get_attname_column()[1] if (old_field_dec == field_dec or ( # Was the field renamed and db_column equal to the # old field's column added? old_field_dec[0:2] == field_dec[0:2] and dict(old_field_dec[2], db_column=old_db_column) == field_dec[2])): if self.questioner.ask_rename(model_name, rem_field_name, field_name, field): self.add_operation( app_label, operations.RenameField( model_name=model_name, old_name=rem_field_name, new_name=field_name, ) ) self.old_field_keys.remove((rem_app_label, rem_model_name, rem_field_name)) self.old_field_keys.add((app_label, model_name, field_name)) self.renamed_fields[app_label, model_name, field_name] = rem_field_name break def generate_added_fields(self): """Make AddField operations.""" for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys): self._generate_added_field(app_label, model_name, field_name) def _generate_added_field(self, app_label, model_name, field_name): field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name) # Fields that are foreignkeys/m2ms depend on stuff dependencies = [] if field.remote_field and field.remote_field.model: dependencies.extend(self._get_dependencies_for_foreign_key(field)) # You can't just add NOT NULL fields with no default or fields # which don't allow empty strings as default. time_fields = (models.DateField, models.DateTimeField, models.TimeField) preserve_default = ( field.null or field.has_default() or field.many_to_many or (field.blank and field.empty_strings_allowed) or (isinstance(field, time_fields) and field.auto_now) ) if not preserve_default: field = field.clone() if isinstance(field, time_fields) and field.auto_now_add: field.default = self.questioner.ask_auto_now_add_addition(field_name, model_name) else: field.default = self.questioner.ask_not_null_addition(field_name, model_name) self.add_operation( app_label, operations.AddField( model_name=model_name, name=field_name, field=field, preserve_default=preserve_default, ), dependencies=dependencies, ) def generate_removed_fields(self): """Make RemoveField operations.""" for app_label, model_name, field_name in sorted(self.old_field_keys - self.new_field_keys): self._generate_removed_field(app_label, model_name, field_name) def _generate_removed_field(self, app_label, model_name, field_name): self.add_operation( app_label, operations.RemoveField( model_name=model_name, name=field_name, ), # We might need to depend on the removal of an # order_with_respect_to or index/unique_together operation; # this is safely ignored if there isn't one dependencies=[ (app_label, model_name, field_name, "order_wrt_unset"), (app_label, model_name, field_name, "foo_together_change"), ], ) def generate_altered_fields(self): """ Make AlterField operations, or possibly RemovedField/AddField if alter isn's possible. """ for app_label, model_name, field_name in sorted(self.old_field_keys & self.new_field_keys): # Did the field change? old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_field_name = self.renamed_fields.get((app_label, model_name, field_name), field_name) old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field(old_field_name) new_field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name) dependencies = [] # Implement any model renames on relations; these are handled by RenameModel # so we need to exclude them from the comparison if hasattr(new_field, "remote_field") and getattr(new_field.remote_field, "model", None): rename_key = ( new_field.remote_field.model._meta.app_label, new_field.remote_field.model._meta.model_name, ) if rename_key in self.renamed_models: new_field.remote_field.model = old_field.remote_field.model # Handle ForeignKey which can only have a single to_field. remote_field_name = getattr(new_field.remote_field, 'field_name', None) if remote_field_name: to_field_rename_key = rename_key + (remote_field_name,) if to_field_rename_key in self.renamed_fields: # Repoint both model and field name because to_field # inclusion in ForeignKey.deconstruct() is based on # both. new_field.remote_field.model = old_field.remote_field.model new_field.remote_field.field_name = old_field.remote_field.field_name # Handle ForeignObjects which can have multiple from_fields/to_fields. from_fields = getattr(new_field, 'from_fields', None) if from_fields: from_rename_key = (app_label, model_name) new_field.from_fields = tuple([ self.renamed_fields.get(from_rename_key + (from_field,), from_field) for from_field in from_fields ]) new_field.to_fields = tuple([ self.renamed_fields.get(rename_key + (to_field,), to_field) for to_field in new_field.to_fields ]) dependencies.extend(self._get_dependencies_for_foreign_key(new_field)) if hasattr(new_field, "remote_field") and getattr(new_field.remote_field, "through", None): rename_key = ( new_field.remote_field.through._meta.app_label, new_field.remote_field.through._meta.model_name, ) if rename_key in self.renamed_models: new_field.remote_field.through = old_field.remote_field.through old_field_dec = self.deep_deconstruct(old_field) new_field_dec = self.deep_deconstruct(new_field) if old_field_dec != new_field_dec: both_m2m = old_field.many_to_many and new_field.many_to_many neither_m2m = not old_field.many_to_many and not new_field.many_to_many if both_m2m or neither_m2m: # Either both fields are m2m or neither is preserve_default = True if (old_field.null and not new_field.null and not new_field.has_default() and not new_field.many_to_many): field = new_field.clone() new_default = self.questioner.ask_not_null_alteration(field_name, model_name) if new_default is not models.NOT_PROVIDED: field.default = new_default preserve_default = False else: field = new_field self.add_operation( app_label, operations.AlterField( model_name=model_name, name=field_name, field=field, preserve_default=preserve_default, ), dependencies=dependencies, ) else: # We cannot alter between m2m and concrete fields self._generate_removed_field(app_label, model_name, field_name) self._generate_added_field(app_label, model_name, field_name) def create_altered_indexes(self): option_name = operations.AddIndex.option_name for app_label, model_name in sorted(self.kept_model_keys): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, model_name] old_indexes = old_model_state.options[option_name] new_indexes = new_model_state.options[option_name] add_idx = [idx for idx in new_indexes if idx not in old_indexes] rem_idx = [idx for idx in old_indexes if idx not in new_indexes] self.altered_indexes.update({ (app_label, model_name): { 'added_indexes': add_idx, 'removed_indexes': rem_idx, } }) def generate_added_indexes(self): for (app_label, model_name), alt_indexes in self.altered_indexes.items(): for index in alt_indexes['added_indexes']: self.add_operation( app_label, operations.AddIndex( model_name=model_name, index=index, ) ) def generate_removed_indexes(self): for (app_label, model_name), alt_indexes in self.altered_indexes.items(): for index in alt_indexes['removed_indexes']: self.add_operation( app_label, operations.RemoveIndex( model_name=model_name, name=index.name, ) ) def create_altered_constraints(self): option_name = operations.AddConstraint.option_name for app_label, model_name in sorted(self.kept_model_keys): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, model_name] old_constraints = old_model_state.options[option_name] new_constraints = new_model_state.options[option_name] add_constraints = [c for c in new_constraints if c not in old_constraints] rem_constraints = [c for c in old_constraints if c not in new_constraints] self.altered_constraints.update({ (app_label, model_name): { 'added_constraints': add_constraints, 'removed_constraints': rem_constraints, } }) def generate_added_constraints(self): for (app_label, model_name), alt_constraints in self.altered_constraints.items(): for constraint in alt_constraints['added_constraints']: self.add_operation( app_label, operations.AddConstraint( model_name=model_name, constraint=constraint, ) ) def generate_removed_constraints(self): for (app_label, model_name), alt_constraints in self.altered_constraints.items(): for constraint in alt_constraints['removed_constraints']: self.add_operation( app_label, operations.RemoveConstraint( model_name=model_name, name=constraint.name, ) ) def _get_dependencies_for_foreign_key(self, field): # Account for FKs to swappable models swappable_setting = getattr(field, 'swappable_setting', None) if swappable_setting is not None: dep_app_label = "__setting__" dep_object_name = swappable_setting else: dep_app_label = field.remote_field.model._meta.app_label dep_object_name = field.remote_field.model._meta.object_name dependencies = [(dep_app_label, dep_object_name, None, True)] if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created: dependencies.append(( field.remote_field.through._meta.app_label, field.remote_field.through._meta.object_name, None, True, )) return dependencies def _generate_altered_foo_together(self, operation): option_name = operation.option_name for app_label, model_name in sorted(self.kept_model_keys): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, model_name] # We run the old version through the field renames to account for those old_value = old_model_state.options.get(option_name) old_value = { tuple( self.renamed_fields.get((app_label, model_name, n), n) for n in unique ) for unique in old_value } if old_value else set() new_value = new_model_state.options.get(option_name) new_value = set(new_value) if new_value else set() if old_value != new_value: dependencies = [] for foo_togethers in new_value: for field_name in foo_togethers: field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name) if field.remote_field and field.remote_field.model: dependencies.extend(self._get_dependencies_for_foreign_key(field)) self.add_operation( app_label, operation( name=model_name, **{option_name: new_value} ), dependencies=dependencies, ) def generate_altered_unique_together(self): self._generate_altered_foo_together(operations.AlterUniqueTogether) def generate_altered_index_together(self): self._generate_altered_foo_together(operations.AlterIndexTogether) def generate_altered_db_table(self): models_to_check = self.kept_model_keys.union(self.kept_proxy_keys, self.kept_unmanaged_keys) for app_label, model_name in sorted(models_to_check): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, model_name] old_db_table_name = old_model_state.options.get('db_table') new_db_table_name = new_model_state.options.get('db_table') if old_db_table_name != new_db_table_name: self.add_operation( app_label, operations.AlterModelTable( name=model_name, table=new_db_table_name, ) ) def generate_altered_options(self): """ Work out if any non-schema-affecting options have changed and make an operation to represent them in state changes (in case Python code in migrations needs them). """ models_to_check = self.kept_model_keys.union( self.kept_proxy_keys, self.kept_unmanaged_keys, # unmanaged converted to managed self.old_unmanaged_keys & self.new_model_keys, # managed converted to unmanaged self.old_model_keys & self.new_unmanaged_keys, ) for app_label, model_name in sorted(models_to_check): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, model_name] old_options = { key: value for key, value in old_model_state.options.items() if key in AlterModelOptions.ALTER_OPTION_KEYS } new_options = { key: value for key, value in new_model_state.options.items() if key in AlterModelOptions.ALTER_OPTION_KEYS } if old_options != new_options: self.add_operation( app_label, operations.AlterModelOptions( name=model_name, options=new_options, ) ) def generate_altered_order_with_respect_to(self): for app_label, model_name in sorted(self.kept_model_keys): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, model_name] if (old_model_state.options.get("order_with_respect_to") != new_model_state.options.get("order_with_respect_to")): # Make sure it comes second if we're adding # (removal dependency is part of RemoveField) dependencies = [] if new_model_state.options.get("order_with_respect_to"): dependencies.append(( app_label, model_name, new_model_state.options["order_with_respect_to"], True, )) # Actually generate the operation self.add_operation( app_label, operations.AlterOrderWithRespectTo( name=model_name, order_with_respect_to=new_model_state.options.get('order_with_respect_to'), ), dependencies=dependencies, ) def generate_altered_managers(self): for app_label, model_name in sorted(self.kept_model_keys): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, model_name] if old_model_state.managers != new_model_state.managers: self.add_operation( app_label, operations.AlterModelManagers( name=model_name, managers=new_model_state.managers, ) ) def arrange_for_graph(self, changes, graph, migration_name=None): """ Take a result from changes() and a MigrationGraph, and fix the names and dependencies of the changes so they extend the graph from the leaf nodes for each app. """ leaves = graph.leaf_nodes() name_map = {} for app_label, migrations in list(changes.items()): if not migrations: continue # Find the app label's current leaf node app_leaf = None for leaf in leaves: if leaf[0] == app_label: app_leaf = leaf break # Do they want an initial migration for this app? if app_leaf is None and not self.questioner.ask_initial(app_label): # They don't. for migration in migrations: name_map[(app_label, migration.name)] = (app_label, "__first__") del changes[app_label] continue # Work out the next number in the sequence if app_leaf is None: next_number = 1 else: next_number = (self.parse_number(app_leaf[1]) or 0) + 1 # Name each migration for i, migration in enumerate(migrations): if i == 0 and app_leaf: migration.dependencies.append(app_leaf) if i == 0 and not app_leaf: new_name = "0001_%s" % migration_name if migration_name else "0001_initial" else: new_name = "%04i_%s" % ( next_number, migration_name or self.suggest_name(migration.operations)[:100], ) name_map[(app_label, migration.name)] = (app_label, new_name) next_number += 1 migration.name = new_name # Now fix dependencies for migrations in changes.values(): for migration in migrations: migration.dependencies = [name_map.get(d, d) for d in migration.dependencies] return changes def _trim_to_apps(self, changes, app_labels): """ Take changes from arrange_for_graph() and set of app labels, and return a modified set of changes which trims out as many migrations that are not in app_labels as possible. Note that some other migrations may still be present as they may be required dependencies. """ # Gather other app dependencies in a first pass app_dependencies = {} for app_label, migrations in changes.items(): for migration in migrations: for dep_app_label, name in migration.dependencies: app_dependencies.setdefault(app_label, set()).add(dep_app_label) required_apps = set(app_labels) # Keep resolving till there's no change old_required_apps = None while old_required_apps != required_apps: old_required_apps = set(required_apps) required_apps.update(*[app_dependencies.get(app_label, ()) for app_label in required_apps]) # Remove all migrations that aren't needed for app_label in list(changes): if app_label not in required_apps: del changes[app_label] return changes @classmethod def suggest_name(cls, ops): """ Given a set of operations, suggest a name for the migration they might represent. Names are not guaranteed to be unique, but put some effort into the fallback name to avoid VCS conflicts if possible. """ name = None if len(ops) == 1: name = ops[0].migration_name_fragment elif len(ops) > 1 and all(isinstance(o, operations.CreateModel) for o in ops): name = '_'.join(sorted(o.migration_name_fragment for o in ops)) if name is None: name = 'auto_%s' % get_migration_name_timestamp() return name @classmethod def parse_number(cls, name): """ Given a migration name, try to extract a number from the beginning of it. If no number is found, return None. """ match = re.match(r'^\d+', name) if match: return int(match[0]) return None
1f9e903ec3e772c2635afd3b18cbce966b4e5d971363d3c558006aa15f3b22e4
import copy import datetime import inspect from decimal import Decimal from django.core.exceptions import EmptyResultSet, FieldError from django.db import NotSupportedError, connection from django.db.models import fields from django.db.models.constants import LOOKUP_SEP from django.db.models.query_utils import Q from django.utils.deconstruct import deconstructible from django.utils.functional import cached_property from django.utils.hashable import make_hashable class SQLiteNumericMixin: """ Some expressions with output_field=DecimalField() must be cast to numeric to be properly filtered. """ def as_sqlite(self, compiler, connection, **extra_context): sql, params = self.as_sql(compiler, connection, **extra_context) try: if self.output_field.get_internal_type() == 'DecimalField': sql = 'CAST(%s AS NUMERIC)' % sql except FieldError: pass return sql, params class Combinable: """ Provide the ability to combine one or two objects with some connector. For example F('foo') + F('bar'). """ # Arithmetic connectors ADD = '+' SUB = '-' MUL = '*' DIV = '/' POW = '^' # The following is a quoted % operator - it is quoted because it can be # used in strings that also have parameter substitution. MOD = '%%' # Bitwise operators - note that these are generated by .bitand() # and .bitor(), the '&' and '|' are reserved for boolean operator # usage. BITAND = '&' BITOR = '|' BITLEFTSHIFT = '<<' BITRIGHTSHIFT = '>>' BITXOR = '#' def _combine(self, other, connector, reversed): if not hasattr(other, 'resolve_expression'): # everything must be resolvable to an expression if isinstance(other, datetime.timedelta): other = DurationValue(other, output_field=fields.DurationField()) else: other = Value(other) if reversed: return CombinedExpression(other, connector, self) return CombinedExpression(self, connector, other) ############# # OPERATORS # ############# def __neg__(self): return self._combine(-1, self.MUL, False) def __add__(self, other): return self._combine(other, self.ADD, False) def __sub__(self, other): return self._combine(other, self.SUB, False) def __mul__(self, other): return self._combine(other, self.MUL, False) def __truediv__(self, other): return self._combine(other, self.DIV, False) def __mod__(self, other): return self._combine(other, self.MOD, False) def __pow__(self, other): return self._combine(other, self.POW, False) def __and__(self, other): if getattr(self, 'conditional', False) and getattr(other, 'conditional', False): return Q(self) & Q(other) raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def bitand(self, other): return self._combine(other, self.BITAND, False) def bitleftshift(self, other): return self._combine(other, self.BITLEFTSHIFT, False) def bitrightshift(self, other): return self._combine(other, self.BITRIGHTSHIFT, False) def bitxor(self, other): return self._combine(other, self.BITXOR, False) def __or__(self, other): if getattr(self, 'conditional', False) and getattr(other, 'conditional', False): return Q(self) | Q(other) raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def bitor(self, other): return self._combine(other, self.BITOR, False) def __radd__(self, other): return self._combine(other, self.ADD, True) def __rsub__(self, other): return self._combine(other, self.SUB, True) def __rmul__(self, other): return self._combine(other, self.MUL, True) def __rtruediv__(self, other): return self._combine(other, self.DIV, True) def __rmod__(self, other): return self._combine(other, self.MOD, True) def __rpow__(self, other): return self._combine(other, self.POW, True) def __rand__(self, other): raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def __ror__(self, other): raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) @deconstructible class BaseExpression: """Base class for all query expressions.""" # aggregate specific fields is_summary = False _output_field_resolved_to_none = False # Can the expression be used in a WHERE clause? filterable = True # Can the expression can be used as a source expression in Window? window_compatible = False def __init__(self, output_field=None): if output_field is not None: self.output_field = output_field def __getstate__(self): state = self.__dict__.copy() state.pop('convert_value', None) return state def get_db_converters(self, connection): return ( [] if self.convert_value is self._convert_value_noop else [self.convert_value] ) + self.output_field.get_db_converters(connection) def get_source_expressions(self): return [] def set_source_expressions(self, exprs): assert not exprs def _parse_expressions(self, *expressions): return [ arg if hasattr(arg, 'resolve_expression') else ( F(arg) if isinstance(arg, str) else Value(arg) ) for arg in expressions ] def as_sql(self, compiler, connection): """ Responsible for returning a (sql, [params]) tuple to be included in the current query. Different backends can provide their own implementation, by providing an `as_{vendor}` method and patching the Expression: ``` def override_as_sql(self, compiler, connection): # custom logic return super().as_sql(compiler, connection) setattr(Expression, 'as_' + connection.vendor, override_as_sql) ``` Arguments: * compiler: the query compiler responsible for generating the query. Must have a compile method, returning a (sql, [params]) tuple. Calling compiler(value) will return a quoted `value`. * connection: the database connection used for the current query. Return: (sql, params) Where `sql` is a string containing ordered sql parameters to be replaced with the elements of the list `params`. """ raise NotImplementedError("Subclasses must implement as_sql()") @cached_property def contains_aggregate(self): return any(expr and expr.contains_aggregate for expr in self.get_source_expressions()) @cached_property def contains_over_clause(self): return any(expr and expr.contains_over_clause for expr in self.get_source_expressions()) @cached_property def contains_column_references(self): return any(expr and expr.contains_column_references for expr in self.get_source_expressions()) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): """ Provide the chance to do any preprocessing or validation before being added to the query. Arguments: * query: the backend query implementation * allow_joins: boolean allowing or denying use of joins in this query * reuse: a set of reusable joins for multijoins * summarize: a terminal aggregate clause * for_save: whether this expression about to be used in a save or update Return: an Expression to be added to the query. """ c = self.copy() c.is_summary = summarize c.set_source_expressions([ expr.resolve_expression(query, allow_joins, reuse, summarize) if expr else None for expr in c.get_source_expressions() ]) return c @property def conditional(self): return isinstance(self.output_field, fields.BooleanField) @property def field(self): return self.output_field @cached_property def output_field(self): """Return the output type of this expressions.""" output_field = self._resolve_output_field() if output_field is None: self._output_field_resolved_to_none = True raise FieldError('Cannot resolve expression type, unknown output_field') return output_field @cached_property def _output_field_or_none(self): """ Return the output field of this expression, or None if _resolve_output_field() didn't return an output type. """ try: return self.output_field except FieldError: if not self._output_field_resolved_to_none: raise def _resolve_output_field(self): """ Attempt to infer the output type of the expression. If the output fields of all source fields match then, simply infer the same type here. This isn't always correct, but it makes sense most of the time. Consider the difference between `2 + 2` and `2 / 3`. Inferring the type here is a convenience for the common case. The user should supply their own output_field with more complex computations. If a source's output field resolves to None, exclude it from this check. If all sources are None, then an error is raised higher up the stack in the output_field property. """ sources_iter = (source for source in self.get_source_fields() if source is not None) for output_field in sources_iter: for source in sources_iter: if not isinstance(output_field, source.__class__): raise FieldError( 'Expression contains mixed types: %s, %s. You must ' 'set output_field.' % ( output_field.__class__.__name__, source.__class__.__name__, ) ) return output_field @staticmethod def _convert_value_noop(value, expression, connection): return value @cached_property def convert_value(self): """ Expressions provide their own converters because users have the option of manually specifying the output_field which may be a different type from the one the database returns. """ field = self.output_field internal_type = field.get_internal_type() if internal_type == 'FloatField': return lambda value, expression, connection: None if value is None else float(value) elif internal_type.endswith('IntegerField'): return lambda value, expression, connection: None if value is None else int(value) elif internal_type == 'DecimalField': return lambda value, expression, connection: None if value is None else Decimal(value) return self._convert_value_noop def get_lookup(self, lookup): return self.output_field.get_lookup(lookup) def get_transform(self, name): return self.output_field.get_transform(name) def relabeled_clone(self, change_map): clone = self.copy() clone.set_source_expressions([ e.relabeled_clone(change_map) if e is not None else None for e in self.get_source_expressions() ]) return clone def copy(self): return copy.copy(self) def get_group_by_cols(self, alias=None): if not self.contains_aggregate: return [self] cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols def get_source_fields(self): """Return the underlying field types used by this aggregate.""" return [e._output_field_or_none for e in self.get_source_expressions()] def asc(self, **kwargs): return OrderBy(self, **kwargs) def desc(self, **kwargs): return OrderBy(self, descending=True, **kwargs) def reverse_ordering(self): return self def flatten(self): """ Recursively yield this expression and all subexpressions, in depth-first order. """ yield self for expr in self.get_source_expressions(): if expr: yield from expr.flatten() def select_format(self, compiler, sql, params): """ Custom format for select clauses. For example, EXISTS expressions need to be wrapped in CASE WHEN on Oracle. """ return self.output_field.select_format(compiler, sql, params) @cached_property def identity(self): constructor_signature = inspect.signature(self.__init__) args, kwargs = self._constructor_args signature = constructor_signature.bind_partial(*args, **kwargs) signature.apply_defaults() arguments = signature.arguments.items() identity = [self.__class__] for arg, value in arguments: if isinstance(value, fields.Field): if value.name and value.model: value = (value.model._meta.label, value.name) else: value = type(value) else: value = make_hashable(value) identity.append((arg, value)) return tuple(identity) def __eq__(self, other): if not isinstance(other, BaseExpression): return NotImplemented return other.identity == self.identity def __hash__(self): return hash(self.identity) class Expression(BaseExpression, Combinable): """An expression that can be combined with other expressions.""" pass class CombinedExpression(SQLiteNumericMixin, Expression): def __init__(self, lhs, connector, rhs, output_field=None): super().__init__(output_field=output_field) self.connector = connector self.lhs = lhs self.rhs = rhs def __repr__(self): return "<{}: {}>".format(self.__class__.__name__, self) def __str__(self): return "{} {} {}".format(self.lhs, self.connector, self.rhs) def get_source_expressions(self): return [self.lhs, self.rhs] def set_source_expressions(self, exprs): self.lhs, self.rhs = exprs def as_sql(self, compiler, connection): try: lhs_output = self.lhs.output_field except FieldError: lhs_output = None try: rhs_output = self.rhs.output_field except FieldError: rhs_output = None if (not connection.features.has_native_duration_field and ((lhs_output and lhs_output.get_internal_type() == 'DurationField') or (rhs_output and rhs_output.get_internal_type() == 'DurationField'))): return DurationExpression(self.lhs, self.connector, self.rhs).as_sql(compiler, connection) if (lhs_output and rhs_output and self.connector == self.SUB and lhs_output.get_internal_type() in {'DateField', 'DateTimeField', 'TimeField'} and lhs_output.get_internal_type() == rhs_output.get_internal_type()): return TemporalSubtraction(self.lhs, self.rhs).as_sql(compiler, connection) expressions = [] expression_params = [] sql, params = compiler.compile(self.lhs) expressions.append(sql) expression_params.extend(params) sql, params = compiler.compile(self.rhs) expressions.append(sql) expression_params.extend(params) # order of precedence expression_wrapper = '(%s)' sql = connection.ops.combine_expression(self.connector, expressions) return expression_wrapper % sql, expression_params def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize c.lhs = c.lhs.resolve_expression(query, allow_joins, reuse, summarize, for_save) c.rhs = c.rhs.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c class DurationExpression(CombinedExpression): def compile(self, side, compiler, connection): if not isinstance(side, DurationValue): try: output = side.output_field except FieldError: pass else: if output.get_internal_type() == 'DurationField': sql, params = compiler.compile(side) return connection.ops.format_for_duration_arithmetic(sql), params return compiler.compile(side) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) expressions = [] expression_params = [] sql, params = self.compile(self.lhs, compiler, connection) expressions.append(sql) expression_params.extend(params) sql, params = self.compile(self.rhs, compiler, connection) expressions.append(sql) expression_params.extend(params) # order of precedence expression_wrapper = '(%s)' sql = connection.ops.combine_duration_expression(self.connector, expressions) return expression_wrapper % sql, expression_params class TemporalSubtraction(CombinedExpression): output_field = fields.DurationField() def __init__(self, lhs, rhs): super().__init__(lhs, self.SUB, rhs) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) lhs = compiler.compile(self.lhs) rhs = compiler.compile(self.rhs) return connection.ops.subtract_temporals(self.lhs.output_field.get_internal_type(), lhs, rhs) @deconstructible class F(Combinable): """An object capable of resolving references to existing query objects.""" def __init__(self, name): """ Arguments: * name: the name of the field this expression references """ self.name = name def __repr__(self): return "{}({})".format(self.__class__.__name__, self.name) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): return query.resolve_ref(self.name, allow_joins, reuse, summarize) def asc(self, **kwargs): return OrderBy(self, **kwargs) def desc(self, **kwargs): return OrderBy(self, descending=True, **kwargs) def __eq__(self, other): return self.__class__ == other.__class__ and self.name == other.name def __hash__(self): return hash(self.name) class ResolvedOuterRef(F): """ An object that contains a reference to an outer query. In this case, the reference to the outer query has been resolved because the inner query has been used as a subquery. """ contains_aggregate = False def as_sql(self, *args, **kwargs): raise ValueError( 'This queryset contains a reference to an outer query and may ' 'only be used in a subquery.' ) def resolve_expression(self, *args, **kwargs): col = super().resolve_expression(*args, **kwargs) # FIXME: Rename possibly_multivalued to multivalued and fix detection # for non-multivalued JOINs (e.g. foreign key fields). This should take # into account only many-to-many and one-to-many relationships. col.possibly_multivalued = LOOKUP_SEP in self.name return col def relabeled_clone(self, relabels): return self def get_group_by_cols(self, alias=None): return [] class OuterRef(F): def resolve_expression(self, *args, **kwargs): if isinstance(self.name, self.__class__): return self.name return ResolvedOuterRef(self.name) def relabeled_clone(self, relabels): return self class Func(SQLiteNumericMixin, Expression): """An SQL function call.""" function = None template = '%(function)s(%(expressions)s)' arg_joiner = ', ' arity = None # The number of arguments the function accepts. def __init__(self, *expressions, output_field=None, **extra): if self.arity is not None and len(expressions) != self.arity: raise TypeError( "'%s' takes exactly %s %s (%s given)" % ( self.__class__.__name__, self.arity, "argument" if self.arity == 1 else "arguments", len(expressions), ) ) super().__init__(output_field=output_field) self.source_expressions = self._parse_expressions(*expressions) self.extra = extra def __repr__(self): args = self.arg_joiner.join(str(arg) for arg in self.source_expressions) extra = {**self.extra, **self._get_repr_options()} if extra: extra = ', '.join(str(key) + '=' + str(val) for key, val in sorted(extra.items())) return "{}({}, {})".format(self.__class__.__name__, args, extra) return "{}({})".format(self.__class__.__name__, args) def _get_repr_options(self): """Return a dict of extra __init__() options to include in the repr.""" return {} def get_source_expressions(self): return self.source_expressions def set_source_expressions(self, exprs): self.source_expressions = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize for pos, arg in enumerate(c.source_expressions): c.source_expressions[pos] = arg.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def as_sql(self, compiler, connection, function=None, template=None, arg_joiner=None, **extra_context): connection.ops.check_expression_support(self) sql_parts = [] params = [] for arg in self.source_expressions: arg_sql, arg_params = compiler.compile(arg) sql_parts.append(arg_sql) params.extend(arg_params) data = {**self.extra, **extra_context} # Use the first supplied value in this order: the parameter to this # method, a value supplied in __init__()'s **extra (the value in # `data`), or the value defined on the class. if function is not None: data['function'] = function else: data.setdefault('function', self.function) template = template or data.get('template', self.template) arg_joiner = arg_joiner or data.get('arg_joiner', self.arg_joiner) data['expressions'] = data['field'] = arg_joiner.join(sql_parts) return template % data, params def copy(self): copy = super().copy() copy.source_expressions = self.source_expressions[:] copy.extra = self.extra.copy() return copy class Value(Expression): """Represent a wrapped value as a node within an expression.""" def __init__(self, value, output_field=None): """ Arguments: * value: the value this expression represents. The value will be added into the sql parameter list and properly quoted. * output_field: an instance of the model field type that this expression will return, such as IntegerField() or CharField(). """ super().__init__(output_field=output_field) self.value = value def __repr__(self): return "{}({})".format(self.__class__.__name__, self.value) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) val = self.value output_field = self._output_field_or_none if output_field is not None: if self.for_save: val = output_field.get_db_prep_save(val, connection=connection) else: val = output_field.get_db_prep_value(val, connection=connection) if hasattr(output_field, 'get_placeholder'): return output_field.get_placeholder(val, compiler, connection), [val] if val is None: # cx_Oracle does not always convert None to the appropriate # NULL type (like in case expressions using numbers), so we # use a literal SQL NULL return 'NULL', [] return '%s', [val] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = super().resolve_expression(query, allow_joins, reuse, summarize, for_save) c.for_save = for_save return c def get_group_by_cols(self, alias=None): return [] class DurationValue(Value): def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) if connection.features.has_native_duration_field: return super().as_sql(compiler, connection) return connection.ops.date_interval_sql(self.value), [] class RawSQL(Expression): def __init__(self, sql, params, output_field=None): if output_field is None: output_field = fields.Field() self.sql, self.params = sql, params super().__init__(output_field=output_field) def __repr__(self): return "{}({}, {})".format(self.__class__.__name__, self.sql, self.params) def as_sql(self, compiler, connection): return '(%s)' % self.sql, self.params def get_group_by_cols(self, alias=None): return [self] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): # Resolve parents fields used in raw SQL. for parent in query.model._meta.get_parent_list(): for parent_field in parent._meta.local_fields: _, column_name = parent_field.get_attname_column() if column_name.lower() in self.sql.lower(): query.resolve_ref(parent_field.name, allow_joins, reuse, summarize) break return super().resolve_expression(query, allow_joins, reuse, summarize, for_save) class Star(Expression): def __repr__(self): return "'*'" def as_sql(self, compiler, connection): return '*', [] class Random(Expression): output_field = fields.FloatField() def __repr__(self): return "Random()" def as_sql(self, compiler, connection): return connection.ops.random_function_sql(), [] class Col(Expression): contains_column_references = True possibly_multivalued = False def __init__(self, alias, target, output_field=None): if output_field is None: output_field = target super().__init__(output_field=output_field) self.alias, self.target = alias, target def __repr__(self): alias, target = self.alias, self.target identifiers = (alias, str(target)) if alias else (str(target),) return '{}({})'.format(self.__class__.__name__, ', '.join(identifiers)) def as_sql(self, compiler, connection): alias, column = self.alias, self.target.column identifiers = (alias, column) if alias else (column,) sql = '.'.join(map(compiler.quote_name_unless_alias, identifiers)) return sql, [] def relabeled_clone(self, relabels): if self.alias is None: return self return self.__class__(relabels.get(self.alias, self.alias), self.target, self.output_field) def get_group_by_cols(self, alias=None): return [self] def get_db_converters(self, connection): if self.target == self.output_field: return self.output_field.get_db_converters(connection) return (self.output_field.get_db_converters(connection) + self.target.get_db_converters(connection)) class Ref(Expression): """ Reference to column alias of the query. For example, Ref('sum_cost') in qs.annotate(sum_cost=Sum('cost')) query. """ def __init__(self, refs, source): super().__init__() self.refs, self.source = refs, source def __repr__(self): return "{}({}, {})".format(self.__class__.__name__, self.refs, self.source) def get_source_expressions(self): return [self.source] def set_source_expressions(self, exprs): self.source, = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): # The sub-expression `source` has already been resolved, as this is # just a reference to the name of `source`. return self def relabeled_clone(self, relabels): return self def as_sql(self, compiler, connection): return connection.ops.quote_name(self.refs), [] def get_group_by_cols(self, alias=None): return [self] class ExpressionList(Func): """ An expression containing multiple expressions. Can be used to provide a list of expressions as an argument to another expression, like an ordering clause. """ template = '%(expressions)s' def __init__(self, *expressions, **extra): if not expressions: raise ValueError('%s requires at least one expression.' % self.__class__.__name__) super().__init__(*expressions, **extra) def __str__(self): return self.arg_joiner.join(str(arg) for arg in self.source_expressions) class ExpressionWrapper(Expression): """ An expression that can wrap another expression so that it can provide extra context to the inner expression, such as the output_field. """ def __init__(self, expression, output_field): super().__init__(output_field=output_field) self.expression = expression def set_source_expressions(self, exprs): self.expression = exprs[0] def get_source_expressions(self): return [self.expression] def as_sql(self, compiler, connection): return self.expression.as_sql(compiler, connection) def __repr__(self): return "{}({})".format(self.__class__.__name__, self.expression) class When(Expression): template = 'WHEN %(condition)s THEN %(result)s' # This isn't a complete conditional expression, must be used in Case(). conditional = False def __init__(self, condition=None, then=None, **lookups): if lookups: if condition is None: condition, lookups = Q(**lookups), None elif getattr(condition, 'conditional', False): condition, lookups = Q(condition, **lookups), None if condition is None or not getattr(condition, 'conditional', False) or lookups: raise TypeError( 'When() supports a Q object, a boolean expression, or lookups ' 'as a condition.' ) if isinstance(condition, Q) and not condition: raise ValueError("An empty Q() can't be used as a When() condition.") super().__init__(output_field=None) self.condition = condition self.result = self._parse_expressions(then)[0] def __str__(self): return "WHEN %r THEN %r" % (self.condition, self.result) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) def get_source_expressions(self): return [self.condition, self.result] def set_source_expressions(self, exprs): self.condition, self.result = exprs def get_source_fields(self): # We're only interested in the fields of the result expressions. return [self.result._output_field_or_none] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize if hasattr(c.condition, 'resolve_expression'): c.condition = c.condition.resolve_expression(query, allow_joins, reuse, summarize, False) c.result = c.result.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def as_sql(self, compiler, connection, template=None, **extra_context): connection.ops.check_expression_support(self) template_params = extra_context sql_params = [] condition_sql, condition_params = compiler.compile(self.condition) template_params['condition'] = condition_sql sql_params.extend(condition_params) result_sql, result_params = compiler.compile(self.result) template_params['result'] = result_sql sql_params.extend(result_params) template = template or self.template return template % template_params, sql_params def get_group_by_cols(self, alias=None): # This is not a complete expression and cannot be used in GROUP BY. cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols class Case(Expression): """ An SQL searched CASE expression: CASE WHEN n > 0 THEN 'positive' WHEN n < 0 THEN 'negative' ELSE 'zero' END """ template = 'CASE %(cases)s ELSE %(default)s END' case_joiner = ' ' def __init__(self, *cases, default=None, output_field=None, **extra): if not all(isinstance(case, When) for case in cases): raise TypeError("Positional arguments must all be When objects.") super().__init__(output_field) self.cases = list(cases) self.default = self._parse_expressions(default)[0] self.extra = extra def __str__(self): return "CASE %s, ELSE %r" % (', '.join(str(c) for c in self.cases), self.default) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) def get_source_expressions(self): return self.cases + [self.default] def set_source_expressions(self, exprs): *self.cases, self.default = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize for pos, case in enumerate(c.cases): c.cases[pos] = case.resolve_expression(query, allow_joins, reuse, summarize, for_save) c.default = c.default.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def copy(self): c = super().copy() c.cases = c.cases[:] return c def as_sql(self, compiler, connection, template=None, case_joiner=None, **extra_context): connection.ops.check_expression_support(self) if not self.cases: return compiler.compile(self.default) template_params = {**self.extra, **extra_context} case_parts = [] sql_params = [] for case in self.cases: try: case_sql, case_params = compiler.compile(case) except EmptyResultSet: continue case_parts.append(case_sql) sql_params.extend(case_params) default_sql, default_params = compiler.compile(self.default) if not case_parts: return default_sql, default_params case_joiner = case_joiner or self.case_joiner template_params['cases'] = case_joiner.join(case_parts) template_params['default'] = default_sql sql_params.extend(default_params) template = template or template_params.get('template', self.template) sql = template % template_params if self._output_field_or_none is not None: sql = connection.ops.unification_cast_sql(self.output_field) % sql return sql, sql_params class Subquery(Expression): """ An explicit subquery. It may contain OuterRef() references to the outer query which will be resolved when it is applied to that query. """ template = '(%(subquery)s)' contains_aggregate = False def __init__(self, queryset, output_field=None, **extra): self.query = queryset.query self.extra = extra # Prevent the QuerySet from being evaluated. self.queryset = queryset._chain(_result_cache=[], prefetch_done=True) super().__init__(output_field) def __getstate__(self): state = super().__getstate__() args, kwargs = state['_constructor_args'] if args: args = (self.queryset, *args[1:]) else: kwargs['queryset'] = self.queryset state['_constructor_args'] = args, kwargs return state def get_source_expressions(self): return [self.query] def set_source_expressions(self, exprs): self.query = exprs[0] def _resolve_output_field(self): return self.query.output_field def copy(self): clone = super().copy() clone.query = clone.query.clone() return clone @property def external_aliases(self): return self.query.external_aliases def as_sql(self, compiler, connection, template=None, **extra_context): connection.ops.check_expression_support(self) template_params = {**self.extra, **extra_context} subquery_sql, sql_params = self.query.as_sql(compiler, connection) template_params['subquery'] = subquery_sql[1:-1] template = template or template_params.get('template', self.template) sql = template % template_params return sql, sql_params def get_group_by_cols(self, alias=None): if alias: return [Ref(alias, self)] external_cols = self.query.get_external_cols() if any(col.possibly_multivalued for col in external_cols): return [self] return external_cols class Exists(Subquery): template = 'EXISTS(%(subquery)s)' output_field = fields.BooleanField() def __init__(self, queryset, negated=False, **kwargs): # As a performance optimization, remove ordering since EXISTS doesn't # care about it, just whether or not a row matches. queryset = queryset.order_by() self.negated = negated super().__init__(queryset, **kwargs) def __invert__(self): clone = self.copy() clone.negated = not self.negated return clone def as_sql(self, compiler, connection, template=None, **extra_context): sql, params = super().as_sql(compiler, connection, template, **extra_context) if self.negated: sql = 'NOT {}'.format(sql) return sql, params def select_format(self, compiler, sql, params): # Wrap EXISTS() with a CASE WHEN expression if a database backend # (e.g. Oracle) doesn't support boolean expression in SELECT or GROUP # BY list. if not compiler.connection.features.supports_boolean_expr_in_select_clause: sql = 'CASE WHEN {} THEN 1 ELSE 0 END'.format(sql) return sql, params class OrderBy(BaseExpression): template = '%(expression)s %(ordering)s' conditional = False def __init__(self, expression, descending=False, nulls_first=False, nulls_last=False): if nulls_first and nulls_last: raise ValueError('nulls_first and nulls_last are mutually exclusive') self.nulls_first = nulls_first self.nulls_last = nulls_last self.descending = descending if not hasattr(expression, 'resolve_expression'): raise ValueError('expression must be an expression type') self.expression = expression def __repr__(self): return "{}({}, descending={})".format( self.__class__.__name__, self.expression, self.descending) def set_source_expressions(self, exprs): self.expression = exprs[0] def get_source_expressions(self): return [self.expression] def as_sql(self, compiler, connection, template=None, **extra_context): template = template or self.template if connection.features.supports_order_by_nulls_modifier: if self.nulls_last: template = '%s NULLS LAST' % template elif self.nulls_first: template = '%s NULLS FIRST' % template else: if self.nulls_last and not ( self.descending and connection.features.order_by_nulls_first ): template = '%%(expression)s IS NULL, %s' % template elif self.nulls_first and not ( not self.descending and connection.features.order_by_nulls_first ): template = '%%(expression)s IS NOT NULL, %s' % template connection.ops.check_expression_support(self) expression_sql, params = compiler.compile(self.expression) placeholders = { 'expression': expression_sql, 'ordering': 'DESC' if self.descending else 'ASC', **extra_context, } template = template or self.template params *= template.count('%(expression)s') return (template % placeholders).rstrip(), params def as_oracle(self, compiler, connection): # Oracle doesn't allow ORDER BY EXISTS() unless it's wrapped in # a CASE WHEN. if isinstance(self.expression, Exists): copy = self.copy() copy.expression = Case( When(self.expression, then=True), default=False, output_field=fields.BooleanField(), ) return copy.as_sql(compiler, connection) return self.as_sql(compiler, connection) def get_group_by_cols(self, alias=None): cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols def reverse_ordering(self): self.descending = not self.descending if self.nulls_first or self.nulls_last: self.nulls_first = not self.nulls_first self.nulls_last = not self.nulls_last return self def asc(self): self.descending = False def desc(self): self.descending = True class Window(Expression): template = '%(expression)s OVER (%(window)s)' # Although the main expression may either be an aggregate or an # expression with an aggregate function, the GROUP BY that will # be introduced in the query as a result is not desired. contains_aggregate = False contains_over_clause = True filterable = False def __init__(self, expression, partition_by=None, order_by=None, frame=None, output_field=None): self.partition_by = partition_by self.order_by = order_by self.frame = frame if not getattr(expression, 'window_compatible', False): raise ValueError( "Expression '%s' isn't compatible with OVER clauses." % expression.__class__.__name__ ) if self.partition_by is not None: if not isinstance(self.partition_by, (tuple, list)): self.partition_by = (self.partition_by,) self.partition_by = ExpressionList(*self.partition_by) if self.order_by is not None: if isinstance(self.order_by, (list, tuple)): self.order_by = ExpressionList(*self.order_by) elif not isinstance(self.order_by, BaseExpression): raise ValueError( 'order_by must be either an Expression or a sequence of ' 'expressions.' ) super().__init__(output_field=output_field) self.source_expression = self._parse_expressions(expression)[0] def _resolve_output_field(self): return self.source_expression.output_field def get_source_expressions(self): return [self.source_expression, self.partition_by, self.order_by, self.frame] def set_source_expressions(self, exprs): self.source_expression, self.partition_by, self.order_by, self.frame = exprs def as_sql(self, compiler, connection, template=None): connection.ops.check_expression_support(self) if not connection.features.supports_over_clause: raise NotSupportedError('This backend does not support window expressions.') expr_sql, params = compiler.compile(self.source_expression) window_sql, window_params = [], [] if self.partition_by is not None: sql_expr, sql_params = self.partition_by.as_sql( compiler=compiler, connection=connection, template='PARTITION BY %(expressions)s', ) window_sql.extend(sql_expr) window_params.extend(sql_params) if self.order_by is not None: window_sql.append(' ORDER BY ') order_sql, order_params = compiler.compile(self.order_by) window_sql.extend(order_sql) window_params.extend(order_params) if self.frame: frame_sql, frame_params = compiler.compile(self.frame) window_sql.append(' ' + frame_sql) window_params.extend(frame_params) params.extend(window_params) template = template or self.template return template % { 'expression': expr_sql, 'window': ''.join(window_sql).strip() }, params def __str__(self): return '{} OVER ({}{}{})'.format( str(self.source_expression), 'PARTITION BY ' + str(self.partition_by) if self.partition_by else '', 'ORDER BY ' + str(self.order_by) if self.order_by else '', str(self.frame or ''), ) def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def get_group_by_cols(self, alias=None): return [] class WindowFrame(Expression): """ Model the frame clause in window expressions. There are two types of frame clauses which are subclasses, however, all processing and validation (by no means intended to be complete) is done here. Thus, providing an end for a frame is optional (the default is UNBOUNDED FOLLOWING, which is the last row in the frame). """ template = '%(frame_type)s BETWEEN %(start)s AND %(end)s' def __init__(self, start=None, end=None): self.start = Value(start) self.end = Value(end) def set_source_expressions(self, exprs): self.start, self.end = exprs def get_source_expressions(self): return [self.start, self.end] def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) start, end = self.window_frame_start_end(connection, self.start.value, self.end.value) return self.template % { 'frame_type': self.frame_type, 'start': start, 'end': end, }, [] def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def get_group_by_cols(self, alias=None): return [] def __str__(self): if self.start.value is not None and self.start.value < 0: start = '%d %s' % (abs(self.start.value), connection.ops.PRECEDING) elif self.start.value is not None and self.start.value == 0: start = connection.ops.CURRENT_ROW else: start = connection.ops.UNBOUNDED_PRECEDING if self.end.value is not None and self.end.value > 0: end = '%d %s' % (self.end.value, connection.ops.FOLLOWING) elif self.end.value is not None and self.end.value == 0: end = connection.ops.CURRENT_ROW else: end = connection.ops.UNBOUNDED_FOLLOWING return self.template % { 'frame_type': self.frame_type, 'start': start, 'end': end, } def window_frame_start_end(self, connection, start, end): raise NotImplementedError('Subclasses must implement window_frame_start_end().') class RowRange(WindowFrame): frame_type = 'ROWS' def window_frame_start_end(self, connection, start, end): return connection.ops.window_frame_rows_start_end(start, end) class ValueRange(WindowFrame): frame_type = 'RANGE' def window_frame_start_end(self, connection, start, end): return connection.ops.window_frame_range_start_end(start, end)
e01ba15a49a50907a554ea28f60a37b4f87871b2b0e01af3bad128e883c76398
import operator from collections import Counter, defaultdict from functools import partial, reduce from itertools import chain from operator import attrgetter from django.db import IntegrityError, connections, transaction from django.db.models import query_utils, signals, sql class ProtectedError(IntegrityError): def __init__(self, msg, protected_objects): self.protected_objects = protected_objects super().__init__(msg, protected_objects) class RestrictedError(IntegrityError): def __init__(self, msg, restricted_objects): self.restricted_objects = restricted_objects super().__init__(msg, restricted_objects) def CASCADE(collector, field, sub_objs, using): collector.collect( sub_objs, source=field.remote_field.model, source_attr=field.name, nullable=field.null, fail_on_restricted=False, ) if field.null and not connections[using].features.can_defer_constraint_checks: collector.add_field_update(field, None, sub_objs) def PROTECT(collector, field, sub_objs, using): raise ProtectedError( "Cannot delete some instances of model '%s' because they are " "referenced through a protected foreign key: '%s.%s'" % ( field.remote_field.model.__name__, sub_objs[0].__class__.__name__, field.name ), sub_objs ) def RESTRICT(collector, field, sub_objs, using): collector.add_restricted_objects(field, sub_objs) collector.add_dependency(field.remote_field.model, field.model) def SET(value): if callable(value): def set_on_delete(collector, field, sub_objs, using): collector.add_field_update(field, value(), sub_objs) else: def set_on_delete(collector, field, sub_objs, using): collector.add_field_update(field, value, sub_objs) set_on_delete.deconstruct = lambda: ('django.db.models.SET', (value,), {}) return set_on_delete def SET_NULL(collector, field, sub_objs, using): collector.add_field_update(field, None, sub_objs) def SET_DEFAULT(collector, field, sub_objs, using): collector.add_field_update(field, field.get_default(), sub_objs) def DO_NOTHING(collector, field, sub_objs, using): pass def get_candidate_relations_to_delete(opts): # The candidate relations are the ones that come from N-1 and 1-1 relations. # N-N (i.e., many-to-many) relations aren't candidates for deletion. return ( f for f in opts.get_fields(include_hidden=True) if f.auto_created and not f.concrete and (f.one_to_one or f.one_to_many) ) class Collector: def __init__(self, using): self.using = using # Initially, {model: {instances}}, later values become lists. self.data = defaultdict(set) # {model: {(field, value): {instances}}} self.field_updates = defaultdict(partial(defaultdict, set)) # {model: {field: {instances}}} self.restricted_objects = defaultdict(partial(defaultdict, set)) # fast_deletes is a list of queryset-likes that can be deleted without # fetching the objects into memory. self.fast_deletes = [] # Tracks deletion-order dependency for databases without transactions # or ability to defer constraint checks. Only concrete model classes # should be included, as the dependencies exist only between actual # database tables; proxy models are represented here by their concrete # parent. self.dependencies = defaultdict(set) # {model: {models}} def add(self, objs, source=None, nullable=False, reverse_dependency=False): """ Add 'objs' to the collection of objects to be deleted. If the call is the result of a cascade, 'source' should be the model that caused it, and 'nullable' should be set to True if the relation can be null. Return a list of all objects that were not already collected. """ if not objs: return [] new_objs = [] model = objs[0].__class__ instances = self.data[model] for obj in objs: if obj not in instances: new_objs.append(obj) instances.update(new_objs) # Nullable relationships can be ignored -- they are nulled out before # deleting, and therefore do not affect the order in which objects have # to be deleted. if source is not None and not nullable: self.add_dependency(source, model, reverse_dependency=reverse_dependency) return new_objs def add_dependency(self, model, dependency, reverse_dependency=False): if reverse_dependency: model, dependency = dependency, model self.dependencies[model._meta.concrete_model].add(dependency._meta.concrete_model) self.data.setdefault(dependency, self.data.default_factory()) def add_field_update(self, field, value, objs): """ Schedule a field update. 'objs' must be a homogeneous iterable collection of model instances (e.g. a QuerySet). """ if not objs: return model = objs[0].__class__ self.field_updates[model][field, value].update(objs) def add_restricted_objects(self, field, objs): if objs: model = objs[0].__class__ self.restricted_objects[model][field].update(objs) def clear_restricted_objects_from_set(self, model, objs): if model in self.restricted_objects: self.restricted_objects[model] = { field: items - objs for field, items in self.restricted_objects[model].items() } def clear_restricted_objects_from_queryset(self, model, qs): if model in self.restricted_objects: objs = set(qs.filter(pk__in=[ obj.pk for objs in self.restricted_objects[model].values() for obj in objs ])) self.clear_restricted_objects_from_set(model, objs) def _has_signal_listeners(self, model): return ( signals.pre_delete.has_listeners(model) or signals.post_delete.has_listeners(model) ) def can_fast_delete(self, objs, from_field=None): """ Determine if the objects in the given queryset-like or single object can be fast-deleted. This can be done if there are no cascades, no parents and no signal listeners for the object class. The 'from_field' tells where we are coming from - we need this to determine if the objects are in fact to be deleted. Allow also skipping parent -> child -> parent chain preventing fast delete of the child. """ if from_field and from_field.remote_field.on_delete is not CASCADE: return False if hasattr(objs, '_meta'): model = objs._meta.model elif hasattr(objs, 'model') and hasattr(objs, '_raw_delete'): model = objs.model else: return False if self._has_signal_listeners(model): return False # The use of from_field comes from the need to avoid cascade back to # parent when parent delete is cascading to child. opts = model._meta return ( all(link == from_field for link in opts.concrete_model._meta.parents.values()) and # Foreign keys pointing to this model. all( related.field.remote_field.on_delete is DO_NOTHING for related in get_candidate_relations_to_delete(opts) ) and ( # Something like generic foreign key. not any(hasattr(field, 'bulk_related_objects') for field in opts.private_fields) ) ) def get_del_batches(self, objs, fields): """ Return the objs in suitably sized batches for the used connection. """ field_names = [field.name for field in fields] conn_batch_size = max( connections[self.using].ops.bulk_batch_size(field_names, objs), 1) if len(objs) > conn_batch_size: return [objs[i:i + conn_batch_size] for i in range(0, len(objs), conn_batch_size)] else: return [objs] def collect(self, objs, source=None, nullable=False, collect_related=True, source_attr=None, reverse_dependency=False, keep_parents=False, fail_on_restricted=True): """ Add 'objs' to the collection of objects to be deleted as well as all parent instances. 'objs' must be a homogeneous iterable collection of model instances (e.g. a QuerySet). If 'collect_related' is True, related objects will be handled by their respective on_delete handler. If the call is the result of a cascade, 'source' should be the model that caused it and 'nullable' should be set to True, if the relation can be null. If 'reverse_dependency' is True, 'source' will be deleted before the current model, rather than after. (Needed for cascading to parent models, the one case in which the cascade follows the forwards direction of an FK rather than the reverse direction.) If 'keep_parents' is True, data of parent model's will be not deleted. If 'fail_on_restricted' is False, error won't be raised even if it's prohibited to delete such objects due to RESTRICT, that defers restricted object checking in recursive calls where the top-level call may need to collect more objects to determine whether restricted ones can be deleted. """ if self.can_fast_delete(objs): self.fast_deletes.append(objs) return new_objs = self.add(objs, source, nullable, reverse_dependency=reverse_dependency) if not new_objs: return model = new_objs[0].__class__ if not keep_parents: # Recursively collect concrete model's parent models, but not their # related objects. These will be found by meta.get_fields() concrete_model = model._meta.concrete_model for ptr in concrete_model._meta.parents.values(): if ptr: parent_objs = [getattr(obj, ptr.name) for obj in new_objs] self.collect(parent_objs, source=model, source_attr=ptr.remote_field.related_name, collect_related=False, reverse_dependency=True, fail_on_restricted=False) if not collect_related: return if keep_parents: parents = set(model._meta.get_parent_list()) model_fast_deletes = defaultdict(list) protected_objects = defaultdict(list) for related in get_candidate_relations_to_delete(model._meta): # Preserve parent reverse relationships if keep_parents=True. if keep_parents and related.model in parents: continue field = related.field if field.remote_field.on_delete == DO_NOTHING: continue related_model = related.related_model if self.can_fast_delete(related_model, from_field=field): model_fast_deletes[related_model].append(field) continue batches = self.get_del_batches(new_objs, [field]) for batch in batches: sub_objs = self.related_objects(related_model, [field], batch) # Non-referenced fields can be deferred if no signal receivers # are connected for the related model as they'll never be # exposed to the user. Skip field deferring when some # relationships are select_related as interactions between both # features are hard to get right. This should only happen in # the rare cases where .related_objects is overridden anyway. if not (sub_objs.query.select_related or self._has_signal_listeners(related_model)): referenced_fields = set(chain.from_iterable( (rf.attname for rf in rel.field.foreign_related_fields) for rel in get_candidate_relations_to_delete(related_model._meta) )) sub_objs = sub_objs.only(*tuple(referenced_fields)) if sub_objs: try: field.remote_field.on_delete(self, field, sub_objs, self.using) except ProtectedError as error: key = "'%s.%s'" % (field.model.__name__, field.name) protected_objects[key] += error.protected_objects if protected_objects: raise ProtectedError( 'Cannot delete some instances of model %r because they are ' 'referenced through protected foreign keys: %s.' % ( model.__name__, ', '.join(protected_objects), ), chain.from_iterable(protected_objects.values()), ) for related_model, related_fields in model_fast_deletes.items(): batches = self.get_del_batches(new_objs, related_fields) for batch in batches: sub_objs = self.related_objects(related_model, related_fields, batch) self.fast_deletes.append(sub_objs) for field in model._meta.private_fields: if hasattr(field, 'bulk_related_objects'): # It's something like generic foreign key. sub_objs = field.bulk_related_objects(new_objs, self.using) self.collect(sub_objs, source=model, nullable=True, fail_on_restricted=False) if fail_on_restricted: # Raise an error if collected restricted objects (RESTRICT) aren't # candidates for deletion also collected via CASCADE. for related_model, instances in self.data.items(): self.clear_restricted_objects_from_set(related_model, instances) for qs in self.fast_deletes: self.clear_restricted_objects_from_queryset(qs.model, qs) if self.restricted_objects.values(): restricted_objects = defaultdict(list) for related_model, fields in self.restricted_objects.items(): for field, objs in fields.items(): if objs: key = "'%s.%s'" % (related_model.__name__, field.name) restricted_objects[key] += objs if restricted_objects: raise RestrictedError( 'Cannot delete some instances of model %r because ' 'they are referenced through restricted foreign keys: ' '%s.' % ( model.__name__, ', '.join(restricted_objects), ), chain.from_iterable(restricted_objects.values()), ) def related_objects(self, related_model, related_fields, objs): """ Get a QuerySet of the related model to objs via related fields. """ predicate = reduce(operator.or_, ( query_utils.Q(**{'%s__in' % related_field.name: objs}) for related_field in related_fields )) return related_model._base_manager.using(self.using).filter(predicate) def instances_with_model(self): for model, instances in self.data.items(): for obj in instances: yield model, obj def sort(self): sorted_models = [] concrete_models = set() models = list(self.data) while len(sorted_models) < len(models): found = False for model in models: if model in sorted_models: continue dependencies = self.dependencies.get(model._meta.concrete_model) if not (dependencies and dependencies.difference(concrete_models)): sorted_models.append(model) concrete_models.add(model._meta.concrete_model) found = True if not found: return self.data = {model: self.data[model] for model in sorted_models} def delete(self): # sort instance collections for model, instances in self.data.items(): self.data[model] = sorted(instances, key=attrgetter("pk")) # if possible, bring the models in an order suitable for databases that # don't support transactions or cannot defer constraint checks until the # end of a transaction. self.sort() # number of objects deleted for each model label deleted_counter = Counter() # Optimize for the case with a single obj and no dependencies if len(self.data) == 1 and len(instances) == 1: instance = list(instances)[0] if self.can_fast_delete(instance): with transaction.mark_for_rollback_on_error(self.using): count = sql.DeleteQuery(model).delete_batch([instance.pk], self.using) setattr(instance, model._meta.pk.attname, None) return count, {model._meta.label: count} with transaction.atomic(using=self.using, savepoint=False): # send pre_delete signals for model, obj in self.instances_with_model(): if not model._meta.auto_created: signals.pre_delete.send( sender=model, instance=obj, using=self.using ) # fast deletes for qs in self.fast_deletes: count = qs._raw_delete(using=self.using) if count: deleted_counter[qs.model._meta.label] += count # update fields for model, instances_for_fieldvalues in self.field_updates.items(): for (field, value), instances in instances_for_fieldvalues.items(): query = sql.UpdateQuery(model) query.update_batch([obj.pk for obj in instances], {field.name: value}, self.using) # reverse instance collections for instances in self.data.values(): instances.reverse() # delete instances for model, instances in self.data.items(): query = sql.DeleteQuery(model) pk_list = [obj.pk for obj in instances] count = query.delete_batch(pk_list, self.using) if count: deleted_counter[model._meta.label] += count if not model._meta.auto_created: for obj in instances: signals.post_delete.send( sender=model, instance=obj, using=self.using ) # update collected instances for instances_for_fieldvalues in self.field_updates.values(): for (field, value), instances in instances_for_fieldvalues.items(): for obj in instances: setattr(obj, field.attname, value) for model, instances in self.data.items(): for instance in instances: setattr(instance, model._meta.pk.attname, None) return sum(deleted_counter.values()), dict(deleted_counter)
6d3b5835fa6fa0b74687a5dd9f8d5849d3b55f37230c054d3a3dd4dd9b7d63a3
from django.db import models from django.db.migrations.operations.base import Operation from django.db.migrations.state import ModelState from django.db.models.options import normalize_together from django.utils.functional import cached_property from .fields import ( AddField, AlterField, FieldOperation, RemoveField, RenameField, ) from .utils import field_references, get_references, resolve_relation def _check_for_duplicates(arg_name, objs): used_vals = set() for val in objs: if val in used_vals: raise ValueError( "Found duplicate value %s in CreateModel %s argument." % (val, arg_name) ) used_vals.add(val) class ModelOperation(Operation): def __init__(self, name): self.name = name @cached_property def name_lower(self): return self.name.lower() def references_model(self, name, app_label): return name.lower() == self.name_lower def reduce(self, operation, app_label): return ( super().reduce(operation, app_label) or not operation.references_model(self.name, app_label) ) class CreateModel(ModelOperation): """Create a model's table.""" serialization_expand_args = ['fields', 'options', 'managers'] def __init__(self, name, fields, options=None, bases=None, managers=None): self.fields = fields self.options = options or {} self.bases = bases or (models.Model,) self.managers = managers or [] super().__init__(name) # Sanity-check that there are no duplicated field names, bases, or # manager names _check_for_duplicates('fields', (name for name, _ in self.fields)) _check_for_duplicates('bases', ( base._meta.label_lower if hasattr(base, '_meta') else base.lower() if isinstance(base, str) else base for base in self.bases )) _check_for_duplicates('managers', (name for name, _ in self.managers)) def deconstruct(self): kwargs = { 'name': self.name, 'fields': self.fields, } if self.options: kwargs['options'] = self.options if self.bases and self.bases != (models.Model,): kwargs['bases'] = self.bases if self.managers and self.managers != [('objects', models.Manager())]: kwargs['managers'] = self.managers return ( self.__class__.__qualname__, [], kwargs ) def state_forwards(self, app_label, state): state.add_model(ModelState( app_label, self.name, list(self.fields), dict(self.options), tuple(self.bases), list(self.managers), )) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.create_model(model) def database_backwards(self, app_label, schema_editor, from_state, to_state): model = from_state.apps.get_model(app_label, self.name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.delete_model(model) def describe(self): return "Create %smodel %s" % ("proxy " if self.options.get("proxy", False) else "", self.name) @property def migration_name_fragment(self): return self.name_lower def references_model(self, name, app_label): name_lower = name.lower() if name_lower == self.name_lower: return True # Check we didn't inherit from the model reference_model_tuple = (app_label, name_lower) for base in self.bases: if (base is not models.Model and isinstance(base, (models.base.ModelBase, str)) and resolve_relation(base, app_label) == reference_model_tuple): return True # Check we have no FKs/M2Ms with it for _name, field in self.fields: if field_references((app_label, self.name_lower), field, reference_model_tuple): return True return False def reduce(self, operation, app_label): if (isinstance(operation, DeleteModel) and self.name_lower == operation.name_lower and not self.options.get("proxy", False)): return [] elif isinstance(operation, RenameModel) and self.name_lower == operation.old_name_lower: return [ CreateModel( operation.new_name, fields=self.fields, options=self.options, bases=self.bases, managers=self.managers, ), ] elif isinstance(operation, AlterModelOptions) and self.name_lower == operation.name_lower: return [ CreateModel( self.name, fields=self.fields, options={**self.options, **operation.options}, bases=self.bases, managers=self.managers, ), ] elif isinstance(operation, AlterTogetherOptionOperation) and self.name_lower == operation.name_lower: return [ CreateModel( self.name, fields=self.fields, options={**self.options, **{operation.option_name: operation.option_value}}, bases=self.bases, managers=self.managers, ), ] elif isinstance(operation, AlterOrderWithRespectTo) and self.name_lower == operation.name_lower: return [ CreateModel( self.name, fields=self.fields, options={**self.options, 'order_with_respect_to': operation.order_with_respect_to}, bases=self.bases, managers=self.managers, ), ] elif isinstance(operation, FieldOperation) and self.name_lower == operation.model_name_lower: if isinstance(operation, AddField): return [ CreateModel( self.name, fields=self.fields + [(operation.name, operation.field)], options=self.options, bases=self.bases, managers=self.managers, ), ] elif isinstance(operation, AlterField): return [ CreateModel( self.name, fields=[ (n, operation.field if n == operation.name else v) for n, v in self.fields ], options=self.options, bases=self.bases, managers=self.managers, ), ] elif isinstance(operation, RemoveField): options = self.options.copy() for option_name in ('unique_together', 'index_together'): option = options.pop(option_name, None) if option: option = set(filter(bool, ( tuple(f for f in fields if f != operation.name_lower) for fields in option ))) if option: options[option_name] = option order_with_respect_to = options.get('order_with_respect_to') if order_with_respect_to == operation.name_lower: del options['order_with_respect_to'] return [ CreateModel( self.name, fields=[ (n, v) for n, v in self.fields if n.lower() != operation.name_lower ], options=options, bases=self.bases, managers=self.managers, ), ] elif isinstance(operation, RenameField): options = self.options.copy() for option_name in ('unique_together', 'index_together'): option = options.get(option_name) if option: options[option_name] = { tuple(operation.new_name if f == operation.old_name else f for f in fields) for fields in option } order_with_respect_to = options.get('order_with_respect_to') if order_with_respect_to == operation.old_name: options['order_with_respect_to'] = operation.new_name return [ CreateModel( self.name, fields=[ (operation.new_name if n == operation.old_name else n, v) for n, v in self.fields ], options=options, bases=self.bases, managers=self.managers, ), ] return super().reduce(operation, app_label) class DeleteModel(ModelOperation): """Drop a model's table.""" def deconstruct(self): kwargs = { 'name': self.name, } return ( self.__class__.__qualname__, [], kwargs ) def state_forwards(self, app_label, state): state.remove_model(app_label, self.name_lower) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = from_state.apps.get_model(app_label, self.name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.delete_model(model) def database_backwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.create_model(model) def references_model(self, name, app_label): # The deleted model could be referencing the specified model through # related fields. return True def describe(self): return "Delete model %s" % self.name @property def migration_name_fragment(self): return 'delete_%s' % self.name_lower class RenameModel(ModelOperation): """Rename a model.""" def __init__(self, old_name, new_name): self.old_name = old_name self.new_name = new_name super().__init__(old_name) @cached_property def old_name_lower(self): return self.old_name.lower() @cached_property def new_name_lower(self): return self.new_name.lower() def deconstruct(self): kwargs = { 'old_name': self.old_name, 'new_name': self.new_name, } return ( self.__class__.__qualname__, [], kwargs ) def state_forwards(self, app_label, state): # Add a new model. renamed_model = state.models[app_label, self.old_name_lower].clone() renamed_model.name = self.new_name state.models[app_label, self.new_name_lower] = renamed_model # Repoint all fields pointing to the old model to the new one. old_model_tuple = (app_label, self.old_name_lower) new_remote_model = '%s.%s' % (app_label, self.new_name) to_reload = set() for model_state, name, field, reference in get_references(state, old_model_tuple): changed_field = None if reference.to: changed_field = field.clone() changed_field.remote_field.model = new_remote_model if reference.through: if changed_field is None: changed_field = field.clone() changed_field.remote_field.through = new_remote_model if changed_field: model_state.fields[name] = changed_field to_reload.add((model_state.app_label, model_state.name_lower)) # Reload models related to old model before removing the old model. state.reload_models(to_reload, delay=True) # Remove the old model. state.remove_model(app_label, self.old_name_lower) state.reload_model(app_label, self.new_name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): new_model = to_state.apps.get_model(app_label, self.new_name) if self.allow_migrate_model(schema_editor.connection.alias, new_model): old_model = from_state.apps.get_model(app_label, self.old_name) # Move the main table schema_editor.alter_db_table( new_model, old_model._meta.db_table, new_model._meta.db_table, ) # Alter the fields pointing to us for related_object in old_model._meta.related_objects: if related_object.related_model == old_model: model = new_model related_key = (app_label, self.new_name_lower) else: model = related_object.related_model related_key = ( related_object.related_model._meta.app_label, related_object.related_model._meta.model_name, ) to_field = to_state.apps.get_model( *related_key )._meta.get_field(related_object.field.name) schema_editor.alter_field( model, related_object.field, to_field, ) # Rename M2M fields whose name is based on this model's name. fields = zip(old_model._meta.local_many_to_many, new_model._meta.local_many_to_many) for (old_field, new_field) in fields: # Skip self-referential fields as these are renamed above. if new_field.model == new_field.related_model or not new_field.remote_field.through._meta.auto_created: continue # Rename the M2M table that's based on this model's name. old_m2m_model = old_field.remote_field.through new_m2m_model = new_field.remote_field.through schema_editor.alter_db_table( new_m2m_model, old_m2m_model._meta.db_table, new_m2m_model._meta.db_table, ) # Rename the column in the M2M table that's based on this # model's name. schema_editor.alter_field( new_m2m_model, old_m2m_model._meta.get_field(old_model._meta.model_name), new_m2m_model._meta.get_field(new_model._meta.model_name), ) def database_backwards(self, app_label, schema_editor, from_state, to_state): self.new_name_lower, self.old_name_lower = self.old_name_lower, self.new_name_lower self.new_name, self.old_name = self.old_name, self.new_name self.database_forwards(app_label, schema_editor, from_state, to_state) self.new_name_lower, self.old_name_lower = self.old_name_lower, self.new_name_lower self.new_name, self.old_name = self.old_name, self.new_name def references_model(self, name, app_label): return ( name.lower() == self.old_name_lower or name.lower() == self.new_name_lower ) def describe(self): return "Rename model %s to %s" % (self.old_name, self.new_name) @property def migration_name_fragment(self): return 'rename_%s_%s' % (self.old_name_lower, self.new_name_lower) def reduce(self, operation, app_label): if (isinstance(operation, RenameModel) and self.new_name_lower == operation.old_name_lower): return [ RenameModel( self.old_name, operation.new_name, ), ] # Skip `ModelOperation.reduce` as we want to run `references_model` # against self.new_name. return ( super(ModelOperation, self).reduce(operation, app_label) or not operation.references_model(self.new_name, app_label) ) class ModelOptionOperation(ModelOperation): def reduce(self, operation, app_label): if isinstance(operation, (self.__class__, DeleteModel)) and self.name_lower == operation.name_lower: return [operation] return super().reduce(operation, app_label) class AlterModelTable(ModelOptionOperation): """Rename a model's table.""" def __init__(self, name, table): self.table = table super().__init__(name) def deconstruct(self): kwargs = { 'name': self.name, 'table': self.table, } return ( self.__class__.__qualname__, [], kwargs ) def state_forwards(self, app_label, state): state.models[app_label, self.name_lower].options["db_table"] = self.table state.reload_model(app_label, self.name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): new_model = to_state.apps.get_model(app_label, self.name) if self.allow_migrate_model(schema_editor.connection.alias, new_model): old_model = from_state.apps.get_model(app_label, self.name) schema_editor.alter_db_table( new_model, old_model._meta.db_table, new_model._meta.db_table, ) # Rename M2M fields whose name is based on this model's db_table for (old_field, new_field) in zip(old_model._meta.local_many_to_many, new_model._meta.local_many_to_many): if new_field.remote_field.through._meta.auto_created: schema_editor.alter_db_table( new_field.remote_field.through, old_field.remote_field.through._meta.db_table, new_field.remote_field.through._meta.db_table, ) def database_backwards(self, app_label, schema_editor, from_state, to_state): return self.database_forwards(app_label, schema_editor, from_state, to_state) def describe(self): return "Rename table for %s to %s" % ( self.name, self.table if self.table is not None else "(default)" ) @property def migration_name_fragment(self): return 'alter_%s_table' % self.name_lower class AlterTogetherOptionOperation(ModelOptionOperation): option_name = None def __init__(self, name, option_value): if option_value: option_value = set(normalize_together(option_value)) setattr(self, self.option_name, option_value) super().__init__(name) @cached_property def option_value(self): return getattr(self, self.option_name) def deconstruct(self): kwargs = { 'name': self.name, self.option_name: self.option_value, } return ( self.__class__.__qualname__, [], kwargs ) def state_forwards(self, app_label, state): model_state = state.models[app_label, self.name_lower] model_state.options[self.option_name] = self.option_value state.reload_model(app_label, self.name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): new_model = to_state.apps.get_model(app_label, self.name) if self.allow_migrate_model(schema_editor.connection.alias, new_model): old_model = from_state.apps.get_model(app_label, self.name) alter_together = getattr(schema_editor, 'alter_%s' % self.option_name) alter_together( new_model, getattr(old_model._meta, self.option_name, set()), getattr(new_model._meta, self.option_name, set()), ) def database_backwards(self, app_label, schema_editor, from_state, to_state): return self.database_forwards(app_label, schema_editor, from_state, to_state) def references_field(self, model_name, name, app_label): return ( self.references_model(model_name, app_label) and ( not self.option_value or any((name in fields) for fields in self.option_value) ) ) def describe(self): return "Alter %s for %s (%s constraint(s))" % (self.option_name, self.name, len(self.option_value or '')) @property def migration_name_fragment(self): return 'alter_%s_%s' % (self.name_lower, self.option_name) class AlterUniqueTogether(AlterTogetherOptionOperation): """ Change the value of unique_together to the target one. Input value of unique_together must be a set of tuples. """ option_name = 'unique_together' def __init__(self, name, unique_together): super().__init__(name, unique_together) class AlterIndexTogether(AlterTogetherOptionOperation): """ Change the value of index_together to the target one. Input value of index_together must be a set of tuples. """ option_name = "index_together" def __init__(self, name, index_together): super().__init__(name, index_together) class AlterOrderWithRespectTo(ModelOptionOperation): """Represent a change with the order_with_respect_to option.""" option_name = 'order_with_respect_to' def __init__(self, name, order_with_respect_to): self.order_with_respect_to = order_with_respect_to super().__init__(name) def deconstruct(self): kwargs = { 'name': self.name, 'order_with_respect_to': self.order_with_respect_to, } return ( self.__class__.__qualname__, [], kwargs ) def state_forwards(self, app_label, state): model_state = state.models[app_label, self.name_lower] model_state.options['order_with_respect_to'] = self.order_with_respect_to state.reload_model(app_label, self.name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): to_model = to_state.apps.get_model(app_label, self.name) if self.allow_migrate_model(schema_editor.connection.alias, to_model): from_model = from_state.apps.get_model(app_label, self.name) # Remove a field if we need to if from_model._meta.order_with_respect_to and not to_model._meta.order_with_respect_to: schema_editor.remove_field(from_model, from_model._meta.get_field("_order")) # Add a field if we need to (altering the column is untouched as # it's likely a rename) elif to_model._meta.order_with_respect_to and not from_model._meta.order_with_respect_to: field = to_model._meta.get_field("_order") if not field.has_default(): field.default = 0 schema_editor.add_field( from_model, field, ) def database_backwards(self, app_label, schema_editor, from_state, to_state): self.database_forwards(app_label, schema_editor, from_state, to_state) def references_field(self, model_name, name, app_label): return ( self.references_model(model_name, app_label) and ( self.order_with_respect_to is None or name == self.order_with_respect_to ) ) def describe(self): return "Set order_with_respect_to on %s to %s" % (self.name, self.order_with_respect_to) @property def migration_name_fragment(self): return 'alter_%s_order_with_respect_to' % self.name_lower class AlterModelOptions(ModelOptionOperation): """ Set new model options that don't directly affect the database schema (like verbose_name, permissions, ordering). Python code in migrations may still need them. """ # Model options we want to compare and preserve in an AlterModelOptions op ALTER_OPTION_KEYS = [ "base_manager_name", "default_manager_name", "default_related_name", "get_latest_by", "managed", "ordering", "permissions", "default_permissions", "select_on_save", "verbose_name", "verbose_name_plural", ] def __init__(self, name, options): self.options = options super().__init__(name) def deconstruct(self): kwargs = { 'name': self.name, 'options': self.options, } return ( self.__class__.__qualname__, [], kwargs ) def state_forwards(self, app_label, state): model_state = state.models[app_label, self.name_lower] model_state.options = {**model_state.options, **self.options} for key in self.ALTER_OPTION_KEYS: if key not in self.options: model_state.options.pop(key, False) state.reload_model(app_label, self.name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): pass def database_backwards(self, app_label, schema_editor, from_state, to_state): pass def describe(self): return "Change Meta options on %s" % self.name @property def migration_name_fragment(self): return 'alter_%s_options' % self.name_lower class AlterModelManagers(ModelOptionOperation): """Alter the model's managers.""" serialization_expand_args = ['managers'] def __init__(self, name, managers): self.managers = managers super().__init__(name) def deconstruct(self): return ( self.__class__.__qualname__, [self.name, self.managers], {} ) def state_forwards(self, app_label, state): model_state = state.models[app_label, self.name_lower] model_state.managers = list(self.managers) state.reload_model(app_label, self.name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): pass def database_backwards(self, app_label, schema_editor, from_state, to_state): pass def describe(self): return "Change managers on %s" % self.name @property def migration_name_fragment(self): return 'alter_%s_managers' % self.name_lower class IndexOperation(Operation): option_name = 'indexes' @cached_property def model_name_lower(self): return self.model_name.lower() class AddIndex(IndexOperation): """Add an index on a model.""" def __init__(self, model_name, index): self.model_name = model_name if not index.name: raise ValueError( "Indexes passed to AddIndex operations require a name " "argument. %r doesn't have one." % index ) self.index = index def state_forwards(self, app_label, state): model_state = state.models[app_label, self.model_name_lower] model_state.options[self.option_name] = [*model_state.options[self.option_name], self.index.clone()] state.reload_model(app_label, self.model_name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.add_index(model, self.index) def database_backwards(self, app_label, schema_editor, from_state, to_state): model = from_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.remove_index(model, self.index) def deconstruct(self): kwargs = { 'model_name': self.model_name, 'index': self.index, } return ( self.__class__.__qualname__, [], kwargs, ) def describe(self): return 'Create index %s on field(s) %s of model %s' % ( self.index.name, ', '.join(self.index.fields), self.model_name, ) @property def migration_name_fragment(self): return '%s_%s' % (self.model_name_lower, self.index.name.lower()) class RemoveIndex(IndexOperation): """Remove an index from a model.""" def __init__(self, model_name, name): self.model_name = model_name self.name = name def state_forwards(self, app_label, state): model_state = state.models[app_label, self.model_name_lower] indexes = model_state.options[self.option_name] model_state.options[self.option_name] = [idx for idx in indexes if idx.name != self.name] state.reload_model(app_label, self.model_name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = from_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): from_model_state = from_state.models[app_label, self.model_name_lower] index = from_model_state.get_index_by_name(self.name) schema_editor.remove_index(model, index) def database_backwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): to_model_state = to_state.models[app_label, self.model_name_lower] index = to_model_state.get_index_by_name(self.name) schema_editor.add_index(model, index) def deconstruct(self): kwargs = { 'model_name': self.model_name, 'name': self.name, } return ( self.__class__.__qualname__, [], kwargs, ) def describe(self): return 'Remove index %s from %s' % (self.name, self.model_name) @property def migration_name_fragment(self): return 'remove_%s_%s' % (self.model_name_lower, self.name.lower()) class AddConstraint(IndexOperation): option_name = 'constraints' def __init__(self, model_name, constraint): self.model_name = model_name self.constraint = constraint def state_forwards(self, app_label, state): model_state = state.models[app_label, self.model_name_lower] model_state.options[self.option_name] = [*model_state.options[self.option_name], self.constraint] state.reload_model(app_label, self.model_name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.add_constraint(model, self.constraint) def database_backwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.remove_constraint(model, self.constraint) def deconstruct(self): return self.__class__.__name__, [], { 'model_name': self.model_name, 'constraint': self.constraint, } def describe(self): return 'Create constraint %s on model %s' % (self.constraint.name, self.model_name) @property def migration_name_fragment(self): return '%s_%s' % (self.model_name_lower, self.constraint.name.lower()) class RemoveConstraint(IndexOperation): option_name = 'constraints' def __init__(self, model_name, name): self.model_name = model_name self.name = name def state_forwards(self, app_label, state): model_state = state.models[app_label, self.model_name_lower] constraints = model_state.options[self.option_name] model_state.options[self.option_name] = [c for c in constraints if c.name != self.name] state.reload_model(app_label, self.model_name_lower, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): from_model_state = from_state.models[app_label, self.model_name_lower] constraint = from_model_state.get_constraint_by_name(self.name) schema_editor.remove_constraint(model, constraint) def database_backwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): to_model_state = to_state.models[app_label, self.model_name_lower] constraint = to_model_state.get_constraint_by_name(self.name) schema_editor.add_constraint(model, constraint) def deconstruct(self): return self.__class__.__name__, [], { 'model_name': self.model_name, 'name': self.name, } def describe(self): return 'Remove constraint %s from model %s' % (self.name, self.model_name) @property def migration_name_fragment(self): return 'remove_%s_%s' % (self.model_name_lower, self.name.lower())
fabe9ce10ec2202053f79ef81fbddd479a6ff7812ab28cd2e4fd9a357f15c939
from django.db import router class Operation: """ Base class for migration operations. It's responsible for both mutating the in-memory model state (see db/migrations/state.py) to represent what it performs, as well as actually performing it against a live database. Note that some operations won't modify memory state at all (e.g. data copying operations), and some will need their modifications to be optionally specified by the user (e.g. custom Python code snippets) Due to the way this class deals with deconstruction, it should be considered immutable. """ # If this migration can be run in reverse. # Some operations are impossible to reverse, like deleting data. reversible = True # Can this migration be represented as SQL? (things like RunPython cannot) reduces_to_sql = True # Should this operation be forced as atomic even on backends with no # DDL transaction support (i.e., does it have no DDL, like RunPython) atomic = False # Should this operation be considered safe to elide and optimize across? elidable = False serialization_expand_args = [] def __new__(cls, *args, **kwargs): # We capture the arguments to make returning them trivial self = object.__new__(cls) self._constructor_args = (args, kwargs) return self def deconstruct(self): """ Return a 3-tuple of class import path (or just name if it lives under django.db.migrations), positional arguments, and keyword arguments. """ return ( self.__class__.__name__, self._constructor_args[0], self._constructor_args[1], ) def state_forwards(self, app_label, state): """ Take the state from the previous migration, and mutate it so that it matches what this migration would perform. """ raise NotImplementedError('subclasses of Operation must provide a state_forwards() method') def database_forwards(self, app_label, schema_editor, from_state, to_state): """ Perform the mutation on the database schema in the normal (forwards) direction. """ raise NotImplementedError('subclasses of Operation must provide a database_forwards() method') def database_backwards(self, app_label, schema_editor, from_state, to_state): """ Perform the mutation on the database schema in the reverse direction - e.g. if this were CreateModel, it would in fact drop the model's table. """ raise NotImplementedError('subclasses of Operation must provide a database_backwards() method') def describe(self): """ Output a brief summary of what the action does. """ return "%s: %s" % (self.__class__.__name__, self._constructor_args) @property def migration_name_fragment(self): """ A filename part suitable for automatically naming a migration containing this operation, or None if not applicable. """ return None def references_model(self, name, app_label): """ Return True if there is a chance this operation references the given model name (as a string), with an app label for accuracy. Used for optimization. If in doubt, return True; returning a false positive will merely make the optimizer a little less efficient, while returning a false negative may result in an unusable optimized migration. """ return True def references_field(self, model_name, name, app_label): """ Return True if there is a chance this operation references the given field name, with an app label for accuracy. Used for optimization. If in doubt, return True. """ return self.references_model(model_name, app_label) def allow_migrate_model(self, connection_alias, model): """ Return whether or not a model may be migrated. This is a thin wrapper around router.allow_migrate_model() that preemptively rejects any proxy, swapped out, or unmanaged model. """ if not model._meta.can_migrate(connection_alias): return False return router.allow_migrate_model(connection_alias, model) def reduce(self, operation, app_label): """ Return either a list of operations the actual operation should be replaced with or a boolean that indicates whether or not the specified operation can be optimized across. """ if self.elidable: return [operation] elif operation.elidable: return [self] return False def __repr__(self): return "<%s %s%s>" % ( self.__class__.__name__, ", ".join(map(repr, self._constructor_args[0])), ",".join(" %s=%r" % x for x in self._constructor_args[1].items()), )
515352d3814761b8fcf9ed802bb46dfbe967999cebd4eb0784db16f321cd56c8
from django.core.exceptions import FieldDoesNotExist from django.db.models import NOT_PROVIDED from django.utils.functional import cached_property from .base import Operation from .utils import field_is_referenced, field_references, get_references class FieldOperation(Operation): def __init__(self, model_name, name, field=None): self.model_name = model_name self.name = name self.field = field @cached_property def model_name_lower(self): return self.model_name.lower() @cached_property def name_lower(self): return self.name.lower() def is_same_model_operation(self, operation): return self.model_name_lower == operation.model_name_lower def is_same_field_operation(self, operation): return self.is_same_model_operation(operation) and self.name_lower == operation.name_lower def references_model(self, name, app_label): name_lower = name.lower() if name_lower == self.model_name_lower: return True if self.field: return bool(field_references( (app_label, self.model_name_lower), self.field, (app_label, name_lower) )) return False def references_field(self, model_name, name, app_label): model_name_lower = model_name.lower() # Check if this operation locally references the field. if model_name_lower == self.model_name_lower: if name == self.name: return True elif self.field and hasattr(self.field, 'from_fields') and name in self.field.from_fields: return True # Check if this operation remotely references the field. if self.field is None: return False return bool(field_references( (app_label, self.model_name_lower), self.field, (app_label, model_name_lower), name, )) def reduce(self, operation, app_label): return ( super().reduce(operation, app_label) or not operation.references_field(self.model_name, self.name, app_label) ) class AddField(FieldOperation): """Add a field to a model.""" def __init__(self, model_name, name, field, preserve_default=True): self.preserve_default = preserve_default super().__init__(model_name, name, field) def deconstruct(self): kwargs = { 'model_name': self.model_name, 'name': self.name, 'field': self.field, } if self.preserve_default is not True: kwargs['preserve_default'] = self.preserve_default return ( self.__class__.__name__, [], kwargs ) def state_forwards(self, app_label, state): # If preserve default is off, don't use the default for future state if not self.preserve_default: field = self.field.clone() field.default = NOT_PROVIDED else: field = self.field state.models[app_label, self.model_name_lower].fields[self.name] = field # Delay rendering of relationships if it's not a relational field delay = not field.is_relation state.reload_model(app_label, self.model_name_lower, delay=delay) def database_forwards(self, app_label, schema_editor, from_state, to_state): to_model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, to_model): from_model = from_state.apps.get_model(app_label, self.model_name) field = to_model._meta.get_field(self.name) if not self.preserve_default: field.default = self.field.default schema_editor.add_field( from_model, field, ) if not self.preserve_default: field.default = NOT_PROVIDED def database_backwards(self, app_label, schema_editor, from_state, to_state): from_model = from_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, from_model): schema_editor.remove_field(from_model, from_model._meta.get_field(self.name)) def describe(self): return "Add field %s to %s" % (self.name, self.model_name) @property def migration_name_fragment(self): return '%s_%s' % (self.model_name_lower, self.name_lower) def reduce(self, operation, app_label): if isinstance(operation, FieldOperation) and self.is_same_field_operation(operation): if isinstance(operation, AlterField): return [ AddField( model_name=self.model_name, name=operation.name, field=operation.field, ), ] elif isinstance(operation, RemoveField): return [] elif isinstance(operation, RenameField): return [ AddField( model_name=self.model_name, name=operation.new_name, field=self.field, ), ] return super().reduce(operation, app_label) class RemoveField(FieldOperation): """Remove a field from a model.""" def deconstruct(self): kwargs = { 'model_name': self.model_name, 'name': self.name, } return ( self.__class__.__name__, [], kwargs ) def state_forwards(self, app_label, state): model_state = state.models[app_label, self.model_name_lower] old_field = model_state.fields.pop(self.name) # Delay rendering of relationships if it's not a relational field delay = not old_field.is_relation state.reload_model(app_label, self.model_name_lower, delay=delay) def database_forwards(self, app_label, schema_editor, from_state, to_state): from_model = from_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, from_model): schema_editor.remove_field(from_model, from_model._meta.get_field(self.name)) def database_backwards(self, app_label, schema_editor, from_state, to_state): to_model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, to_model): from_model = from_state.apps.get_model(app_label, self.model_name) schema_editor.add_field(from_model, to_model._meta.get_field(self.name)) def describe(self): return "Remove field %s from %s" % (self.name, self.model_name) @property def migration_name_fragment(self): return 'remove_%s_%s' % (self.model_name_lower, self.name_lower) def reduce(self, operation, app_label): from .models import DeleteModel if isinstance(operation, DeleteModel) and operation.name_lower == self.model_name_lower: return [operation] return super().reduce(operation, app_label) class AlterField(FieldOperation): """ Alter a field's database column (e.g. null, max_length) to the provided new field. """ def __init__(self, model_name, name, field, preserve_default=True): self.preserve_default = preserve_default super().__init__(model_name, name, field) def deconstruct(self): kwargs = { 'model_name': self.model_name, 'name': self.name, 'field': self.field, } if self.preserve_default is not True: kwargs['preserve_default'] = self.preserve_default return ( self.__class__.__name__, [], kwargs ) def state_forwards(self, app_label, state): if not self.preserve_default: field = self.field.clone() field.default = NOT_PROVIDED else: field = self.field model_state = state.models[app_label, self.model_name_lower] model_state.fields[self.name] = field # TODO: investigate if old relational fields must be reloaded or if it's # sufficient if the new field is (#27737). # Delay rendering of relationships if it's not a relational field and # not referenced by a foreign key. delay = ( not field.is_relation and not field_is_referenced( state, (app_label, self.model_name_lower), (self.name, field), ) ) state.reload_model(app_label, self.model_name_lower, delay=delay) def database_forwards(self, app_label, schema_editor, from_state, to_state): to_model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, to_model): from_model = from_state.apps.get_model(app_label, self.model_name) from_field = from_model._meta.get_field(self.name) to_field = to_model._meta.get_field(self.name) if not self.preserve_default: to_field.default = self.field.default schema_editor.alter_field(from_model, from_field, to_field) if not self.preserve_default: to_field.default = NOT_PROVIDED def database_backwards(self, app_label, schema_editor, from_state, to_state): self.database_forwards(app_label, schema_editor, from_state, to_state) def describe(self): return "Alter field %s on %s" % (self.name, self.model_name) @property def migration_name_fragment(self): return 'alter_%s_%s' % (self.model_name_lower, self.name_lower) def reduce(self, operation, app_label): if isinstance(operation, RemoveField) and self.is_same_field_operation(operation): return [operation] elif isinstance(operation, RenameField) and self.is_same_field_operation(operation): return [ operation, AlterField( model_name=self.model_name, name=operation.new_name, field=self.field, ), ] return super().reduce(operation, app_label) class RenameField(FieldOperation): """Rename a field on the model. Might affect db_column too.""" def __init__(self, model_name, old_name, new_name): self.old_name = old_name self.new_name = new_name super().__init__(model_name, old_name) @cached_property def old_name_lower(self): return self.old_name.lower() @cached_property def new_name_lower(self): return self.new_name.lower() def deconstruct(self): kwargs = { 'model_name': self.model_name, 'old_name': self.old_name, 'new_name': self.new_name, } return ( self.__class__.__name__, [], kwargs ) def state_forwards(self, app_label, state): model_state = state.models[app_label, self.model_name_lower] # Rename the field fields = model_state.fields try: found = fields.pop(self.old_name) except KeyError: raise FieldDoesNotExist( "%s.%s has no field named '%s'" % (app_label, self.model_name, self.old_name) ) fields[self.new_name] = found for field in fields.values(): # Fix from_fields to refer to the new field. from_fields = getattr(field, 'from_fields', None) if from_fields: field.from_fields = tuple([ self.new_name if from_field_name == self.old_name else from_field_name for from_field_name in from_fields ]) # Fix index/unique_together to refer to the new field options = model_state.options for option in ('index_together', 'unique_together'): if option in options: options[option] = [ [self.new_name if n == self.old_name else n for n in together] for together in options[option] ] # Fix to_fields to refer to the new field. delay = True references = get_references( state, (app_label, self.model_name_lower), (self.old_name, found), ) for *_, field, reference in references: delay = False if reference.to: remote_field, to_fields = reference.to if getattr(remote_field, 'field_name', None) == self.old_name: remote_field.field_name = self.new_name if to_fields: field.to_fields = tuple([ self.new_name if to_field_name == self.old_name else to_field_name for to_field_name in to_fields ]) state.reload_model(app_label, self.model_name_lower, delay=delay) def database_forwards(self, app_label, schema_editor, from_state, to_state): to_model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, to_model): from_model = from_state.apps.get_model(app_label, self.model_name) schema_editor.alter_field( from_model, from_model._meta.get_field(self.old_name), to_model._meta.get_field(self.new_name), ) def database_backwards(self, app_label, schema_editor, from_state, to_state): to_model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, to_model): from_model = from_state.apps.get_model(app_label, self.model_name) schema_editor.alter_field( from_model, from_model._meta.get_field(self.new_name), to_model._meta.get_field(self.old_name), ) def describe(self): return "Rename field %s on %s to %s" % (self.old_name, self.model_name, self.new_name) @property def migration_name_fragment(self): return 'rename_%s_%s_%s' % ( self.old_name_lower, self.model_name_lower, self.new_name_lower, ) def references_field(self, model_name, name, app_label): return self.references_model(model_name, app_label) and ( name.lower() == self.old_name_lower or name.lower() == self.new_name_lower ) def reduce(self, operation, app_label): if (isinstance(operation, RenameField) and self.is_same_model_operation(operation) and self.new_name_lower == operation.old_name_lower): return [ RenameField( self.model_name, self.old_name, operation.new_name, ), ] # Skip `FieldOperation.reduce` as we want to run `references_field` # against self.new_name. return ( super(FieldOperation, self).reduce(operation, app_label) or not operation.references_field(self.model_name, self.new_name, app_label) )
b65ed53afd6ba0ba11380ba994b0c128877ff10bb50cfce701aa5fdd1209fcec
import collections import re from functools import partial from itertools import chain from django.core.exceptions import EmptyResultSet, FieldError from django.db import DatabaseError, NotSupportedError from django.db.models.constants import LOOKUP_SEP from django.db.models.expressions import F, OrderBy, Random, RawSQL, Ref, Value from django.db.models.functions import Cast from django.db.models.query_utils import Q, select_related_descend from django.db.models.sql.constants import ( CURSOR, GET_ITERATOR_CHUNK_SIZE, MULTI, NO_RESULTS, ORDER_DIR, SINGLE, ) from django.db.models.sql.query import Query, get_order_dir from django.db.transaction import TransactionManagementError from django.utils.functional import cached_property from django.utils.hashable import make_hashable from django.utils.regex_helper import _lazy_re_compile class SQLCompiler: # Multiline ordering SQL clause may appear from RawSQL. ordering_parts = _lazy_re_compile( r'^(.*)\s(?:ASC|DESC).*', re.MULTILINE | re.DOTALL, ) def __init__(self, query, connection, using): self.query = query self.connection = connection self.using = using self.quote_cache = {'*': '*'} # The select, klass_info, and annotations are needed by QuerySet.iterator() # these are set as a side-effect of executing the query. Note that we calculate # separately a list of extra select columns needed for grammatical correctness # of the query, but these columns are not included in self.select. self.select = None self.annotation_col_map = None self.klass_info = None self._meta_ordering = None def setup_query(self): if all(self.query.alias_refcount[a] == 0 for a in self.query.alias_map): self.query.get_initial_alias() self.select, self.klass_info, self.annotation_col_map = self.get_select() self.col_count = len(self.select) def pre_sql_setup(self): """ Do any necessary class setup immediately prior to producing SQL. This is for things that can't necessarily be done in __init__ because we might not have all the pieces in place at that time. """ self.setup_query() order_by = self.get_order_by() self.where, self.having = self.query.where.split_having() extra_select = self.get_extra_select(order_by, self.select) self.has_extra_select = bool(extra_select) group_by = self.get_group_by(self.select + extra_select, order_by) return extra_select, order_by, group_by def get_group_by(self, select, order_by): """ Return a list of 2-tuples of form (sql, params). The logic of what exactly the GROUP BY clause contains is hard to describe in other words than "if it passes the test suite, then it is correct". """ # Some examples: # SomeModel.objects.annotate(Count('somecol')) # GROUP BY: all fields of the model # # SomeModel.objects.values('name').annotate(Count('somecol')) # GROUP BY: name # # SomeModel.objects.annotate(Count('somecol')).values('name') # GROUP BY: all cols of the model # # SomeModel.objects.values('name', 'pk').annotate(Count('somecol')).values('pk') # GROUP BY: name, pk # # SomeModel.objects.values('name').annotate(Count('somecol')).values('pk') # GROUP BY: name, pk # # In fact, the self.query.group_by is the minimal set to GROUP BY. It # can't be ever restricted to a smaller set, but additional columns in # HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately # the end result is that it is impossible to force the query to have # a chosen GROUP BY clause - you can almost do this by using the form: # .values(*wanted_cols).annotate(AnAggregate()) # but any later annotations, extra selects, values calls that # refer some column outside of the wanted_cols, order_by, or even # filter calls can alter the GROUP BY clause. # The query.group_by is either None (no GROUP BY at all), True # (group by select fields), or a list of expressions to be added # to the group by. if self.query.group_by is None: return [] expressions = [] if self.query.group_by is not True: # If the group by is set to a list (by .values() call most likely), # then we need to add everything in it to the GROUP BY clause. # Backwards compatibility hack for setting query.group_by. Remove # when we have public API way of forcing the GROUP BY clause. # Converts string references to expressions. for expr in self.query.group_by: if not hasattr(expr, 'as_sql'): expressions.append(self.query.resolve_ref(expr)) else: expressions.append(expr) # Note that even if the group_by is set, it is only the minimal # set to group by. So, we need to add cols in select, order_by, and # having into the select in any case. ref_sources = { expr.source for expr in expressions if isinstance(expr, Ref) } for expr, _, _ in select: # Skip members of the select clause that are already included # by reference. if expr in ref_sources: continue cols = expr.get_group_by_cols() for col in cols: expressions.append(col) for expr, (sql, params, is_ref) in order_by: # Skip References to the select clause, as all expressions in the # select clause are already part of the group by. if not is_ref: expressions.extend(expr.get_group_by_cols()) having_group_by = self.having.get_group_by_cols() if self.having else () for expr in having_group_by: expressions.append(expr) result = [] seen = set() expressions = self.collapse_group_by(expressions, having_group_by) for expr in expressions: sql, params = self.compile(expr) sql, params = expr.select_format(self, sql, params) params_hash = make_hashable(params) if (sql, params_hash) not in seen: result.append((sql, params)) seen.add((sql, params_hash)) return result def collapse_group_by(self, expressions, having): # If the DB can group by primary key, then group by the primary key of # query's main model. Note that for PostgreSQL the GROUP BY clause must # include the primary key of every table, but for MySQL it is enough to # have the main table's primary key. if self.connection.features.allows_group_by_pk: # Determine if the main model's primary key is in the query. pk = None for expr in expressions: # Is this a reference to query's base table primary key? If the # expression isn't a Col-like, then skip the expression. if (getattr(expr, 'target', None) == self.query.model._meta.pk and getattr(expr, 'alias', None) == self.query.base_table): pk = expr break # If the main model's primary key is in the query, group by that # field, HAVING expressions, and expressions associated with tables # that don't have a primary key included in the grouped columns. if pk: pk_aliases = { expr.alias for expr in expressions if hasattr(expr, 'target') and expr.target.primary_key } expressions = [pk] + [ expr for expr in expressions if expr in having or ( getattr(expr, 'alias', None) is not None and expr.alias not in pk_aliases ) ] elif self.connection.features.allows_group_by_selected_pks: # Filter out all expressions associated with a table's primary key # present in the grouped columns. This is done by identifying all # tables that have their primary key included in the grouped # columns and removing non-primary key columns referring to them. # Unmanaged models are excluded because they could be representing # database views on which the optimization might not be allowed. pks = { expr for expr in expressions if ( hasattr(expr, 'target') and expr.target.primary_key and self.connection.features.allows_group_by_selected_pks_on_model(expr.target.model) ) } aliases = {expr.alias for expr in pks} expressions = [ expr for expr in expressions if expr in pks or getattr(expr, 'alias', None) not in aliases ] return expressions def get_select(self): """ Return three values: - a list of 3-tuples of (expression, (sql, params), alias) - a klass_info structure, - a dictionary of annotations The (sql, params) is what the expression will produce, and alias is the "AS alias" for the column (possibly None). The klass_info structure contains the following information: - The base model of the query. - Which columns for that model are present in the query (by position of the select clause). - related_klass_infos: [f, klass_info] to descent into The annotations is a dictionary of {'attname': column position} values. """ select = [] klass_info = None annotations = {} select_idx = 0 for alias, (sql, params) in self.query.extra_select.items(): annotations[alias] = select_idx select.append((RawSQL(sql, params), alias)) select_idx += 1 assert not (self.query.select and self.query.default_cols) if self.query.default_cols: cols = self.get_default_columns() else: # self.query.select is a special case. These columns never go to # any model. cols = self.query.select if cols: select_list = [] for col in cols: select_list.append(select_idx) select.append((col, None)) select_idx += 1 klass_info = { 'model': self.query.model, 'select_fields': select_list, } for alias, annotation in self.query.annotation_select.items(): annotations[alias] = select_idx select.append((annotation, alias)) select_idx += 1 if self.query.select_related: related_klass_infos = self.get_related_selections(select) klass_info['related_klass_infos'] = related_klass_infos def get_select_from_parent(klass_info): for ki in klass_info['related_klass_infos']: if ki['from_parent']: ki['select_fields'] = (klass_info['select_fields'] + ki['select_fields']) get_select_from_parent(ki) get_select_from_parent(klass_info) ret = [] for col, alias in select: try: sql, params = self.compile(col) except EmptyResultSet: # Select a predicate that's always False. sql, params = '0', () else: sql, params = col.select_format(self, sql, params) ret.append((col, (sql, params), alias)) return ret, klass_info, annotations def get_order_by(self): """ Return a list of 2-tuples of form (expr, (sql, params, is_ref)) for the ORDER BY clause. The order_by clause can alter the select clause (for example it can add aliases to clauses that do not yet have one, or it can add totally new select clauses). """ if self.query.extra_order_by: ordering = self.query.extra_order_by elif not self.query.default_ordering: ordering = self.query.order_by elif self.query.order_by: ordering = self.query.order_by elif self.query.get_meta().ordering: ordering = self.query.get_meta().ordering self._meta_ordering = ordering else: ordering = [] if self.query.standard_ordering: asc, desc = ORDER_DIR['ASC'] else: asc, desc = ORDER_DIR['DESC'] order_by = [] for field in ordering: if hasattr(field, 'resolve_expression'): if isinstance(field, Value): # output_field must be resolved for constants. field = Cast(field, field.output_field) if not isinstance(field, OrderBy): field = field.asc() if not self.query.standard_ordering: field = field.copy() field.reverse_ordering() order_by.append((field, False)) continue if field == '?': # random order_by.append((OrderBy(Random()), False)) continue col, order = get_order_dir(field, asc) descending = order == 'DESC' if col in self.query.annotation_select: # Reference to expression in SELECT clause order_by.append(( OrderBy(Ref(col, self.query.annotation_select[col]), descending=descending), True)) continue if col in self.query.annotations: # References to an expression which is masked out of the SELECT # clause. expr = self.query.annotations[col] if isinstance(expr, Value): # output_field must be resolved for constants. expr = Cast(expr, expr.output_field) order_by.append((OrderBy(expr, descending=descending), False)) continue if '.' in field: # This came in through an extra(order_by=...) addition. Pass it # on verbatim. table, col = col.split('.', 1) order_by.append(( OrderBy( RawSQL('%s.%s' % (self.quote_name_unless_alias(table), col), []), descending=descending ), False)) continue if not self.query.extra or col not in self.query.extra: # 'col' is of the form 'field' or 'field1__field2' or # '-field1__field2__field', etc. order_by.extend(self.find_ordering_name( field, self.query.get_meta(), default_order=asc)) else: if col not in self.query.extra_select: order_by.append(( OrderBy(RawSQL(*self.query.extra[col]), descending=descending), False)) else: order_by.append(( OrderBy(Ref(col, RawSQL(*self.query.extra[col])), descending=descending), True)) result = [] seen = set() for expr, is_ref in order_by: resolved = expr.resolve_expression(self.query, allow_joins=True, reuse=None) if self.query.combinator: src = resolved.get_source_expressions()[0] expr_src = expr.get_source_expressions()[0] # Relabel order by columns to raw numbers if this is a combined # query; necessary since the columns can't be referenced by the # fully qualified name and the simple column names may collide. for idx, (sel_expr, _, col_alias) in enumerate(self.select): if is_ref and col_alias == src.refs: src = src.source elif col_alias and not ( isinstance(expr_src, F) and col_alias == expr_src.name ): continue if src == sel_expr: resolved.set_source_expressions([RawSQL('%d' % (idx + 1), ())]) break else: if col_alias: raise DatabaseError('ORDER BY term does not match any column in the result set.') # Add column used in ORDER BY clause without an alias to # the selected columns. self.query.add_select_col(src) resolved.set_source_expressions([RawSQL('%d' % len(self.query.select), ())]) sql, params = self.compile(resolved) # Don't add the same column twice, but the order direction is # not taken into account so we strip it. When this entire method # is refactored into expressions, then we can check each part as we # generate it. without_ordering = self.ordering_parts.search(sql)[1] params_hash = make_hashable(params) if (without_ordering, params_hash) in seen: continue seen.add((without_ordering, params_hash)) result.append((resolved, (sql, params, is_ref))) return result def get_extra_select(self, order_by, select): extra_select = [] if self.query.distinct and not self.query.distinct_fields: select_sql = [t[1] for t in select] for expr, (sql, params, is_ref) in order_by: without_ordering = self.ordering_parts.search(sql)[1] if not is_ref and (without_ordering, params) not in select_sql: extra_select.append((expr, (without_ordering, params), None)) return extra_select def quote_name_unless_alias(self, name): """ A wrapper around connection.ops.quote_name that doesn't quote aliases for table names. This avoids problems with some SQL dialects that treat quoted strings specially (e.g. PostgreSQL). """ if name in self.quote_cache: return self.quote_cache[name] if ((name in self.query.alias_map and name not in self.query.table_map) or name in self.query.extra_select or ( self.query.external_aliases.get(name) and name not in self.query.table_map)): self.quote_cache[name] = name return name r = self.connection.ops.quote_name(name) self.quote_cache[name] = r return r def compile(self, node): vendor_impl = getattr(node, 'as_' + self.connection.vendor, None) if vendor_impl: sql, params = vendor_impl(self, self.connection) else: sql, params = node.as_sql(self, self.connection) return sql, params def get_combinator_sql(self, combinator, all): features = self.connection.features compilers = [ query.get_compiler(self.using, self.connection) for query in self.query.combined_queries if not query.is_empty() ] if not features.supports_slicing_ordering_in_compound: for query, compiler in zip(self.query.combined_queries, compilers): if query.low_mark or query.high_mark: raise DatabaseError('LIMIT/OFFSET not allowed in subqueries of compound statements.') if compiler.get_order_by(): raise DatabaseError('ORDER BY not allowed in subqueries of compound statements.') parts = () for compiler in compilers: try: # If the columns list is limited, then all combined queries # must have the same columns list. Set the selects defined on # the query on all combined queries, if not already set. if not compiler.query.values_select and self.query.values_select: compiler.query = compiler.query.clone() compiler.query.set_values(( *self.query.extra_select, *self.query.values_select, *self.query.annotation_select, )) part_sql, part_args = compiler.as_sql() if compiler.query.combinator: # Wrap in a subquery if wrapping in parentheses isn't # supported. if not features.supports_parentheses_in_compound: part_sql = 'SELECT * FROM ({})'.format(part_sql) # Add parentheses when combining with compound query if not # already added for all compound queries. elif not features.supports_slicing_ordering_in_compound: part_sql = '({})'.format(part_sql) parts += ((part_sql, part_args),) except EmptyResultSet: # Omit the empty queryset with UNION and with DIFFERENCE if the # first queryset is nonempty. if combinator == 'union' or (combinator == 'difference' and parts): continue raise if not parts: raise EmptyResultSet combinator_sql = self.connection.ops.set_operators[combinator] if all and combinator == 'union': combinator_sql += ' ALL' braces = '({})' if features.supports_slicing_ordering_in_compound else '{}' sql_parts, args_parts = zip(*((braces.format(sql), args) for sql, args in parts)) result = [' {} '.format(combinator_sql).join(sql_parts)] params = [] for part in args_parts: params.extend(part) return result, params def as_sql(self, with_limits=True, with_col_aliases=False): """ Create the SQL for this query. Return the SQL string and list of parameters. If 'with_limits' is False, any limit/offset information is not included in the query. """ refcounts_before = self.query.alias_refcount.copy() try: extra_select, order_by, group_by = self.pre_sql_setup() for_update_part = None # Is a LIMIT/OFFSET clause needed? with_limit_offset = with_limits and (self.query.high_mark is not None or self.query.low_mark) combinator = self.query.combinator features = self.connection.features if combinator: if not getattr(features, 'supports_select_{}'.format(combinator)): raise NotSupportedError('{} is not supported on this database backend.'.format(combinator)) result, params = self.get_combinator_sql(combinator, self.query.combinator_all) else: distinct_fields, distinct_params = self.get_distinct() # This must come after 'select', 'ordering', and 'distinct' # (see docstring of get_from_clause() for details). from_, f_params = self.get_from_clause() where, w_params = self.compile(self.where) if self.where is not None else ("", []) having, h_params = self.compile(self.having) if self.having is not None else ("", []) result = ['SELECT'] params = [] if self.query.distinct: distinct_result, distinct_params = self.connection.ops.distinct_sql( distinct_fields, distinct_params, ) result += distinct_result params += distinct_params out_cols = [] col_idx = 1 for _, (s_sql, s_params), alias in self.select + extra_select: if alias: s_sql = '%s AS %s' % (s_sql, self.connection.ops.quote_name(alias)) elif with_col_aliases: s_sql = '%s AS %s' % (s_sql, 'Col%d' % col_idx) col_idx += 1 params.extend(s_params) out_cols.append(s_sql) result += [', '.join(out_cols), 'FROM', *from_] params.extend(f_params) if self.query.select_for_update and self.connection.features.has_select_for_update: if self.connection.get_autocommit(): raise TransactionManagementError('select_for_update cannot be used outside of a transaction.') if with_limit_offset and not self.connection.features.supports_select_for_update_with_limit: raise NotSupportedError( 'LIMIT/OFFSET is not supported with ' 'select_for_update on this database backend.' ) nowait = self.query.select_for_update_nowait skip_locked = self.query.select_for_update_skip_locked of = self.query.select_for_update_of no_key = self.query.select_for_no_key_update # If it's a NOWAIT/SKIP LOCKED/OF/NO KEY query but the # backend doesn't support it, raise NotSupportedError to # prevent a possible deadlock. if nowait and not self.connection.features.has_select_for_update_nowait: raise NotSupportedError('NOWAIT is not supported on this database backend.') elif skip_locked and not self.connection.features.has_select_for_update_skip_locked: raise NotSupportedError('SKIP LOCKED is not supported on this database backend.') elif of and not self.connection.features.has_select_for_update_of: raise NotSupportedError('FOR UPDATE OF is not supported on this database backend.') elif no_key and not self.connection.features.has_select_for_no_key_update: raise NotSupportedError( 'FOR NO KEY UPDATE is not supported on this ' 'database backend.' ) for_update_part = self.connection.ops.for_update_sql( nowait=nowait, skip_locked=skip_locked, of=self.get_select_for_update_of_arguments(), no_key=no_key, ) if for_update_part and self.connection.features.for_update_after_from: result.append(for_update_part) if where: result.append('WHERE %s' % where) params.extend(w_params) grouping = [] for g_sql, g_params in group_by: grouping.append(g_sql) params.extend(g_params) if grouping: if distinct_fields: raise NotImplementedError('annotate() + distinct(fields) is not implemented.') order_by = order_by or self.connection.ops.force_no_ordering() result.append('GROUP BY %s' % ', '.join(grouping)) if self._meta_ordering: order_by = None if having: result.append('HAVING %s' % having) params.extend(h_params) if self.query.explain_query: result.insert(0, self.connection.ops.explain_query_prefix( self.query.explain_format, **self.query.explain_options )) if order_by: ordering = [] for _, (o_sql, o_params, _) in order_by: ordering.append(o_sql) params.extend(o_params) result.append('ORDER BY %s' % ', '.join(ordering)) if with_limit_offset: result.append(self.connection.ops.limit_offset_sql(self.query.low_mark, self.query.high_mark)) if for_update_part and not self.connection.features.for_update_after_from: result.append(for_update_part) if self.query.subquery and extra_select: # If the query is used as a subquery, the extra selects would # result in more columns than the left-hand side expression is # expecting. This can happen when a subquery uses a combination # of order_by() and distinct(), forcing the ordering expressions # to be selected as well. Wrap the query in another subquery # to exclude extraneous selects. sub_selects = [] sub_params = [] for index, (select, _, alias) in enumerate(self.select, start=1): if not alias and with_col_aliases: alias = 'col%d' % index if alias: sub_selects.append("%s.%s" % ( self.connection.ops.quote_name('subquery'), self.connection.ops.quote_name(alias), )) else: select_clone = select.relabeled_clone({select.alias: 'subquery'}) subselect, subparams = select_clone.as_sql(self, self.connection) sub_selects.append(subselect) sub_params.extend(subparams) return 'SELECT %s FROM (%s) subquery' % ( ', '.join(sub_selects), ' '.join(result), ), tuple(sub_params + params) return ' '.join(result), tuple(params) finally: # Finally do cleanup - get rid of the joins we created above. self.query.reset_refcounts(refcounts_before) def get_default_columns(self, start_alias=None, opts=None, from_parent=None): """ Compute the default columns for selecting every field in the base model. Will sometimes be called to pull in related models (e.g. via select_related), in which case "opts" and "start_alias" will be given to provide a starting point for the traversal. Return a list of strings, quoted appropriately for use in SQL directly, as well as a set of aliases used in the select statement (if 'as_pairs' is True, return a list of (alias, col_name) pairs instead of strings as the first component and None as the second component). """ result = [] if opts is None: opts = self.query.get_meta() only_load = self.deferred_to_columns() start_alias = start_alias or self.query.get_initial_alias() # The 'seen_models' is used to optimize checking the needed parent # alias for a given field. This also includes None -> start_alias to # be used by local fields. seen_models = {None: start_alias} for field in opts.concrete_fields: model = field.model._meta.concrete_model # A proxy model will have a different model and concrete_model. We # will assign None if the field belongs to this model. if model == opts.model: model = None if from_parent and model is not None and issubclass( from_parent._meta.concrete_model, model._meta.concrete_model): # Avoid loading data for already loaded parents. # We end up here in the case select_related() resolution # proceeds from parent model to child model. In that case the # parent model data is already present in the SELECT clause, # and we want to avoid reloading the same data again. continue if field.model in only_load and field.attname not in only_load[field.model]: continue alias = self.query.join_parent_model(opts, model, start_alias, seen_models) column = field.get_col(alias) result.append(column) return result def get_distinct(self): """ Return a quoted list of fields to use in DISTINCT ON part of the query. This method can alter the tables in the query, and thus it must be called before get_from_clause(). """ result = [] params = [] opts = self.query.get_meta() for name in self.query.distinct_fields: parts = name.split(LOOKUP_SEP) _, targets, alias, joins, path, _, transform_function = self._setup_joins(parts, opts, None) targets, alias, _ = self.query.trim_joins(targets, joins, path) for target in targets: if name in self.query.annotation_select: result.append(name) else: r, p = self.compile(transform_function(target, alias)) result.append(r) params.append(p) return result, params def find_ordering_name(self, name, opts, alias=None, default_order='ASC', already_seen=None): """ Return the table alias (the name might be ambiguous, the alias will not be) and column name for ordering by the given 'name' parameter. The 'name' is of the form 'field1__field2__...__fieldN'. """ name, order = get_order_dir(name, default_order) descending = order == 'DESC' pieces = name.split(LOOKUP_SEP) field, targets, alias, joins, path, opts, transform_function = self._setup_joins(pieces, opts, alias) # If we get to this point and the field is a relation to another model, # append the default ordering for that model unless it is the pk # shortcut or the attribute name of the field that is specified. if field.is_relation and opts.ordering and getattr(field, 'attname', None) != name and name != 'pk': # Firstly, avoid infinite loops. already_seen = already_seen or set() join_tuple = tuple(getattr(self.query.alias_map[j], 'join_cols', None) for j in joins) if join_tuple in already_seen: raise FieldError('Infinite loop caused by ordering.') already_seen.add(join_tuple) results = [] for item in opts.ordering: if hasattr(item, 'resolve_expression') and not isinstance(item, OrderBy): item = item.desc() if descending else item.asc() if isinstance(item, OrderBy): results.append((item, False)) continue results.extend(self.find_ordering_name(item, opts, alias, order, already_seen)) return results targets, alias, _ = self.query.trim_joins(targets, joins, path) return [(OrderBy(transform_function(t, alias), descending=descending), False) for t in targets] def _setup_joins(self, pieces, opts, alias): """ Helper method for get_order_by() and get_distinct(). get_ordering() and get_distinct() must produce same target columns on same input, as the prefixes of get_ordering() and get_distinct() must match. Executing SQL where this is not true is an error. """ alias = alias or self.query.get_initial_alias() field, targets, opts, joins, path, transform_function = self.query.setup_joins(pieces, opts, alias) alias = joins[-1] return field, targets, alias, joins, path, opts, transform_function def get_from_clause(self): """ Return a list of strings that are joined together to go after the "FROM" part of the query, as well as a list any extra parameters that need to be included. Subclasses, can override this to create a from-clause via a "select". This should only be called after any SQL construction methods that might change the tables that are needed. This means the select columns, ordering, and distinct must be done first. """ result = [] params = [] for alias in tuple(self.query.alias_map): if not self.query.alias_refcount[alias]: continue try: from_clause = self.query.alias_map[alias] except KeyError: # Extra tables can end up in self.tables, but not in the # alias_map if they aren't in a join. That's OK. We skip them. continue clause_sql, clause_params = self.compile(from_clause) result.append(clause_sql) params.extend(clause_params) for t in self.query.extra_tables: alias, _ = self.query.table_alias(t) # Only add the alias if it's not already present (the table_alias() # call increments the refcount, so an alias refcount of one means # this is the only reference). if alias not in self.query.alias_map or self.query.alias_refcount[alias] == 1: result.append(', %s' % self.quote_name_unless_alias(alias)) return result, params def get_related_selections(self, select, opts=None, root_alias=None, cur_depth=1, requested=None, restricted=None): """ Fill in the information needed for a select_related query. The current depth is measured as the number of connections away from the root model (for example, cur_depth=1 means we are looking at models with direct connections to the root model). """ def _get_field_choices(): direct_choices = (f.name for f in opts.fields if f.is_relation) reverse_choices = ( f.field.related_query_name() for f in opts.related_objects if f.field.unique ) return chain(direct_choices, reverse_choices, self.query._filtered_relations) related_klass_infos = [] if not restricted and cur_depth > self.query.max_depth: # We've recursed far enough; bail out. return related_klass_infos if not opts: opts = self.query.get_meta() root_alias = self.query.get_initial_alias() only_load = self.query.get_loaded_field_names() # Setup for the case when only particular related fields should be # included in the related selection. fields_found = set() if requested is None: restricted = isinstance(self.query.select_related, dict) if restricted: requested = self.query.select_related def get_related_klass_infos(klass_info, related_klass_infos): klass_info['related_klass_infos'] = related_klass_infos for f in opts.fields: field_model = f.model._meta.concrete_model fields_found.add(f.name) if restricted: next = requested.get(f.name, {}) if not f.is_relation: # If a non-related field is used like a relation, # or if a single non-relational field is given. if next or f.name in requested: raise FieldError( "Non-relational field given in select_related: '%s'. " "Choices are: %s" % ( f.name, ", ".join(_get_field_choices()) or '(none)', ) ) else: next = False if not select_related_descend(f, restricted, requested, only_load.get(field_model)): continue klass_info = { 'model': f.remote_field.model, 'field': f, 'reverse': False, 'local_setter': f.set_cached_value, 'remote_setter': f.remote_field.set_cached_value if f.unique else lambda x, y: None, 'from_parent': False, } related_klass_infos.append(klass_info) select_fields = [] _, _, _, joins, _, _ = self.query.setup_joins( [f.name], opts, root_alias) alias = joins[-1] columns = self.get_default_columns(start_alias=alias, opts=f.remote_field.model._meta) for col in columns: select_fields.append(len(select)) select.append((col, None)) klass_info['select_fields'] = select_fields next_klass_infos = self.get_related_selections( select, f.remote_field.model._meta, alias, cur_depth + 1, next, restricted) get_related_klass_infos(klass_info, next_klass_infos) if restricted: related_fields = [ (o.field, o.related_model) for o in opts.related_objects if o.field.unique and not o.many_to_many ] for f, model in related_fields: if not select_related_descend(f, restricted, requested, only_load.get(model), reverse=True): continue related_field_name = f.related_query_name() fields_found.add(related_field_name) join_info = self.query.setup_joins([related_field_name], opts, root_alias) alias = join_info.joins[-1] from_parent = issubclass(model, opts.model) and model is not opts.model klass_info = { 'model': model, 'field': f, 'reverse': True, 'local_setter': f.remote_field.set_cached_value, 'remote_setter': f.set_cached_value, 'from_parent': from_parent, } related_klass_infos.append(klass_info) select_fields = [] columns = self.get_default_columns( start_alias=alias, opts=model._meta, from_parent=opts.model) for col in columns: select_fields.append(len(select)) select.append((col, None)) klass_info['select_fields'] = select_fields next = requested.get(f.related_query_name(), {}) next_klass_infos = self.get_related_selections( select, model._meta, alias, cur_depth + 1, next, restricted) get_related_klass_infos(klass_info, next_klass_infos) def local_setter(obj, from_obj): # Set a reverse fk object when relation is non-empty. if from_obj: f.remote_field.set_cached_value(from_obj, obj) def remote_setter(name, obj, from_obj): setattr(from_obj, name, obj) for name in list(requested): # Filtered relations work only on the topmost level. if cur_depth > 1: break if name in self.query._filtered_relations: fields_found.add(name) f, _, join_opts, joins, _, _ = self.query.setup_joins([name], opts, root_alias) model = join_opts.model alias = joins[-1] from_parent = issubclass(model, opts.model) and model is not opts.model klass_info = { 'model': model, 'field': f, 'reverse': True, 'local_setter': local_setter, 'remote_setter': partial(remote_setter, name), 'from_parent': from_parent, } related_klass_infos.append(klass_info) select_fields = [] columns = self.get_default_columns( start_alias=alias, opts=model._meta, from_parent=opts.model, ) for col in columns: select_fields.append(len(select)) select.append((col, None)) klass_info['select_fields'] = select_fields next_requested = requested.get(name, {}) next_klass_infos = self.get_related_selections( select, opts=model._meta, root_alias=alias, cur_depth=cur_depth + 1, requested=next_requested, restricted=restricted, ) get_related_klass_infos(klass_info, next_klass_infos) fields_not_found = set(requested).difference(fields_found) if fields_not_found: invalid_fields = ("'%s'" % s for s in fields_not_found) raise FieldError( 'Invalid field name(s) given in select_related: %s. ' 'Choices are: %s' % ( ', '.join(invalid_fields), ', '.join(_get_field_choices()) or '(none)', ) ) return related_klass_infos def get_select_for_update_of_arguments(self): """ Return a quoted list of arguments for the SELECT FOR UPDATE OF part of the query. """ def _get_parent_klass_info(klass_info): for parent_model, parent_link in klass_info['model']._meta.parents.items(): parent_list = parent_model._meta.get_parent_list() yield { 'model': parent_model, 'field': parent_link, 'reverse': False, 'select_fields': [ select_index for select_index in klass_info['select_fields'] # Selected columns from a model or its parents. if ( self.select[select_index][0].target.model == parent_model or self.select[select_index][0].target.model in parent_list ) ], } def _get_first_selected_col_from_model(klass_info): """ Find the first selected column from a model. If it doesn't exist, don't lock a model. select_fields is filled recursively, so it also contains fields from the parent models. """ for select_index in klass_info['select_fields']: if self.select[select_index][0].target.model == klass_info['model']: return self.select[select_index][0] def _get_field_choices(): """Yield all allowed field paths in breadth-first search order.""" queue = collections.deque([(None, self.klass_info)]) while queue: parent_path, klass_info = queue.popleft() if parent_path is None: path = [] yield 'self' else: field = klass_info['field'] if klass_info['reverse']: field = field.remote_field path = parent_path + [field.name] yield LOOKUP_SEP.join(path) queue.extend( (path, klass_info) for klass_info in _get_parent_klass_info(klass_info) ) queue.extend( (path, klass_info) for klass_info in klass_info.get('related_klass_infos', []) ) result = [] invalid_names = [] for name in self.query.select_for_update_of: klass_info = self.klass_info if name == 'self': col = _get_first_selected_col_from_model(klass_info) else: for part in name.split(LOOKUP_SEP): klass_infos = ( *klass_info.get('related_klass_infos', []), *_get_parent_klass_info(klass_info), ) for related_klass_info in klass_infos: field = related_klass_info['field'] if related_klass_info['reverse']: field = field.remote_field if field.name == part: klass_info = related_klass_info break else: klass_info = None break if klass_info is None: invalid_names.append(name) continue col = _get_first_selected_col_from_model(klass_info) if col is not None: if self.connection.features.select_for_update_of_column: result.append(self.compile(col)[0]) else: result.append(self.quote_name_unless_alias(col.alias)) if invalid_names: raise FieldError( 'Invalid field name(s) given in select_for_update(of=(...)): %s. ' 'Only relational fields followed in the query are allowed. ' 'Choices are: %s.' % ( ', '.join(invalid_names), ', '.join(_get_field_choices()), ) ) return result def deferred_to_columns(self): """ Convert the self.deferred_loading data structure to mapping of table names to sets of column names which are to be loaded. Return the dictionary. """ columns = {} self.query.deferred_to_data(columns, self.query.get_loaded_field_names_cb) return columns def get_converters(self, expressions): converters = {} for i, expression in enumerate(expressions): if expression: backend_converters = self.connection.ops.get_db_converters(expression) field_converters = expression.get_db_converters(self.connection) if backend_converters or field_converters: converters[i] = (backend_converters + field_converters, expression) return converters def apply_converters(self, rows, converters): connection = self.connection converters = list(converters.items()) for row in map(list, rows): for pos, (convs, expression) in converters: value = row[pos] for converter in convs: value = converter(value, expression, connection) row[pos] = value yield row def results_iter(self, results=None, tuple_expected=False, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE): """Return an iterator over the results from executing this query.""" if results is None: results = self.execute_sql(MULTI, chunked_fetch=chunked_fetch, chunk_size=chunk_size) fields = [s[0] for s in self.select[0:self.col_count]] converters = self.get_converters(fields) rows = chain.from_iterable(results) if converters: rows = self.apply_converters(rows, converters) if tuple_expected: rows = map(tuple, rows) return rows def has_results(self): """ Backends (e.g. NoSQL) can override this in order to use optimized versions of "query has any results." """ # This is always executed on a query clone, so we can modify self.query self.query.add_extra({'a': 1}, None, None, None, None, None) self.query.set_extra_mask(['a']) return bool(self.execute_sql(SINGLE)) def execute_sql(self, result_type=MULTI, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE): """ Run the query against the database and return the result(s). The return value is a single data item if result_type is SINGLE, or an iterator over the results if the result_type is MULTI. result_type is either MULTI (use fetchmany() to retrieve all rows), SINGLE (only retrieve a single row), or None. In this last case, the cursor is returned if any query is executed, since it's used by subclasses such as InsertQuery). It's possible, however, that no query is needed, as the filters describe an empty set. In that case, None is returned, to avoid any unnecessary database interaction. """ result_type = result_type or NO_RESULTS try: sql, params = self.as_sql() if not sql: raise EmptyResultSet except EmptyResultSet: if result_type == MULTI: return iter([]) else: return if chunked_fetch: cursor = self.connection.chunked_cursor() else: cursor = self.connection.cursor() try: cursor.execute(sql, params) except Exception: # Might fail for server-side cursors (e.g. connection closed) cursor.close() raise if result_type == CURSOR: # Give the caller the cursor to process and close. return cursor if result_type == SINGLE: try: val = cursor.fetchone() if val: return val[0:self.col_count] return val finally: # done with the cursor cursor.close() if result_type == NO_RESULTS: cursor.close() return result = cursor_iter( cursor, self.connection.features.empty_fetchmany_value, self.col_count if self.has_extra_select else None, chunk_size, ) if not chunked_fetch or not self.connection.features.can_use_chunked_reads: try: # If we are using non-chunked reads, we return the same data # structure as normally, but ensure it is all read into memory # before going any further. Use chunked_fetch if requested, # unless the database doesn't support it. return list(result) finally: # done with the cursor cursor.close() return result def as_subquery_condition(self, alias, columns, compiler): qn = compiler.quote_name_unless_alias qn2 = self.connection.ops.quote_name for index, select_col in enumerate(self.query.select): lhs_sql, lhs_params = self.compile(select_col) rhs = '%s.%s' % (qn(alias), qn2(columns[index])) self.query.where.add( RawSQL('%s = %s' % (lhs_sql, rhs), lhs_params), 'AND') sql, params = self.as_sql() return 'EXISTS (%s)' % sql, params def explain_query(self): result = list(self.execute_sql()) # Some backends return 1 item tuples with strings, and others return # tuples with integers and strings. Flatten them out into strings. for row in result[0]: if not isinstance(row, str): yield ' '.join(str(c) for c in row) else: yield row class SQLInsertCompiler(SQLCompiler): returning_fields = None returning_params = tuple() def field_as_sql(self, field, val): """ Take a field and a value intended to be saved on that field, and return placeholder SQL and accompanying params. Check for raw values, expressions, and fields with get_placeholder() defined in that order. When field is None, consider the value raw and use it as the placeholder, with no corresponding parameters returned. """ if field is None: # A field value of None means the value is raw. sql, params = val, [] elif hasattr(val, 'as_sql'): # This is an expression, let's compile it. sql, params = self.compile(val) elif hasattr(field, 'get_placeholder'): # Some fields (e.g. geo fields) need special munging before # they can be inserted. sql, params = field.get_placeholder(val, self, self.connection), [val] else: # Return the common case for the placeholder sql, params = '%s', [val] # The following hook is only used by Oracle Spatial, which sometimes # needs to yield 'NULL' and [] as its placeholder and params instead # of '%s' and [None]. The 'NULL' placeholder is produced earlier by # OracleOperations.get_geom_placeholder(). The following line removes # the corresponding None parameter. See ticket #10888. params = self.connection.ops.modify_insert_params(sql, params) return sql, params def prepare_value(self, field, value): """ Prepare a value to be used in a query by resolving it if it is an expression and otherwise calling the field's get_db_prep_save(). """ if hasattr(value, 'resolve_expression'): value = value.resolve_expression(self.query, allow_joins=False, for_save=True) # Don't allow values containing Col expressions. They refer to # existing columns on a row, but in the case of insert the row # doesn't exist yet. if value.contains_column_references: raise ValueError( 'Failed to insert expression "%s" on %s. F() expressions ' 'can only be used to update, not to insert.' % (value, field) ) if value.contains_aggregate: raise FieldError( 'Aggregate functions are not allowed in this query ' '(%s=%r).' % (field.name, value) ) if value.contains_over_clause: raise FieldError( 'Window expressions are not allowed in this query (%s=%r).' % (field.name, value) ) else: value = field.get_db_prep_save(value, connection=self.connection) return value def pre_save_val(self, field, obj): """ Get the given field's value off the given obj. pre_save() is used for things like auto_now on DateTimeField. Skip it if this is a raw query. """ if self.query.raw: return getattr(obj, field.attname) return field.pre_save(obj, add=True) def assemble_as_sql(self, fields, value_rows): """ Take a sequence of N fields and a sequence of M rows of values, and generate placeholder SQL and parameters for each field and value. Return a pair containing: * a sequence of M rows of N SQL placeholder strings, and * a sequence of M rows of corresponding parameter values. Each placeholder string may contain any number of '%s' interpolation strings, and each parameter row will contain exactly as many params as the total number of '%s's in the corresponding placeholder row. """ if not value_rows: return [], [] # list of (sql, [params]) tuples for each object to be saved # Shape: [n_objs][n_fields][2] rows_of_fields_as_sql = ( (self.field_as_sql(field, v) for field, v in zip(fields, row)) for row in value_rows ) # tuple like ([sqls], [[params]s]) for each object to be saved # Shape: [n_objs][2][n_fields] sql_and_param_pair_rows = (zip(*row) for row in rows_of_fields_as_sql) # Extract separate lists for placeholders and params. # Each of these has shape [n_objs][n_fields] placeholder_rows, param_rows = zip(*sql_and_param_pair_rows) # Params for each field are still lists, and need to be flattened. param_rows = [[p for ps in row for p in ps] for row in param_rows] return placeholder_rows, param_rows def as_sql(self): # We don't need quote_name_unless_alias() here, since these are all # going to be column names (so we can avoid the extra overhead). qn = self.connection.ops.quote_name opts = self.query.get_meta() insert_statement = self.connection.ops.insert_statement(ignore_conflicts=self.query.ignore_conflicts) result = ['%s %s' % (insert_statement, qn(opts.db_table))] fields = self.query.fields or [opts.pk] result.append('(%s)' % ', '.join(qn(f.column) for f in fields)) if self.query.fields: value_rows = [ [self.prepare_value(field, self.pre_save_val(field, obj)) for field in fields] for obj in self.query.objs ] else: # An empty object. value_rows = [[self.connection.ops.pk_default_value()] for _ in self.query.objs] fields = [None] # Currently the backends just accept values when generating bulk # queries and generate their own placeholders. Doing that isn't # necessary and it should be possible to use placeholders and # expressions in bulk inserts too. can_bulk = (not self.returning_fields and self.connection.features.has_bulk_insert) placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows) ignore_conflicts_suffix_sql = self.connection.ops.ignore_conflicts_suffix_sql( ignore_conflicts=self.query.ignore_conflicts ) if self.returning_fields and self.connection.features.can_return_columns_from_insert: if self.connection.features.can_return_rows_from_bulk_insert: result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows)) params = param_rows else: result.append("VALUES (%s)" % ", ".join(placeholder_rows[0])) params = [param_rows[0]] if ignore_conflicts_suffix_sql: result.append(ignore_conflicts_suffix_sql) # Skip empty r_sql to allow subclasses to customize behavior for # 3rd party backends. Refs #19096. r_sql, self.returning_params = self.connection.ops.return_insert_columns(self.returning_fields) if r_sql: result.append(r_sql) params += [self.returning_params] return [(" ".join(result), tuple(chain.from_iterable(params)))] if can_bulk: result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows)) if ignore_conflicts_suffix_sql: result.append(ignore_conflicts_suffix_sql) return [(" ".join(result), tuple(p for ps in param_rows for p in ps))] else: if ignore_conflicts_suffix_sql: result.append(ignore_conflicts_suffix_sql) return [ (" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals) for p, vals in zip(placeholder_rows, param_rows) ] def execute_sql(self, returning_fields=None): assert not ( returning_fields and len(self.query.objs) != 1 and not self.connection.features.can_return_rows_from_bulk_insert ) self.returning_fields = returning_fields with self.connection.cursor() as cursor: for sql, params in self.as_sql(): cursor.execute(sql, params) if not self.returning_fields: return [] if self.connection.features.can_return_rows_from_bulk_insert and len(self.query.objs) > 1: return self.connection.ops.fetch_returned_insert_rows(cursor) if self.connection.features.can_return_columns_from_insert: assert len(self.query.objs) == 1 return [self.connection.ops.fetch_returned_insert_columns(cursor, self.returning_params)] return [(self.connection.ops.last_insert_id( cursor, self.query.get_meta().db_table, self.query.get_meta().pk.column ),)] class SQLDeleteCompiler(SQLCompiler): @cached_property def single_alias(self): # Ensure base table is in aliases. self.query.get_initial_alias() return sum(self.query.alias_refcount[t] > 0 for t in self.query.alias_map) == 1 def _as_sql(self, query): result = [ 'DELETE FROM %s' % self.quote_name_unless_alias(query.base_table) ] where, params = self.compile(query.where) if where: result.append('WHERE %s' % where) return ' '.join(result), tuple(params) def as_sql(self): """ Create the SQL for this query. Return the SQL string and list of parameters. """ if self.single_alias: return self._as_sql(self.query) innerq = self.query.clone() innerq.__class__ = Query innerq.clear_select_clause() pk = self.query.model._meta.pk innerq.select = [ pk.get_col(self.query.get_initial_alias()) ] outerq = Query(self.query.model) outerq.where = self.query.where_class() outerq.add_q(Q(pk__in=innerq)) return self._as_sql(outerq) class SQLUpdateCompiler(SQLCompiler): def as_sql(self): """ Create the SQL for this query. Return the SQL string and list of parameters. """ self.pre_sql_setup() if not self.query.values: return '', () qn = self.quote_name_unless_alias values, update_params = [], [] for field, model, val in self.query.values: if hasattr(val, 'resolve_expression'): val = val.resolve_expression(self.query, allow_joins=False, for_save=True) if val.contains_aggregate: raise FieldError( 'Aggregate functions are not allowed in this query ' '(%s=%r).' % (field.name, val) ) if val.contains_over_clause: raise FieldError( 'Window expressions are not allowed in this query ' '(%s=%r).' % (field.name, val) ) elif hasattr(val, 'prepare_database_save'): if field.remote_field: val = field.get_db_prep_save( val.prepare_database_save(field), connection=self.connection, ) else: raise TypeError( "Tried to update field %s with a model instance, %r. " "Use a value compatible with %s." % (field, val, field.__class__.__name__) ) else: val = field.get_db_prep_save(val, connection=self.connection) # Getting the placeholder for the field. if hasattr(field, 'get_placeholder'): placeholder = field.get_placeholder(val, self, self.connection) else: placeholder = '%s' name = field.column if hasattr(val, 'as_sql'): sql, params = self.compile(val) values.append('%s = %s' % (qn(name), placeholder % sql)) update_params.extend(params) elif val is not None: values.append('%s = %s' % (qn(name), placeholder)) update_params.append(val) else: values.append('%s = NULL' % qn(name)) table = self.query.base_table result = [ 'UPDATE %s SET' % qn(table), ', '.join(values), ] where, params = self.compile(self.query.where) if where: result.append('WHERE %s' % where) return ' '.join(result), tuple(update_params + params) def execute_sql(self, result_type): """ Execute the specified update. Return the number of rows affected by the primary update query. The "primary update query" is the first non-empty query that is executed. Row counts for any subsequent, related queries are not available. """ cursor = super().execute_sql(result_type) try: rows = cursor.rowcount if cursor else 0 is_empty = cursor is None finally: if cursor: cursor.close() for query in self.query.get_related_updates(): aux_rows = query.get_compiler(self.using).execute_sql(result_type) if is_empty and aux_rows: rows = aux_rows is_empty = False return rows def pre_sql_setup(self): """ If the update depends on results from other tables, munge the "where" conditions to match the format required for (portable) SQL updates. If multiple updates are required, pull out the id values to update at this point so that they don't change as a result of the progressive updates. """ refcounts_before = self.query.alias_refcount.copy() # Ensure base table is in the query self.query.get_initial_alias() count = self.query.count_active_tables() if not self.query.related_updates and count == 1: return query = self.query.chain(klass=Query) query.select_related = False query.clear_ordering(True) query.extra = {} query.select = [] query.add_fields([query.get_meta().pk.name]) super().pre_sql_setup() must_pre_select = count > 1 and not self.connection.features.update_can_self_select # Now we adjust the current query: reset the where clause and get rid # of all the tables we don't need (since they're in the sub-select). self.query.where = self.query.where_class() if self.query.related_updates or must_pre_select: # Either we're using the idents in multiple update queries (so # don't want them to change), or the db backend doesn't support # selecting from the updating table (e.g. MySQL). idents = [] for rows in query.get_compiler(self.using).execute_sql(MULTI): idents.extend(r[0] for r in rows) self.query.add_filter(('pk__in', idents)) self.query.related_ids = idents else: # The fast path. Filters and updates in one query. self.query.add_filter(('pk__in', query)) self.query.reset_refcounts(refcounts_before) class SQLAggregateCompiler(SQLCompiler): def as_sql(self): """ Create the SQL for this query. Return the SQL string and list of parameters. """ sql, params = [], [] for annotation in self.query.annotation_select.values(): ann_sql, ann_params = self.compile(annotation) ann_sql, ann_params = annotation.select_format(self, ann_sql, ann_params) sql.append(ann_sql) params.extend(ann_params) self.col_count = len(self.query.annotation_select) sql = ', '.join(sql) params = tuple(params) sql = 'SELECT %s FROM (%s) subquery' % (sql, self.query.subquery) params = params + self.query.sub_params return sql, params def cursor_iter(cursor, sentinel, col_count, itersize): """ Yield blocks of rows from a cursor and ensure the cursor is closed when done. """ try: for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel): yield rows if col_count is None else [r[:col_count] for r in rows] finally: cursor.close()
4c69d61d39abb5fa8ac8890a53613fa5f6310057077122fa566fe11b3419a2a9
import asyncio import logging import types from asgiref.sync import async_to_sync, sync_to_async from django.conf import settings from django.core.exceptions import ImproperlyConfigured, MiddlewareNotUsed from django.core.signals import request_finished from django.db import connections, transaction from django.urls import get_resolver, set_urlconf from django.utils.log import log_response from django.utils.module_loading import import_string from .exception import convert_exception_to_response logger = logging.getLogger('django.request') class BaseHandler: _view_middleware = None _template_response_middleware = None _exception_middleware = None _middleware_chain = None def load_middleware(self, is_async=False): """ Populate middleware lists from settings.MIDDLEWARE. Must be called after the environment is fixed (see __call__ in subclasses). """ self._view_middleware = [] self._template_response_middleware = [] self._exception_middleware = [] get_response = self._get_response_async if is_async else self._get_response handler = convert_exception_to_response(get_response) handler_is_async = is_async for middleware_path in reversed(settings.MIDDLEWARE): middleware = import_string(middleware_path) middleware_can_sync = getattr(middleware, 'sync_capable', True) middleware_can_async = getattr(middleware, 'async_capable', False) if not middleware_can_sync and not middleware_can_async: raise RuntimeError( 'Middleware %s must have at least one of ' 'sync_capable/async_capable set to True.' % middleware_path ) elif not handler_is_async and middleware_can_sync: middleware_is_async = False else: middleware_is_async = middleware_can_async try: # Adapt handler, if needed. handler = self.adapt_method_mode( middleware_is_async, handler, handler_is_async, debug=settings.DEBUG, name='middleware %s' % middleware_path, ) mw_instance = middleware(handler) except MiddlewareNotUsed as exc: if settings.DEBUG: if str(exc): logger.debug('MiddlewareNotUsed(%r): %s', middleware_path, exc) else: logger.debug('MiddlewareNotUsed: %r', middleware_path) continue if mw_instance is None: raise ImproperlyConfigured( 'Middleware factory %s returned None.' % middleware_path ) if hasattr(mw_instance, 'process_view'): self._view_middleware.insert( 0, self.adapt_method_mode(is_async, mw_instance.process_view), ) if hasattr(mw_instance, 'process_template_response'): self._template_response_middleware.append( self.adapt_method_mode(is_async, mw_instance.process_template_response), ) if hasattr(mw_instance, 'process_exception'): # The exception-handling stack is still always synchronous for # now, so adapt that way. self._exception_middleware.append( self.adapt_method_mode(False, mw_instance.process_exception), ) handler = convert_exception_to_response(mw_instance) handler_is_async = middleware_is_async # Adapt the top of the stack, if needed. handler = self.adapt_method_mode(is_async, handler, handler_is_async) # We only assign to this when initialization is complete as it is used # as a flag for initialization being complete. self._middleware_chain = handler def adapt_method_mode( self, is_async, method, method_is_async=None, debug=False, name=None, ): """ Adapt a method to be in the correct "mode": - If is_async is False: - Synchronous methods are left alone - Asynchronous methods are wrapped with async_to_sync - If is_async is True: - Synchronous methods are wrapped with sync_to_async() - Asynchronous methods are left alone """ if method_is_async is None: method_is_async = asyncio.iscoroutinefunction(method) if debug and not name: name = name or 'method %s()' % method.__qualname__ if is_async: if not method_is_async: if debug: logger.debug('Synchronous %s adapted.', name) return sync_to_async(method, thread_sensitive=True) elif method_is_async: if debug: logger.debug('Asynchronous %s adapted.', name) return async_to_sync(method) return method def get_response(self, request): """Return an HttpResponse object for the given HttpRequest.""" # Setup default url resolver for this thread set_urlconf(settings.ROOT_URLCONF) response = self._middleware_chain(request) response._resource_closers.append(request.close) if response.status_code >= 400: log_response( '%s: %s', response.reason_phrase, request.path, response=response, request=request, ) return response async def get_response_async(self, request): """ Asynchronous version of get_response. Funneling everything, including WSGI, into a single async get_response() is too slow. Avoid the context switch by using a separate async response path. """ # Setup default url resolver for this thread. set_urlconf(settings.ROOT_URLCONF) response = await self._middleware_chain(request) response._resource_closers.append(request.close) if response.status_code >= 400: await sync_to_async(log_response)( '%s: %s', response.reason_phrase, request.path, response=response, request=request, ) return response def _get_response(self, request): """ Resolve and call the view, then apply view, exception, and template_response middleware. This method is everything that happens inside the request/response middleware. """ response = None callback, callback_args, callback_kwargs = self.resolve_request(request) # Apply view middleware for middleware_method in self._view_middleware: response = middleware_method(request, callback, callback_args, callback_kwargs) if response: break if response is None: wrapped_callback = self.make_view_atomic(callback) # If it is an asynchronous view, run it in a subthread. if asyncio.iscoroutinefunction(wrapped_callback): wrapped_callback = async_to_sync(wrapped_callback) try: response = wrapped_callback(request, *callback_args, **callback_kwargs) except Exception as e: response = self.process_exception_by_middleware(e, request) if response is None: raise # Complain if the view returned None (a common error). self.check_response(response, callback) # If the response supports deferred rendering, apply template # response middleware and then render the response if hasattr(response, 'render') and callable(response.render): for middleware_method in self._template_response_middleware: response = middleware_method(request, response) # Complain if the template response middleware returned None (a common error). self.check_response( response, middleware_method, name='%s.process_template_response' % ( middleware_method.__self__.__class__.__name__, ) ) try: response = response.render() except Exception as e: response = self.process_exception_by_middleware(e, request) if response is None: raise return response async def _get_response_async(self, request): """ Resolve and call the view, then apply view, exception, and template_response middleware. This method is everything that happens inside the request/response middleware. """ response = None callback, callback_args, callback_kwargs = self.resolve_request(request) # Apply view middleware. for middleware_method in self._view_middleware: response = await middleware_method(request, callback, callback_args, callback_kwargs) if response: break if response is None: wrapped_callback = self.make_view_atomic(callback) # If it is a synchronous view, run it in a subthread if not asyncio.iscoroutinefunction(wrapped_callback): wrapped_callback = sync_to_async(wrapped_callback, thread_sensitive=True) try: response = await wrapped_callback(request, *callback_args, **callback_kwargs) except Exception as e: response = await sync_to_async( self.process_exception_by_middleware, thread_sensitive=True, )(e, request) if response is None: raise # Complain if the view returned None or an uncalled coroutine. self.check_response(response, callback) # If the response supports deferred rendering, apply template # response middleware and then render the response if hasattr(response, 'render') and callable(response.render): for middleware_method in self._template_response_middleware: response = await middleware_method(request, response) # Complain if the template response middleware returned None or # an uncalled coroutine. self.check_response( response, middleware_method, name='%s.process_template_response' % ( middleware_method.__self__.__class__.__name__, ) ) try: if asyncio.iscoroutinefunction(response.render): response = await response.render() else: response = await sync_to_async(response.render, thread_sensitive=True)() except Exception as e: response = await sync_to_async( self.process_exception_by_middleware, thread_sensitive=True, )(e, request) if response is None: raise # Make sure the response is not a coroutine if asyncio.iscoroutine(response): raise RuntimeError('Response is still a coroutine.') return response def resolve_request(self, request): """ Retrieve/set the urlconf for the request. Return the view resolved, with its args and kwargs. """ # Work out the resolver. if hasattr(request, 'urlconf'): urlconf = request.urlconf set_urlconf(urlconf) resolver = get_resolver(urlconf) else: resolver = get_resolver() # Resolve the view, and assign the match object back to the request. resolver_match = resolver.resolve(request.path_info) request.resolver_match = resolver_match return resolver_match def check_response(self, response, callback, name=None): """ Raise an error if the view returned None or an uncalled coroutine. """ if not(response is None or asyncio.iscoroutine(response)): return if not name: if isinstance(callback, types.FunctionType): # FBV name = 'The view %s.%s' % (callback.__module__, callback.__name__) else: # CBV name = 'The view %s.%s.__call__' % ( callback.__module__, callback.__class__.__name__, ) if response is None: raise ValueError( "%s didn't return an HttpResponse object. It returned None " "instead." % name ) elif asyncio.iscoroutine(response): raise ValueError( "%s didn't return an HttpResponse object. It returned an " "unawaited coroutine instead. You may need to add an 'await' " "into your view." % name ) # Other utility methods. def make_view_atomic(self, view): non_atomic_requests = getattr(view, '_non_atomic_requests', set()) for db in connections.all(): if db.settings_dict['ATOMIC_REQUESTS'] and db.alias not in non_atomic_requests: if asyncio.iscoroutinefunction(view): raise RuntimeError( 'You cannot use ATOMIC_REQUESTS with async views.' ) view = transaction.atomic(using=db.alias)(view) return view def process_exception_by_middleware(self, exception, request): """ Pass the exception to the exception middleware. If no middleware return a response for this exception, return None. """ for middleware_method in self._exception_middleware: response = middleware_method(request, exception) if response: return response return None def reset_urlconf(sender, **kwargs): """Reset the URLconf after each request is finished.""" set_urlconf(None) request_finished.connect(reset_urlconf)
4dcb8c2c69ccf3dde9e71d73b024f90148bf8da5e44429ab01539ba12d4181c1
from django.contrib.postgres.signals import ( get_citext_oids, get_hstore_oids, register_type_handlers, ) from django.db import NotSupportedError, router from django.db.migrations import AddIndex, RemoveIndex from django.db.migrations.operations.base import Operation class CreateExtension(Operation): reversible = True def __init__(self, name): self.name = name def state_forwards(self, app_label, state): pass def database_forwards(self, app_label, schema_editor, from_state, to_state): if ( schema_editor.connection.vendor != 'postgresql' or not router.allow_migrate(schema_editor.connection.alias, app_label) ): return if not self.extension_exists(schema_editor, self.name): schema_editor.execute( 'CREATE EXTENSION IF NOT EXISTS %s' % schema_editor.quote_name(self.name) ) # Clear cached, stale oids. get_hstore_oids.cache_clear() get_citext_oids.cache_clear() # Registering new type handlers cannot be done before the extension is # installed, otherwise a subsequent data migration would use the same # connection. register_type_handlers(schema_editor.connection) def database_backwards(self, app_label, schema_editor, from_state, to_state): if not router.allow_migrate(schema_editor.connection.alias, app_label): return if self.extension_exists(schema_editor, self.name): schema_editor.execute( 'DROP EXTENSION IF EXISTS %s' % schema_editor.quote_name(self.name) ) # Clear cached, stale oids. get_hstore_oids.cache_clear() get_citext_oids.cache_clear() def extension_exists(self, schema_editor, extension): with schema_editor.connection.cursor() as cursor: cursor.execute( 'SELECT 1 FROM pg_extension WHERE extname = %s', [extension], ) return bool(cursor.fetchone()) def describe(self): return "Creates extension %s" % self.name @property def migration_name_fragment(self): return 'create_extension_%s' % self.name class BloomExtension(CreateExtension): def __init__(self): self.name = 'bloom' class BtreeGinExtension(CreateExtension): def __init__(self): self.name = 'btree_gin' class BtreeGistExtension(CreateExtension): def __init__(self): self.name = 'btree_gist' class CITextExtension(CreateExtension): def __init__(self): self.name = 'citext' class CryptoExtension(CreateExtension): def __init__(self): self.name = 'pgcrypto' class HStoreExtension(CreateExtension): def __init__(self): self.name = 'hstore' class TrigramExtension(CreateExtension): def __init__(self): self.name = 'pg_trgm' class UnaccentExtension(CreateExtension): def __init__(self): self.name = 'unaccent' class NotInTransactionMixin: def _ensure_not_in_transaction(self, schema_editor): if schema_editor.connection.in_atomic_block: raise NotSupportedError( 'The %s operation cannot be executed inside a transaction ' '(set atomic = False on the migration).' % self.__class__.__name__ ) class AddIndexConcurrently(NotInTransactionMixin, AddIndex): """Create an index using PostgreSQL's CREATE INDEX CONCURRENTLY syntax.""" atomic = False def describe(self): return 'Concurrently create index %s on field(s) %s of model %s' % ( self.index.name, ', '.join(self.index.fields), self.model_name, ) def database_forwards(self, app_label, schema_editor, from_state, to_state): self._ensure_not_in_transaction(schema_editor) model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.add_index(model, self.index, concurrently=True) def database_backwards(self, app_label, schema_editor, from_state, to_state): self._ensure_not_in_transaction(schema_editor) model = from_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.remove_index(model, self.index, concurrently=True) class RemoveIndexConcurrently(NotInTransactionMixin, RemoveIndex): """Remove an index using PostgreSQL's DROP INDEX CONCURRENTLY syntax.""" atomic = False def describe(self): return 'Concurrently remove index %s from %s' % (self.name, self.model_name) def database_forwards(self, app_label, schema_editor, from_state, to_state): self._ensure_not_in_transaction(schema_editor) model = from_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): from_model_state = from_state.models[app_label, self.model_name_lower] index = from_model_state.get_index_by_name(self.name) schema_editor.remove_index(model, index, concurrently=True) def database_backwards(self, app_label, schema_editor, from_state, to_state): self._ensure_not_in_transaction(schema_editor) model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): to_model_state = to_state.models[app_label, self.model_name_lower] index = to_model_state.get_index_by_name(self.name) schema_editor.add_index(model, index, concurrently=True)
63a27e0500b8373a2c998597a110029846d6fb28f937f4ed46193e18cbf8527b
import re from datetime import date, datetime from decimal import Decimal from django import template from django.template import defaultfilters from django.utils.formats import number_format from django.utils.safestring import mark_safe from django.utils.timezone import is_aware, utc from django.utils.translation import ( gettext as _, gettext_lazy, ngettext, ngettext_lazy, npgettext_lazy, pgettext, round_away_from_one, ) register = template.Library() @register.filter(is_safe=True) def ordinal(value): """ Convert an integer to its ordinal as a string. 1 is '1st', 2 is '2nd', 3 is '3rd', etc. Works for any integer. """ try: value = int(value) except (TypeError, ValueError): return value if value % 100 in (11, 12, 13): # Translators: Ordinal format for 11 (11th), 12 (12th), and 13 (13th). value = pgettext('ordinal 11, 12, 13', '{}th').format(value) else: templates = ( # Translators: Ordinal format when value ends with 0, e.g. 80th. pgettext('ordinal 0', '{}th'), # Translators: Ordinal format when value ends with 1, e.g. 81st, except 11. pgettext('ordinal 1', '{}st'), # Translators: Ordinal format when value ends with 2, e.g. 82nd, except 12. pgettext('ordinal 2', '{}nd'), # Translators: Ordinal format when value ends with 3, e.g. 83th, except 13. pgettext('ordinal 3', '{}rd'), # Translators: Ordinal format when value ends with 4, e.g. 84th. pgettext('ordinal 4', '{}th'), # Translators: Ordinal format when value ends with 5, e.g. 85th. pgettext('ordinal 5', '{}th'), # Translators: Ordinal format when value ends with 6, e.g. 86th. pgettext('ordinal 6', '{}th'), # Translators: Ordinal format when value ends with 7, e.g. 87th. pgettext('ordinal 7', '{}th'), # Translators: Ordinal format when value ends with 8, e.g. 88th. pgettext('ordinal 8', '{}th'), # Translators: Ordinal format when value ends with 9, e.g. 89th. pgettext('ordinal 9', '{}th'), ) value = templates[value % 10].format(value) # Mark value safe so i18n does not break with <sup> or <sub> see #19988 return mark_safe(value) @register.filter(is_safe=True) def intcomma(value, use_l10n=True): """ Convert an integer to a string containing commas every three digits. For example, 3000 becomes '3,000' and 45000 becomes '45,000'. """ if use_l10n: try: if not isinstance(value, (float, Decimal)): value = int(value) except (TypeError, ValueError): return intcomma(value, False) else: return number_format(value, use_l10n=True, force_grouping=True) orig = str(value) new = re.sub(r"^(-?\d+)(\d{3})", r'\g<1>,\g<2>', orig) if orig == new: return new else: return intcomma(new, use_l10n) # A tuple of standard large number to their converters intword_converters = ( (6, lambda number: ngettext('%(value)s million', '%(value)s million', number)), (9, lambda number: ngettext('%(value)s billion', '%(value)s billion', number)), (12, lambda number: ngettext('%(value)s trillion', '%(value)s trillion', number)), (15, lambda number: ngettext('%(value)s quadrillion', '%(value)s quadrillion', number)), (18, lambda number: ngettext('%(value)s quintillion', '%(value)s quintillion', number)), (21, lambda number: ngettext('%(value)s sextillion', '%(value)s sextillion', number)), (24, lambda number: ngettext('%(value)s septillion', '%(value)s septillion', number)), (27, lambda number: ngettext('%(value)s octillion', '%(value)s octillion', number)), (30, lambda number: ngettext('%(value)s nonillion', '%(value)s nonillion', number)), (33, lambda number: ngettext('%(value)s decillion', '%(value)s decillion', number)), (100, lambda number: ngettext('%(value)s googol', '%(value)s googol', number)), ) @register.filter(is_safe=False) def intword(value): """ Convert a large integer to a friendly text representation. Works best for numbers over 1 million. For example, 1000000 becomes '1.0 million', 1200000 becomes '1.2 million' and '1200000000' becomes '1.2 billion'. """ try: value = int(value) except (TypeError, ValueError): return value abs_value = abs(value) if abs_value < 1000000: return value for exponent, converter in intword_converters: large_number = 10 ** exponent if abs_value < large_number * 1000: new_value = value / large_number rounded_value = round_away_from_one(new_value) return converter(abs(rounded_value)) % { 'value': defaultfilters.floatformat(new_value, 1), } return value @register.filter(is_safe=True) def apnumber(value): """ For numbers 1-9, return the number spelled out. Otherwise, return the number. This follows Associated Press style. """ try: value = int(value) except (TypeError, ValueError): return value if not 0 < value < 10: return value return (_('one'), _('two'), _('three'), _('four'), _('five'), _('six'), _('seven'), _('eight'), _('nine'))[value - 1] # Perform the comparison in the default time zone when USE_TZ = True # (unless a specific time zone has been applied with the |timezone filter). @register.filter(expects_localtime=True) def naturalday(value, arg=None): """ For date values that are tomorrow, today or yesterday compared to present day return representing string. Otherwise, return a string formatted according to settings.DATE_FORMAT. """ tzinfo = getattr(value, 'tzinfo', None) try: value = date(value.year, value.month, value.day) except AttributeError: # Passed value wasn't a date object return value today = datetime.now(tzinfo).date() delta = value - today if delta.days == 0: return _('today') elif delta.days == 1: return _('tomorrow') elif delta.days == -1: return _('yesterday') return defaultfilters.date(value, arg) # This filter doesn't require expects_localtime=True because it deals properly # with both naive and aware datetimes. Therefore avoid the cost of conversion. @register.filter def naturaltime(value): """ For date and time values show how many seconds, minutes, or hours ago compared to current timestamp return representing string. """ return NaturalTimeFormatter.string_for(value) class NaturalTimeFormatter: time_strings = { # Translators: delta will contain a string like '2 months' or '1 month, 2 weeks' 'past-day': gettext_lazy('%(delta)s ago'), # Translators: please keep a non-breaking space (U+00A0) between count # and time unit. 'past-hour': ngettext_lazy('an hour ago', '%(count)s hours ago', 'count'), # Translators: please keep a non-breaking space (U+00A0) between count # and time unit. 'past-minute': ngettext_lazy('a minute ago', '%(count)s minutes ago', 'count'), # Translators: please keep a non-breaking space (U+00A0) between count # and time unit. 'past-second': ngettext_lazy('a second ago', '%(count)s seconds ago', 'count'), 'now': gettext_lazy('now'), # Translators: please keep a non-breaking space (U+00A0) between count # and time unit. 'future-second': ngettext_lazy('a second from now', '%(count)s seconds from now', 'count'), # Translators: please keep a non-breaking space (U+00A0) between count # and time unit. 'future-minute': ngettext_lazy('a minute from now', '%(count)s minutes from now', 'count'), # Translators: please keep a non-breaking space (U+00A0) between count # and time unit. 'future-hour': ngettext_lazy('an hour from now', '%(count)s hours from now', 'count'), # Translators: delta will contain a string like '2 months' or '1 month, 2 weeks' 'future-day': gettext_lazy('%(delta)s from now'), } past_substrings = { # Translators: 'naturaltime-past' strings will be included in '%(delta)s ago' 'year': npgettext_lazy('naturaltime-past', '%d year', '%d years'), 'month': npgettext_lazy('naturaltime-past', '%d month', '%d months'), 'week': npgettext_lazy('naturaltime-past', '%d week', '%d weeks'), 'day': npgettext_lazy('naturaltime-past', '%d day', '%d days'), 'hour': npgettext_lazy('naturaltime-past', '%d hour', '%d hours'), 'minute': npgettext_lazy('naturaltime-past', '%d minute', '%d minutes'), } future_substrings = { # Translators: 'naturaltime-future' strings will be included in '%(delta)s from now' 'year': npgettext_lazy('naturaltime-future', '%d year', '%d years'), 'month': npgettext_lazy('naturaltime-future', '%d month', '%d months'), 'week': npgettext_lazy('naturaltime-future', '%d week', '%d weeks'), 'day': npgettext_lazy('naturaltime-future', '%d day', '%d days'), 'hour': npgettext_lazy('naturaltime-future', '%d hour', '%d hours'), 'minute': npgettext_lazy('naturaltime-future', '%d minute', '%d minutes'), } @classmethod def string_for(cls, value): if not isinstance(value, date): # datetime is a subclass of date return value now = datetime.now(utc if is_aware(value) else None) if value < now: delta = now - value if delta.days != 0: return cls.time_strings['past-day'] % { 'delta': defaultfilters.timesince(value, now, time_strings=cls.past_substrings), } elif delta.seconds == 0: return cls.time_strings['now'] elif delta.seconds < 60: return cls.time_strings['past-second'] % {'count': delta.seconds} elif delta.seconds // 60 < 60: count = delta.seconds // 60 return cls.time_strings['past-minute'] % {'count': count} else: count = delta.seconds // 60 // 60 return cls.time_strings['past-hour'] % {'count': count} else: delta = value - now if delta.days != 0: return cls.time_strings['future-day'] % { 'delta': defaultfilters.timeuntil(value, now, time_strings=cls.future_substrings), } elif delta.seconds == 0: return cls.time_strings['now'] elif delta.seconds < 60: return cls.time_strings['future-second'] % {'count': delta.seconds} elif delta.seconds // 60 < 60: count = delta.seconds // 60 return cls.time_strings['future-minute'] % {'count': count} else: count = delta.seconds // 60 // 60 return cls.time_strings['future-hour'] % {'count': count}
1dd5ae0fb49ff72bbb7336142bf10df8052a329814aeb6c57e635277559f38e7
import datetime import importlib import io import os import sys from unittest import mock from django.apps import apps from django.core.management import CommandError, call_command from django.db import ( ConnectionHandler, DatabaseError, OperationalError, connection, connections, models, ) from django.db.backends.base.schema import BaseDatabaseSchemaEditor from django.db.backends.utils import truncate_name from django.db.migrations.exceptions import InconsistentMigrationHistory from django.db.migrations.recorder import MigrationRecorder from django.test import TestCase, override_settings, skipUnlessDBFeature from .models import UnicodeModel, UnserializableModel from .routers import TestRouter from .test_base import MigrationTestBase class MigrateTests(MigrationTestBase): """ Tests running the migrate command. """ databases = {'default', 'other'} @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_migrate(self): """ Tests basic usage of the migrate command. """ # No tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run the migrations to 0001 only stdout = io.StringIO() call_command('migrate', 'migrations', '0001', verbosity=1, stdout=stdout, no_color=True) stdout = stdout.getvalue() self.assertIn('Target specific migration: 0001_initial, from migrations', stdout) self.assertIn('Applying migrations.0001_initial... OK', stdout) # The correct tables exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run migrations all the way call_command("migrate", verbosity=0) # The correct tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableExists("migrations_book") # Unmigrate everything stdout = io.StringIO() call_command('migrate', 'migrations', 'zero', verbosity=1, stdout=stdout, no_color=True) stdout = stdout.getvalue() self.assertIn('Unapply all migrations: migrations', stdout) self.assertIn('Unapplying migrations.0002_second... OK', stdout) # Tables are gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings(INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'migrations.migrations_test_apps.migrated_app', ]) def test_migrate_with_system_checks(self): out = io.StringIO() call_command('migrate', skip_checks=False, no_color=True, stdout=out) self.assertIn('Apply all migrations: migrated_app', out.getvalue()) @override_settings(INSTALLED_APPS=['migrations', 'migrations.migrations_test_apps.unmigrated_app_syncdb']) def test_app_without_migrations(self): msg = "App 'unmigrated_app_syncdb' does not have migrations." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='unmigrated_app_syncdb') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_clashing_prefix'}) def test_ambiguous_prefix(self): msg = ( "More than one migration matches 'a' in app 'migrations'. Please " "be more specific." ) with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='migrations', migration_name='a') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_unknown_prefix(self): msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='migrations', migration_name='nonexistent') @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_initial_false"}) def test_migrate_initial_false(self): """ `Migration.initial = False` skips fake-initial detection. """ # Make sure no tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) # Fake rollback call_command("migrate", "migrations", "zero", fake=True, verbosity=0) # Make sure fake-initial detection does not run with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0) call_command("migrate", "migrations", "0001", fake=True, verbosity=0) # Real rollback call_command("migrate", "migrations", "zero", verbosity=0) # Make sure it's all gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings( MIGRATION_MODULES={"migrations": "migrations.test_migrations"}, DATABASE_ROUTERS=['migrations.routers.TestRouter'], ) def test_migrate_fake_initial(self): """ --fake-initial only works if all tables created in the initial migration of an app exists. Database routers must be obeyed when doing that check. """ # Make sure no tables are created for db in self.databases: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) call_command("migrate", "migrations", "0001", verbosity=0, database="other") # Make sure the right tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Also check the "other" database self.assertTableNotExists("migrations_author", using="other") self.assertTableExists("migrations_tribble", using="other") # Fake a roll-back call_command("migrate", "migrations", "zero", fake=True, verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0, database="other") # Make sure the tables still exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble", using="other") # Try to run initial migration with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", verbosity=0) # Run initial migration with an explicit --fake-initial out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command("migrate", "migrations", "0001", fake_initial=True, stdout=out, verbosity=1) call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0, database="other") self.assertIn( "migrations.0001_initial... faked", out.getvalue().lower() ) try: # Run migrations all the way. call_command('migrate', verbosity=0) call_command('migrate', verbosity=0, database="other") self.assertTableExists('migrations_author') self.assertTableNotExists('migrations_tribble') self.assertTableExists('migrations_book') self.assertTableNotExists('migrations_author', using='other') self.assertTableNotExists('migrations_tribble', using='other') self.assertTableNotExists('migrations_book', using='other') # Fake a roll-back. call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0) call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0, database='other') self.assertTableExists('migrations_author') self.assertTableNotExists('migrations_tribble') self.assertTableExists('migrations_book') # Run initial migration. with self.assertRaises(DatabaseError): call_command('migrate', 'migrations', verbosity=0) # Run initial migration with an explicit --fake-initial. with self.assertRaises(DatabaseError): # Fails because "migrations_tribble" does not exist but needs # to in order to make --fake-initial work. call_command('migrate', 'migrations', fake_initial=True, verbosity=0) # Fake an apply. call_command('migrate', 'migrations', fake=True, verbosity=0) call_command('migrate', 'migrations', fake=True, verbosity=0, database='other') finally: # Unmigrate everything. call_command('migrate', 'migrations', 'zero', verbosity=0) call_command('migrate', 'migrations', 'zero', verbosity=0, database='other') # Make sure it's all gone for db in self.databases: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) self.assertTableNotExists("migrations_book", using=db) @skipUnlessDBFeature('ignores_table_name_case') def test_migrate_fake_initial_case_insensitive(self): with override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_fake_initial_case_insensitive.initial', }): call_command('migrate', 'migrations', '0001', verbosity=0) call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0) with override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_fake_initial_case_insensitive.fake_initial', }): out = io.StringIO() call_command( 'migrate', 'migrations', '0001', fake_initial=True, stdout=out, verbosity=1, no_color=True, ) self.assertIn( 'migrations.0001_initial... faked', out.getvalue().lower(), ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_fake_split_initial"}) def test_migrate_fake_split_initial(self): """ Split initial migrations can be faked with --fake-initial. """ call_command("migrate", "migrations", "0002", verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0) out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command("migrate", "migrations", "0002", fake_initial=True, stdout=out, verbosity=1) value = out.getvalue().lower() self.assertIn("migrations.0001_initial... faked", value) self.assertIn("migrations.0002_second... faked", value) # Fake an apply call_command("migrate", "migrations", fake=True, verbosity=0) # Unmigrate everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"}) def test_migrate_conflict_exit(self): """ migrate exits if it detects a conflict. """ with self.assertRaisesMessage(CommandError, "Conflicting migrations detected"): call_command("migrate", "migrations") @override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_migrations', }) def test_migrate_check(self): with self.assertRaises(SystemExit): call_command('migrate', 'migrations', '0001', check_unapplied=True) self.assertTableNotExists('migrations_author') self.assertTableNotExists('migrations_tribble') self.assertTableNotExists('migrations_book') @override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_migrations_plan', }) def test_migrate_check_plan(self): out = io.StringIO() with self.assertRaises(SystemExit): call_command( 'migrate', 'migrations', '0001', check_unapplied=True, plan=True, stdout=out, no_color=True, ) self.assertEqual( 'Planned operations:\n' 'migrations.0001_initial\n' ' Create model Salamander\n' ' Raw Python operation -> Grow salamander tail.\n', out.getvalue(), ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_showmigrations_list(self): """ showmigrations --list displays migrations and whether or not they're applied. """ out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: True): call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False) self.assertEqual( '\x1b[1mmigrations\n\x1b[0m' ' [ ] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) call_command("migrate", "migrations", "0001", verbosity=0) out = io.StringIO() # Giving the explicit app_label tests for selective `show_list` in the command call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) out = io.StringIO() # Applied datetimes are displayed at verbosity 2+. call_command('showmigrations', 'migrations', stdout=out, verbosity=2, no_color=True) migration1 = MigrationRecorder(connection).migration_qs.get(app='migrations', name='0001_initial') self.assertEqual( 'migrations\n' ' [x] 0001_initial (applied at %s)\n' ' [ ] 0002_second\n' % migration1.applied.strftime('%Y-%m-%d %H:%M:%S'), out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}) def test_showmigrations_plan(self): """ Tests --plan output of showmigrations command """ out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "0003", verbosity=0) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_plan'}) def test_migrate_plan(self): """Tests migrate --plan output.""" out = io.StringIO() # Show the plan up to the third migration. call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0001_initial\n' ' Create model Salamander\n' ' Raw Python operation -> Grow salamander tail.\n' 'migrations.0002_second\n' ' Create model Book\n' " Raw SQL operation -> ['SELECT * FROM migrations_book']\n" 'migrations.0003_third\n' ' Create model Author\n' " Raw SQL operation -> ['SELECT * FROM migrations_author']\n", out.getvalue() ) try: # Migrate to the third migration. call_command('migrate', 'migrations', '0003', verbosity=0) out = io.StringIO() # Show the plan for when there is nothing to apply. call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' ' No planned migration operations.\n', out.getvalue() ) out = io.StringIO() # Show the plan for reverse migration back to 0001. call_command('migrate', 'migrations', '0001', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0003_third\n' ' Undo Create model Author\n' " Raw SQL operation -> ['SELECT * FROM migrations_book']\n" 'migrations.0002_second\n' ' Undo Create model Book\n' " Raw SQL operation -> ['SELECT * FROM migrations_salamand…\n", out.getvalue() ) out = io.StringIO() # Show the migration plan to fourth, with truncated details. call_command('migrate', 'migrations', '0004', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0004_fourth\n' ' Raw SQL operation -> SELECT * FROM migrations_author WHE…\n', out.getvalue() ) # Show the plan when an operation is irreversible. # Migrate to the fourth migration. call_command('migrate', 'migrations', '0004', verbosity=0) out = io.StringIO() call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0004_fourth\n' ' Raw SQL operation -> IRREVERSIBLE\n', out.getvalue() ) out = io.StringIO() call_command('migrate', 'migrations', '0005', plan=True, stdout=out, no_color=True) # Operation is marked as irreversible only in the revert plan. self.assertEqual( 'Planned operations:\n' 'migrations.0005_fifth\n' ' Raw Python operation\n' ' Raw Python operation\n' ' Raw Python operation -> Feed salamander.\n', out.getvalue() ) call_command('migrate', 'migrations', '0005', verbosity=0) out = io.StringIO() call_command('migrate', 'migrations', '0004', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0005_fifth\n' ' Raw Python operation -> IRREVERSIBLE\n' ' Raw Python operation -> IRREVERSIBLE\n' ' Raw Python operation\n', out.getvalue() ) finally: # Cleanup by unmigrating everything: fake the irreversible, then # migrate all to zero. call_command('migrate', 'migrations', '0003', fake=True, verbosity=0) call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_empty'}) def test_showmigrations_no_migrations(self): out = io.StringIO() call_command('showmigrations', stdout=out, no_color=True) self.assertEqual('migrations\n (no migrations)\n', out.getvalue().lower()) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app']) def test_showmigrations_unmigrated_app(self): out = io.StringIO() call_command('showmigrations', 'unmigrated_app', stdout=out, no_color=True) self.assertEqual('unmigrated_app\n (no migrations)\n', out.getvalue().lower()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}) def test_showmigrations_plan_no_migrations(self): """ Tests --plan output of showmigrations command without migrations """ out = io.StringIO() call_command('showmigrations', format='plan', stdout=out, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue().lower()) out = io.StringIO() call_command('showmigrations', format='plan', stdout=out, verbosity=2, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue().lower()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}) def test_showmigrations_plan_squashed(self): """ Tests --plan output of showmigrations command with squashed migrations. """ out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto\n" "[ ] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto ... (migrations.1_auto)\n" "[ ] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "3_squashed_5", verbosity=0) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto\n" "[x] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto ... (migrations.1_auto)\n" "[x] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_b', 'migrations.migrations_test_apps.alter_fk.author_app', 'migrations.migrations_test_apps.alter_fk.book_app', ]) def test_showmigrations_plan_single_app_label(self): """ `showmigrations --plan app_label` output with a single app_label. """ # Single app with no dependencies on other apps. out = io.StringIO() call_command('showmigrations', 'mutate_state_b', format='plan', stdout=out) self.assertEqual( '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) # Single app with dependencies. out = io.StringIO() call_command('showmigrations', 'author_app', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n', out.getvalue() ) # Some migrations already applied. call_command('migrate', 'author_app', '0001', verbosity=0) out = io.StringIO() call_command('showmigrations', 'author_app', format='plan', stdout=out) self.assertEqual( '[X] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n', out.getvalue() ) # Cleanup by unmigrating author_app. call_command('migrate', 'author_app', 'zero', verbosity=0) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_b', 'migrations.migrations_test_apps.alter_fk.author_app', 'migrations.migrations_test_apps.alter_fk.book_app', ]) def test_showmigrations_plan_multiple_app_labels(self): """ `showmigrations --plan app_label` output with multiple app_labels. """ # Multiple apps: author_app depends on book_app; mutate_state_b doesn't # depend on other apps. out = io.StringIO() call_command('showmigrations', 'mutate_state_b', 'author_app', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n' '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) # Multiple apps: args order shouldn't matter (the same result is # expected as above). out = io.StringIO() call_command('showmigrations', 'author_app', 'mutate_state_b', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n' '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app']) def test_showmigrations_plan_app_label_no_migrations(self): out = io.StringIO() call_command('showmigrations', 'unmigrated_app', format='plan', stdout=out, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_forwards(self): """ sqlmigrate outputs forward looking SQL. """ out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_author = output.find('-- create model author') index_create_table = output.find('create table') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_unique_together = output.find('-- alter unique_together') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) if connection.features.can_rollback_ddl: self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before operation description (unique_together)" ) self.assertGreater( index_op_desc_author, index_tx_start, "Operation description (author) not found or found before transaction start" ) self.assertGreater( index_create_table, index_op_desc_author, "CREATE TABLE not found or found before operation description (author)" ) self.assertGreater( index_op_desc_tribble, index_create_table, "Operation description (tribble) not found or found before CREATE TABLE (author)" ) self.assertGreater( index_op_desc_unique_together, index_op_desc_tribble, "Operation description (unique_together) not found or found before operation description (tribble)" ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_backwards(self): """ sqlmigrate outputs reverse looking SQL. """ # Cannot generate the reverse SQL unless we've applied the migration. call_command("migrate", "migrations", verbosity=0) out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out, backwards=True) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_unique_together = output.find('-- alter unique_together') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_author = output.find('-- create model author') index_drop_table = output.rfind('drop table') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) if connection.features.can_rollback_ddl: self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before DROP TABLE" ) self.assertGreater( index_op_desc_unique_together, index_tx_start, "Operation description (unique_together) not found or found before transaction start" ) self.assertGreater( index_op_desc_tribble, index_op_desc_unique_together, "Operation description (tribble) not found or found before operation description (unique_together)" ) self.assertGreater( index_op_desc_author, index_op_desc_tribble, "Operation description (author) not found or found before operation description (tribble)" ) self.assertGreater( index_drop_table, index_op_desc_author, "DROP TABLE not found or found before operation description (author)" ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}) def test_sqlmigrate_for_non_atomic_migration(self): """ Transaction wrappers aren't shown for non-atomic migrations. """ out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() queries = [q.strip() for q in output.splitlines()] if connection.ops.start_transaction_sql(): self.assertNotIn(connection.ops.start_transaction_sql().lower(), queries) self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_sqlmigrate_for_non_transactional_databases(self): """ Transaction wrappers aren't shown for databases that don't support transactional DDL. """ out = io.StringIO() with mock.patch.object(connection.features, 'can_rollback_ddl', False): call_command('sqlmigrate', 'migrations', '0001', stdout=out) output = out.getvalue().lower() queries = [q.strip() for q in output.splitlines()] start_transaction_sql = connection.ops.start_transaction_sql() if start_transaction_sql: self.assertNotIn(start_transaction_sql.lower(), queries) self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_sqlmigrate_ambiguous_prefix_squashed_migrations(self): msg = ( "More than one migration matches '0001' in app 'migrations'. " "Please be more specific." ) with self.assertRaisesMessage(CommandError, msg): call_command('sqlmigrate', 'migrations', '0001') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_sqlmigrate_squashed_migration(self): out = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_squashed_0002', stdout=out) output = out.getvalue().lower() self.assertIn('-- create model author', output) self.assertIn('-- create model book', output) self.assertNotIn('-- create model tribble', output) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_sqlmigrate_replaced_migration(self): out = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_initial', stdout=out) output = out.getvalue().lower() self.assertIn('-- create model author', output) self.assertIn('-- create model tribble', output) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_no_operations'}) def test_migrations_no_operations(self): err = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_initial', stderr=err) self.assertEqual(err.getvalue(), 'No operations found.\n') @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.migrated_unapplied_app", "migrations.migrations_test_apps.unmigrated_app", ], ) def test_regression_22823_unmigrated_fk_to_migrated_model(self): """ Assuming you have 3 apps, `A`, `B`, and `C`, such that: * `A` has migrations * `B` has a migration we want to apply * `C` has no migrations, but has an FK to `A` When we try to migrate "B", an exception occurs because the "B" was not included in the ProjectState that is used to detect soft-applied migrations (#22823). """ call_command('migrate', 'migrated_unapplied_app', verbosity=0) # unmigrated_app.SillyModel has a foreign key to 'migrations.Tribble', # but that model is only defined in a migration, so the global app # registry never sees it and the reference is left dangling. Remove it # to avoid problems in subsequent tests. del apps._pending_operations[('migrations', 'tribble')] @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app_syncdb']) def test_migrate_syncdb_deferred_sql_executed_with_schemaeditor(self): """ For an app without migrations, editor.execute() is used for executing the syncdb deferred SQL. """ stdout = io.StringIO() with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute: call_command('migrate', run_syncdb=True, verbosity=1, stdout=stdout, no_color=True) create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)]) self.assertEqual(create_table_count, 2) # There's at least one deferred SQL for creating the foreign key # index. self.assertGreater(len(execute.mock_calls), 2) stdout = stdout.getvalue() self.assertIn('Synchronize unmigrated apps: unmigrated_app_syncdb', stdout) self.assertIn('Creating tables...', stdout) table_name = truncate_name('unmigrated_app_syncdb_classroom', connection.ops.max_name_length()) self.assertIn('Creating table %s' % table_name, stdout) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrate_syncdb_app_with_migrations(self): msg = "Can't use run_syncdb with app 'migrations' as it has migrations." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', 'migrations', run_syncdb=True, verbosity=0) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.unmigrated_app_syncdb', 'migrations.migrations_test_apps.unmigrated_app_simple', ]) def test_migrate_syncdb_app_label(self): """ Running migrate --run-syncdb with an app_label only creates tables for the specified app. """ stdout = io.StringIO() with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute: call_command('migrate', 'unmigrated_app_syncdb', run_syncdb=True, stdout=stdout) create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)]) self.assertEqual(create_table_count, 2) self.assertGreater(len(execute.mock_calls), 2) self.assertIn('Synchronize unmigrated app: unmigrated_app_syncdb', stdout.getvalue()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_replaced(self): """ Running a single squashed migration should record all of the original replaced migrations as run. """ recorder = MigrationRecorder(connection) out = io.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) applied_migrations = recorder.applied_migrations() self.assertIn(("migrations", "0001_initial"), applied_migrations) self.assertIn(("migrations", "0002_second"), applied_migrations) self.assertIn(("migrations", "0001_squashed_0002"), applied_migrations) # Rollback changes call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_squashed(self): """ Running migrate for a squashed migration should record as run if all of the replaced migrations have been run (#25231). """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0001_initial") recorder.record_applied("migrations", "0002_second") out = io.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations() ) # No changes were actually applied so there is nothing to rollback @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrate_inconsistent_history(self): """ Running migrate with some migrations applied before their dependencies should not be allowed. """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0002_second") msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("migrate") applied_migrations = recorder.applied_migrations() self.assertNotIn(("migrations", "0001_initial"), applied_migrations) class MakeMigrationsTests(MigrationTestBase): """ Tests running the makemigrations command. """ def setUp(self): super().setUp() self._old_models = apps.app_configs['migrations'].models.copy() def tearDown(self): apps.app_configs['migrations'].models = self._old_models apps.all_models['migrations'] = self._old_models apps.clear_cache() super().tearDown() def test_files_content(self): self.assertTableNotExists("migrations_unicodemodel") apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) # Check for empty __init__.py file in migrations folder init_file = os.path.join(migration_dir, "__init__.py") self.assertTrue(os.path.exists(init_file)) with open(init_file) as fp: content = fp.read() self.assertEqual(content, '') # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with open(initial_file, encoding='utf-8') as fp: content = fp.read() self.assertIn('migrations.CreateModel', content) self.assertIn('initial = True', content) self.assertIn('úñí©óðé µóðéø', content) # Meta.verbose_name self.assertIn('úñí©óðé µóðéøß', content) # Meta.verbose_name_plural self.assertIn('ÚÑÍ¢ÓÐÉ', content) # title.verbose_name self.assertIn('“Ðjáñgó”', content) # title.default def test_makemigrations_order(self): """ makemigrations should recognize number-only migrations (0001.py). """ module = 'migrations.test_migrations_order' with self.temporary_migration_module(module=module) as migration_dir: if hasattr(importlib, 'invalidate_caches'): # importlib caches os.listdir() on some platforms like macOS # (#23850). importlib.invalidate_caches() call_command('makemigrations', 'migrations', '--empty', '-n', 'a', '-v', '0') self.assertTrue(os.path.exists(os.path.join(migration_dir, '0002_a.py'))) def test_makemigrations_empty_connections(self): empty_connections = ConnectionHandler({'default': {}}) with mock.patch('django.core.management.commands.makemigrations.connections', new=empty_connections): # with no apps out = io.StringIO() call_command('makemigrations', stdout=out) self.assertIn('No changes detected', out.getvalue()) # with an app with self.temporary_migration_module() as migration_dir: call_command('makemigrations', 'migrations', verbosity=0) init_file = os.path.join(migration_dir, '__init__.py') self.assertTrue(os.path.exists(init_file)) @override_settings(INSTALLED_APPS=['migrations', 'migrations2']) def test_makemigrations_consistency_checks_respect_routers(self): """ The history consistency checks in makemigrations respect settings.DATABASE_ROUTERS. """ def patched_has_table(migration_recorder): if migration_recorder.connection is connections['other']: raise Exception('Other connection') else: return mock.DEFAULT self.assertTableNotExists('migrations_unicodemodel') apps.register_model('migrations', UnicodeModel) with mock.patch.object( MigrationRecorder, 'has_table', autospec=True, side_effect=patched_has_table) as has_table: with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) self.assertEqual(has_table.call_count, 1) # 'default' is checked # Router says not to migrate 'other' so consistency shouldn't # be checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(has_table.call_count, 2) # 'default' again # With a router that doesn't prohibit migrating 'other', # consistency is checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.DefaultOtherRouter']): with self.assertRaisesMessage(Exception, 'Other connection'): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(has_table.call_count, 4) # 'default' and 'other' # With a router that doesn't allow migrating on any database, # no consistency checks are made. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): with mock.patch.object(TestRouter, 'allow_migrate', return_value=False) as allow_migrate: call_command('makemigrations', 'migrations', verbosity=0) allow_migrate.assert_any_call('other', 'migrations', model_name='UnicodeModel') # allow_migrate() is called with the correct arguments. self.assertGreater(len(allow_migrate.mock_calls), 0) called_aliases = set() for mock_call in allow_migrate.mock_calls: _, call_args, call_kwargs = mock_call connection_alias, app_name = call_args called_aliases.add(connection_alias) # Raises an error if invalid app_name/model_name occurs. apps.get_app_config(app_name).get_model(call_kwargs['model_name']) self.assertEqual(called_aliases, set(connections)) self.assertEqual(has_table.call_count, 4) def test_failing_migration(self): # If a migration fails to serialize, it shouldn't generate an empty file. #21280 apps.register_model('migrations', UnserializableModel) with self.temporary_migration_module() as migration_dir: with self.assertRaisesMessage(ValueError, 'Cannot serialize'): call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertFalse(os.path.exists(initial_file)) def test_makemigrations_conflict_exit(self): """ makemigrations exits if it detects a conflict. """ with self.temporary_migration_module(module="migrations.test_migrations_conflict"): with self.assertRaises(CommandError) as context: call_command("makemigrations") exception_message = str(context.exception) self.assertIn( 'Conflicting migrations detected; multiple leaf nodes ' 'in the migration graph:', exception_message ) self.assertIn('0002_second', exception_message) self.assertIn('0002_conflicting_second', exception_message) self.assertIn('in migrations', exception_message) self.assertIn("To fix them run 'python manage.py makemigrations --merge'", exception_message) def test_makemigrations_merge_no_conflict(self): """ makemigrations exits if in merge mode with no conflicts. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("makemigrations", merge=True, stdout=out) self.assertIn("No conflicts detected to merge.", out.getvalue()) def test_makemigrations_empty_no_app_specified(self): """ makemigrations exits if no app is specified with 'empty' mode. """ msg = 'You must supply at least one app label when using --empty.' with self.assertRaisesMessage(CommandError, msg): call_command("makemigrations", empty=True) def test_makemigrations_empty_migration(self): """ makemigrations properly constructs an empty migration. """ with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", empty=True, verbosity=0) # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with open(initial_file, encoding='utf-8') as fp: content = fp.read() # Remove all whitespace to check for empty dependencies and operations content = content.replace(' ', '') self.assertIn('dependencies=[\n]', content) self.assertIn('operations=[\n]', content) @override_settings(MIGRATION_MODULES={"migrations": None}) def test_makemigrations_disabled_migrations_for_app(self): """ makemigrations raises a nice error when migrations are disabled for an app. """ msg = ( "Django can't create migrations for app 'migrations' because migrations " "have been disabled via the MIGRATION_MODULES setting." ) with self.assertRaisesMessage(ValueError, msg): call_command("makemigrations", "migrations", empty=True, verbosity=0) def test_makemigrations_no_changes_no_apps(self): """ makemigrations exits when there are no changes and no apps are specified. """ out = io.StringIO() call_command("makemigrations", stdout=out) self.assertIn("No changes detected", out.getvalue()) def test_makemigrations_no_changes(self): """ makemigrations exits when there are no changes to an app. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", stdout=out) self.assertIn("No changes detected in app 'migrations'", out.getvalue()) def test_makemigrations_no_apps_initial(self): """ makemigrations should detect initial is needed on empty migration modules if no app provided. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_empty"): call_command("makemigrations", stdout=out) self.assertIn("0001_initial.py", out.getvalue()) def test_makemigrations_no_init(self): """Migration directories without an __init__.py file are allowed.""" out = io.StringIO() with self.temporary_migration_module(module='migrations.test_migrations_no_init'): call_command('makemigrations', stdout=out) self.assertIn('0001_initial.py', out.getvalue()) def test_makemigrations_migrations_announce(self): """ makemigrations announces the migration at the default verbosity level. """ out = io.StringIO() with self.temporary_migration_module(): call_command("makemigrations", "migrations", stdout=out) self.assertIn("Migrations for 'migrations'", out.getvalue()) def test_makemigrations_no_common_ancestor(self): """ makemigrations fails to merge migrations with no common ancestor. """ with self.assertRaises(ValueError) as context: with self.temporary_migration_module(module="migrations.test_migrations_no_ancestor"): call_command("makemigrations", "migrations", merge=True) exception_message = str(context.exception) self.assertIn("Could not find common ancestor of", exception_message) self.assertIn("0002_second", exception_message) self.assertIn("0002_conflicting_second", exception_message) def test_makemigrations_interactive_reject(self): """ makemigrations enters and exits interactive mode properly. """ # Monkeypatch interactive questioner to auto reject with mock.patch('builtins.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, verbosity=0) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) def test_makemigrations_interactive_accept(self): """ makemigrations enters interactive mode and merges properly. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) @mock.patch('django.db.migrations.utils.datetime') def test_makemigrations_default_merge_name(self, mock_datetime): mock_datetime.datetime.now.return_value = datetime.datetime(2016, 1, 2, 3, 4) with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge_20160102_0304.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) def test_makemigrations_non_interactive_not_null_addition(self): """ Non-interactive makemigrations fails when a default is missing on a new not-null field. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_int = models.IntegerField() class Meta: app_label = "migrations" with self.assertRaises(SystemExit): with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False) def test_makemigrations_non_interactive_not_null_alteration(self): """ Non-interactive makemigrations fails when a default is missing on a field changed to not-null. """ class Author(models.Model): name = models.CharField(max_length=255) slug = models.SlugField() age = models.IntegerField(default=0) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Alter field slug on author", out.getvalue()) def test_makemigrations_non_interactive_no_model_rename(self): """ makemigrations adds and removes a possible model rename in non-interactive mode. """ class RenamedModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Delete model SillyModel", out.getvalue()) self.assertIn("Create model RenamedModel", out.getvalue()) def test_makemigrations_non_interactive_no_field_rename(self): """ makemigrations adds and removes a possible field rename in non-interactive mode. """ class SillyModel(models.Model): silly_rename = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Remove field silly_field from sillymodel", out.getvalue()) self.assertIn("Add field silly_rename to sillymodel", out.getvalue()) def test_makemigrations_handle_merge(self): """ makemigrations properly merges the conflicting migrations with --noinput. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=False, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertIn("Created new merge migration", output) def test_makemigration_merge_dry_run(self): """ makemigrations respects --dry-run option when fixing migration conflicts (#24427). """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) def test_makemigration_merge_dry_run_verbosity_3(self): """ `makemigrations --merge --dry-run` writes the merge migration file to stdout with `verbosity == 3` (#24427). """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, verbosity=3, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) # Additional output caused by verbosity 3 # The complete merge migration file that would be written self.assertIn("class Migration(migrations.Migration):", output) self.assertIn("dependencies = [", output) self.assertIn("('migrations', '0002_second')", output) self.assertIn("('migrations', '0002_conflicting_second')", output) self.assertIn("operations = [", output) self.assertIn("]", output) def test_makemigrations_dry_run(self): """ `makemigrations --dry-run` should not ask for defaults. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_date = models.DateField() # Added field without a default class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out) # Output the expected changes directly, without asking for defaults self.assertIn("Add field silly_date to sillymodel", out.getvalue()) def test_makemigrations_dry_run_verbosity_3(self): """ Allow `makemigrations --dry-run` to output the migrations file to stdout (with verbosity == 3). """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_char = models.CharField(default="") class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out, verbosity=3) # Normal --dry-run output self.assertIn("- Add field silly_char to sillymodel", out.getvalue()) # Additional output caused by verbosity 3 # The complete migrations file that would be written self.assertIn("class Migration(migrations.Migration):", out.getvalue()) self.assertIn("dependencies = [", out.getvalue()) self.assertIn("('migrations', '0001_initial'),", out.getvalue()) self.assertIn("migrations.AddField(", out.getvalue()) self.assertIn("model_name='sillymodel',", out.getvalue()) self.assertIn("name='silly_char',", out.getvalue()) def test_makemigrations_migrations_modules_path_not_exist(self): """ makemigrations creates migrations when specifying a custom location for migration files using MIGRATION_MODULES if the custom path doesn't already exist. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() migration_module = "migrations.test_migrations_path_doesnt_exist.foo.bar" with self.temporary_migration_module(module=migration_module) as migration_dir: call_command("makemigrations", "migrations", stdout=out) # Migrations file is actually created in the expected path. initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) # Command output indicates the migration is created. self.assertIn(" - Create model SillyModel", out.getvalue()) @override_settings(MIGRATION_MODULES={'migrations': 'some.nonexistent.path'}) def test_makemigrations_migrations_modules_nonexistent_toplevel_package(self): msg = ( 'Could not locate an appropriate location to create migrations ' 'package some.nonexistent.path. Make sure the toplevel package ' 'exists and can be imported.' ) with self.assertRaisesMessage(ValueError, msg): call_command('makemigrations', 'migrations', empty=True, verbosity=0) def test_makemigrations_interactive_by_default(self): """ The user is prompted to merge by default if there are conflicts and merge is True. Answer negative to differentiate it from behavior when --noinput is specified. """ # Monkeypatch interactive questioner to auto reject out = io.StringIO() with mock.patch('builtins.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') # This will fail if interactive is False by default self.assertFalse(os.path.exists(merge_file)) self.assertNotIn("Created new merge migration", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_no_merge(self): """ makemigrations does not raise a CommandError when an unspecified app has conflicting migrations. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", merge=False, verbosity=0) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_merge(self): """ makemigrations does not create a merge for an unspecified app even if it has conflicting migrations. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(app_label="migrated_app") as migration_dir: call_command("makemigrations", "migrated_app", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) self.assertIn("No conflicts detected to merge.", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.conflicting_app_with_dependencies"]) def test_makemigrations_merge_dont_output_dependency_operations(self): """ makemigrations --merge does not output any operations from apps that don't belong to a given app. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='N')): out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command( "makemigrations", "conflicting_app_with_dependencies", merge=True, interactive=True, stdout=out ) val = out.getvalue().lower() self.assertIn('merging conflicting_app_with_dependencies\n', val) self.assertIn( ' branch 0002_conflicting_second\n' ' - create model something\n', val ) self.assertIn( ' branch 0002_second\n' ' - delete model tribble\n' ' - remove field silly_field from author\n' ' - add field rating to author\n' ' - create model book\n', val ) def test_makemigrations_with_custom_name(self): """ makemigrations --name generate a custom migration name. """ with self.temporary_migration_module() as migration_dir: def cmd(migration_count, migration_name, *args): call_command("makemigrations", "migrations", "--verbosity", "0", "--name", migration_name, *args) migration_file = os.path.join(migration_dir, "%s_%s.py" % (migration_count, migration_name)) # Check for existing migration file in migration folder self.assertTrue(os.path.exists(migration_file)) with open(migration_file, encoding='utf-8') as fp: content = fp.read() content = content.replace(" ", "") return content # generate an initial migration migration_name_0001 = "my_initial_migration" content = cmd("0001", migration_name_0001) self.assertIn("dependencies=[\n]", content) # importlib caches os.listdir() on some platforms like macOS # (#23850). if hasattr(importlib, 'invalidate_caches'): importlib.invalidate_caches() # generate an empty migration migration_name_0002 = "my_custom_migration" content = cmd("0002", migration_name_0002, "--empty") self.assertIn("dependencies=[\n('migrations','0001_%s'),\n]" % migration_name_0001, content) self.assertIn("operations=[\n]", content) def test_makemigrations_with_invalid_custom_name(self): msg = 'The migration name must be a valid Python identifier.' with self.assertRaisesMessage(CommandError, msg): call_command('makemigrations', 'migrations', '--name', 'invalid name', '--empty') def test_makemigrations_check(self): """ makemigrations --check should exit with a non-zero status when there are changes to an app requiring migrations. """ with self.temporary_migration_module(): with self.assertRaises(SystemExit): call_command("makemigrations", "--check", "migrations", verbosity=0) with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "--check", "migrations", verbosity=0) def test_makemigrations_migration_path_output(self): """ makemigrations should print the relative paths to the migrations unless they are outside of the current tree, in which case the absolute path should be shown. """ out = io.StringIO() apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_migration_path_output_valueerror(self): """ makemigrations prints the absolute path if os.path.relpath() raises a ValueError when it's impossible to obtain a relative path, e.g. on Windows if Django is installed on a different drive than where the migration files are created. """ out = io.StringIO() with self.temporary_migration_module() as migration_dir: with mock.patch('os.path.relpath', side_effect=ValueError): call_command('makemigrations', 'migrations', stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_inconsistent_history(self): """ makemigrations should raise InconsistentMigrationHistory exception if there are some migrations applied before their dependencies. """ recorder = MigrationRecorder(connection) recorder.record_applied('migrations', '0002_second') msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.temporary_migration_module(module="migrations.test_migrations"): with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("makemigrations") def test_makemigrations_inconsistent_history_db_failure(self): msg = ( "Got an error checking a consistent migration history performed " "for database connection 'default': could not connect to server" ) with mock.patch( 'django.db.migrations.loader.MigrationLoader.check_consistent_history', side_effect=OperationalError('could not connect to server'), ): with self.temporary_migration_module(): with self.assertWarnsMessage(RuntimeWarning, msg): call_command('makemigrations', verbosity=0) @mock.patch('builtins.input', return_value='1') @mock.patch('django.db.migrations.questioner.sys.stdin', mock.MagicMock(encoding=sys.getdefaultencoding())) def test_makemigrations_auto_now_add_interactive(self, *args): """ makemigrations prompts the user when adding auto_now_add to an existing model. """ class Entry(models.Model): title = models.CharField(max_length=255) creation_date = models.DateTimeField(auto_now_add=True) class Meta: app_label = 'migrations' # Monkeypatch interactive questioner to auto accept with mock.patch('django.db.migrations.questioner.sys.stdout', new_callable=io.StringIO) as prompt_stdout: out = io.StringIO() with self.temporary_migration_module(module='migrations.test_auto_now_add'): call_command('makemigrations', 'migrations', interactive=True, stdout=out) output = out.getvalue() prompt_output = prompt_stdout.getvalue() self.assertIn("You can accept the default 'timezone.now' by pressing 'Enter'", prompt_output) self.assertIn("Add field creation_date to entry", output) class SquashMigrationsTests(MigrationTestBase): """ Tests running the squashmigrations command. """ def test_squashmigrations_squashes(self): """ squashmigrations squashes migrations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command('squashmigrations', 'migrations', '0002', interactive=False, stdout=out, no_color=True) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") self.assertTrue(os.path.exists(squashed_migration_file)) self.assertEqual( out.getvalue(), 'Will squash the following migrations:\n' ' - 0001_initial\n' ' - 0002_second\n' 'Optimizing...\n' ' Optimized from 8 operations to 2 operations.\n' 'Created new squashed migration %s\n' ' You should commit this migration but leave the old ones in place;\n' ' the new migration will be used for new installs. Once you are sure\n' ' all instances of the codebase have applied the migrations you squashed,\n' ' you can delete them.\n' % squashed_migration_file ) def test_squashmigrations_initial_attribute(self): with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") with open(squashed_migration_file, encoding='utf-8') as fp: content = fp.read() self.assertIn("initial = True", content) def test_squashmigrations_optimizes(self): """ squashmigrations optimizes operations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, stdout=out) self.assertIn("Optimized from 8 operations to 2 operations.", out.getvalue()) def test_ticket_23799_squashmigrations_no_optimize(self): """ squashmigrations --no-optimize doesn't optimize operations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, no_optimize=True, stdout=out) self.assertIn("Skipping optimization", out.getvalue()) def test_squashmigrations_valid_start(self): """ squashmigrations accepts a starting migration. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes") as migration_dir: call_command("squashmigrations", "migrations", "0002", "0003", interactive=False, verbosity=1, stdout=out) squashed_migration_file = os.path.join(migration_dir, "0002_second_squashed_0003_third.py") with open(squashed_migration_file, encoding='utf-8') as fp: content = fp.read() self.assertIn(" ('migrations', '0001_initial')", content) self.assertNotIn("initial = True", content) out = out.getvalue() self.assertNotIn(" - 0001_initial", out) self.assertIn(" - 0002_second", out) self.assertIn(" - 0003_third", out) def test_squashmigrations_invalid_start(self): """ squashmigrations doesn't accept a starting migration after the ending migration. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): msg = ( "The migration 'migrations.0003_third' cannot be found. Maybe " "it comes after the migration 'migrations.0002_second'" ) with self.assertRaisesMessage(CommandError, msg): call_command("squashmigrations", "migrations", "0003", "0002", interactive=False, verbosity=0) def test_squashed_name_with_start_migration_name(self): """--squashed-name specifies the new migration's name.""" squashed_name = 'squashed_name' with self.temporary_migration_module(module='migrations.test_migrations') as migration_dir: call_command( 'squashmigrations', 'migrations', '0001', '0002', squashed_name=squashed_name, interactive=False, verbosity=0, ) squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name) self.assertTrue(os.path.exists(squashed_migration_file)) def test_squashed_name_without_start_migration_name(self): """--squashed-name also works if a start migration is omitted.""" squashed_name = 'squashed_name' with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command( 'squashmigrations', 'migrations', '0001', squashed_name=squashed_name, interactive=False, verbosity=0, ) squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name) self.assertTrue(os.path.exists(squashed_migration_file)) class AppLabelErrorTests(TestCase): """ This class inherits TestCase because MigrationTestBase uses `available_apps = ['migrations']` which means that it's the only installed app. 'django.contrib.auth' must be in INSTALLED_APPS for some of these tests. """ nonexistent_app_error = "No installed app with label 'nonexistent_app'." did_you_mean_auth_error = ( "No installed app with label 'django.contrib.auth'. Did you mean " "'auth'?" ) def test_makemigrations_nonexistent_app_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('makemigrations', 'nonexistent_app', stderr=err) self.assertIn(self.nonexistent_app_error, err.getvalue()) def test_makemigrations_app_name_specified_as_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('makemigrations', 'django.contrib.auth', stderr=err) self.assertIn(self.did_you_mean_auth_error, err.getvalue()) def test_migrate_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('migrate', 'nonexistent_app') def test_migrate_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('migrate', 'django.contrib.auth') def test_showmigrations_nonexistent_app_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('showmigrations', 'nonexistent_app', stderr=err) self.assertIn(self.nonexistent_app_error, err.getvalue()) def test_showmigrations_app_name_specified_as_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('showmigrations', 'django.contrib.auth', stderr=err) self.assertIn(self.did_you_mean_auth_error, err.getvalue()) def test_sqlmigrate_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('sqlmigrate', 'nonexistent_app', '0002') def test_sqlmigrate_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('sqlmigrate', 'django.contrib.auth', '0002') def test_squashmigrations_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('squashmigrations', 'nonexistent_app', '0002') def test_squashmigrations_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('squashmigrations', 'django.contrib.auth', '0002')
d41c9b067529a9271621f5c305da9af2fdcdd40d8c7511c97cc641f567eb974e
from django.core.exceptions import FieldDoesNotExist from django.db import ( IntegrityError, connection, migrations, models, transaction, ) from django.db.migrations.migration import Migration from django.db.migrations.operations.fields import FieldOperation from django.db.migrations.state import ModelState, ProjectState from django.db.transaction import atomic from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature from .models import FoodManager, FoodQuerySet, UnicodeModel from .test_base import OperationTestBase class Mixin: pass class OperationTests(OperationTestBase): """ Tests running the operations and making sure they do what they say they do. Each test looks at their state changing, and then their database operation - both forwards and backwards. """ def test_create_model(self): """ Tests the CreateModel operation. Most other tests use this operation as part of setup, so check failures here first. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) self.assertEqual(operation.describe(), "Create model Pony") self.assertEqual(operation.migration_name_fragment, 'pony') # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["fields", "name"]) # And default manager not in set operation = migrations.CreateModel("Foo", fields=[], managers=[("objects", models.Manager())]) definition = operation.deconstruct() self.assertNotIn('managers', definition[2]) def test_create_model_with_duplicate_field_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value pink in CreateModel fields argument.'): migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.TextField()), ("pink", models.IntegerField(default=1)), ], ) def test_create_model_with_duplicate_base(self): message = 'Found duplicate value test_crmo.pony in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.Pony",), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.pony",), ) message = 'Found duplicate value migrations.unicodemodel in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, UnicodeModel,), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.unicodemodel',), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.UnicodeModel',), ) message = "Found duplicate value <class 'django.db.models.base.Model'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(models.Model, models.Model,), ) message = "Found duplicate value <class 'migrations.test_operations.Mixin'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(Mixin, Mixin,), ) def test_create_model_with_duplicate_manager_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value objects in CreateModel managers argument.'): migrations.CreateModel( "Pony", fields=[], managers=[ ("objects", models.Manager()), ("objects", models.Manager()), ], ) def test_create_model_with_unique_after(self): """ Tests the CreateModel operation directly followed by an AlterUniqueTogether (bug #22844 - sqlite remake issues) """ operation1 = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) operation2 = migrations.CreateModel( "Rider", [ ("id", models.AutoField(primary_key=True)), ("number", models.IntegerField(default=1)), ("pony", models.ForeignKey("test_crmoua.Pony", models.CASCADE)), ], ) operation3 = migrations.AlterUniqueTogether( "Rider", [ ("number", "pony"), ], ) # Test the database alteration project_state = ProjectState() self.assertTableNotExists("test_crmoua_pony") self.assertTableNotExists("test_crmoua_rider") with connection.schema_editor() as editor: new_state = project_state.clone() operation1.state_forwards("test_crmoua", new_state) operation1.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation2.state_forwards("test_crmoua", new_state) operation2.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation3.state_forwards("test_crmoua", new_state) operation3.database_forwards("test_crmoua", editor, project_state, new_state) self.assertTableExists("test_crmoua_pony") self.assertTableExists("test_crmoua_rider") def test_create_model_m2m(self): """ Test the creation of a model with a ManyToMany field and the auto-created "through" model. """ project_state = self.set_up_test_model("test_crmomm") operation = migrations.CreateModel( "Stable", [ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("Pony", related_name="stables")) ] ) # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_crmomm", new_state) # Test the database alteration self.assertTableNotExists("test_crmomm_stable_ponies") with connection.schema_editor() as editor: operation.database_forwards("test_crmomm", editor, project_state, new_state) self.assertTableExists("test_crmomm_stable") self.assertTableExists("test_crmomm_stable_ponies") self.assertColumnNotExists("test_crmomm_stable", "ponies") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_crmomm", "Pony") Stable = new_state.apps.get_model("test_crmomm", "Stable") stable = Stable.objects.create() p1 = Pony.objects.create(pink=False, weight=4.55) p2 = Pony.objects.create(pink=True, weight=5.43) stable.ponies.add(p1, p2) self.assertEqual(stable.ponies.count(), 2) stable.ponies.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmomm", editor, new_state, project_state) self.assertTableNotExists("test_crmomm_stable") self.assertTableNotExists("test_crmomm_stable_ponies") def test_create_model_inheritance(self): """ Tests the CreateModel operation on a multi-table inheritance setup. """ project_state = self.set_up_test_model("test_crmoih") # Test the state alteration operation = migrations.CreateModel( "ShetlandPony", [ ('pony_ptr', models.OneToOneField( 'test_crmoih.Pony', models.CASCADE, auto_created=True, primary_key=True, to_field='id', serialize=False, )), ("cuteness", models.IntegerField(default=1)), ], ) new_state = project_state.clone() operation.state_forwards("test_crmoih", new_state) self.assertIn(("test_crmoih", "shetlandpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crmoih_shetlandpony") with connection.schema_editor() as editor: operation.database_forwards("test_crmoih", editor, project_state, new_state) self.assertTableExists("test_crmoih_shetlandpony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmoih", editor, new_state, project_state) self.assertTableNotExists("test_crmoih_shetlandpony") def test_create_proxy_model(self): """ CreateModel ignores proxy models. """ project_state = self.set_up_test_model("test_crprmo") # Test the state alteration operation = migrations.CreateModel( "ProxyPony", [], options={"proxy": True}, bases=("test_crprmo.Pony",), ) self.assertEqual(operation.describe(), "Create proxy model ProxyPony") new_state = project_state.clone() operation.state_forwards("test_crprmo", new_state) self.assertIn(("test_crprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crprmo", editor, project_state, new_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crprmo", editor, new_state, project_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["bases", "fields", "name", "options"]) def test_create_unmanaged_model(self): """ CreateModel ignores unmanaged models. """ project_state = self.set_up_test_model("test_crummo") # Test the state alteration operation = migrations.CreateModel( "UnmanagedPony", [], options={"proxy": True}, bases=("test_crummo.Pony",), ) self.assertEqual(operation.describe(), "Create proxy model UnmanagedPony") new_state = project_state.clone() operation.state_forwards("test_crummo", new_state) self.assertIn(("test_crummo", "unmanagedpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crummo", editor, project_state, new_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crummo", editor, new_state, project_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") @skipUnlessDBFeature('supports_table_check_constraints') def test_create_model_with_constraint(self): where = models.Q(pink__gt=2) check_constraint = models.CheckConstraint(check=where, name='test_constraint_pony_pink_gt_2') operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=3)), ], options={'constraints': [check_constraint]}, ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) # Test database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") with connection.cursor() as cursor: with self.assertRaises(IntegrityError): cursor.execute("INSERT INTO test_crmo_pony (id, pink) VALUES (1, 1)") # Test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(definition[2]['options']['constraints'], [check_constraint]) def test_create_model_with_partial_unique_constraint(self): partial_unique_constraint = models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ('weight', models.FloatField()), ], options={'constraints': [partial_unique_constraint]}, ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) # Test database alteration self.assertTableNotExists('test_crmo_pony') with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') # Test constraint works Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test reversal with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual(definition[2]['options']['constraints'], [partial_unique_constraint]) def test_create_model_with_deferred_unique_constraint(self): deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferrable_pink_constraint', deferrable=models.Deferrable.DEFERRED, ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ], options={'constraints': [deferred_unique_constraint]}, ) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) self.assertTableNotExists('test_crmo_pony') # Create table. with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1) obj.pink = 3 obj.save() else: Pony.objects.create(pink=1) # Reversal. with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual( definition[2]['options']['constraints'], [deferred_unique_constraint], ) def test_create_model_managers(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_cmoma") # Test the state alteration operation = migrations.CreateModel( "Food", fields=[ ("id", models.AutoField(primary_key=True)), ], managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Create model Food") new_state = project_state.clone() operation.state_forwards("test_cmoma", new_state) self.assertIn(("test_cmoma", "food"), new_state.models) managers = new_state.models["test_cmoma", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) def test_delete_model(self): """ Tests the DeleteModel operation. """ project_state = self.set_up_test_model("test_dlmo") # Test the state alteration operation = migrations.DeleteModel("Pony") self.assertEqual(operation.describe(), "Delete model Pony") self.assertEqual(operation.migration_name_fragment, 'delete_pony') new_state = project_state.clone() operation.state_forwards("test_dlmo", new_state) self.assertNotIn(("test_dlmo", "pony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dlmo", editor, project_state, new_state) self.assertTableNotExists("test_dlmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlmo", editor, new_state, project_state) self.assertTableExists("test_dlmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "DeleteModel") self.assertEqual(definition[1], []) self.assertEqual(list(definition[2]), ["name"]) def test_delete_proxy_model(self): """ Tests the DeleteModel operation ignores proxy models. """ project_state = self.set_up_test_model("test_dlprmo", proxy_model=True) # Test the state alteration operation = migrations.DeleteModel("ProxyPony") new_state = project_state.clone() operation.state_forwards("test_dlprmo", new_state) self.assertIn(("test_dlprmo", "proxypony"), project_state.models) self.assertNotIn(("test_dlprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") with connection.schema_editor() as editor: operation.database_forwards("test_dlprmo", editor, project_state, new_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlprmo", editor, new_state, project_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") def test_delete_mti_model(self): project_state = self.set_up_test_model('test_dlmtimo', mti_model=True) # Test the state alteration operation = migrations.DeleteModel('ShetlandPony') new_state = project_state.clone() operation.state_forwards('test_dlmtimo', new_state) self.assertIn(('test_dlmtimo', 'shetlandpony'), project_state.models) self.assertNotIn(('test_dlmtimo', 'shetlandpony'), new_state.models) # Test the database alteration self.assertTableExists('test_dlmtimo_pony') self.assertTableExists('test_dlmtimo_shetlandpony') self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id') with connection.schema_editor() as editor: operation.database_forwards('test_dlmtimo', editor, project_state, new_state) self.assertTableExists('test_dlmtimo_pony') self.assertTableNotExists('test_dlmtimo_shetlandpony') # And test reversal with connection.schema_editor() as editor: operation.database_backwards('test_dlmtimo', editor, new_state, project_state) self.assertTableExists('test_dlmtimo_pony') self.assertTableExists('test_dlmtimo_shetlandpony') self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id') def test_rename_model(self): """ Tests the RenameModel operation. """ project_state = self.set_up_test_model("test_rnmo", related_model=True) # Test the state alteration operation = migrations.RenameModel("Pony", "Horse") self.assertEqual(operation.describe(), "Rename model Pony to Horse") self.assertEqual(operation.migration_name_fragment, 'rename_pony_horse') # Test initial state and database self.assertIn(("test_rnmo", "pony"), project_state.models) self.assertNotIn(("test_rnmo", "horse"), project_state.models) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate forwards new_state = project_state.clone() atomic_rename = connection.features.supports_atomic_references_rename new_state = self.apply_operations("test_rnmo", new_state, [operation], atomic=atomic_rename) # Test new state and database self.assertNotIn(("test_rnmo", "pony"), new_state.models) self.assertIn(("test_rnmo", "horse"), new_state.models) # RenameModel also repoints all incoming FKs and M2Ms self.assertEqual( new_state.models['test_rnmo', 'rider'].fields['pony'].remote_field.model, 'test_rnmo.Horse', ) self.assertTableNotExists("test_rnmo_pony") self.assertTableExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate backwards original_state = self.unapply_operations("test_rnmo", project_state, [operation], atomic=atomic_rename) # Test original state and database self.assertIn(("test_rnmo", "pony"), original_state.models) self.assertNotIn(("test_rnmo", "horse"), original_state.models) self.assertEqual( original_state.models['test_rnmo', 'rider'].fields['pony'].remote_field.model, 'Pony', ) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameModel") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'old_name': "Pony", 'new_name': "Horse"}) def test_rename_model_state_forwards(self): """ RenameModel operations shouldn't trigger the caching of rendered apps on state without prior apps. """ state = ProjectState() state.add_model(ModelState('migrations', 'Foo', [])) operation = migrations.RenameModel('Foo', 'Bar') operation.state_forwards('migrations', state) self.assertNotIn('apps', state.__dict__) self.assertNotIn(('migrations', 'foo'), state.models) self.assertIn(('migrations', 'bar'), state.models) # Now with apps cached. apps = state.apps operation = migrations.RenameModel('Bar', 'Foo') operation.state_forwards('migrations', state) self.assertIs(state.apps, apps) self.assertNotIn(('migrations', 'bar'), state.models) self.assertIn(('migrations', 'foo'), state.models) def test_rename_model_with_self_referential_fk(self): """ Tests the RenameModel operation on model with self referential FK. """ project_state = self.set_up_test_model("test_rmwsrf", related_model=True) # Test the state alteration operation = migrations.RenameModel("Rider", "HorseRider") self.assertEqual(operation.describe(), "Rename model Rider to HorseRider") new_state = project_state.clone() operation.state_forwards("test_rmwsrf", new_state) self.assertNotIn(("test_rmwsrf", "rider"), new_state.models) self.assertIn(("test_rmwsrf", "horserider"), new_state.models) # Remember, RenameModel also repoints all incoming FKs and M2Ms self.assertEqual( 'self', new_state.models["test_rmwsrf", "horserider"].fields['friend'].remote_field.model ) HorseRider = new_state.apps.get_model('test_rmwsrf', 'horserider') self.assertIs(HorseRider._meta.get_field('horserider').remote_field.model, HorseRider) # Test the database alteration self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_forwards("test_rmwsrf", editor, project_state, new_state) self.assertTableNotExists("test_rmwsrf_rider") self.assertTableExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_horserider", "id")) # And test reversal with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_backwards("test_rmwsrf", editor, new_state, project_state) self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) def test_rename_model_with_superclass_fk(self): """ Tests the RenameModel operation on a model which has a superclass that has a foreign key. """ project_state = self.set_up_test_model("test_rmwsc", related_model=True, mti_model=True) # Test the state alteration operation = migrations.RenameModel("ShetlandPony", "LittleHorse") self.assertEqual(operation.describe(), "Rename model ShetlandPony to LittleHorse") new_state = project_state.clone() operation.state_forwards("test_rmwsc", new_state) self.assertNotIn(("test_rmwsc", "shetlandpony"), new_state.models) self.assertIn(("test_rmwsc", "littlehorse"), new_state.models) # RenameModel shouldn't repoint the superclass's relations, only local ones self.assertEqual( project_state.models['test_rmwsc', 'rider'].fields['pony'].remote_field.model, new_state.models['test_rmwsc', 'rider'].fields['pony'].remote_field.model, ) # Before running the migration we have a table for Shetland Pony, not Little Horse self.assertTableExists("test_rmwsc_shetlandpony") self.assertTableNotExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # and the foreign key on rider points to pony, not shetland pony self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_shetlandpony", "id")) with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: operation.database_forwards("test_rmwsc", editor, project_state, new_state) # Now we have a little horse table, not shetland pony self.assertTableNotExists("test_rmwsc_shetlandpony") self.assertTableExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # but the Foreign keys still point at pony, not little horse self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_littlehorse", "id")) def test_rename_model_with_self_referential_m2m(self): app_label = "test_rename_model_with_self_referential_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("ReflexivePony", fields=[ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("self")), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("ReflexivePony", "ReflexivePony2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "ReflexivePony2") pony = Pony.objects.create() pony.ponies.add(pony) def test_rename_model_with_m2m(self): app_label = "test_rename_model_with_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Pony", "Pony2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "Pony2") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_m2m_target_model(self): app_label = "test_rename_m2m_target_model" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Rider", "Rider2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider2") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_m2m_through_model(self): app_label = "test_rename_through" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("PonyRider", fields=[ ("id", models.AutoField(primary_key=True)), ("rider", models.ForeignKey("test_rename_through.Rider", models.CASCADE)), ("pony", models.ForeignKey("test_rename_through.Pony", models.CASCADE)), ]), migrations.AddField( "Pony", "riders", models.ManyToManyField("test_rename_through.Rider", through="test_rename_through.PonyRider"), ), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider") pony = Pony.objects.create() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("PonyRider", "PonyRider2"), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider2") pony = Pony.objects.first() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) self.assertEqual(Pony.objects.count(), 1) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(PonyRider.objects.count(), 2) self.assertEqual(pony.riders.count(), 2) def test_rename_m2m_model_after_rename_field(self): """RenameModel renames a many-to-many column after a RenameField.""" app_label = 'test_rename_multiple' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=20)), ]), migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('test_rename_multiple.Pony', models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('riders', models.ManyToManyField('Rider')), ]), migrations.RenameField(model_name='pony', old_name='name', new_name='fancy_name'), migrations.RenameModel(old_name='Rider', new_name='Jockey'), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, 'Pony') Jockey = project_state.apps.get_model(app_label, 'Jockey') PonyRider = project_state.apps.get_model(app_label, 'PonyRider') # No "no such column" error means the column was renamed correctly. pony = Pony.objects.create(fancy_name='a good name') jockey = Jockey.objects.create(pony=pony) ponyrider = PonyRider.objects.create() ponyrider.riders.add(jockey) def test_add_field(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) self.assertEqual(operation.describe(), "Add field height to Pony") self.assertEqual(operation.migration_name_fragment, 'pony_height') project_state, new_state = self.make_test_state("test_adfl", operation) self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4) field = new_state.models['test_adfl', 'pony'].fields['height'] self.assertEqual(field.default, 5) # Test the database alteration self.assertColumnNotExists("test_adfl_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adfl", editor, project_state, new_state) self.assertColumnExists("test_adfl_pony", "height") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfl", editor, new_state, project_state) self.assertColumnNotExists("test_adfl_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_add_charfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adchfl") Pony = project_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adchfl", project_state, [ migrations.AddField( "Pony", "text", models.CharField(max_length=10, default="some text"), ), migrations.AddField( "Pony", "empty", models.CharField(max_length=10, default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.CharField(max_length=10, default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.CharField(max_length=10, default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_textfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adtxtfl") Pony = project_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adtxtfl", project_state, [ migrations.AddField( "Pony", "text", models.TextField(default="some text"), ), migrations.AddField( "Pony", "empty", models.TextField(default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.TextField(default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.TextField(default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_binaryfield(self): """ Tests the AddField operation on TextField/BinaryField. """ project_state = self.set_up_test_model("test_adbinfl") Pony = project_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adbinfl", project_state, [ migrations.AddField( "Pony", "blob", models.BinaryField(default=b"some text"), ), migrations.AddField( "Pony", "empty", models.BinaryField(default=b""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.BinaryField(default=b"42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.BinaryField(default=b'"\'"'), ), ]) Pony = new_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.get(pk=pony.pk) # SQLite returns buffer/memoryview, cast to bytes for checking. self.assertEqual(bytes(pony.blob), b"some text") self.assertEqual(bytes(pony.empty), b"") self.assertEqual(bytes(pony.digits), b"42") self.assertEqual(bytes(pony.quotes), b'"\'"') def test_column_name_quoting(self): """ Column names that are SQL keywords shouldn't cause problems when used in migrations (#22168). """ project_state = self.set_up_test_model("test_regr22168") operation = migrations.AddField( "Pony", "order", models.IntegerField(default=0), ) new_state = project_state.clone() operation.state_forwards("test_regr22168", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_regr22168", editor, project_state, new_state) self.assertColumnExists("test_regr22168_pony", "order") def test_add_field_preserve_default(self): """ Tests the AddField operation's state alteration when preserve_default = False. """ project_state = self.set_up_test_model("test_adflpd") # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=4), preserve_default=False, ) new_state = project_state.clone() operation.state_forwards("test_adflpd", new_state) self.assertEqual(len(new_state.models["test_adflpd", "pony"].fields), 4) field = new_state.models['test_adflpd', 'pony'].fields['height'] self.assertEqual(field.default, models.NOT_PROVIDED) # Test the database alteration project_state.apps.get_model("test_adflpd", "pony").objects.create( weight=4, ) self.assertColumnNotExists("test_adflpd_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adflpd", editor, project_state, new_state) self.assertColumnExists("test_adflpd_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name", "preserve_default"]) def test_add_field_m2m(self): """ Tests the AddField operation with a ManyToManyField. """ project_state = self.set_up_test_model("test_adflmm", second_model=True) # Test the state alteration operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) new_state = project_state.clone() operation.state_forwards("test_adflmm", new_state) self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4) # Test the database alteration self.assertTableNotExists("test_adflmm_pony_stables") with connection.schema_editor() as editor: operation.database_forwards("test_adflmm", editor, project_state, new_state) self.assertTableExists("test_adflmm_pony_stables") self.assertColumnNotExists("test_adflmm_pony", "stables") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_adflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.stables.create() self.assertEqual(p.stables.count(), 1) p.stables.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adflmm", editor, new_state, project_state) self.assertTableNotExists("test_adflmm_pony_stables") def test_alter_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertFalse(Pony._meta.get_field('stables').blank) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField( "Pony", "stables", models.ManyToManyField(to="Stable", related_name="ponies", blank=True) ) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertTrue(Pony._meta.get_field('stables').blank) def test_repoint_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True, third_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "places", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField("Pony", "places", models.ManyToManyField(to="Van", related_name="ponies")) ]) # Ensure the new field actually works Pony = project_state.apps.get_model("test_alflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.places.create() self.assertEqual(p.places.count(), 1) p.places.all().delete() def test_remove_field_m2m(self): project_state = self.set_up_test_model("test_rmflmm", second_model=True) project_state = self.apply_operations("test_rmflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) self.assertTableExists("test_rmflmm_pony_stables") with_field_state = project_state.clone() operations = [migrations.RemoveField("Pony", "stables")] project_state = self.apply_operations("test_rmflmm", project_state, operations=operations) self.assertTableNotExists("test_rmflmm_pony_stables") # And test reversal self.unapply_operations("test_rmflmm", with_field_state, operations=operations) self.assertTableExists("test_rmflmm_pony_stables") def test_remove_field_m2m_with_through(self): project_state = self.set_up_test_model("test_rmflmmwt", second_model=True) self.assertTableNotExists("test_rmflmmwt_ponystables") project_state = self.apply_operations("test_rmflmmwt", project_state, operations=[ migrations.CreateModel("PonyStables", fields=[ ("pony", models.ForeignKey('test_rmflmmwt.Pony', models.CASCADE)), ("stable", models.ForeignKey('test_rmflmmwt.Stable', models.CASCADE)), ]), migrations.AddField( "Pony", "stables", models.ManyToManyField("Stable", related_name="ponies", through='test_rmflmmwt.PonyStables') ) ]) self.assertTableExists("test_rmflmmwt_ponystables") operations = [migrations.RemoveField("Pony", "stables"), migrations.DeleteModel("PonyStables")] self.apply_operations("test_rmflmmwt", project_state, operations=operations) def test_remove_field(self): """ Tests the RemoveField operation. """ project_state = self.set_up_test_model("test_rmfl") # Test the state alteration operation = migrations.RemoveField("Pony", "pink") self.assertEqual(operation.describe(), "Remove field pink from Pony") self.assertEqual(operation.migration_name_fragment, 'remove_pony_pink') new_state = project_state.clone() operation.state_forwards("test_rmfl", new_state) self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2) # Test the database alteration self.assertColumnExists("test_rmfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_rmfl", editor, project_state, new_state) self.assertColumnNotExists("test_rmfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmfl", editor, new_state, project_state) self.assertColumnExists("test_rmfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': 'pink'}) def test_remove_fk(self): """ Tests the RemoveField operation on a foreign key. """ project_state = self.set_up_test_model("test_rfk", related_model=True) self.assertColumnExists("test_rfk_rider", "pony_id") operation = migrations.RemoveField("Rider", "pony") new_state = project_state.clone() operation.state_forwards("test_rfk", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_rfk", editor, project_state, new_state) self.assertColumnNotExists("test_rfk_rider", "pony_id") with connection.schema_editor() as editor: operation.database_backwards("test_rfk", editor, new_state, project_state) self.assertColumnExists("test_rfk_rider", "pony_id") def test_alter_model_table(self): """ Tests the AlterModelTable operation. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony_2") self.assertEqual(operation.describe(), "Rename table for Pony to test_almota_pony_2") self.assertEqual(operation.migration_name_fragment, 'alter_pony_table') new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony_2") # Test the database alteration self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableNotExists("test_almota_pony") self.assertTableExists("test_almota_pony_2") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelTable") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'table': "test_almota_pony_2"}) def test_alter_model_table_none(self): """ Tests the AlterModelTable operation if the table name is set to None. """ operation = migrations.AlterModelTable("Pony", None) self.assertEqual(operation.describe(), "Rename table for Pony to (default)") def test_alter_model_table_noop(self): """ Tests the AlterModelTable operation if the table name is not changed. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony") new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony") # Test the database alteration self.assertTableExists("test_almota_pony") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableExists("test_almota_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") def test_alter_model_table_m2m(self): """ AlterModelTable should rename auto-generated M2M tables. """ app_label = "test_talflmltlm2m" pony_db_table = 'pony_foo' project_state = self.set_up_test_model(app_label, second_model=True, db_table=pony_db_table) # Add the M2M field first_state = project_state.clone() operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable")) operation.state_forwards(app_label, first_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, first_state) original_m2m_table = "%s_%s" % (pony_db_table, "stables") new_m2m_table = "%s_%s" % (app_label, "pony_stables") self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) # Rename the Pony db_table which should also rename the m2m table. second_state = first_state.clone() operation = migrations.AlterModelTable(name='pony', table=None) operation.state_forwards(app_label, second_state) atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_forwards(app_label, editor, first_state, second_state) self.assertTableExists(new_m2m_table) self.assertTableNotExists(original_m2m_table) # And test reversal with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_backwards(app_label, editor, second_state, first_state) self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) def test_alter_field(self): """ Tests the AlterField operation. """ project_state = self.set_up_test_model("test_alfl") # Test the state alteration operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) self.assertEqual(operation.describe(), "Alter field pink on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_pink') new_state = project_state.clone() operation.state_forwards("test_alfl", new_state) self.assertIs(project_state.models['test_alfl', 'pony'].fields['pink'].null, False) self.assertIs(new_state.models['test_alfl', 'pony'].fields['pink'].null, True) # Test the database alteration self.assertColumnNotNull("test_alfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alfl", editor, project_state, new_state) self.assertColumnNull("test_alfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alfl", editor, new_state, project_state) self.assertColumnNotNull("test_alfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_alter_field_pk(self): """ Tests the AlterField operation on primary keys (for things like PostgreSQL's SERIAL weirdness) """ project_state = self.set_up_test_model("test_alflpk") # Test the state alteration operation = migrations.AlterField("Pony", "id", models.IntegerField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpk", new_state) self.assertIsInstance( project_state.models['test_alflpk', 'pony'].fields['id'], models.AutoField, ) self.assertIsInstance( new_state.models['test_alflpk', 'pony'].fields['id'], models.IntegerField, ) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpk", editor, project_state, new_state) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpk", editor, new_state, project_state) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_pk_fk(self): """ Tests the AlterField operation on primary keys changes any FKs pointing to it. """ project_state = self.set_up_test_model("test_alflpkfk", related_model=True) project_state = self.apply_operations('test_alflpkfk', project_state, [ migrations.CreateModel('Stable', fields=[ ('ponies', models.ManyToManyField('Pony')), ]), migrations.AddField( 'Pony', 'stables', models.ManyToManyField('Stable'), ), ]) # Test the state alteration operation = migrations.AlterField("Pony", "id", models.FloatField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpkfk", new_state) self.assertIsInstance( project_state.models['test_alflpkfk', 'pony'].fields['id'], models.AutoField, ) self.assertIsInstance( new_state.models['test_alflpkfk', 'pony'].fields['id'], models.FloatField, ) def assertIdTypeEqualsFkType(): with connection.cursor() as cursor: id_type, id_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_pony") if c.name == "id" ][0] fk_type, fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_rider") if c.name == "pony_id" ][0] m2m_fk_type, m2m_fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description( cursor, 'test_alflpkfk_pony_stables', ) if c.name == 'pony_id' ][0] remote_m2m_fk_type, remote_m2m_fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description( cursor, 'test_alflpkfk_stable_ponies', ) if c.name == 'pony_id' ][0] self.assertEqual(id_type, fk_type) self.assertEqual(id_type, m2m_fk_type) self.assertEqual(id_type, remote_m2m_fk_type) self.assertEqual(id_null, fk_null) self.assertEqual(id_null, m2m_fk_null) self.assertEqual(id_null, remote_m2m_fk_null) assertIdTypeEqualsFkType() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpkfk", editor, project_state, new_state) assertIdTypeEqualsFkType() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpkfk", editor, new_state, project_state) assertIdTypeEqualsFkType() @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_reloads_state_on_fk_with_to_field_target_type_change(self): app_label = 'test_alflrsfkwtflttc' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('code', models.IntegerField(unique=True)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='code')), ]), ]) operation = migrations.AlterField( 'Rider', 'code', models.CharField(max_length=100, unique=True), ) self.apply_operations(app_label, project_state, operations=[operation]) id_type, id_null = [ (c.type_code, c.null_ok) for c in self.get_table_description('%s_rider' % app_label) if c.name == 'code' ][0] fk_type, fk_null = [ (c.type_code, c.null_ok) for c in self.get_table_description('%s_pony' % app_label) if c.name == 'rider_id' ][0] self.assertEqual(id_type, fk_type) self.assertEqual(id_null, fk_null) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_reloads_state_on_fk_with_to_field_related_name_target_type_change(self): app_label = 'test_alflrsfkwtflrnttc' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('code', models.PositiveIntegerField(unique=True)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('rider', models.ForeignKey( '%s.Rider' % app_label, models.CASCADE, to_field='code', related_name='+', )), ]), ]) operation = migrations.AlterField( 'Rider', 'code', models.CharField(max_length=100, unique=True), ) self.apply_operations(app_label, project_state, operations=[operation]) def test_alter_field_reloads_state_on_fk_target_changes(self): """ If AlterField doesn't reload state appropriately, the second AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_alter_field_reloads_state_on_fk_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.AlterField('Rider', 'id', models.CharField(primary_key=True, max_length=99)), migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)), ]) def test_alter_field_reloads_state_on_fk_with_to_field_target_changes(self): """ If AlterField doesn't reload state appropriately, the second AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_alter_field_reloads_state_on_fk_with_to_field_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('slug', models.CharField(unique=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='slug')), ('slug', models.CharField(unique=True, max_length=100)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE, to_field='slug')), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.AlterField('Rider', 'slug', models.CharField(unique=True, max_length=99)), migrations.AlterField('Pony', 'slug', models.CharField(unique=True, max_length=99)), ]) def test_rename_field_reloads_state_on_fk_target_changes(self): """ If RenameField doesn't reload state appropriately, the AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_rename_field_reloads_state_on_fk_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameField('Rider', 'id', 'id2'), migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)), ], atomic=connection.features.supports_atomic_references_rename) def test_rename_field(self): """ Tests the RenameField operation. """ project_state = self.set_up_test_model("test_rnfl", unique_together=True, index_together=True) # Test the state alteration operation = migrations.RenameField("Pony", "pink", "blue") self.assertEqual(operation.describe(), "Rename field pink on Pony to blue") self.assertEqual(operation.migration_name_fragment, 'rename_pink_pony_blue') new_state = project_state.clone() operation.state_forwards("test_rnfl", new_state) self.assertIn("blue", new_state.models["test_rnfl", "pony"].fields) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].fields) # Make sure the unique_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) # Make sure the index_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['index_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['index_together'][0]) # Test the database alteration self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") with connection.schema_editor() as editor: operation.database_forwards("test_rnfl", editor, project_state, new_state) self.assertColumnExists("test_rnfl_pony", "blue") self.assertColumnNotExists("test_rnfl_pony", "pink") # Ensure the unique constraint has been ported over with connection.cursor() as cursor: cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_rnfl_pony") # Ensure the index constraint has been ported over self.assertIndexExists("test_rnfl_pony", ["weight", "blue"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rnfl", editor, new_state, project_state) self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") # Ensure the index constraint has been reset self.assertIndexExists("test_rnfl_pony", ["weight", "pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'old_name': "pink", 'new_name': "blue"}) def test_rename_missing_field(self): state = ProjectState() state.add_model(ModelState('app', 'model', [])) with self.assertRaisesMessage(FieldDoesNotExist, "app.model has no field named 'field'"): migrations.RenameField('model', 'field', 'new_field').state_forwards('app', state) def test_rename_referenced_field_state_forward(self): state = ProjectState() state.add_model(ModelState('app', 'Model', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(unique=True)), ])) state.add_model(ModelState('app', 'OtherModel', [ ('id', models.AutoField(primary_key=True)), ('fk', models.ForeignKey('Model', models.CASCADE, to_field='field')), ('fo', models.ForeignObject('Model', models.CASCADE, from_fields=('fk',), to_fields=('field',))), ])) operation = migrations.RenameField('Model', 'field', 'renamed') new_state = state.clone() operation.state_forwards('app', new_state) self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].remote_field.field_name, 'renamed') self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].from_fields, ['self']) self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].to_fields, ('renamed',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].from_fields, ('fk',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].to_fields, ('renamed',)) operation = migrations.RenameField('OtherModel', 'fk', 'renamed_fk') new_state = state.clone() operation.state_forwards('app', new_state) self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].remote_field.field_name, 'renamed') self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].from_fields, ('self',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].to_fields, ('renamed',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].from_fields, ('renamed_fk',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].to_fields, ('renamed',)) def test_alter_unique_together(self): """ Tests the AlterUniqueTogether operation. """ project_state = self.set_up_test_model("test_alunto") # Test the state alteration operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter unique_together for Pony (1 constraint(s))") self.assertEqual( operation.migration_name_fragment, 'alter_pony_unique_together', ) new_state = project_state.clone() operation.state_forwards("test_alunto", new_state) self.assertEqual(len(project_state.models["test_alunto", "pony"].options.get("unique_together", set())), 0) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # Make sure we can insert duplicate rows with connection.cursor() as cursor: cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alunto", editor, project_state, new_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alunto", editor, new_state, project_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test flat unique_together operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight")) operation.state_forwards("test_alunto", new_state) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterUniqueTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'unique_together': {("pink", "weight")}}) def test_alter_unique_together_remove(self): operation = migrations.AlterUniqueTogether("Pony", None) self.assertEqual(operation.describe(), "Alter unique_together for Pony (0 constraint(s))") def test_add_index(self): """ Test the AddIndex operation. """ project_state = self.set_up_test_model("test_adin") msg = ( "Indexes passed to AddIndex operations require a name argument. " "<Index: fields='pink'> doesn't have one." ) with self.assertRaisesMessage(ValueError, msg): migrations.AddIndex("Pony", models.Index(fields=["pink"])) index = models.Index(fields=["pink"], name="test_adin_pony_pink_idx") operation = migrations.AddIndex("Pony", index) self.assertEqual(operation.describe(), "Create index test_adin_pony_pink_idx on field(s) pink of model Pony") self.assertEqual( operation.migration_name_fragment, 'pony_test_adin_pony_pink_idx', ) new_state = project_state.clone() operation.state_forwards("test_adin", new_state) # Test the database alteration self.assertEqual(len(new_state.models["test_adin", "pony"].options['indexes']), 1) self.assertIndexNotExists("test_adin_pony", ["pink"]) with connection.schema_editor() as editor: operation.database_forwards("test_adin", editor, project_state, new_state) self.assertIndexExists("test_adin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adin", editor, new_state, project_state) self.assertIndexNotExists("test_adin_pony", ["pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'index': index}) def test_remove_index(self): """ Test the RemoveIndex operation. """ project_state = self.set_up_test_model("test_rmin", multicol_index=True) self.assertTableExists("test_rmin_pony") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) operation = migrations.RemoveIndex("Pony", "pony_test_idx") self.assertEqual(operation.describe(), "Remove index pony_test_idx from Pony") self.assertEqual( operation.migration_name_fragment, 'remove_pony_pony_test_idx', ) new_state = project_state.clone() operation.state_forwards("test_rmin", new_state) # Test the state alteration self.assertEqual(len(new_state.models["test_rmin", "pony"].options['indexes']), 0) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_rmin", editor, project_state, new_state) self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmin", editor, new_state, project_state) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': "pony_test_idx"}) # Also test a field dropped with index - sqlite remake issue operations = [ migrations.RemoveIndex("Pony", "pony_test_idx"), migrations.RemoveField("Pony", "pink"), ] self.assertColumnExists("test_rmin_pony", "pink") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test database alteration new_state = project_state.clone() self.apply_operations('test_rmin', new_state, operations=operations) self.assertColumnNotExists("test_rmin_pony", "pink") self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal self.unapply_operations("test_rmin", project_state, operations=operations) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) def test_add_index_state_forwards(self): project_state = self.set_up_test_model('test_adinsf') index = models.Index(fields=['pink'], name='test_adinsf_pony_pink_idx') old_model = project_state.apps.get_model('test_adinsf', 'Pony') new_state = project_state.clone() operation = migrations.AddIndex('Pony', index) operation.state_forwards('test_adinsf', new_state) new_model = new_state.apps.get_model('test_adinsf', 'Pony') self.assertIsNot(old_model, new_model) def test_remove_index_state_forwards(self): project_state = self.set_up_test_model('test_rminsf') index = models.Index(fields=['pink'], name='test_rminsf_pony_pink_idx') migrations.AddIndex('Pony', index).state_forwards('test_rminsf', project_state) old_model = project_state.apps.get_model('test_rminsf', 'Pony') new_state = project_state.clone() operation = migrations.RemoveIndex('Pony', 'test_rminsf_pony_pink_idx') operation.state_forwards('test_rminsf', new_state) new_model = new_state.apps.get_model('test_rminsf', 'Pony') self.assertIsNot(old_model, new_model) def test_alter_field_with_index(self): """ Test AlterField operation with an index to ensure indexes created via Meta.indexes don't get dropped with sqlite3 remake. """ project_state = self.set_up_test_model("test_alflin", index=True) operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) new_state = project_state.clone() operation.state_forwards("test_alflin", new_state) # Test the database alteration self.assertColumnNotNull("test_alflin_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alflin", editor, project_state, new_state) # Index hasn't been dropped self.assertIndexExists("test_alflin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflin", editor, new_state, project_state) # Ensure the index is still there self.assertIndexExists("test_alflin_pony", ["pink"]) def test_alter_index_together(self): """ Tests the AlterIndexTogether operation. """ project_state = self.set_up_test_model("test_alinto") # Test the state alteration operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter index_together for Pony (1 constraint(s))") self.assertEqual( operation.migration_name_fragment, 'alter_pony_index_together', ) new_state = project_state.clone() operation.state_forwards("test_alinto", new_state) self.assertEqual(len(project_state.models["test_alinto", "pony"].options.get("index_together", set())), 0) self.assertEqual(len(new_state.models["test_alinto", "pony"].options.get("index_together", set())), 1) # Make sure there's no matching index self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alinto", editor, project_state, new_state) self.assertIndexExists("test_alinto_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alinto", editor, new_state, project_state) self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterIndexTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'index_together': {("pink", "weight")}}) def test_alter_index_together_remove(self): operation = migrations.AlterIndexTogether("Pony", None) self.assertEqual(operation.describe(), "Alter index_together for Pony (0 constraint(s))") @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_alter_index_together_remove_with_unique_together(self): app_label = 'test_alintoremove_wunto' table_name = '%s_pony' % app_label project_state = self.set_up_test_model(app_label, unique_together=True) self.assertUniqueConstraintExists(table_name, ['pink', 'weight']) # Add index together. new_state = project_state.clone() operation = migrations.AlterIndexTogether('Pony', [('pink', 'weight')]) operation.state_forwards(app_label, new_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexExists(table_name, ['pink', 'weight']) # Remove index together. project_state = new_state new_state = project_state.clone() operation = migrations.AlterIndexTogether('Pony', set()) operation.state_forwards(app_label, new_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNotExists(table_name, ['pink', 'weight']) self.assertUniqueConstraintExists(table_name, ['pink', 'weight']) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint(self): project_state = self.set_up_test_model("test_addconstraint") gt_check = models.Q(pink__gt=2) gt_constraint = models.CheckConstraint(check=gt_check, name="test_add_constraint_pony_pink_gt_2") gt_operation = migrations.AddConstraint("Pony", gt_constraint) self.assertEqual( gt_operation.describe(), "Create constraint test_add_constraint_pony_pink_gt_2 on model Pony" ) self.assertEqual( gt_operation.migration_name_fragment, 'pony_test_add_constraint_pony_pink_gt_2', ) # Test the state alteration new_state = project_state.clone() gt_operation.state_forwards("test_addconstraint", new_state) self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 1) Pony = new_state.apps.get_model("test_addconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 1) # Test the database alteration with connection.schema_editor() as editor: gt_operation.database_forwards("test_addconstraint", editor, project_state, new_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=1.0) # Add another one. lt_check = models.Q(pink__lt=100) lt_constraint = models.CheckConstraint(check=lt_check, name="test_add_constraint_pony_pink_lt_100") lt_operation = migrations.AddConstraint("Pony", lt_constraint) lt_operation.state_forwards("test_addconstraint", new_state) self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 2) Pony = new_state.apps.get_model("test_addconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 2) with connection.schema_editor() as editor: lt_operation.database_forwards("test_addconstraint", editor, project_state, new_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=100, weight=1.0) # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards("test_addconstraint", editor, new_state, project_state) Pony.objects.create(pink=1, weight=1.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], "AddConstraint") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'constraint': gt_constraint}) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint_percent_escaping(self): app_label = 'add_constraint_string_quoting' operations = [ migrations.CreateModel( 'Author', fields=[ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=100)), ('rebate', models.CharField(max_length=100)), ], ), ] from_state = self.apply_operations(app_label, ProjectState(), operations) # "%" generated in startswith lookup should be escaped in a way that is # considered a leading wildcard. check = models.Q(name__startswith='Albert') constraint = models.CheckConstraint(check=check, name='name_constraint') operation = migrations.AddConstraint('Author', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Artur') # Literal "%" should be escaped in a way that is not a considered a # wildcard. check = models.Q(rebate__endswith='%') constraint = models.CheckConstraint(check=check, name='rebate_constraint') operation = migrations.AddConstraint('Author', constraint) from_state = to_state to_state = from_state.clone() operation.state_forwards(app_label, to_state) Author = to_state.apps.get_model(app_label, 'Author') with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Albert', rebate='10$') author = Author.objects.create(name='Albert', rebate='10%') self.assertEqual(Author.objects.get(), author) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_or_constraint(self): app_label = 'test_addorconstraint' constraint_name = 'add_constraint_or' from_state = self.set_up_test_model(app_label) check = models.Q(pink__gt=2, weight__gt=2) | models.Q(weight__lt=0) constraint = models.CheckConstraint(check=check, name=constraint_name) operation = migrations.AddConstraint('Pony', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Pony = to_state.apps.get_model(app_label, 'Pony') with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=2, weight=3.0) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=3, weight=1.0) Pony.objects.bulk_create([ Pony(pink=3, weight=-1.0), Pony(pink=1, weight=-1.0), Pony(pink=3, weight=3.0), ]) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint_combinable(self): app_label = 'test_addconstraint_combinable' operations = [ migrations.CreateModel( 'Book', fields=[ ('id', models.AutoField(primary_key=True)), ('read', models.PositiveIntegerField()), ('unread', models.PositiveIntegerField()), ], ), ] from_state = self.apply_operations(app_label, ProjectState(), operations) constraint = models.CheckConstraint( check=models.Q(read=(100 - models.F('unread'))), name='test_addconstraint_combinable_sum_100', ) operation = migrations.AddConstraint('Book', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Book = to_state.apps.get_model(app_label, 'Book') with self.assertRaises(IntegrityError), transaction.atomic(): Book.objects.create(read=70, unread=10) Book.objects.create(read=70, unread=30) @skipUnlessDBFeature('supports_table_check_constraints') def test_remove_constraint(self): project_state = self.set_up_test_model("test_removeconstraint", constraints=[ models.CheckConstraint(check=models.Q(pink__gt=2), name="test_remove_constraint_pony_pink_gt_2"), models.CheckConstraint(check=models.Q(pink__lt=100), name="test_remove_constraint_pony_pink_lt_100"), ]) gt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_gt_2") self.assertEqual( gt_operation.describe(), "Remove constraint test_remove_constraint_pony_pink_gt_2 from model Pony" ) self.assertEqual( gt_operation.migration_name_fragment, 'remove_pony_test_remove_constraint_pony_pink_gt_2', ) # Test state alteration new_state = project_state.clone() gt_operation.state_forwards("test_removeconstraint", new_state) self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 1) Pony = new_state.apps.get_model("test_removeconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 1) # Test database alteration with connection.schema_editor() as editor: gt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state) Pony.objects.create(pink=1, weight=1.0).delete() with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=100, weight=1.0) # Remove the other one. lt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_lt_100") lt_operation.state_forwards("test_removeconstraint", new_state) self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 0) Pony = new_state.apps.get_model("test_removeconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor: lt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state) Pony.objects.create(pink=100, weight=1.0).delete() # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards("test_removeconstraint", editor, new_state, project_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=1.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], "RemoveConstraint") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': "test_remove_constraint_pony_pink_gt_2"}) def test_add_partial_unique_constraint(self): project_state = self.set_up_test_model('test_addpartialuniqueconstraint') partial_unique_constraint = models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ) operation = migrations.AddConstraint('Pony', partial_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint test_constraint_pony_pink_for_weight_gt_5_uniq ' 'on model Pony' ) # Test the state alteration new_state = project_state.clone() operation.state_forwards('test_addpartialuniqueconstraint', new_state) self.assertEqual(len(new_state.models['test_addpartialuniqueconstraint', 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model('test_addpartialuniqueconstraint', 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards('test_addpartialuniqueconstraint', editor, project_state, new_state) # Test constraint works Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test reversal with connection.schema_editor() as editor: operation.database_backwards('test_addpartialuniqueconstraint', editor, new_state, project_state) # Test constraint doesn't work Pony.objects.create(pink=1, weight=7.0) # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'constraint': partial_unique_constraint}) def test_remove_partial_unique_constraint(self): project_state = self.set_up_test_model('test_removepartialuniqueconstraint', constraints=[ models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ), ]) gt_operation = migrations.RemoveConstraint('Pony', 'test_constraint_pony_pink_for_weight_gt_5_uniq') self.assertEqual( gt_operation.describe(), 'Remove constraint test_constraint_pony_pink_for_weight_gt_5_uniq from model Pony' ) # Test state alteration new_state = project_state.clone() gt_operation.state_forwards('test_removepartialuniqueconstraint', new_state) self.assertEqual(len(new_state.models['test_removepartialuniqueconstraint', 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model('test_removepartialuniqueconstraint', 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) # Test database alteration with connection.schema_editor() as editor: gt_operation.database_forwards('test_removepartialuniqueconstraint', editor, project_state, new_state) # Test constraint doesn't work Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) Pony.objects.create(pink=1, weight=7.0).delete() # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards('test_removepartialuniqueconstraint', editor, new_state, project_state) # Test constraint works if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'test_constraint_pony_pink_for_weight_gt_5_uniq', }) def test_add_deferred_unique_constraint(self): app_label = 'test_adddeferred_uc' project_state = self.set_up_test_model(app_label) deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferred_pink_constraint_add', deferrable=models.Deferrable.DEFERRED, ) operation = migrations.AddConstraint('Pony', deferred_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint deferred_pink_constraint_add on model Pony', ) # Add constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) Pony.objects.create(pink=1, weight=4.0) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 3 obj.save() else: Pony.objects.create(pink=1, weight=4.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual( definition[2], {'model_name': 'Pony', 'constraint': deferred_unique_constraint}, ) def test_remove_deferred_unique_constraint(self): app_label = 'test_removedeferred_uc' deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferred_pink_constraint_rm', deferrable=models.Deferrable.DEFERRED, ) project_state = self.set_up_test_model(app_label, constraints=[deferred_unique_constraint]) operation = migrations.RemoveConstraint('Pony', deferred_unique_constraint.name) self.assertEqual( operation.describe(), 'Remove constraint deferred_pink_constraint_rm from model Pony', ) # Remove constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0).delete() # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 3 obj.save() else: Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'deferred_pink_constraint_rm', }) def test_alter_model_options(self): """ Tests the AlterModelOptions operation. """ project_state = self.set_up_test_model("test_almoop") # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {"permissions": [("can_groom", "Can groom")]}) self.assertEqual(operation.describe(), "Change Meta options on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_options') new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(new_state.models["test_almoop", "pony"].options["permissions"][0][0], "can_groom") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {"permissions": [("can_groom", "Can groom")]}}) def test_alter_model_options_emptying(self): """ The AlterModelOptions operation removes keys from the dict (#23121) """ project_state = self.set_up_test_model("test_almoop", options=True) # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {}) self.assertEqual(operation.describe(), "Change Meta options on Pony") new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {}}) def test_alter_order_with_respect_to(self): """ Tests the AlterOrderWithRespectTo operation. """ project_state = self.set_up_test_model("test_alorwrtto", related_model=True) # Test the state alteration operation = migrations.AlterOrderWithRespectTo("Rider", "pony") self.assertEqual(operation.describe(), "Set order_with_respect_to on Rider to pony") self.assertEqual( operation.migration_name_fragment, 'alter_rider_order_with_respect_to', ) new_state = project_state.clone() operation.state_forwards("test_alorwrtto", new_state) self.assertIsNone( project_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None) ) self.assertEqual( new_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None), "pony" ) # Make sure there's no matching index self.assertColumnNotExists("test_alorwrtto_rider", "_order") # Create some rows before alteration rendered_state = project_state.apps pony = rendered_state.get_model("test_alorwrtto", "Pony").objects.create(weight=50) rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=1) rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=2) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alorwrtto", editor, project_state, new_state) self.assertColumnExists("test_alorwrtto_rider", "_order") # Check for correct value in rows updated_riders = new_state.apps.get_model("test_alorwrtto", "Rider").objects.all() self.assertEqual(updated_riders[0]._order, 0) self.assertEqual(updated_riders[1]._order, 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alorwrtto", editor, new_state, project_state) self.assertColumnNotExists("test_alorwrtto_rider", "_order") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterOrderWithRespectTo") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Rider", 'order_with_respect_to': "pony"}) def test_alter_model_managers(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_almoma") # Test the state alteration operation = migrations.AlterModelManagers( "Pony", managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Change managers on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_managers') managers = project_state.models["test_almoma", "pony"].managers self.assertEqual(managers, []) new_state = project_state.clone() operation.state_forwards("test_almoma", new_state) self.assertIn(("test_almoma", "pony"), new_state.models) managers = new_state.models["test_almoma", "pony"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) rendered_state = new_state.apps model = rendered_state.get_model('test_almoma', 'pony') self.assertIsInstance(model.food_qs, models.Manager) self.assertIsInstance(model.food_mgr, FoodManager) self.assertIsInstance(model.food_mgr_kwargs, FoodManager) def test_alter_model_managers_emptying(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_almomae", manager_model=True) # Test the state alteration operation = migrations.AlterModelManagers("Food", managers=[]) self.assertEqual(operation.describe(), "Change managers on Food") self.assertIn(("test_almomae", "food"), project_state.models) managers = project_state.models["test_almomae", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) new_state = project_state.clone() operation.state_forwards("test_almomae", new_state) managers = new_state.models["test_almomae", "food"].managers self.assertEqual(managers, []) def test_alter_fk(self): """ Creating and then altering an FK works correctly and deals with the pending SQL (#23091) """ project_state = self.set_up_test_model("test_alfk") # Test adding and then altering the FK in one go create_operation = migrations.CreateModel( name="Rider", fields=[ ("id", models.AutoField(primary_key=True)), ("pony", models.ForeignKey("Pony", models.CASCADE)), ], ) create_state = project_state.clone() create_operation.state_forwards("test_alfk", create_state) alter_operation = migrations.AlterField( model_name='Rider', name='pony', field=models.ForeignKey("Pony", models.CASCADE, editable=False), ) alter_state = create_state.clone() alter_operation.state_forwards("test_alfk", alter_state) with connection.schema_editor() as editor: create_operation.database_forwards("test_alfk", editor, project_state, create_state) alter_operation.database_forwards("test_alfk", editor, create_state, alter_state) def test_alter_fk_non_fk(self): """ Altering an FK to a non-FK works (#23244) """ # Test the state alteration operation = migrations.AlterField( model_name="Rider", name="pony", field=models.FloatField(), ) project_state, new_state = self.make_test_state("test_afknfk", operation, related_model=True) # Test the database alteration self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") with connection.schema_editor() as editor: operation.database_forwards("test_afknfk", editor, project_state, new_state) self.assertColumnExists("test_afknfk_rider", "pony") self.assertColumnNotExists("test_afknfk_rider", "pony_id") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_afknfk", editor, new_state, project_state) self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") def test_run_sql(self): """ Tests the RunSQL operation. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( # Use a multi-line string with a comment to test splitting on SQLite and MySQL respectively "CREATE TABLE i_love_ponies (id int, special_thing varchar(15));\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'i love ponies'); -- this is magic!\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (2, 'i love django');\n" "UPDATE i_love_ponies SET special_thing = 'Ponies' WHERE special_thing LIKE '%%ponies';" "UPDATE i_love_ponies SET special_thing = 'Django' WHERE special_thing LIKE '%django';", # Run delete queries to test for parameter substitution failure # reported in #23426 "DELETE FROM i_love_ponies WHERE special_thing LIKE '%Django%';" "DELETE FROM i_love_ponies WHERE special_thing LIKE '%%Ponies%%';" "DROP TABLE i_love_ponies", state_operations=[migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])], ) self.assertEqual(operation.describe(), "Raw SQL operation") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_runsql", new_state) self.assertEqual(len(new_state.models["test_runsql", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test SQL collection with connection.schema_editor(collect_sql=True) as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%ponies';", "\n".join(editor.collected_sql)) operation.database_backwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%Ponies%%';", "\n".join(editor.collected_sql)) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 2) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Django'") self.assertEqual(cursor.fetchall()[0][0], 1) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Ponies'") self.assertEqual(cursor.fetchall()[0][0], 1) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunSQL") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["reverse_sql", "sql", "state_operations"]) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunSQL('SELECT 1 FROM void;', elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_sql_params(self): """ #23426 - RunSQL should accept parameters. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( ["CREATE TABLE i_love_ponies (id int, special_thing varchar(15));"], ["DROP TABLE i_love_ponies"], ) param_operation = migrations.RunSQL( # forwards ( "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'Django');", ["INSERT INTO i_love_ponies (id, special_thing) VALUES (2, %s);", ['Ponies']], ("INSERT INTO i_love_ponies (id, special_thing) VALUES (%s, %s);", (3, 'Python',)), ), # backwards [ "DELETE FROM i_love_ponies WHERE special_thing = 'Django';", ["DELETE FROM i_love_ponies WHERE special_thing = 'Ponies';", None], ("DELETE FROM i_love_ponies WHERE id = %s OR special_thing = %s;", [3, 'Python']), ] ) # Make sure there's no table self.assertTableNotExists("i_love_ponies") new_state = project_state.clone() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) # Test parameter passing with connection.schema_editor() as editor: param_operation.database_forwards("test_runsql", editor, project_state, new_state) # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 3) with connection.schema_editor() as editor: param_operation.database_backwards("test_runsql", editor, new_state, project_state) with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") def test_run_sql_params_invalid(self): """ #23426 - RunSQL should fail when a list of statements with an incorrect number of tuples is given. """ project_state = self.set_up_test_model("test_runsql") new_state = project_state.clone() operation = migrations.RunSQL( # forwards [ ["INSERT INTO foo (bar) VALUES ('buz');"] ], # backwards ( ("DELETE FROM foo WHERE bar = 'buz';", 'invalid', 'parameter count'), ), ) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 1"): operation.database_forwards("test_runsql", editor, project_state, new_state) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 3"): operation.database_backwards("test_runsql", editor, new_state, project_state) def test_run_sql_noop(self): """ #24098 - Tests no-op RunSQL operations. """ operation = migrations.RunSQL(migrations.RunSQL.noop, migrations.RunSQL.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, None, None) operation.database_backwards("test_runsql", editor, None, None) def test_run_python(self): """ Tests the RunPython operation """ project_state = self.set_up_test_model("test_runpython", mti_model=True) # Create the operation def inner_method(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.create(pink=1, weight=3.55) Pony.objects.create(weight=5) def inner_method_reverse(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.filter(pink=1, weight=3.55).delete() Pony.objects.filter(weight=5).delete() operation = migrations.RunPython(inner_method, reverse_code=inner_method_reverse) self.assertEqual(operation.describe(), "Raw Python operation") # Test the state alteration does nothing new_state = project_state.clone() operation.state_forwards("test_runpython", new_state) self.assertEqual(new_state, project_state) # Test the database alteration self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) # Now test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) # Now test we can't use a string with self.assertRaisesMessage(ValueError, 'RunPython must be supplied with a callable'): migrations.RunPython("print 'ahahaha'") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code", "reverse_code"]) # Also test reversal fails, with an operation identical to above but without reverse_code set no_reverse_operation = migrations.RunPython(inner_method) self.assertFalse(no_reverse_operation.reversible) with connection.schema_editor() as editor: no_reverse_operation.database_forwards("test_runpython", editor, project_state, new_state) with self.assertRaises(NotImplementedError): no_reverse_operation.database_backwards("test_runpython", editor, new_state, project_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) def create_ponies(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") pony1 = Pony.objects.create(pink=1, weight=3.55) self.assertIsNot(pony1.pk, None) pony2 = Pony.objects.create(weight=5) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_ponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 4) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code"]) def create_shetlandponies(models, schema_editor): ShetlandPony = models.get_model("test_runpython", "ShetlandPony") pony1 = ShetlandPony.objects.create(weight=4.0) self.assertIsNot(pony1.pk, None) pony2 = ShetlandPony.objects.create(weight=5.0) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_shetlandponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 6) self.assertEqual(project_state.apps.get_model("test_runpython", "ShetlandPony").objects.count(), 2) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunPython(inner_method, elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_python_atomic(self): """ Tests the RunPython operation correctly handles the "atomic" keyword """ project_state = self.set_up_test_model("test_runpythonatomic", mti_model=True) def inner_method(models, schema_editor): Pony = models.get_model("test_runpythonatomic", "Pony") Pony.objects.create(pink=1, weight=3.55) raise ValueError("Adrian hates ponies.") # Verify atomicity when applying. atomic_migration = Migration("test", "test_runpythonatomic") atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method)] non_atomic_migration = Migration("test", "test_runpythonatomic") non_atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method, atomic=False)] # If we're a fully-transactional database, both versions should rollback if connection.features.can_rollback_ddl: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) # Otherwise, the non-atomic operation should leave a row there else: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1) # Reset object count to zero and verify atomicity when unapplying. project_state.apps.get_model("test_runpythonatomic", "Pony").objects.all().delete() # On a fully-transactional database, both versions rollback. if connection.features.can_rollback_ddl: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) # Otherwise, the non-atomic operation leaves a row there. else: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1) # Verify deconstruction. definition = non_atomic_migration.operations[0].deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["atomic", "code", "reverse_code"]) def test_run_python_related_assignment(self): """ #24282 - Model changes to a FK reverse side update the model on the FK side as well. """ def inner_method(models, schema_editor): Author = models.get_model("test_authors", "Author") Book = models.get_model("test_books", "Book") author = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author) create_author = migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey("test_authors.Author", models.CASCADE)) ], options={}, ) add_hometown = migrations.AddField( "Author", "hometown", models.CharField(max_length=100), ) create_old_man = migrations.RunPython(inner_method, inner_method) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_authors", new_state) create_author.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_books", new_state) create_book.database_forwards("test_books", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: add_hometown.state_forwards("test_authors", new_state) add_hometown.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_old_man.state_forwards("test_books", new_state) create_old_man.database_forwards("test_books", editor, project_state, new_state) def test_model_with_bigautofield(self): """ A model with BigAutoField can be created. """ def create_data(models, schema_editor): Author = models.get_model("test_author", "Author") Book = models.get_model("test_book", "Book") author1 = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author1) Book.objects.create(id=2 ** 33, title="A farewell to arms", author=author1) author2 = Author.objects.create(id=2 ** 33, name="Remarque") Book.objects.create(title="All quiet on the western front", author=author2) Book.objects.create(title="Arc de Triomphe", author=author2) create_author = migrations.CreateModel( "Author", [ ("id", models.BigAutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.BigAutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey(to="test_author.Author", on_delete=models.CASCADE)) ], options={}, ) fill_data = migrations.RunPython(create_data) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_author", new_state) create_author.database_forwards("test_author", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_book", new_state) create_book.database_forwards("test_book", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_data.state_forwards("fill_data", new_state) fill_data.database_forwards("fill_data", editor, project_state, new_state) def _test_autofield_foreignfield_growth(self, source_field, target_field, target_value): """ A field may be migrated in the following ways: - AutoField to BigAutoField - SmallAutoField to AutoField - SmallAutoField to BigAutoField """ def create_initial_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog = Blog.objects.create(name="web development done right") Article.objects.create(name="Frameworks", blog=blog) Article.objects.create(name="Programming Languages", blog=blog) def create_big_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog2 = Blog.objects.create(name="Frameworks", id=target_value) Article.objects.create(name="Django", blog=blog2) Article.objects.create(id=target_value, name="Django2", blog=blog2) create_blog = migrations.CreateModel( "Blog", [ ("id", source_field(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_article = migrations.CreateModel( "Article", [ ("id", source_field(primary_key=True)), ("blog", models.ForeignKey(to="test_blog.Blog", on_delete=models.CASCADE)), ("name", models.CharField(max_length=100)), ("data", models.TextField(default="")), ], options={}, ) fill_initial_data = migrations.RunPython(create_initial_data, create_initial_data) fill_big_data = migrations.RunPython(create_big_data, create_big_data) grow_article_id = migrations.AlterField('Article', 'id', target_field(primary_key=True)) grow_blog_id = migrations.AlterField('Blog', 'id', target_field(primary_key=True)) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_blog.state_forwards("test_blog", new_state) create_blog.database_forwards("test_blog", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_article.state_forwards("test_article", new_state) create_article.database_forwards("test_article", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_initial_data.state_forwards("fill_initial_data", new_state) fill_initial_data.database_forwards("fill_initial_data", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_article_id.state_forwards("test_article", new_state) grow_article_id.database_forwards("test_article", editor, project_state, new_state) state = new_state.clone() article = state.apps.get_model("test_article.Article") self.assertIsInstance(article._meta.pk, target_field) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_blog_id.state_forwards("test_blog", new_state) grow_blog_id.database_forwards("test_blog", editor, project_state, new_state) state = new_state.clone() blog = state.apps.get_model("test_blog.Blog") self.assertIsInstance(blog._meta.pk, target_field) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_big_data.state_forwards("fill_big_data", new_state) fill_big_data.database_forwards("fill_big_data", editor, project_state, new_state) def test_autofield__bigautofield_foreignfield_growth(self): """A field may be migrated from AutoField to BigAutoField.""" self._test_autofield_foreignfield_growth( models.AutoField, models.BigAutoField, 2 ** 33, ) def test_smallfield_autofield_foreignfield_growth(self): """A field may be migrated from SmallAutoField to AutoField.""" self._test_autofield_foreignfield_growth( models.SmallAutoField, models.AutoField, 2 ** 22, ) def test_smallfield_bigautofield_foreignfield_growth(self): """A field may be migrated from SmallAutoField to BigAutoField.""" self._test_autofield_foreignfield_growth( models.SmallAutoField, models.BigAutoField, 2 ** 33, ) def test_run_python_noop(self): """ #24098 - Tests no-op RunPython operations. """ project_state = ProjectState() new_state = project_state.clone() operation = migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) operation.database_backwards("test_runpython", editor, new_state, project_state) def test_separate_database_and_state(self): """ Tests the SeparateDatabaseAndState operation. """ project_state = self.set_up_test_model("test_separatedatabaseandstate") # Create the operation database_operation = migrations.RunSQL( "CREATE TABLE i_love_ponies (id int, special_thing int);", "DROP TABLE i_love_ponies;" ) state_operation = migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))]) operation = migrations.SeparateDatabaseAndState( state_operations=[state_operation], database_operations=[database_operation] ) self.assertEqual(operation.describe(), "Custom state/database change combination") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_separatedatabaseandstate", new_state) self.assertEqual(len(new_state.models["test_separatedatabaseandstate", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_separatedatabaseandstate", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_separatedatabaseandstate", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "SeparateDatabaseAndState") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["database_operations", "state_operations"]) def test_separate_database_and_state2(self): """ A complex SeparateDatabaseAndState operation: Multiple operations both for state and database. Verify the state dependencies within each list and that state ops don't affect the database. """ app_label = "test_separatedatabaseandstate2" project_state = self.set_up_test_model(app_label) # Create the operation database_operations = [ migrations.CreateModel( "ILovePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveponies"}, ), migrations.CreateModel( "ILoveMorePonies", # We use IntegerField and not AutoField because # the model is going to be deleted immediately # and with an AutoField this fails on Oracle [("id", models.IntegerField(primary_key=True))], options={"db_table": "ilovemoreponies"}, ), migrations.DeleteModel("ILoveMorePonies"), migrations.CreateModel( "ILoveEvenMorePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveevenmoreponies"}, ), ] state_operations = [ migrations.CreateModel( "SomethingElse", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingelse"}, ), migrations.DeleteModel("SomethingElse"), migrations.CreateModel( "SomethingCompletelyDifferent", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingcompletelydifferent"}, ), ] operation = migrations.SeparateDatabaseAndState( state_operations=state_operations, database_operations=database_operations, ) # Test the state alteration new_state = project_state.clone() operation.state_forwards(app_label, new_state) def assertModelsAndTables(after_db): # Tables and models exist, or don't, as they should: self.assertNotIn((app_label, "somethingelse"), new_state.models) self.assertEqual(len(new_state.models[app_label, "somethingcompletelydifferent"].fields), 1) self.assertNotIn((app_label, "iloveponiesonies"), new_state.models) self.assertNotIn((app_label, "ilovemoreponies"), new_state.models) self.assertNotIn((app_label, "iloveevenmoreponies"), new_state.models) self.assertTableNotExists("somethingelse") self.assertTableNotExists("somethingcompletelydifferent") self.assertTableNotExists("ilovemoreponies") if after_db: self.assertTableExists("iloveponies") self.assertTableExists("iloveevenmoreponies") else: self.assertTableNotExists("iloveponies") self.assertTableNotExists("iloveevenmoreponies") assertModelsAndTables(after_db=False) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) assertModelsAndTables(after_db=True) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) assertModelsAndTables(after_db=False) class SwappableOperationTests(OperationTestBase): """ Key operations ignore swappable models (we don't want to replicate all of them here, as the functionality is in a common base class anyway) """ available_apps = ['migrations'] @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_create_ignore_swapped(self): """ The CreateTable operation ignores swapped models. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], options={ "swappable": "TEST_SWAP_MODEL", }, ) # Test the state alteration (it should still be there!) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crigsw", new_state) self.assertEqual(new_state.models["test_crigsw", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crigsw", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crigsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crigsw", editor, project_state, new_state) self.assertTableNotExists("test_crigsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crigsw", editor, new_state, project_state) self.assertTableNotExists("test_crigsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_delete_ignore_swapped(self): """ Tests the DeleteModel operation ignores swapped models. """ operation = migrations.DeleteModel("Pony") project_state, new_state = self.make_test_state("test_dligsw", operation) # Test the database alteration self.assertTableNotExists("test_dligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dligsw", editor, project_state, new_state) self.assertTableNotExists("test_dligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dligsw", editor, new_state, project_state) self.assertTableNotExists("test_dligsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_add_field_ignore_swapped(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) project_state, new_state = self.make_test_state("test_adfligsw", operation) # Test the database alteration self.assertTableNotExists("test_adfligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_adfligsw", editor, project_state, new_state) self.assertTableNotExists("test_adfligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfligsw", editor, new_state, project_state) self.assertTableNotExists("test_adfligsw_pony") @override_settings(TEST_SWAP_MODEL='migrations.SomeFakeModel') def test_indexes_ignore_swapped(self): """ Add/RemoveIndex operations ignore swapped models. """ operation = migrations.AddIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state('test_adinigsw', operation) with connection.schema_editor() as editor: # No database queries should be run for swapped models operation.database_forwards('test_adinigsw', editor, project_state, new_state) operation.database_backwards('test_adinigsw', editor, new_state, project_state) operation = migrations.RemoveIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state("test_rminigsw", operation) with connection.schema_editor() as editor: operation.database_forwards('test_rminigsw', editor, project_state, new_state) operation.database_backwards('test_rminigsw', editor, new_state, project_state) class TestCreateModel(SimpleTestCase): def test_references_model_mixin(self): migrations.CreateModel( 'name', fields=[], bases=(Mixin, models.Model), ).references_model('other_model', 'migrations') class FieldOperationTests(SimpleTestCase): def test_references_model(self): operation = FieldOperation('MoDel', 'field', models.ForeignKey('Other', models.CASCADE)) # Model name match. self.assertIs(operation.references_model('mOdEl', 'migrations'), True) # Referenced field. self.assertIs(operation.references_model('oTher', 'migrations'), True) # Doesn't reference. self.assertIs(operation.references_model('Whatever', 'migrations'), False) def test_references_field_by_name(self): operation = FieldOperation('MoDel', 'field', models.BooleanField(default=False)) self.assertIs(operation.references_field('model', 'field', 'migrations'), True) def test_references_field_by_remote_field_model(self): operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE)) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_references_field_by_from_fields(self): operation = FieldOperation( 'Model', 'field', models.fields.related.ForeignObject('Other', models.CASCADE, ['from'], ['to']) ) self.assertIs(operation.references_field('Model', 'from', 'migrations'), True) self.assertIs(operation.references_field('Model', 'to', 'migrations'), False) self.assertIs(operation.references_field('Other', 'from', 'migrations'), False) self.assertIs(operation.references_field('Model', 'to', 'migrations'), False) def test_references_field_by_to_fields(self): operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE, to_field='field')) self.assertIs(operation.references_field('Other', 'field', 'migrations'), True) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), False) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_references_field_by_through(self): operation = FieldOperation('Model', 'field', models.ManyToManyField('Other', through='Through')) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Through', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_reference_field_by_through_fields(self): operation = FieldOperation( 'Model', 'field', models.ManyToManyField('Other', through='Through', through_fields=('first', 'second')) ) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Through', 'whatever', 'migrations'), False) self.assertIs(operation.references_field('Through', 'first', 'migrations'), True) self.assertIs(operation.references_field('Through', 'second', 'migrations'), True)
78902087de64f01634ed47b3b95e5b883aa6e0c6448af38924c90033826a8c90
import functools import re from unittest import mock from django.apps import apps from django.conf import settings from django.contrib.auth.models import AbstractBaseUser from django.core.validators import RegexValidator, validate_slug from django.db import connection, migrations, models from django.db.migrations.autodetector import MigrationAutodetector from django.db.migrations.graph import MigrationGraph from django.db.migrations.loader import MigrationLoader from django.db.migrations.questioner import MigrationQuestioner from django.db.migrations.state import ModelState, ProjectState from django.test import SimpleTestCase, TestCase, override_settings from django.test.utils import isolate_lru_cache from .models import FoodManager, FoodQuerySet class DeconstructibleObject: """ A custom deconstructible object. """ def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs def deconstruct(self): return ( self.__module__ + '.' + self.__class__.__name__, self.args, self.kwargs ) class AutodetectorTests(TestCase): """ Tests the migration autodetector. """ author_empty = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) author_name = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ]) author_name_null = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, null=True)), ]) author_name_longer = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=400)), ]) author_name_renamed = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("names", models.CharField(max_length=200)), ]) author_name_default = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default='Ada Lovelace')), ]) author_name_check_constraint = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ], {'constraints': [models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob')]}, ) author_dates_of_birth_auto_now = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("date_of_birth", models.DateField(auto_now=True)), ("date_time_of_birth", models.DateTimeField(auto_now=True)), ("time_of_birth", models.TimeField(auto_now=True)), ]) author_dates_of_birth_auto_now_add = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("date_of_birth", models.DateField(auto_now_add=True)), ("date_time_of_birth", models.DateTimeField(auto_now_add=True)), ("time_of_birth", models.TimeField(auto_now_add=True)), ]) author_name_deconstructible_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject())), ]) author_name_deconstructible_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject())), ]) author_name_deconstructible_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=models.IntegerField())), ]) author_name_deconstructible_4 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=models.IntegerField())), ]) author_name_deconstructible_list_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])), ]) author_name_deconstructible_list_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])), ]) author_name_deconstructible_list_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 999])), ]) author_name_deconstructible_tuple_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))), ]) author_name_deconstructible_tuple_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))), ]) author_name_deconstructible_tuple_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 999))), ]) author_name_deconstructible_dict_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 123 })), ]) author_name_deconstructible_dict_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 123 })), ]) author_name_deconstructible_dict_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 999 })), ]) author_name_nested_deconstructible_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_changed_arg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2-changed'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_extra_arg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), None, a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_changed_kwarg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c-changed')), ))), ]) author_name_nested_deconstructible_extra_kwarg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), c=None, ))), ]) author_custom_pk = ModelState("testapp", "Author", [("pk_field", models.IntegerField(primary_key=True))]) author_with_biography_non_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField()), ("biography", models.TextField()), ]) author_with_biography_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(blank=True)), ("biography", models.TextField(blank=True)), ]) author_with_book = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) author_with_book_order_wrt = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ], options={"order_with_respect_to": "book"}) author_renamed_with_book = ModelState("testapp", "Writer", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) author_with_publisher_string = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("publisher_name", models.CharField(max_length=200)), ]) author_with_publisher = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) author_with_user = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("user", models.ForeignKey("auth.User", models.CASCADE)), ]) author_with_custom_user = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("user", models.ForeignKey("thirdapp.CustomUser", models.CASCADE)), ]) author_proxy = ModelState("testapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)) author_proxy_options = ModelState("testapp", "AuthorProxy", [], { "proxy": True, "verbose_name": "Super Author", }, ("testapp.author",)) author_proxy_notproxy = ModelState("testapp", "AuthorProxy", [], {}, ("testapp.author",)) author_proxy_third = ModelState("thirdapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)) author_proxy_third_notproxy = ModelState("thirdapp", "AuthorProxy", [], {}, ("testapp.author",)) author_proxy_proxy = ModelState("testapp", "AAuthorProxyProxy", [], {"proxy": True}, ("testapp.authorproxy",)) author_unmanaged = ModelState("testapp", "AuthorUnmanaged", [], {"managed": False}, ("testapp.author",)) author_unmanaged_managed = ModelState("testapp", "AuthorUnmanaged", [], {}, ("testapp.author",)) author_unmanaged_default_pk = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) author_unmanaged_custom_pk = ModelState("testapp", "Author", [ ("pk_field", models.IntegerField(primary_key=True)), ]) author_with_m2m = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher")), ]) author_with_m2m_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", blank=True)), ]) author_with_m2m_through = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", through="testapp.Contract")), ]) author_with_renamed_m2m_through = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", through="testapp.Deal")), ]) author_with_former_m2m = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.CharField(max_length=100)), ]) author_with_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], { "permissions": [('can_hire', 'Can hire')], "verbose_name": "Authi", }) author_with_db_table_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_one"}) author_with_new_db_table_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_two"}) author_renamed_with_db_table_options = ModelState("testapp", "NewAuthor", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_one"}) author_renamed_with_new_db_table_options = ModelState("testapp", "NewAuthor", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_three"}) contract = ModelState("testapp", "Contract", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) contract_renamed = ModelState("testapp", "Deal", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) publisher = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ]) publisher_with_author = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) publisher_with_aardvark_author = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Aardvark", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) publisher_with_book = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("otherapp.Book", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) other_pony = ModelState("otherapp", "Pony", [ ("id", models.AutoField(primary_key=True)), ]) other_pony_food = ModelState("otherapp", "Pony", [ ("id", models.AutoField(primary_key=True)), ], managers=[ ('food_qs', FoodQuerySet.as_manager()), ('food_mgr', FoodManager('a', 'b')), ('food_mgr_kwargs', FoodManager('x', 'y', 3, 4)), ]) other_stable = ModelState("otherapp", "Stable", [("id", models.AutoField(primary_key=True))]) third_thing = ModelState("thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]) book = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_proxy_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("thirdapp.AuthorProxy", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_proxy_proxy_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.AAuthorProxyProxy", models.CASCADE)), ]) book_migrations_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("migrations.UnmigratedModel", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_no_author_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.IntegerField()), ("title", models.CharField(max_length=200)), ]) book_with_no_author = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("title", models.CharField(max_length=200)), ]) book_with_author_renamed = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Writer", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_field_and_author_renamed = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("writer", models.ForeignKey("testapp.Writer", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_multiple_authors = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("authors", models.ManyToManyField("testapp.Author")), ("title", models.CharField(max_length=200)), ]) book_with_multiple_authors_through_attribution = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("authors", models.ManyToManyField("testapp.Author", through="otherapp.Attribution")), ("title", models.CharField(max_length=200)), ]) book_indexes = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "indexes": [models.Index(fields=["author", "title"], name="book_title_author_idx")], }) book_unordered_indexes = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "indexes": [models.Index(fields=["title", "author"], name="book_author_title_idx")], }) book_foo_together = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("author", "title")}, "unique_together": {("author", "title")}, }) book_foo_together_2 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "author")}, "unique_together": {("title", "author")}, }) book_foo_together_3 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("newfield", models.IntegerField()), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "newfield")}, "unique_together": {("title", "newfield")}, }) book_foo_together_4 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("newfield2", models.IntegerField()), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "newfield2")}, "unique_together": {("title", "newfield2")}, }) attribution = ModelState("otherapp", "Attribution", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) edition = ModelState("thirdapp", "Edition", [ ("id", models.AutoField(primary_key=True)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) custom_user = ModelState("thirdapp", "CustomUser", [ ("id", models.AutoField(primary_key=True)), ("username", models.CharField(max_length=255)), ], bases=(AbstractBaseUser,)) custom_user_no_inherit = ModelState("thirdapp", "CustomUser", [ ("id", models.AutoField(primary_key=True)), ("username", models.CharField(max_length=255)), ]) aardvark = ModelState("thirdapp", "Aardvark", [("id", models.AutoField(primary_key=True))]) aardvark_testapp = ModelState("testapp", "Aardvark", [("id", models.AutoField(primary_key=True))]) aardvark_based_on_author = ModelState("testapp", "Aardvark", [], bases=("testapp.Author",)) aardvark_pk_fk_author = ModelState("testapp", "Aardvark", [ ("id", models.OneToOneField("testapp.Author", models.CASCADE, primary_key=True)), ]) knight = ModelState("eggs", "Knight", [("id", models.AutoField(primary_key=True))]) rabbit = ModelState("eggs", "Rabbit", [ ("id", models.AutoField(primary_key=True)), ("knight", models.ForeignKey("eggs.Knight", models.CASCADE)), ("parent", models.ForeignKey("eggs.Rabbit", models.CASCADE)), ], { "unique_together": {("parent", "knight")}, "indexes": [models.Index(fields=["parent", "knight"], name='rabbit_circular_fk_index')], }) def repr_changes(self, changes, include_dependencies=False): output = "" for app_label, migrations_ in sorted(changes.items()): output += " %s:\n" % app_label for migration in migrations_: output += " %s\n" % migration.name for operation in migration.operations: output += " %s\n" % operation if include_dependencies: output += " Dependencies:\n" if migration.dependencies: for dep in migration.dependencies: output += " %s\n" % (dep,) else: output += " None\n" return output def assertNumberMigrations(self, changes, app_label, number): if len(changes.get(app_label, [])) != number: self.fail("Incorrect number of migrations (%s) for %s (expected %s)\n%s" % ( len(changes.get(app_label, [])), app_label, number, self.repr_changes(changes), )) def assertMigrationDependencies(self, changes, app_label, position, dependencies): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if set(migration.dependencies) != set(dependencies): self.fail("Migration dependencies mismatch for %s.%s (expected %s):\n%s" % ( app_label, migration.name, dependencies, self.repr_changes(changes, include_dependencies=True), )) def assertOperationTypes(self, changes, app_label, position, types): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] real_types = [operation.__class__.__name__ for operation in migration.operations] if types != real_types: self.fail("Operation type mismatch for %s.%s (expected %s):\n%s" % ( app_label, migration.name, types, self.repr_changes(changes), )) def assertOperationAttributes(self, changes, app_label, position, operation_position, **attrs): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if len(changes[app_label]) < position + 1: self.fail("No operation at index %s for %s.%s\n%s" % ( operation_position, app_label, migration.name, self.repr_changes(changes), )) operation = migration.operations[operation_position] for attr, value in attrs.items(): if getattr(operation, attr, None) != value: self.fail("Attribute mismatch for %s.%s op #%s, %s (expected %r, got %r):\n%s" % ( app_label, migration.name, operation_position, attr, value, getattr(operation, attr, None), self.repr_changes(changes), )) def assertOperationFieldAttributes(self, changes, app_label, position, operation_position, **attrs): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if len(changes[app_label]) < position + 1: self.fail("No operation at index %s for %s.%s\n%s" % ( operation_position, app_label, migration.name, self.repr_changes(changes), )) operation = migration.operations[operation_position] if not hasattr(operation, 'field'): self.fail("No field attribute for %s.%s op #%s." % ( app_label, migration.name, operation_position, )) field = operation.field for attr, value in attrs.items(): if getattr(field, attr, None) != value: self.fail("Field attribute mismatch for %s.%s op #%s, field.%s (expected %r, got %r):\n%s" % ( app_label, migration.name, operation_position, attr, value, getattr(field, attr, None), self.repr_changes(changes), )) def make_project_state(self, model_states): "Shortcut to make ProjectStates from lists of predefined models" project_state = ProjectState() for model_state in model_states: project_state.add_model(model_state.clone()) return project_state def get_changes(self, before_states, after_states, questioner=None): return MigrationAutodetector( self.make_project_state(before_states), self.make_project_state(after_states), questioner, )._detect_changes() def test_arrange_for_graph(self): """Tests auto-naming of migrations for graph matching.""" # Make a fake graph graph = MigrationGraph() graph.add_node(("testapp", "0001_initial"), None) graph.add_node(("testapp", "0002_foobar"), None) graph.add_node(("otherapp", "0001_initial"), None) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("testapp", "0001_initial")) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("otherapp", "0001_initial")) # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Run through arrange_for_graph changes = autodetector.arrange_for_graph(changes, graph) # Make sure there's a new name, deps match, etc. self.assertEqual(changes["testapp"][0].name, "0003_author") self.assertEqual(changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]) self.assertEqual(changes["otherapp"][0].name, "0002_pony_stable") self.assertEqual(changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]) def test_trim_apps(self): """ Trim does not remove dependencies but does remove unwanted apps. """ # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable, self.third_thing]) autodetector = MigrationAutodetector(before, after, MigrationQuestioner({"ask_initial": True})) changes = autodetector._detect_changes() # Run through arrange_for_graph graph = MigrationGraph() changes = autodetector.arrange_for_graph(changes, graph) changes["testapp"][0].dependencies.append(("otherapp", "0001_initial")) changes = autodetector._trim_to_apps(changes, {"testapp"}) # Make sure there's the right set of migrations self.assertEqual(changes["testapp"][0].name, "0001_initial") self.assertEqual(changes["otherapp"][0].name, "0001_initial") self.assertNotIn("thirdapp", changes) def test_custom_migration_name(self): """Tests custom naming of migrations for graph matching.""" # Make a fake graph graph = MigrationGraph() graph.add_node(("testapp", "0001_initial"), None) graph.add_node(("testapp", "0002_foobar"), None) graph.add_node(("otherapp", "0001_initial"), None) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("testapp", "0001_initial")) # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Run through arrange_for_graph migration_name = 'custom_name' changes = autodetector.arrange_for_graph(changes, graph, migration_name) # Make sure there's a new name, deps match, etc. self.assertEqual(changes["testapp"][0].name, "0003_%s" % migration_name) self.assertEqual(changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]) self.assertEqual(changes["otherapp"][0].name, "0002_%s" % migration_name) self.assertEqual(changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]) def test_new_model(self): """Tests autodetection of new models.""" changes = self.get_changes([], [self.other_pony_food]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Pony") self.assertEqual([name for name, mgr in changes['otherapp'][0].operations[0].managers], ['food_qs', 'food_mgr', 'food_mgr_kwargs']) def test_old_model(self): """Tests deletion of old models.""" changes = self.get_changes([self.author_empty], []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["DeleteModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author") def test_add_field(self): """Tests autodetection of new fields.""" changes = self.get_changes([self.author_empty], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name") @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_date_fields_with_auto_now_not_asking_for_default(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now=True) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_date_fields_with_auto_now_add_not_asking_for_null_addition(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now_add]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_auto_now_add_addition') def test_add_date_fields_with_auto_now_add_asking_for_default(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now_add]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True) self.assertEqual(mocked_ask_method.call_count, 3) def test_remove_field(self): """Tests autodetection of removed fields.""" changes = self.get_changes([self.author_name], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RemoveField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name") def test_alter_field(self): """Tests autodetection of new fields.""" changes = self.get_changes([self.author_name], [self.author_name_longer]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) def test_supports_functools_partial(self): def _content_file_name(instance, filename, key, **kwargs): return '{}/{}'.format(instance, filename) def content_file_name(key, **kwargs): return functools.partial(_content_file_name, key, **kwargs) # An unchanged partial reference. before = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file'))), ])] after = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file'))), ])] changes = self.get_changes(before, after) self.assertNumberMigrations(changes, 'testapp', 0) # A changed partial reference. args_changed = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('other-file'))), ])] changes = self.get_changes(before, args_changed) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AlterField']) # Can't use assertOperationFieldAttributes because we need the # deconstructed version, i.e., the exploded func/args/keywords rather # than the partial: we don't care if it's not the same instance of the # partial, only if it's the same source function, args, and keywords. value = changes['testapp'][0].operations[0].field.upload_to self.assertEqual( (_content_file_name, ('other-file',), {}), (value.func, value.args, value.keywords) ) kwargs_changed = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file', spam='eggs'))), ])] changes = self.get_changes(before, kwargs_changed) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AlterField']) value = changes['testapp'][0].operations[0].field.upload_to self.assertEqual( (_content_file_name, ('file',), {'spam': 'eggs'}), (value.func, value.args, value.keywords) ) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', side_effect=AssertionError("Should not have prompted for not null addition")) def test_alter_field_to_not_null_with_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name_default]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default='Ada Lovelace') @mock.patch( 'django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', return_value=models.NOT_PROVIDED, ) def test_alter_field_to_not_null_without_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name]) self.assertEqual(mocked_ask_method.call_count, 1) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default=models.NOT_PROVIDED) @mock.patch( 'django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', return_value='Some Name', ) def test_alter_field_to_not_null_oneoff_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name]) self.assertEqual(mocked_ask_method.call_count, 1) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=False) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default="Some Name") def test_rename_field(self): """Tests autodetection of renamed fields.""" changes = self.get_changes( [self.author_name], [self.author_name_renamed], MigrationQuestioner({"ask_rename": True}) ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="name", new_name="names") def test_rename_field_foreign_key_to_field(self): before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(unique=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE, to_field='field')), ]), ] after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('renamed_field', models.IntegerField(unique=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE, to_field='renamed_field')), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) # Right number/type of migrations? self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField']) self.assertOperationAttributes(changes, 'app', 0, 0, old_name='field', new_name='renamed_field') def test_rename_foreign_object_fields(self): fields = ('first', 'second') renamed_fields = ('first_renamed', 'second_renamed') before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ], options={'unique_together': {fields}}), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ('foo', models.ForeignObject( 'app.Foo', models.CASCADE, from_fields=fields, to_fields=fields, )), ]), ] # Case 1: to_fields renames. after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('first_renamed', models.IntegerField()), ('second_renamed', models.IntegerField()), ], options={'unique_together': {renamed_fields}}), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ('foo', models.ForeignObject( 'app.Foo', models.CASCADE, from_fields=fields, to_fields=renamed_fields, )), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'RenameField', 'AlterUniqueTogether']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='foo', old_name='first', new_name='first_renamed', ) self.assertOperationAttributes( changes, 'app', 0, 1, model_name='foo', old_name='second', new_name='second_renamed', ) # Case 2: from_fields renames. after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ], options={'unique_together': {fields}}), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('first_renamed', models.IntegerField()), ('second_renamed', models.IntegerField()), ('foo', models.ForeignObject( 'app.Foo', models.CASCADE, from_fields=renamed_fields, to_fields=fields, )), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'RenameField']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='bar', old_name='first', new_name='first_renamed', ) self.assertOperationAttributes( changes, 'app', 0, 1, model_name='bar', old_name='second', new_name='second_renamed', ) def test_rename_referenced_primary_key(self): before = [ ModelState('app', 'Foo', [ ('id', models.CharField(primary_key=True, serialize=False)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE)), ]), ] after = [ ModelState('app', 'Foo', [ ('renamed_id', models.CharField(primary_key=True, serialize=False)) ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE)), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField']) self.assertOperationAttributes(changes, 'app', 0, 0, old_name='id', new_name='renamed_id') def test_rename_field_preserved_db_column(self): """ RenameField is used if a field is renamed and db_column equal to the old field's column is added. """ before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField()), ]), ] after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('renamed_field', models.IntegerField(db_column='field')), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'AlterField']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='foo', old_name='field', new_name='renamed_field', ) self.assertOperationAttributes(changes, 'app', 0, 1, model_name='foo', name='renamed_field') self.assertEqual(changes['app'][0].operations[-1].field.deconstruct(), ( 'renamed_field', 'django.db.models.IntegerField', [], {'db_column': 'field'}, )) def test_rename_related_field_preserved_db_column(self): before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE)), ]), ] after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('renamed_foo', models.ForeignKey('app.Foo', models.CASCADE, db_column='foo_id')), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'AlterField']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='bar', old_name='foo', new_name='renamed_foo', ) self.assertOperationAttributes(changes, 'app', 0, 1, model_name='bar', name='renamed_foo') self.assertEqual(changes['app'][0].operations[-1].field.deconstruct(), ( 'renamed_foo', 'django.db.models.ForeignKey', [], {'to': 'app.foo', 'on_delete': models.CASCADE, 'db_column': 'foo_id'}, )) def test_rename_model(self): """Tests autodetection of renamed models.""" changes = self.get_changes( [self.author_with_book, self.book], [self.author_renamed_with_book, self.book_with_author_renamed], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="Author", new_name="Writer") # Now that RenameModel handles related fields too, there should be # no AlterField for the related field. self.assertNumberMigrations(changes, 'otherapp', 0) def test_rename_model_case(self): """ Model name is case-insensitive. Changing case doesn't lead to any autodetected operations. """ author_renamed = ModelState('testapp', 'author', [ ('id', models.AutoField(primary_key=True)), ]) changes = self.get_changes( [self.author_empty, self.book], [author_renamed, self.book], questioner=MigrationQuestioner({'ask_rename_model': True}), ) self.assertNumberMigrations(changes, 'testapp', 0) self.assertNumberMigrations(changes, 'otherapp', 0) def test_rename_m2m_through_model(self): """ Tests autodetection of renamed models that are used in M2M relations as through models. """ changes = self.get_changes( [self.author_with_m2m_through, self.publisher, self.contract], [self.author_with_renamed_m2m_through, self.publisher, self.contract_renamed], MigrationQuestioner({'ask_rename_model': True}) ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['RenameModel']) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name='Contract', new_name='Deal') def test_rename_model_with_renamed_rel_field(self): """ Tests autodetection of renamed models while simultaneously renaming one of the fields that relate to the renamed model. """ changes = self.get_changes( [self.author_with_book, self.book], [self.author_renamed_with_book, self.book_with_field_and_author_renamed], MigrationQuestioner({"ask_rename": True, "ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="Author", new_name="Writer") # Right number/type of migrations for related field rename? # Alter is already taken care of. self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["RenameField"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, old_name="author", new_name="writer") def test_rename_model_with_fks_in_different_position(self): """ #24537 - The order of fields in a model does not influence the RenameModel detection. """ before = [ ModelState("testapp", "EntityA", [ ("id", models.AutoField(primary_key=True)), ]), ModelState("testapp", "EntityB", [ ("id", models.AutoField(primary_key=True)), ("some_label", models.CharField(max_length=255)), ("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)), ]), ] after = [ ModelState("testapp", "EntityA", [ ("id", models.AutoField(primary_key=True)), ]), ModelState("testapp", "RenamedEntityB", [ ("id", models.AutoField(primary_key=True)), ("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)), ("some_label", models.CharField(max_length=255)), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({"ask_rename_model": True})) self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="EntityB", new_name="RenamedEntityB") def test_rename_model_reverse_relation_dependencies(self): """ The migration to rename a model pointed to by a foreign key in another app must run after the other app's migration that adds the foreign key with model's original name. Therefore, the renaming migration has a dependency on that other migration. """ before = [ ModelState('testapp', 'EntityA', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('otherapp', 'EntityB', [ ('id', models.AutoField(primary_key=True)), ('entity_a', models.ForeignKey('testapp.EntityA', models.CASCADE)), ]), ] after = [ ModelState('testapp', 'RenamedEntityA', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('otherapp', 'EntityB', [ ('id', models.AutoField(primary_key=True)), ('entity_a', models.ForeignKey('testapp.RenamedEntityA', models.CASCADE)), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename_model': True})) self.assertNumberMigrations(changes, 'testapp', 1) self.assertMigrationDependencies(changes, 'testapp', 0, [('otherapp', '__first__')]) self.assertOperationTypes(changes, 'testapp', 0, ['RenameModel']) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name='EntityA', new_name='RenamedEntityA') def test_fk_dependency(self): """Having a ForeignKey automatically adds a dependency.""" # Note that testapp (author) has no dependencies, # otherapp (book) depends on testapp (author), # thirdapp (edition) depends on otherapp (book) changes = self.get_changes([], [self.author_name, self.book, self.edition]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("testapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="Edition") self.assertMigrationDependencies(changes, 'thirdapp', 0, [("otherapp", "auto_1")]) def test_proxy_fk_dependency(self): """FK dependencies still work on proxy models.""" # Note that testapp (author) has no dependencies, # otherapp (book) depends on testapp (authorproxy) changes = self.get_changes([], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("thirdapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="AuthorProxy") self.assertMigrationDependencies(changes, 'thirdapp', 0, [("testapp", "auto_1")]) def test_same_app_no_fk_dependency(self): """ A migration with a FK between two models of the same app does not have a dependency to itself. """ changes = self.get_changes([], [self.author_with_publisher, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) def test_circular_fk_dependency(self): """ Having a circular ForeignKey dependency automatically resolves the situation into 2 migrations on one side and 1 on the other. """ changes = self.get_changes([], [self.author_with_book, self.book, self.publisher_with_book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, [("otherapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 2) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'otherapp', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'otherapp', 0, []) self.assertMigrationDependencies(changes, 'otherapp', 1, [("otherapp", "auto_1"), ("testapp", "auto_1")]) # both split migrations should be `initial` self.assertTrue(changes['otherapp'][0].initial) self.assertTrue(changes['otherapp'][1].initial) def test_same_app_circular_fk_dependency(self): """ A migration with a FK between two models of the same app does not have a dependency to itself. """ changes = self.get_changes([], [self.author_with_publisher, self.publisher_with_author]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel", "AddField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher") self.assertMigrationDependencies(changes, 'testapp', 0, []) def test_same_app_circular_fk_dependency_with_unique_together_and_indexes(self): """ #22275 - A migration with circular FK dependency does not try to create unique together constraint and indexes before creating all required fields first. """ changes = self.get_changes([], [self.knight, self.rabbit]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'eggs', 1) self.assertOperationTypes( changes, 'eggs', 0, ["CreateModel", "CreateModel", "AddIndex", "AlterUniqueTogether"] ) self.assertNotIn("unique_together", changes['eggs'][0].operations[0].options) self.assertNotIn("unique_together", changes['eggs'][0].operations[1].options) self.assertMigrationDependencies(changes, 'eggs', 0, []) def test_alter_db_table_add(self): """Tests detection for adding db_table in model's options.""" changes = self.get_changes([self.author_empty], [self.author_with_db_table_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table="author_one") def test_alter_db_table_change(self): """Tests detection for changing db_table in model's options'.""" changes = self.get_changes([self.author_with_db_table_options], [self.author_with_new_db_table_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table="author_two") def test_alter_db_table_remove(self): """Tests detection for removing db_table in model's options.""" changes = self.get_changes([self.author_with_db_table_options], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table=None) def test_alter_db_table_no_changes(self): """ Alter_db_table doesn't generate a migration if no changes have been made. """ changes = self.get_changes([self.author_with_db_table_options], [self.author_with_db_table_options]) # Right number of migrations? self.assertEqual(len(changes), 0) def test_keep_db_table_with_model_change(self): """ Tests when model changes but db_table stays as-is, autodetector must not create more than one operation. """ changes = self.get_changes( [self.author_with_db_table_options], [self.author_renamed_with_db_table_options], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor") def test_alter_db_table_with_model_change(self): """ Tests when model and db_table changes, autodetector must create two operations. """ changes = self.get_changes( [self.author_with_db_table_options], [self.author_renamed_with_new_db_table_options], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel", "AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor") self.assertOperationAttributes(changes, "testapp", 0, 1, name="newauthor", table="author_three") def test_identical_regex_doesnt_alter(self): from_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[ RegexValidator( re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid' ) ]))] ) to_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[validate_slug]))] ) changes = self.get_changes([from_state], [to_state]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 0) def test_different_regex_does_alter(self): from_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[ RegexValidator( re.compile('^[a-z]+\\Z', 32), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid' ) ]))] ) to_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[validate_slug]))] ) changes = self.get_changes([from_state], [to_state]) self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterField"]) def test_empty_foo_together(self): """ #23452 - Empty unique/index_together shouldn't generate a migration. """ # Explicitly testing for not specified, since this is the case after # a CreateModel operation w/o any definition on the original model model_state_not_specified = ModelState("a", "model", [("id", models.AutoField(primary_key=True))]) # Explicitly testing for None, since this was the issue in #23452 after # an AlterFooTogether operation with e.g. () as value model_state_none = ModelState("a", "model", [ ("id", models.AutoField(primary_key=True)) ], { "index_together": None, "unique_together": None, }) # Explicitly testing for the empty set, since we now always have sets. # During removal (('col1', 'col2'),) --> () this becomes set([]) model_state_empty = ModelState("a", "model", [ ("id", models.AutoField(primary_key=True)) ], { "index_together": set(), "unique_together": set(), }) def test(from_state, to_state, msg): changes = self.get_changes([from_state], [to_state]) if changes: ops = ', '.join(o.__class__.__name__ for o in changes['a'][0].operations) self.fail('Created operation(s) %s from %s' % (ops, msg)) tests = ( (model_state_not_specified, model_state_not_specified, '"not specified" to "not specified"'), (model_state_not_specified, model_state_none, '"not specified" to "None"'), (model_state_not_specified, model_state_empty, '"not specified" to "empty"'), (model_state_none, model_state_not_specified, '"None" to "not specified"'), (model_state_none, model_state_none, '"None" to "None"'), (model_state_none, model_state_empty, '"None" to "empty"'), (model_state_empty, model_state_not_specified, '"empty" to "not specified"'), (model_state_empty, model_state_none, '"empty" to "None"'), (model_state_empty, model_state_empty, '"empty" to "empty"'), ) for t in tests: test(*t) def test_create_model_with_indexes(self): """Test creation of new model with indexes already defined.""" author = ModelState('otherapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ], {'indexes': [models.Index(fields=['name'], name='create_model_with_indexes_idx')]}) changes = self.get_changes([], [author]) added_index = models.Index(fields=['name'], name='create_model_with_indexes_idx') # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 2) # Right actions order? self.assertOperationTypes(changes, 'otherapp', 0, ['CreateModel', 'AddIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='author', index=added_index) def test_add_indexes(self): """Test change detection of new indexes.""" changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_indexes]) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AddIndex']) added_index = models.Index(fields=['author', 'title'], name='book_title_author_idx') self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', index=added_index) def test_remove_indexes(self): """Test change detection of removed indexes.""" changes = self.get_changes([self.author_empty, self.book_indexes], [self.author_empty, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', name='book_title_author_idx') def test_order_fields_indexes(self): """Test change detection of reordering of fields in indexes.""" changes = self.get_changes( [self.author_empty, self.book_indexes], [self.author_empty, self.book_unordered_indexes] ) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveIndex', 'AddIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', name='book_title_author_idx') added_index = models.Index(fields=['title', 'author'], name='book_author_title_idx') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='book', index=added_index) def test_create_model_with_check_constraint(self): """Test creation of new model with constraints already defined.""" author = ModelState('otherapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ], {'constraints': [models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob')]}) changes = self.get_changes([], [author]) added_constraint = models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob') # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 2) # Right actions order? self.assertOperationTypes(changes, 'otherapp', 0, ['CreateModel', 'AddConstraint']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='author', constraint=added_constraint) def test_add_constraints(self): """Test change detection of new constraints.""" changes = self.get_changes([self.author_name], [self.author_name_check_constraint]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AddConstraint']) added_constraint = models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob') self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name='author', constraint=added_constraint) def test_remove_constraints(self): """Test change detection of removed constraints.""" changes = self.get_changes([self.author_name_check_constraint], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['RemoveConstraint']) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name='author', name='name_contains_bob') def test_add_foo_together(self): """Tests index/unique_together detection.""" changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_foo_together]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together={("author", "title")}) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together={("author", "title")}) def test_remove_foo_together(self): """Tests index/unique_together detection.""" changes = self.get_changes([self.author_empty, self.book_foo_together], [self.author_empty, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together=set()) def test_foo_together_remove_fk(self): """Tests unique_together and field removal detection & ordering""" changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_with_no_author] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, [ "AlterUniqueTogether", "AlterIndexTogether", "RemoveField" ]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 2, model_name="book", name="author") def test_foo_together_no_changes(self): """ index/unique_together doesn't generate a migration if no changes have been made. """ changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_foo_together] ) # Right number of migrations? self.assertEqual(len(changes), 0) def test_foo_together_ordering(self): """ index/unique_together also triggers on ordering changes. """ changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_foo_together_2] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together={("title", "author")}) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together={("title", "author")}) def test_add_field_and_foo_together(self): """ Added fields will be created before using them in index/unique_together. """ changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_foo_together_3]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AddField", "AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", unique_together={("title", "newfield")}) self.assertOperationAttributes(changes, "otherapp", 0, 2, name="book", index_together={("title", "newfield")}) def test_create_model_and_unique_together(self): author = ModelState("otherapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ]) book_with_author = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("otherapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "author")}, "unique_together": {("title", "author")}, }) changes = self.get_changes([self.book_with_no_author], [author, book_with_author]) # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 4) # Right actions order? self.assertOperationTypes( changes, 'otherapp', 0, ['CreateModel', 'AddField', 'AlterUniqueTogether', 'AlterIndexTogether'] ) def test_remove_field_and_foo_together(self): """ Removed fields will be removed after updating index/unique_together. """ changes = self.get_changes( [self.author_empty, self.book_foo_together_3], [self.author_empty, self.book_foo_together] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether", "RemoveField"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together={("author", "title")}) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together={("author", "title")}) self.assertOperationAttributes(changes, "otherapp", 0, 2, model_name="book", name="newfield") def test_rename_field_and_foo_together(self): """ Removed fields will be removed after updating index/unique_together. """ changes = self.get_changes( [self.author_empty, self.book_foo_together_3], [self.author_empty, self.book_foo_together_4], MigrationQuestioner({"ask_rename": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["RenameField", "AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", unique_together={ ("title", "newfield2") }) self.assertOperationAttributes(changes, "otherapp", 0, 2, name="book", index_together={("title", "newfield2")}) def test_proxy(self): """The autodetector correctly deals with proxy models.""" # First, we test adding a proxy model changes = self.get_changes([self.author_empty], [self.author_empty, self.author_proxy]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"]) self.assertOperationAttributes( changes, "testapp", 0, 0, name="AuthorProxy", options={"proxy": True, "indexes": [], "constraints": []} ) # Now, we test turning a proxy model into a non-proxy model # It should delete the proxy then make the real one changes = self.get_changes( [self.author_empty, self.author_proxy], [self.author_empty, self.author_proxy_notproxy] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="AuthorProxy") self.assertOperationAttributes(changes, "testapp", 0, 1, name="AuthorProxy", options={}) def test_proxy_custom_pk(self): """ #23415 - The autodetector must correctly deal with custom FK on proxy models. """ # First, we test the default pk field name changes = self.get_changes([], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'id') # Now, we test the custom pk field name changes = self.get_changes([], [self.author_custom_pk, self.author_proxy_third, self.book_proxy_fk]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'pk_field') def test_proxy_to_mti_with_fk_to_proxy(self): # First, test the pk table and field name. changes = self.get_changes( [], [self.author_empty, self.author_proxy_third, self.book_proxy_fk], ) self.assertEqual( changes['otherapp'][0].operations[0].fields[2][1].remote_field.model._meta.db_table, 'testapp_author', ) self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'id') # Change AuthorProxy to use MTI. changes = self.get_changes( [self.author_empty, self.author_proxy_third, self.book_proxy_fk], [self.author_empty, self.author_proxy_third_notproxy, self.book_proxy_fk], ) # Right number/type of migrations for the AuthorProxy model? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ['DeleteModel', 'CreateModel']) # Right number/type of migrations for the Book model with a FK to # AuthorProxy? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) # otherapp should depend on thirdapp. self.assertMigrationDependencies(changes, 'otherapp', 0, [('thirdapp', 'auto_1')]) # Now, test the pk table and field name. self.assertEqual( changes['otherapp'][0].operations[0].field.remote_field.model._meta.db_table, 'thirdapp_authorproxy', ) self.assertEqual(changes['otherapp'][0].operations[0].field.remote_field.field_name, 'author_ptr') def test_proxy_to_mti_with_fk_to_proxy_proxy(self): # First, test the pk table and field name. changes = self.get_changes( [], [self.author_empty, self.author_proxy, self.author_proxy_proxy, self.book_proxy_proxy_fk], ) self.assertEqual( changes['otherapp'][0].operations[0].fields[1][1].remote_field.model._meta.db_table, 'testapp_author', ) self.assertEqual(changes['otherapp'][0].operations[0].fields[1][1].remote_field.field_name, 'id') # Change AuthorProxy to use MTI. FK still points to AAuthorProxyProxy, # a proxy of AuthorProxy. changes = self.get_changes( [self.author_empty, self.author_proxy, self.author_proxy_proxy, self.book_proxy_proxy_fk], [self.author_empty, self.author_proxy_notproxy, self.author_proxy_proxy, self.book_proxy_proxy_fk], ) # Right number/type of migrations for the AuthorProxy model? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['DeleteModel', 'CreateModel']) # Right number/type of migrations for the Book model with a FK to # AAuthorProxyProxy? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) # otherapp should depend on testapp. self.assertMigrationDependencies(changes, 'otherapp', 0, [('testapp', 'auto_1')]) # Now, test the pk table and field name. self.assertEqual( changes['otherapp'][0].operations[0].field.remote_field.model._meta.db_table, 'testapp_authorproxy', ) self.assertEqual(changes['otherapp'][0].operations[0].field.remote_field.field_name, 'author_ptr') def test_unmanaged_create(self): """The autodetector correctly deals with managed models.""" # First, we test adding an unmanaged model changes = self.get_changes([self.author_empty], [self.author_empty, self.author_unmanaged]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="AuthorUnmanaged", options={"managed": False}) def test_unmanaged_delete(self): changes = self.get_changes([self.author_empty, self.author_unmanaged], [self.author_empty]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['DeleteModel']) def test_unmanaged_to_managed(self): # Now, we test turning an unmanaged model into a managed model changes = self.get_changes( [self.author_empty, self.author_unmanaged], [self.author_empty, self.author_unmanaged_managed] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="authorunmanaged", options={}) def test_managed_to_unmanaged(self): # Now, we turn managed to unmanaged. changes = self.get_changes( [self.author_empty, self.author_unmanaged_managed], [self.author_empty, self.author_unmanaged] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="authorunmanaged", options={"managed": False}) def test_unmanaged_custom_pk(self): """ #23415 - The autodetector must correctly deal with custom FK on unmanaged models. """ # First, we test the default pk field name changes = self.get_changes([], [self.author_unmanaged_default_pk, self.book]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'id') # Now, we test the custom pk field name changes = self.get_changes([], [self.author_unmanaged_custom_pk, self.book]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'pk_field') @override_settings(AUTH_USER_MODEL="thirdapp.CustomUser") def test_swappable(self): with isolate_lru_cache(apps.get_swappable_settings_name): changes = self.get_changes([self.custom_user], [self.custom_user, self.author_with_custom_user]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, [("__setting__", "AUTH_USER_MODEL")]) def test_swappable_changed(self): with isolate_lru_cache(apps.get_swappable_settings_name): before = self.make_project_state([self.custom_user, self.author_with_user]) with override_settings(AUTH_USER_MODEL="thirdapp.CustomUser"): after = self.make_project_state([self.custom_user, self.author_with_custom_user]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name="author", name='user') fk_field = changes['testapp'][0].operations[0].field to_model = '%s.%s' % ( fk_field.remote_field.model._meta.app_label, fk_field.remote_field.model._meta.object_name, ) self.assertEqual(to_model, 'thirdapp.CustomUser') def test_add_field_with_default(self): """#22030 - Adding a field with a default should work.""" changes = self.get_changes([self.author_empty], [self.author_name_default]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="name") def test_custom_deconstructible(self): """ Two instances which deconstruct to the same value aren't considered a change. """ changes = self.get_changes([self.author_name_deconstructible_1], [self.author_name_deconstructible_2]) # Right number of migrations? self.assertEqual(len(changes), 0) def test_deconstruct_field_kwarg(self): """Field instances are handled correctly by nested deconstruction.""" changes = self.get_changes([self.author_name_deconstructible_3], [self.author_name_deconstructible_4]) self.assertEqual(changes, {}) def test_deconstructible_list(self): """Nested deconstruction descends into lists.""" # When lists contain items that deconstruct to identical values, those lists # should be considered equal for the purpose of detecting state changes # (even if the original items are unequal). changes = self.get_changes( [self.author_name_deconstructible_list_1], [self.author_name_deconstructible_list_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed lists should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_list_1], [self.author_name_deconstructible_list_3] ) self.assertEqual(len(changes), 1) def test_deconstructible_tuple(self): """Nested deconstruction descends into tuples.""" # When tuples contain items that deconstruct to identical values, those tuples # should be considered equal for the purpose of detecting state changes # (even if the original items are unequal). changes = self.get_changes( [self.author_name_deconstructible_tuple_1], [self.author_name_deconstructible_tuple_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed tuples should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_tuple_1], [self.author_name_deconstructible_tuple_3] ) self.assertEqual(len(changes), 1) def test_deconstructible_dict(self): """Nested deconstruction descends into dict values.""" # When dicts contain items whose values deconstruct to identical values, # those dicts should be considered equal for the purpose of detecting # state changes (even if the original values are unequal). changes = self.get_changes( [self.author_name_deconstructible_dict_1], [self.author_name_deconstructible_dict_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed dicts should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_dict_1], [self.author_name_deconstructible_dict_3] ) self.assertEqual(len(changes), 1) def test_nested_deconstructible_objects(self): """ Nested deconstruction is applied recursively to the args/kwargs of deconstructed objects. """ # If the items within a deconstructed object's args/kwargs have the same # deconstructed values - whether or not the items themselves are different # instances - then the object as a whole is regarded as unchanged. changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_2] ) self.assertEqual(changes, {}) # Differences that exist solely within the args list of a deconstructed object # should be reported as changes changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_changed_arg] ) self.assertEqual(len(changes), 1) # Additional args should also be reported as a change changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_extra_arg] ) self.assertEqual(len(changes), 1) # Differences that exist solely within the kwargs dict of a deconstructed object # should be reported as changes changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_changed_kwarg] ) self.assertEqual(len(changes), 1) # Additional kwargs should also be reported as a change changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_extra_kwarg] ) self.assertEqual(len(changes), 1) def test_deconstruct_type(self): """ #22951 -- Uninstantiated classes with deconstruct are correctly returned by deep_deconstruct during serialization. """ author = ModelState( "testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField( max_length=200, # IntegerField intentionally not instantiated. default=models.IntegerField, )) ], ) changes = self.get_changes([], [author]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) def test_replace_string_with_foreignkey(self): """ #22300 - Adding an FK in the same "spot" as a deleted CharField should work. """ changes = self.get_changes([self.author_with_publisher_string], [self.author_with_publisher, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "RemoveField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Publisher") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publisher_name") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="publisher") def test_foreign_key_removed_before_target_model(self): """ Removing an FK and the model it targets in the same change must remove the FK field before the model to maintain consistency. """ changes = self.get_changes( [self.author_with_publisher, self.publisher], [self.author_name] ) # removes both the model and FK # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RemoveField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publisher") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Publisher") @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_many_to_many(self, mocked_ask_method): """#22435 - Adding a ManyToManyField should not prompt for a default.""" changes = self.get_changes([self.author_empty, self.publisher], [self.author_with_m2m, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers") def test_alter_many_to_many(self): changes = self.get_changes( [self.author_with_m2m, self.publisher], [self.author_with_m2m_blank, self.publisher] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers") def test_create_with_through_model(self): """ Adding a m2m with a through model and the models that use it should be ordered correctly. """ changes = self.get_changes([], [self.author_with_m2m_through, self.publisher, self.contract]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, [ 'CreateModel', 'CreateModel', 'CreateModel', 'AddField', ]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'testapp', 0, 1, name='Publisher') self.assertOperationAttributes(changes, 'testapp', 0, 2, name='Contract') self.assertOperationAttributes(changes, 'testapp', 0, 3, model_name='author', name='publishers') def test_many_to_many_removed_before_through_model(self): """ Removing a ManyToManyField and the "through" model in the same change must remove the field before the model to maintain consistency. """ changes = self.get_changes( [self.book_with_multiple_authors_through_attribution, self.author_name, self.attribution], [self.book_with_no_author, self.author_name], ) # Remove both the through model and ManyToMany # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveField', 'DeleteModel']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='authors', model_name='book') self.assertOperationAttributes(changes, 'otherapp', 0, 1, name='Attribution') def test_many_to_many_removed_before_through_model_2(self): """ Removing a model that contains a ManyToManyField and the "through" model in the same change must remove the field before the model to maintain consistency. """ changes = self.get_changes( [self.book_with_multiple_authors_through_attribution, self.author_name, self.attribution], [self.author_name], ) # Remove both the through model and ManyToMany # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveField', 'DeleteModel', 'DeleteModel']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='authors', model_name='book') self.assertOperationAttributes(changes, 'otherapp', 0, 1, name='Attribution') self.assertOperationAttributes(changes, 'otherapp', 0, 2, name='Book') def test_m2m_w_through_multistep_remove(self): """ A model with a m2m field that specifies a "through" model cannot be removed in the same migration as that through model as the schema will pass through an inconsistent state. The autodetector should produce two migrations to avoid this issue. """ changes = self.get_changes([self.author_with_m2m_through, self.publisher, self.contract], [self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, [ "RemoveField", "RemoveField", "DeleteModel", "DeleteModel" ]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", model_name='contract') self.assertOperationAttributes(changes, "testapp", 0, 1, name="publisher", model_name='contract') self.assertOperationAttributes(changes, "testapp", 0, 2, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 3, name="Contract") def test_concrete_field_changed_to_many_to_many(self): """ #23938 - Changing a concrete field into a ManyToManyField first removes the concrete field and then adds the m2m field. """ changes = self.get_changes([self.author_with_former_m2m], [self.author_with_m2m, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "RemoveField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name='Publisher') self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 2, name="publishers", model_name='author') def test_many_to_many_changed_to_concrete_field(self): """ #23938 - Changing a ManyToManyField into a concrete field first removes the m2m field and then adds the concrete field. """ changes = self.get_changes([self.author_with_m2m, self.publisher], [self.author_with_former_m2m]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "AddField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 2, name='Publisher') self.assertOperationFieldAttributes(changes, 'testapp', 0, 1, max_length=100) def test_non_circular_foreignkey_dependency_removal(self): """ If two models with a ForeignKey from one to the other are removed at the same time, the autodetector should remove them in the correct order. """ changes = self.get_changes([self.author_with_publisher, self.publisher_with_author], []) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "DeleteModel", "DeleteModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", model_name='publisher') self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 2, name="Publisher") def test_alter_model_options(self): """Changing a model's options should make a change.""" changes = self.get_changes([self.author_empty], [self.author_with_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, options={ "permissions": [('can_hire', 'Can hire')], "verbose_name": "Authi", }) # Changing them back to empty should also make a change changes = self.get_changes([self.author_with_options], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", options={}) def test_alter_model_options_proxy(self): """Changing a proxy model's options should also make a change.""" changes = self.get_changes( [self.author_proxy, self.author_empty], [self.author_proxy_options, self.author_empty] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="authorproxy", options={ "verbose_name": "Super Author" }) def test_set_alter_order_with_respect_to(self): """Setting order_with_respect_to adds a field.""" changes = self.get_changes([self.book, self.author_with_book], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterOrderWithRespectTo"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="author", order_with_respect_to="book") def test_add_alter_order_with_respect_to(self): """ Setting order_with_respect_to when adding the FK too does things in the right order. """ changes = self.get_changes([self.author_name], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AlterOrderWithRespectTo"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name="author", name="book") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="author", order_with_respect_to="book") def test_remove_alter_order_with_respect_to(self): """ Removing order_with_respect_to when removing the FK too does things in the right order. """ changes = self.get_changes([self.book, self.author_with_book_order_wrt], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterOrderWithRespectTo", "RemoveField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="author", order_with_respect_to=None) self.assertOperationAttributes(changes, 'testapp', 0, 1, model_name="author", name="book") def test_add_model_order_with_respect_to(self): """ Setting order_with_respect_to when adding the whole model does things in the right order. """ changes = self.get_changes([], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes( changes, 'testapp', 0, 0, name="Author", options={'order_with_respect_to': 'book'} ) self.assertNotIn("_order", [name for name, field in changes['testapp'][0].operations[0].fields]) def test_alter_model_managers(self): """ Changing the model managers adds a new operation. """ changes = self.get_changes([self.other_pony], [self.other_pony_food]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["AlterModelManagers"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="pony") self.assertEqual([name for name, mgr in changes['otherapp'][0].operations[0].managers], ['food_qs', 'food_mgr', 'food_mgr_kwargs']) self.assertEqual(changes['otherapp'][0].operations[0].managers[1][1].args, ('a', 'b', 1, 2)) self.assertEqual(changes['otherapp'][0].operations[0].managers[2][1].args, ('x', 'y', 3, 4)) def test_swappable_first_inheritance(self): """Swappable models get their CreateModel first.""" changes = self.get_changes([], [self.custom_user, self.aardvark]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="CustomUser") self.assertOperationAttributes(changes, 'thirdapp', 0, 1, name="Aardvark") def test_default_related_name_option(self): model_state = ModelState('app', 'model', [ ('id', models.AutoField(primary_key=True)), ], options={'default_related_name': 'related_name'}) changes = self.get_changes([], [model_state]) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['CreateModel']) self.assertOperationAttributes( changes, 'app', 0, 0, name='model', options={'default_related_name': 'related_name'}, ) altered_model_state = ModelState('app', 'Model', [ ('id', models.AutoField(primary_key=True)), ]) changes = self.get_changes([model_state], [altered_model_state]) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['AlterModelOptions']) self.assertOperationAttributes(changes, 'app', 0, 0, name='model', options={}) @override_settings(AUTH_USER_MODEL="thirdapp.CustomUser") def test_swappable_first_setting(self): """Swappable models get their CreateModel first.""" with isolate_lru_cache(apps.get_swappable_settings_name): changes = self.get_changes([], [self.custom_user_no_inherit, self.aardvark]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="CustomUser") self.assertOperationAttributes(changes, 'thirdapp', 0, 1, name="Aardvark") def test_bases_first(self): """Bases of other models come first.""" changes = self.get_changes([], [self.aardvark_based_on_author, self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Aardvark") def test_multiple_bases(self): """#23956 - Inheriting models doesn't move *_ptr fields into AddField operations.""" A = ModelState("app", "A", [("a_id", models.AutoField(primary_key=True))]) B = ModelState("app", "B", [("b_id", models.AutoField(primary_key=True))]) C = ModelState("app", "C", [], bases=("app.A", "app.B")) D = ModelState("app", "D", [], bases=("app.A", "app.B")) E = ModelState("app", "E", [], bases=("app.A", "app.B")) changes = self.get_changes([], [A, B, C, D, E]) # Right number/type of migrations? self.assertNumberMigrations(changes, "app", 1) self.assertOperationTypes(changes, "app", 0, [ "CreateModel", "CreateModel", "CreateModel", "CreateModel", "CreateModel" ]) self.assertOperationAttributes(changes, "app", 0, 0, name="A") self.assertOperationAttributes(changes, "app", 0, 1, name="B") self.assertOperationAttributes(changes, "app", 0, 2, name="C") self.assertOperationAttributes(changes, "app", 0, 3, name="D") self.assertOperationAttributes(changes, "app", 0, 4, name="E") def test_proxy_bases_first(self): """Bases of proxies come first.""" changes = self.get_changes([], [self.author_empty, self.author_proxy, self.author_proxy_proxy]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="AuthorProxy") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="AAuthorProxyProxy") def test_pk_fk_included(self): """ A relation used as the primary key is kept as part of CreateModel. """ changes = self.get_changes([], [self.aardvark_pk_fk_author, self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Aardvark") def test_first_dependency(self): """ A dependency to an app with no migrations uses __first__. """ # Load graph loader = MigrationLoader(connection) before = self.make_project_state([]) after = self.make_project_state([self.book_migrations_fk]) after.real_apps = ["migrations"] autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes(graph=loader.graph) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("migrations", "__first__")]) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_last_dependency(self): """ A dependency to an app with existing migrations uses the last migration of that app. """ # Load graph loader = MigrationLoader(connection) before = self.make_project_state([]) after = self.make_project_state([self.book_migrations_fk]) after.real_apps = ["migrations"] autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes(graph=loader.graph) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("migrations", "0002_second")]) def test_alter_fk_before_model_deletion(self): """ ForeignKeys are altered _before_ the model they used to refer to are deleted. """ changes = self.get_changes( [self.author_name, self.publisher_with_author], [self.aardvark_testapp, self.publisher_with_aardvark_author] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "AlterField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Aardvark") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="author") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="Author") def test_fk_dependency_other_app(self): """ #23100 - ForeignKeys correctly depend on other apps' models. """ changes = self.get_changes([self.author_name, self.book], [self.author_with_book, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="book") self.assertMigrationDependencies(changes, 'testapp', 0, [("otherapp", "__first__")]) def test_alter_field_to_fk_dependency_other_app(self): changes = self.get_changes( [self.author_empty, self.book_with_no_author_fk], [self.author_empty, self.book], ) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) self.assertMigrationDependencies(changes, 'otherapp', 0, [('testapp', '__first__')]) def test_circular_dependency_mixed_addcreate(self): """ #23315 - The dependency resolver knows to put all CreateModel before AddField and not become unsolvable. """ address = ModelState("a", "Address", [ ("id", models.AutoField(primary_key=True)), ("country", models.ForeignKey("b.DeliveryCountry", models.CASCADE)), ]) person = ModelState("a", "Person", [ ("id", models.AutoField(primary_key=True)), ]) apackage = ModelState("b", "APackage", [ ("id", models.AutoField(primary_key=True)), ("person", models.ForeignKey("a.Person", models.CASCADE)), ]) country = ModelState("b", "DeliveryCountry", [ ("id", models.AutoField(primary_key=True)), ]) changes = self.get_changes([], [address, person, apackage, country]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'a', 0, ["CreateModel", "CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertOperationTypes(changes, 'b', 0, ["CreateModel", "CreateModel"]) @override_settings(AUTH_USER_MODEL="a.Tenant") def test_circular_dependency_swappable(self): """ #23322 - The dependency resolver knows to explicitly resolve swappable models. """ with isolate_lru_cache(apps.get_swappable_settings_name): tenant = ModelState("a", "Tenant", [ ("id", models.AutoField(primary_key=True)), ("primary_address", models.ForeignKey("b.Address", models.CASCADE))], bases=(AbstractBaseUser,) ) address = ModelState("b", "Address", [ ("id", models.AutoField(primary_key=True)), ("tenant", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)), ]) changes = self.get_changes([], [address, tenant]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'a', 0, []) self.assertMigrationDependencies(changes, 'a', 1, [('a', 'auto_1'), ('b', 'auto_1')]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'b', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'b', 0, [('__setting__', 'AUTH_USER_MODEL')]) @override_settings(AUTH_USER_MODEL="b.Tenant") def test_circular_dependency_swappable2(self): """ #23322 - The dependency resolver knows to explicitly resolve swappable models but with the swappable not being the first migrated model. """ with isolate_lru_cache(apps.get_swappable_settings_name): address = ModelState("a", "Address", [ ("id", models.AutoField(primary_key=True)), ("tenant", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)), ]) tenant = ModelState("b", "Tenant", [ ("id", models.AutoField(primary_key=True)), ("primary_address", models.ForeignKey("a.Address", models.CASCADE))], bases=(AbstractBaseUser,) ) changes = self.get_changes([], [address, tenant]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'a', 0, []) self.assertMigrationDependencies(changes, 'a', 1, [('__setting__', 'AUTH_USER_MODEL'), ('a', 'auto_1')]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'b', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'b', 0, [('a', 'auto_1')]) @override_settings(AUTH_USER_MODEL="a.Person") def test_circular_dependency_swappable_self(self): """ #23322 - The dependency resolver knows to explicitly resolve swappable models. """ with isolate_lru_cache(apps.get_swappable_settings_name): person = ModelState("a", "Person", [ ("id", models.AutoField(primary_key=True)), ("parent1", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE, related_name='children')) ]) changes = self.get_changes([], [person]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 1) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'a', 0, []) @override_settings(AUTH_USER_MODEL='a.User') def test_swappable_circular_multi_mti(self): with isolate_lru_cache(apps.get_swappable_settings_name): parent = ModelState('a', 'Parent', [ ('user', models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)) ]) child = ModelState('a', 'Child', [], bases=('a.Parent',)) user = ModelState('a', 'User', [], bases=(AbstractBaseUser, 'a.Child')) changes = self.get_changes([], [parent, child, user]) self.assertNumberMigrations(changes, 'a', 1) self.assertOperationTypes(changes, 'a', 0, ['CreateModel', 'CreateModel', 'CreateModel', 'AddField']) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_blank_textfield_and_charfield(self, mocked_ask_method): """ #23405 - Adding a NOT NULL and blank `CharField` or `TextField` without default should not prompt for a default. """ changes = self.get_changes([self.author_empty], [self.author_with_biography_blank]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition') def test_add_non_blank_textfield_and_charfield(self, mocked_ask_method): """ #23405 - Adding a NOT NULL and non-blank `CharField` or `TextField` without default should prompt for a default. """ changes = self.get_changes([self.author_empty], [self.author_with_biography_non_blank]) self.assertEqual(mocked_ask_method.call_count, 2) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0) def test_mti_inheritance_model_removal(self): Animal = ModelState('app', 'Animal', [ ("id", models.AutoField(primary_key=True)), ]) Dog = ModelState('app', 'Dog', [], bases=('app.Animal',)) changes = self.get_changes([Animal, Dog], [Animal]) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['DeleteModel']) self.assertOperationAttributes(changes, 'app', 0, 0, name='Dog') def test_add_model_with_field_removed_from_base_model(self): """ Removing a base field takes place before adding a new inherited model that has a field with the same name. """ before = [ ModelState('app', 'readable', [ ('id', models.AutoField(primary_key=True)), ('title', models.CharField(max_length=200)), ]), ] after = [ ModelState('app', 'readable', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('app', 'book', [ ('title', models.CharField(max_length=200)), ], bases=('app.readable',)), ] changes = self.get_changes(before, after) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RemoveField', 'CreateModel']) self.assertOperationAttributes(changes, 'app', 0, 0, name='title', model_name='readable') self.assertOperationAttributes(changes, 'app', 0, 1, name='book') class AutodetectorSuggestNameTests(SimpleTestCase): def test_single_operation(self): ops = [migrations.CreateModel('Person', fields=[])] self.assertEqual(MigrationAutodetector.suggest_name(ops), 'person') ops = [migrations.DeleteModel('Person')] self.assertEqual(MigrationAutodetector.suggest_name(ops), 'delete_person') def test_two_create_models(self): ops = [ migrations.CreateModel('Person', fields=[]), migrations.CreateModel('Animal', fields=[]), ] self.assertEqual(MigrationAutodetector.suggest_name(ops), 'animal_person') def test_none_name(self): ops = [migrations.RunSQL('SELECT 1 FROM person;')] suggest_name = MigrationAutodetector.suggest_name(ops) self.assertIs(suggest_name.startswith('auto_'), True) def test_auto(self): suggest_name = MigrationAutodetector.suggest_name([]) self.assertIs(suggest_name.startswith('auto_'), True)
20db99a436b03af1af37639143e978d145a973d810f88161102f3a121b280f41
import datetime import os import re import unittest from unittest import mock from urllib.parse import parse_qsl, urljoin, urlparse import pytz from django.contrib.admin import AdminSite, ModelAdmin from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME from django.contrib.admin.models import ADDITION, DELETION, LogEntry from django.contrib.admin.options import TO_FIELD_VAR from django.contrib.admin.templatetags.admin_urls import add_preserved_filters from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.admin.utils import quote from django.contrib.admin.views.main import IS_POPUP_VAR from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename from django.contrib.auth.models import Group, Permission, User from django.contrib.contenttypes.models import ContentType from django.core import mail from django.core.checks import Error from django.core.files import temp as tempfile from django.forms.utils import ErrorList from django.template.response import TemplateResponse from django.test import ( TestCase, modify_settings, override_settings, skipUnlessDBFeature, ) from django.test.utils import override_script_prefix from django.urls import NoReverseMatch, resolve, reverse from django.utils import formats, translation from django.utils.cache import get_max_age from django.utils.encoding import iri_to_uri from django.utils.html import escape from django.utils.http import urlencode from . import customadmin from .admin import CityAdmin, site, site2 from .models import ( Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField, AdminOrderedModelMethod, Album, Answer, Answer2, Article, BarAccount, Book, Bookmark, Category, Chapter, ChapterXtra1, ChapterXtra2, Character, Child, Choice, City, Collector, Color, ComplexSortedPerson, CoverLetter, CustomArticle, CyclicOne, CyclicTwo, DooHickey, Employee, EmptyModel, Fabric, FancyDoodad, FieldOverridePost, FilteredManager, FooAccount, FoodDelivery, FunkyTag, Gallery, Grommet, Inquisition, Language, Link, MainPrepopulated, Media, ModelWithStringPrimaryKey, OtherStory, Paper, Parent, ParentWithDependentChildren, ParentWithUUIDPK, Person, Persona, Picture, Pizza, Plot, PlotDetails, PluggableSearchPerson, Podcast, Post, PrePopulatedPost, Promo, Question, ReadablePizza, ReadOnlyPizza, Recommendation, Recommender, RelatedPrepopulated, RelatedWithUUIDPKModel, Report, Restaurant, RowLevelChangePermissionModel, SecretHideout, Section, ShortMessage, Simple, Song, State, Story, SuperSecretHideout, SuperVillain, Telegram, TitleTranslation, Topping, UnchangeableObject, UndeletableObject, UnorderedObject, UserProxy, Villain, Vodcast, Whatsit, Widget, Worker, WorkHour, ) ERROR_MESSAGE = "Please enter the correct username and password \ for a staff account. Note that both fields may be case-sensitive." MULTIPART_ENCTYPE = 'enctype="multipart/form-data"' class AdminFieldExtractionMixin: """ Helper methods for extracting data from AdminForm. """ def get_admin_form_fields(self, response): """ Return a list of AdminFields for the AdminForm in the response. """ fields = [] for fieldset in response.context['adminform']: for field_line in fieldset: fields.extend(field_line) return fields def get_admin_readonly_fields(self, response): """ Return the readonly fields for the response's AdminForm. """ return [f for f in self.get_admin_form_fields(response) if f.is_readonly] def get_admin_readonly_field(self, response, field_name): """ Return the readonly field for the given field_name. """ admin_readonly_fields = self.get_admin_readonly_fields(response) for field in admin_readonly_fields: if field.field['name'] == field_name: return field @override_settings(ROOT_URLCONF='admin_views.urls', USE_I18N=True, USE_L10N=False, LANGUAGE_CODE='en') class AdminViewBasicTestCase(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.color1 = Color.objects.create(value='Red', warm=True) cls.color2 = Color.objects.create(value='Orange', warm=True) cls.color3 = Color.objects.create(value='Blue', warm=False) cls.color4 = Color.objects.create(value='Green', warm=False) cls.fab1 = Fabric.objects.create(surface='x') cls.fab2 = Fabric.objects.create(surface='y') cls.fab3 = Fabric.objects.create(surface='plain') cls.b1 = Book.objects.create(name='Book 1') cls.b2 = Book.objects.create(name='Book 2') cls.pro1 = Promo.objects.create(name='Promo 1', book=cls.b1) cls.pro1 = Promo.objects.create(name='Promo 2', book=cls.b2) cls.chap1 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b1) cls.chap2 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b1) cls.chap3 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b2) cls.chap4 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b2) cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='ChapterXtra1 1') cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='ChapterXtra1 2') Actor.objects.create(name='Palin', age=27) # Post data for edit inline cls.inline_post_data = { "name": "Test section", # inline data "article_set-TOTAL_FORMS": "6", "article_set-INITIAL_FORMS": "3", "article_set-MAX_NUM_FORMS": "0", "article_set-0-id": cls.a1.pk, # there is no title in database, give one here or formset will fail. "article_set-0-title": "Norske bostaver æøå skaper problemer", "article_set-0-content": "&lt;p&gt;Middle content&lt;/p&gt;", "article_set-0-date_0": "2008-03-18", "article_set-0-date_1": "11:54:58", "article_set-0-section": cls.s1.pk, "article_set-1-id": cls.a2.pk, "article_set-1-title": "Need a title.", "article_set-1-content": "&lt;p&gt;Oldest content&lt;/p&gt;", "article_set-1-date_0": "2000-03-18", "article_set-1-date_1": "11:54:58", "article_set-2-id": cls.a3.pk, "article_set-2-title": "Need a title.", "article_set-2-content": "&lt;p&gt;Newest content&lt;/p&gt;", "article_set-2-date_0": "2009-03-18", "article_set-2-date_1": "11:54:58", "article_set-3-id": "", "article_set-3-title": "", "article_set-3-content": "", "article_set-3-date_0": "", "article_set-3-date_1": "", "article_set-4-id": "", "article_set-4-title": "", "article_set-4-content": "", "article_set-4-date_0": "", "article_set-4-date_1": "", "article_set-5-id": "", "article_set-5-title": "", "article_set-5-content": "", "article_set-5-date_0": "", "article_set-5-date_1": "", } def setUp(self): self.client.force_login(self.superuser) def assertContentBefore(self, response, text1, text2, failing_msg=None): """ Testing utility asserting that text1 appears before text2 in response content. """ self.assertEqual(response.status_code, 200) self.assertLess( response.content.index(text1.encode()), response.content.index(text2.encode()), (failing_msg or '') + '\nResponse:\n' + response.content.decode(response.charset) ) class AdminViewBasicTest(AdminViewBasicTestCase): def test_trailing_slash_required(self): """ If you leave off the trailing slash, app should redirect and add it. """ add_url = reverse('admin:admin_views_article_add') response = self.client.get(add_url[:-1]) self.assertRedirects(response, add_url, status_code=301) def test_basic_add_GET(self): """ A smoke test to ensure GET on the add_view works. """ response = self.client.get(reverse('admin:admin_views_section_add')) self.assertIsInstance(response, TemplateResponse) self.assertEqual(response.status_code, 200) def test_add_with_GET_args(self): response = self.client.get(reverse('admin:admin_views_section_add'), {'name': 'My Section'}) self.assertContains( response, 'value="My Section"', msg_prefix="Couldn't find an input with the right value in the response" ) def test_basic_edit_GET(self): """ A smoke test to ensure GET on the change_view works. """ response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertIsInstance(response, TemplateResponse) self.assertEqual(response.status_code, 200) def test_basic_edit_GET_string_PK(self): """ GET on the change_view (when passing a string as the PK argument for a model with an integer PK field) redirects to the index page with a message saying the object doesn't exist. """ response = self.client.get(reverse('admin:admin_views_section_change', args=(quote("abc/<b>"),)), follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['section with ID “abc/<b>” doesn’t exist. Perhaps it was deleted?'] ) def test_basic_edit_GET_old_url_redirect(self): """ The change URL changed in Django 1.9, but the old one still redirects. """ response = self.client.get( reverse('admin:admin_views_section_change', args=(self.s1.pk,)).replace('change/', '') ) self.assertRedirects(response, reverse('admin:admin_views_section_change', args=(self.s1.pk,))) def test_basic_inheritance_GET_string_PK(self): """ GET on the change_view (for inherited models) redirects to the index page with a message saying the object doesn't exist. """ response = self.client.get(reverse('admin:admin_views_supervillain_change', args=('abc',)), follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['super villain with ID “abc” doesn’t exist. Perhaps it was deleted?'] ) def test_basic_add_POST(self): """ A smoke test to ensure POST on add_view works. """ post_data = { "name": "Another Section", # inline data "article_set-TOTAL_FORMS": "3", "article_set-INITIAL_FORMS": "0", "article_set-MAX_NUM_FORMS": "0", } response = self.client.post(reverse('admin:admin_views_section_add'), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_popup_add_POST(self): """ Ensure http response from a popup is properly escaped. """ post_data = { '_popup': '1', 'title': 'title with a new\nline', 'content': 'some content', 'date_0': '2010-09-10', 'date_1': '14:55:39', } response = self.client.post(reverse('admin:admin_views_article_add'), post_data) self.assertContains(response, 'title with a new\\nline') def test_basic_edit_POST(self): """ A smoke test to ensure POST on edit_view works. """ url = reverse('admin:admin_views_section_change', args=(self.s1.pk,)) response = self.client.post(url, self.inline_post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_edit_save_as(self): """ Test "save as". """ post_data = self.inline_post_data.copy() post_data.update({ '_saveasnew': 'Save+as+new', "article_set-1-section": "1", "article_set-2-section": "1", "article_set-3-section": "1", "article_set-4-section": "1", "article_set-5-section": "1", }) response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_edit_save_as_delete_inline(self): """ Should be able to "Save as new" while also deleting an inline. """ post_data = self.inline_post_data.copy() post_data.update({ '_saveasnew': 'Save+as+new', "article_set-1-section": "1", "article_set-2-section": "1", "article_set-2-DELETE": "1", "article_set-3-section": "1", }) response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data) self.assertEqual(response.status_code, 302) # started with 3 articles, one was deleted. self.assertEqual(Section.objects.latest('id').article_set.count(), 2) def test_change_list_column_field_classes(self): response = self.client.get(reverse('admin:admin_views_article_changelist')) # callables display the callable name. self.assertContains(response, 'column-callable_year') self.assertContains(response, 'field-callable_year') # lambdas display as "lambda" + index that they appear in list_display. self.assertContains(response, 'column-lambda8') self.assertContains(response, 'field-lambda8') def test_change_list_sorting_callable(self): """ Ensure we can sort on a list_display field that is a callable (column 2 is callable_year in ArticleAdmin) """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 2}) self.assertContentBefore( response, 'Oldest content', 'Middle content', "Results of sorting on callable are out of order." ) self.assertContentBefore( response, 'Middle content', 'Newest content', "Results of sorting on callable are out of order." ) def test_change_list_sorting_property(self): """ Sort on a list_display field that is a property (column 10 is a property in Article model). """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 10}) self.assertContentBefore( response, 'Oldest content', 'Middle content', 'Results of sorting on property are out of order.', ) self.assertContentBefore( response, 'Middle content', 'Newest content', 'Results of sorting on property are out of order.', ) def test_change_list_sorting_callable_query_expression(self): """Query expressions may be used for admin_order_field.""" tests = [ ('order_by_expression', 9), ('order_by_f_expression', 12), ('order_by_orderby_expression', 13), ] for admin_order_field, index in tests: with self.subTest(admin_order_field): response = self.client.get( reverse('admin:admin_views_article_changelist'), {'o': index}, ) self.assertContentBefore( response, 'Oldest content', 'Middle content', 'Results of sorting on callable are out of order.' ) self.assertContentBefore( response, 'Middle content', 'Newest content', 'Results of sorting on callable are out of order.' ) def test_change_list_sorting_callable_query_expression_reverse(self): tests = [ ('order_by_expression', -9), ('order_by_f_expression', -12), ('order_by_orderby_expression', -13), ] for admin_order_field, index in tests: with self.subTest(admin_order_field): response = self.client.get( reverse('admin:admin_views_article_changelist'), {'o': index}, ) self.assertContentBefore( response, 'Middle content', 'Oldest content', 'Results of sorting on callable are out of order.' ) self.assertContentBefore( response, 'Newest content', 'Middle content', 'Results of sorting on callable are out of order.' ) def test_change_list_sorting_model(self): """ Ensure we can sort on a list_display field that is a Model method (column 3 is 'model_year' in ArticleAdmin) """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-3'}) self.assertContentBefore( response, 'Newest content', 'Middle content', "Results of sorting on Model method are out of order." ) self.assertContentBefore( response, 'Middle content', 'Oldest content', "Results of sorting on Model method are out of order." ) def test_change_list_sorting_model_admin(self): """ Ensure we can sort on a list_display field that is a ModelAdmin method (column 4 is 'modeladmin_year' in ArticleAdmin) """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '4'}) self.assertContentBefore( response, 'Oldest content', 'Middle content', "Results of sorting on ModelAdmin method are out of order." ) self.assertContentBefore( response, 'Middle content', 'Newest content', "Results of sorting on ModelAdmin method are out of order." ) def test_change_list_sorting_model_admin_reverse(self): """ Ensure we can sort on a list_display field that is a ModelAdmin method in reverse order (i.e. admin_order_field uses the '-' prefix) (column 6 is 'model_year_reverse' in ArticleAdmin) """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '6'}) self.assertContentBefore( response, '2009', '2008', "Results of sorting on ModelAdmin method are out of order." ) self.assertContentBefore( response, '2008', '2000', "Results of sorting on ModelAdmin method are out of order." ) # Let's make sure the ordering is right and that we don't get a # FieldError when we change to descending order response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-6'}) self.assertContentBefore( response, '2000', '2008', "Results of sorting on ModelAdmin method are out of order." ) self.assertContentBefore( response, '2008', '2009', "Results of sorting on ModelAdmin method are out of order." ) def test_change_list_sorting_multiple(self): p1 = Person.objects.create(name="Chris", gender=1, alive=True) p2 = Person.objects.create(name="Chris", gender=2, alive=True) p3 = Person.objects.create(name="Bob", gender=1, alive=True) link1 = reverse('admin:admin_views_person_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_person_change', args=(p2.pk,)) link3 = reverse('admin:admin_views_person_change', args=(p3.pk,)) # Sort by name, gender response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '1.2'}) self.assertContentBefore(response, link3, link1) self.assertContentBefore(response, link1, link2) # Sort by gender descending, name response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '-2.1'}) self.assertContentBefore(response, link2, link3) self.assertContentBefore(response, link3, link1) def test_change_list_sorting_preserve_queryset_ordering(self): """ If no ordering is defined in `ModelAdmin.ordering` or in the query string, then the underlying order of the queryset should not be changed, even if it is defined in `Modeladmin.get_queryset()`. Refs #11868, #7309. """ p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80) p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70) p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60) link1 = reverse('admin:admin_views_person_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_person_change', args=(p2.pk,)) link3 = reverse('admin:admin_views_person_change', args=(p3.pk,)) response = self.client.get(reverse('admin:admin_views_person_changelist'), {}) self.assertContentBefore(response, link3, link2) self.assertContentBefore(response, link2, link1) def test_change_list_sorting_model_meta(self): # Test ordering on Model Meta is respected l1 = Language.objects.create(iso='ur', name='Urdu') l2 = Language.objects.create(iso='ar', name='Arabic') link1 = reverse('admin:admin_views_language_change', args=(quote(l1.pk),)) link2 = reverse('admin:admin_views_language_change', args=(quote(l2.pk),)) response = self.client.get(reverse('admin:admin_views_language_changelist'), {}) self.assertContentBefore(response, link2, link1) # Test we can override with query string response = self.client.get(reverse('admin:admin_views_language_changelist'), {'o': '-1'}) self.assertContentBefore(response, link1, link2) def test_change_list_sorting_override_model_admin(self): # Test ordering on Model Admin is respected, and overrides Model Meta dt = datetime.datetime.now() p1 = Podcast.objects.create(name="A", release_date=dt) p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10)) link1 = reverse('admin:admin_views_podcast_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_podcast_change', args=(p2.pk,)) response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {}) self.assertContentBefore(response, link1, link2) def test_multiple_sort_same_field(self): # The changelist displays the correct columns if two columns correspond # to the same ordering field. dt = datetime.datetime.now() p1 = Podcast.objects.create(name="A", release_date=dt) p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10)) link1 = reverse('admin:admin_views_podcast_change', args=(quote(p1.pk),)) link2 = reverse('admin:admin_views_podcast_change', args=(quote(p2.pk),)) response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {}) self.assertContentBefore(response, link1, link2) p1 = ComplexSortedPerson.objects.create(name="Bob", age=10) p2 = ComplexSortedPerson.objects.create(name="Amy", age=20) link1 = reverse('admin:admin_views_complexsortedperson_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_complexsortedperson_change', args=(p2.pk,)) response = self.client.get(reverse('admin:admin_views_complexsortedperson_changelist'), {}) # Should have 5 columns (including action checkbox col) self.assertContains(response, '<th scope="col"', count=5) self.assertContains(response, 'Name') self.assertContains(response, 'Colored name') # Check order self.assertContentBefore(response, 'Name', 'Colored name') # Check sorting - should be by name self.assertContentBefore(response, link2, link1) def test_sort_indicators_admin_order(self): """ The admin shows default sort indicators for all kinds of 'ordering' fields: field names, method on the model admin and model itself, and other callables. See #17252. """ models = [(AdminOrderedField, 'adminorderedfield'), (AdminOrderedModelMethod, 'adminorderedmodelmethod'), (AdminOrderedAdminMethod, 'adminorderedadminmethod'), (AdminOrderedCallable, 'adminorderedcallable')] for model, url in models: model.objects.create(stuff='The Last Item', order=3) model.objects.create(stuff='The First Item', order=1) model.objects.create(stuff='The Middle Item', order=2) response = self.client.get(reverse('admin:admin_views_%s_changelist' % url), {}) self.assertEqual(response.status_code, 200) # Should have 3 columns including action checkbox col. self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url) # Check if the correct column was selected. 2 is the index of the # 'order' column in the model admin's 'list_display' with 0 being # the implicit 'action_checkbox' and 1 being the column 'stuff'. self.assertEqual(response.context['cl'].get_ordering_field_columns(), {2: 'asc'}) # Check order of records. self.assertContentBefore(response, 'The First Item', 'The Middle Item') self.assertContentBefore(response, 'The Middle Item', 'The Last Item') def test_has_related_field_in_list_display_fk(self): """Joins shouldn't be performed for <FK>_id fields in list display.""" state = State.objects.create(name='Karnataka') City.objects.create(state=state, name='Bangalore') response = self.client.get(reverse('admin:admin_views_city_changelist'), {}) response.context['cl'].list_display = ['id', 'name', 'state'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), True) response.context['cl'].list_display = ['id', 'name', 'state_id'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), False) def test_has_related_field_in_list_display_o2o(self): """Joins shouldn't be performed for <O2O>_id fields in list display.""" media = Media.objects.create(name='Foo') Vodcast.objects.create(media=media) response = self.client.get(reverse('admin:admin_views_vodcast_changelist'), {}) response.context['cl'].list_display = ['media'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), True) response.context['cl'].list_display = ['media_id'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), False) def test_limited_filter(self): """Ensure admin changelist filters do not contain objects excluded via limit_choices_to. This also tests relation-spanning filters (e.g. 'color__value'). """ response = self.client.get(reverse('admin:admin_views_thing_changelist')) self.assertContains( response, '<div id="changelist-filter">', msg_prefix="Expected filter not found in changelist view" ) self.assertNotContains( response, '<a href="?color__id__exact=3">Blue</a>', msg_prefix="Changelist filter not correctly limited by limit_choices_to" ) def test_relation_spanning_filters(self): changelist_url = reverse('admin:admin_views_chapterxtra1_changelist') response = self.client.get(changelist_url) self.assertContains(response, '<div id="changelist-filter">') filters = { 'chap__id__exact': { 'values': [c.id for c in Chapter.objects.all()], 'test': lambda obj, value: obj.chap.id == value, }, 'chap__title': { 'values': [c.title for c in Chapter.objects.all()], 'test': lambda obj, value: obj.chap.title == value, }, 'chap__book__id__exact': { 'values': [b.id for b in Book.objects.all()], 'test': lambda obj, value: obj.chap.book.id == value, }, 'chap__book__name': { 'values': [b.name for b in Book.objects.all()], 'test': lambda obj, value: obj.chap.book.name == value, }, 'chap__book__promo__id__exact': { 'values': [p.id for p in Promo.objects.all()], 'test': lambda obj, value: obj.chap.book.promo_set.filter(id=value).exists(), }, 'chap__book__promo__name': { 'values': [p.name for p in Promo.objects.all()], 'test': lambda obj, value: obj.chap.book.promo_set.filter(name=value).exists(), }, # A forward relation (book) after a reverse relation (promo). 'guest_author__promo__book__id__exact': { 'values': [p.id for p in Book.objects.all()], 'test': lambda obj, value: obj.guest_author.promo_set.filter(book=value).exists(), }, } for filter_path, params in filters.items(): for value in params['values']: query_string = urlencode({filter_path: value}) # ensure filter link exists self.assertContains(response, '<a href="?%s"' % query_string) # ensure link works filtered_response = self.client.get('%s?%s' % (changelist_url, query_string)) self.assertEqual(filtered_response.status_code, 200) # ensure changelist contains only valid objects for obj in filtered_response.context['cl'].queryset.all(): self.assertTrue(params['test'](obj, value)) def test_incorrect_lookup_parameters(self): """Ensure incorrect lookup parameters are handled gracefully.""" changelist_url = reverse('admin:admin_views_thing_changelist') response = self.client.get(changelist_url, {'notarealfield': '5'}) self.assertRedirects(response, '%s?e=1' % changelist_url) # Spanning relationships through a nonexistent related object (Refs #16716) response = self.client.get(changelist_url, {'notarealfield__whatever': '5'}) self.assertRedirects(response, '%s?e=1' % changelist_url) response = self.client.get(changelist_url, {'color__id__exact': 'StringNotInteger!'}) self.assertRedirects(response, '%s?e=1' % changelist_url) # Regression test for #18530 response = self.client.get(changelist_url, {'pub_date__gte': 'foo'}) self.assertRedirects(response, '%s?e=1' % changelist_url) def test_isnull_lookups(self): """Ensure is_null is handled correctly.""" Article.objects.create(title="I Could Go Anywhere", content="Versatile", date=datetime.datetime.now()) changelist_url = reverse('admin:admin_views_article_changelist') response = self.client.get(changelist_url) self.assertContains(response, '4 articles') response = self.client.get(changelist_url, {'section__isnull': 'false'}) self.assertContains(response, '3 articles') response = self.client.get(changelist_url, {'section__isnull': '0'}) self.assertContains(response, '3 articles') response = self.client.get(changelist_url, {'section__isnull': 'true'}) self.assertContains(response, '1 article') response = self.client.get(changelist_url, {'section__isnull': '1'}) self.assertContains(response, '1 article') def test_logout_and_password_change_URLs(self): response = self.client.get(reverse('admin:admin_views_article_changelist')) self.assertContains(response, '<a href="%s">' % reverse('admin:logout')) self.assertContains(response, '<a href="%s">' % reverse('admin:password_change')) def test_named_group_field_choices_change_list(self): """ Ensures the admin changelist shows correct values in the relevant column for rows corresponding to instances of a model in which a named group has been used in the choices option of a field. """ link1 = reverse('admin:admin_views_fabric_change', args=(self.fab1.pk,)) link2 = reverse('admin:admin_views_fabric_change', args=(self.fab2.pk,)) response = self.client.get(reverse('admin:admin_views_fabric_changelist')) fail_msg = ( "Changelist table isn't showing the right human-readable values " "set by a model field 'choices' option named group." ) self.assertContains(response, '<a href="%s">Horizontal</a>' % link1, msg_prefix=fail_msg, html=True) self.assertContains(response, '<a href="%s">Vertical</a>' % link2, msg_prefix=fail_msg, html=True) def test_named_group_field_choices_filter(self): """ Ensures the filter UI shows correctly when at least one named group has been used in the choices option of a model field. """ response = self.client.get(reverse('admin:admin_views_fabric_changelist')) fail_msg = ( "Changelist filter isn't showing options contained inside a model " "field 'choices' option named group." ) self.assertContains(response, '<div id="changelist-filter">') self.assertContains( response, '<a href="?surface__exact=x" title="Horizontal">Horizontal</a>', msg_prefix=fail_msg, html=True ) self.assertContains( response, '<a href="?surface__exact=y" title="Vertical">Vertical</a>', msg_prefix=fail_msg, html=True ) def test_change_list_null_boolean_display(self): Post.objects.create(public=None) response = self.client.get(reverse('admin:admin_views_post_changelist')) self.assertContains(response, 'icon-unknown.svg') def test_i18n_language_non_english_default(self): """ Check if the JavaScript i18n view returns an empty language catalog if the default language is non-English but the selected language is English. See #13388 and #3594 for more details. """ with self.settings(LANGUAGE_CODE='fr'), translation.override('en-us'): response = self.client.get(reverse('admin:jsi18n')) self.assertNotContains(response, 'Choisir une heure') def test_i18n_language_non_english_fallback(self): """ Makes sure that the fallback language is still working properly in cases where the selected language cannot be found. """ with self.settings(LANGUAGE_CODE='fr'), translation.override('none'): response = self.client.get(reverse('admin:jsi18n')) self.assertContains(response, 'Choisir une heure') def test_jsi18n_with_context(self): response = self.client.get(reverse('admin-extra-context:jsi18n')) self.assertEqual(response.status_code, 200) def test_L10N_deactivated(self): """ Check if L10N is deactivated, the JavaScript i18n view doesn't return localized date/time formats. Refs #14824. """ with self.settings(LANGUAGE_CODE='ru', USE_L10N=False), translation.override('none'): response = self.client.get(reverse('admin:jsi18n')) self.assertNotContains(response, '%d.%m.%Y %H:%M:%S') self.assertContains(response, '%Y-%m-%d %H:%M:%S') def test_disallowed_filtering(self): with self.assertLogs('django.security.DisallowedModelAdminLookup', 'ERROR'): response = self.client.get( "%s?owner__email__startswith=fuzzy" % reverse('admin:admin_views_album_changelist') ) self.assertEqual(response.status_code, 400) # Filters are allowed if explicitly included in list_filter response = self.client.get("%s?color__value__startswith=red" % reverse('admin:admin_views_thing_changelist')) self.assertEqual(response.status_code, 200) response = self.client.get("%s?color__value=red" % reverse('admin:admin_views_thing_changelist')) self.assertEqual(response.status_code, 200) # Filters should be allowed if they involve a local field without the # need to whitelist them in list_filter or date_hierarchy. response = self.client.get("%s?age__gt=30" % reverse('admin:admin_views_person_changelist')) self.assertEqual(response.status_code, 200) e1 = Employee.objects.create(name='Anonymous', gender=1, age=22, alive=True, code='123') e2 = Employee.objects.create(name='Visitor', gender=2, age=19, alive=True, code='124') WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1) WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2) response = self.client.get(reverse('admin:admin_views_workhour_changelist')) self.assertContains(response, 'employee__person_ptr__exact') response = self.client.get("%s?employee__person_ptr__exact=%d" % ( reverse('admin:admin_views_workhour_changelist'), e1.pk) ) self.assertEqual(response.status_code, 200) def test_disallowed_to_field(self): url = reverse('admin:admin_views_section_changelist') with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.get(url, {TO_FIELD_VAR: 'missing_field'}) self.assertEqual(response.status_code, 400) # Specifying a field that is not referred by any other model registered # to this admin site should raise an exception. with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.get(reverse('admin:admin_views_section_changelist'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) # #23839 - Primary key should always be allowed, even if the referenced model isn't registered. response = self.client.get(reverse('admin:admin_views_notreferenced_changelist'), {TO_FIELD_VAR: 'id'}) self.assertEqual(response.status_code, 200) # #23915 - Specifying a field referenced by another model though a m2m should be allowed. response = self.client.get(reverse('admin:admin_views_recipe_changelist'), {TO_FIELD_VAR: 'rname'}) self.assertEqual(response.status_code, 200) # #23604, #23915 - Specifying a field referenced through a reverse m2m relationship should be allowed. response = self.client.get(reverse('admin:admin_views_ingredient_changelist'), {TO_FIELD_VAR: 'iname'}) self.assertEqual(response.status_code, 200) # #23329 - Specifying a field that is not referred by any other model directly registered # to this admin site but registered through inheritance should be allowed. response = self.client.get(reverse('admin:admin_views_referencedbyparent_changelist'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 200) # #23431 - Specifying a field that is only referred to by a inline of a registered # model should be allowed. response = self.client.get(reverse('admin:admin_views_referencedbyinline_changelist'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 200) # #25622 - Specifying a field of a model only referred by a generic # relation should raise DisallowedModelAdminToField. url = reverse('admin:admin_views_referencedbygenrel_changelist') with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.get(url, {TO_FIELD_VAR: 'object_id'}) self.assertEqual(response.status_code, 400) # We also want to prevent the add, change, and delete views from # leaking a disallowed field value. with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.post(reverse('admin:admin_views_section_add'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) section = Section.objects.create() url = reverse('admin:admin_views_section_change', args=(section.pk,)) with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.post(url, {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) url = reverse('admin:admin_views_section_delete', args=(section.pk,)) with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.post(url, {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) def test_allowed_filtering_15103(self): """ Regressions test for ticket 15103 - filtering on fields defined in a ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields can break. """ # Filters should be allowed if they are defined on a ForeignKey pointing to this model url = "%s?leader__name=Palin&leader__age=27" % reverse('admin:admin_views_inquisition_changelist') response = self.client.get(url) self.assertEqual(response.status_code, 200) def test_popup_dismiss_related(self): """ Regression test for ticket 20664 - ensure the pk is properly quoted. """ actor = Actor.objects.create(name="Palin", age=27) response = self.client.get("%s?%s" % (reverse('admin:admin_views_actor_changelist'), IS_POPUP_VAR)) self.assertContains(response, 'data-popup-opener="%s"' % actor.pk) def test_hide_change_password(self): """ Tests if the "change password" link in the admin is hidden if the User does not have a usable password set. (against 9bea85795705d015cdadc82c68b99196a8554f5c) """ user = User.objects.get(username='super') user.set_unusable_password() user.save() self.client.force_login(user) response = self.client.get(reverse('admin:index')) self.assertNotContains( response, reverse('admin:password_change'), msg_prefix='The "change password" link should not be displayed if a user does not have a usable password.' ) def test_change_view_with_show_delete_extra_context(self): """ The 'show_delete' context variable in the admin's change view controls the display of the delete button. """ instance = UndeletableObject.objects.create(name='foo') response = self.client.get(reverse('admin:admin_views_undeletableobject_change', args=(instance.pk,))) self.assertNotContains(response, 'deletelink') def test_change_view_logs_m2m_field_changes(self): """Changes to ManyToManyFields are included in the object's history.""" pizza = ReadablePizza.objects.create(name='Cheese') cheese = Topping.objects.create(name='cheese') post_data = {'name': pizza.name, 'toppings': [cheese.pk]} response = self.client.post(reverse('admin:admin_views_readablepizza_change', args=(pizza.pk,)), post_data) self.assertRedirects(response, reverse('admin:admin_views_readablepizza_changelist')) pizza_ctype = ContentType.objects.get_for_model(ReadablePizza, for_concrete_model=False) log = LogEntry.objects.filter(content_type=pizza_ctype, object_id=pizza.pk).first() self.assertEqual(log.get_change_message(), 'Changed Toppings.') def test_allows_attributeerror_to_bubble_up(self): """ AttributeErrors are allowed to bubble when raised inside a change list view. Requires a model to be created so there's something to display. Refs: #16655, #18593, and #18747 """ Simple.objects.create() with self.assertRaises(AttributeError): self.client.get(reverse('admin:admin_views_simple_changelist')) def test_changelist_with_no_change_url(self): """ ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url for change_view is removed from get_urls (#20934). """ o = UnchangeableObject.objects.create() response = self.client.get(reverse('admin:admin_views_unchangeableobject_changelist')) self.assertEqual(response.status_code, 200) # Check the format of the shown object -- shouldn't contain a change link self.assertContains(response, '<th class="field-__str__">%s</th>' % o, html=True) def test_invalid_appindex_url(self): """ #21056 -- URL reversing shouldn't work for nonexistent apps. """ good_url = '/test_admin/admin/admin_views/' confirm_good_url = reverse('admin:app_list', kwargs={'app_label': 'admin_views'}) self.assertEqual(good_url, confirm_good_url) with self.assertRaises(NoReverseMatch): reverse('admin:app_list', kwargs={'app_label': 'this_should_fail'}) with self.assertRaises(NoReverseMatch): reverse('admin:app_list', args=('admin_views2',)) def test_resolve_admin_views(self): index_match = resolve('/test_admin/admin4/') list_match = resolve('/test_admin/admin4/auth/user/') self.assertIs(index_match.func.admin_site, customadmin.simple_site) self.assertIsInstance(list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin) def test_adminsite_display_site_url(self): """ #13749 - Admin should display link to front-end site 'View site' """ url = reverse('admin:index') response = self.client.get(url) self.assertEqual(response.context['site_url'], '/my-site-url/') self.assertContains(response, '<a href="/my-site-url/">View site</a>') def test_date_hierarchy_empty_queryset(self): self.assertIs(Question.objects.exists(), False) response = self.client.get(reverse('admin:admin_views_answer2_changelist')) self.assertEqual(response.status_code, 200) @override_settings(TIME_ZONE='America/Sao_Paulo', USE_TZ=True) def test_date_hierarchy_timezone_dst(self): # This datetime doesn't exist in this timezone due to DST. date = pytz.timezone('America/Sao_Paulo').localize(datetime.datetime(2016, 10, 16, 15), is_dst=None) q = Question.objects.create(question='Why?', expires=date) Answer2.objects.create(question=q, answer='Because.') response = self.client.get(reverse('admin:admin_views_answer2_changelist')) self.assertEqual(response.status_code, 200) self.assertContains(response, 'question__expires__day=16') self.assertContains(response, 'question__expires__month=10') self.assertContains(response, 'question__expires__year=2016') @override_settings(TIME_ZONE='America/Los_Angeles', USE_TZ=True) def test_date_hierarchy_local_date_differ_from_utc(self): # This datetime is 2017-01-01 in UTC. date = pytz.timezone('America/Los_Angeles').localize(datetime.datetime(2016, 12, 31, 16)) q = Question.objects.create(question='Why?', expires=date) Answer2.objects.create(question=q, answer='Because.') response = self.client.get(reverse('admin:admin_views_answer2_changelist')) self.assertEqual(response.status_code, 200) self.assertContains(response, 'question__expires__day=31') self.assertContains(response, 'question__expires__month=12') self.assertContains(response, 'question__expires__year=2016') def test_sortable_by_columns_subset(self): expected_sortable_fields = ('date', 'callable_year') expected_not_sortable_fields = ( 'content', 'model_year', 'modeladmin_year', 'model_year_reversed', 'section', ) response = self.client.get(reverse('admin6:admin_views_article_changelist')) for field_name in expected_sortable_fields: self.assertContains(response, '<th scope="col" class="sortable column-%s">' % field_name) for field_name in expected_not_sortable_fields: self.assertContains(response, '<th scope="col" class="column-%s">' % field_name) def test_get_sortable_by_columns_subset(self): response = self.client.get(reverse('admin6:admin_views_actor_changelist')) self.assertContains(response, '<th scope="col" class="sortable column-age">') self.assertContains(response, '<th scope="col" class="column-name">') def test_sortable_by_no_column(self): expected_not_sortable_fields = ('title', 'book') response = self.client.get(reverse('admin6:admin_views_chapter_changelist')) for field_name in expected_not_sortable_fields: self.assertContains(response, '<th scope="col" class="column-%s">' % field_name) self.assertNotContains(response, '<th scope="col" class="sortable column') def test_get_sortable_by_no_column(self): response = self.client.get(reverse('admin6:admin_views_color_changelist')) self.assertContains(response, '<th scope="col" class="column-value">') self.assertNotContains(response, '<th scope="col" class="sortable column') @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', # Put this app's and the shared tests templates dirs in DIRS to take precedence # over the admin's templates dir. 'DIRS': [ os.path.join(os.path.dirname(__file__), 'templates'), os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates'), ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }]) class AdminCustomTemplateTests(AdminViewBasicTestCase): def test_custom_model_admin_templates(self): # Test custom change list template with custom extra context response = self.client.get(reverse('admin:admin_views_customarticle_changelist')) self.assertContains(response, "var hello = 'Hello!';") self.assertTemplateUsed(response, 'custom_admin/change_list.html') # Test custom add form template response = self.client.get(reverse('admin:admin_views_customarticle_add')) self.assertTemplateUsed(response, 'custom_admin/add_form.html') # Add an article so we can test delete, change, and history views post = self.client.post(reverse('admin:admin_views_customarticle_add'), { 'content': '<p>great article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39' }) self.assertRedirects(post, reverse('admin:admin_views_customarticle_changelist')) self.assertEqual(CustomArticle.objects.all().count(), 1) article_pk = CustomArticle.objects.all()[0].pk # Test custom delete, change, and object history templates # Test custom change form template response = self.client.get(reverse('admin:admin_views_customarticle_change', args=(article_pk,))) self.assertTemplateUsed(response, 'custom_admin/change_form.html') response = self.client.get(reverse('admin:admin_views_customarticle_delete', args=(article_pk,))) self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html') response = self.client.post(reverse('admin:admin_views_customarticle_changelist'), data={ 'index': 0, 'action': ['delete_selected'], '_selected_action': ['1'], }) self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html') response = self.client.get(reverse('admin:admin_views_customarticle_history', args=(article_pk,))) self.assertTemplateUsed(response, 'custom_admin/object_history.html') # A custom popup response template may be specified by # ModelAdmin.popup_response_template. response = self.client.post(reverse('admin:admin_views_customarticle_add') + '?%s=1' % IS_POPUP_VAR, { 'content': '<p>great article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', IS_POPUP_VAR: '1' }) self.assertEqual(response.template_name, 'custom_admin/popup_response.html') def test_extended_bodyclass_template_change_form(self): """ The admin/change_form.html template uses block.super in the bodyclass block. """ response = self.client.get(reverse('admin:admin_views_section_add')) self.assertContains(response, 'bodyclass_consistency_check ') def test_change_password_template(self): user = User.objects.get(username='super') response = self.client.get(reverse('admin:auth_user_password_change', args=(user.id,))) # The auth/user/change_password.html template uses super in the # bodyclass block. self.assertContains(response, 'bodyclass_consistency_check ') # When a site has multiple passwords in the browser's password manager, # a browser pop up asks which user the new password is for. To prevent # this, the username is added to the change password form. self.assertContains(response, '<input type="text" name="username" value="super" style="display: none">') def test_extended_bodyclass_template_index(self): """ The admin/index.html template uses block.super in the bodyclass block. """ response = self.client.get(reverse('admin:index')) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_change_list(self): """ The admin/change_list.html' template uses block.super in the bodyclass block. """ response = self.client.get(reverse('admin:admin_views_article_changelist')) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_template_login(self): """ The admin/login.html template uses block.super in the bodyclass block. """ self.client.logout() response = self.client.get(reverse('admin:login')) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_template_delete_confirmation(self): """ The admin/delete_confirmation.html template uses block.super in the bodyclass block. """ group = Group.objects.create(name="foogroup") response = self.client.get(reverse('admin:auth_group_delete', args=(group.id,))) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_template_delete_selected_confirmation(self): """ The admin/delete_selected_confirmation.html template uses block.super in bodyclass block. """ group = Group.objects.create(name="foogroup") post_data = { 'action': 'delete_selected', 'selected_across': '0', 'index': '0', '_selected_action': group.id } response = self.client.post(reverse('admin:auth_group_changelist'), post_data) self.assertEqual(response.context['site_header'], 'Django administration') self.assertContains(response, 'bodyclass_consistency_check ') def test_filter_with_custom_template(self): """ A custom template can be used to render an admin filter. """ response = self.client.get(reverse('admin:admin_views_color2_changelist')) self.assertTemplateUsed(response, 'custom_filter_template.html') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewFormUrlTest(TestCase): current_app = "admin3" @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def setUp(self): self.client.force_login(self.superuser) def test_change_form_URL_has_correct_value(self): """ change_view has form_url in response.context """ response = self.client.get( reverse('admin:admin_views_section_change', args=(self.s1.pk,), current_app=self.current_app) ) self.assertIn('form_url', response.context, msg='form_url not present in response.context') self.assertEqual(response.context['form_url'], 'pony') def test_initial_data_can_be_overridden(self): """ The behavior for setting initial form data can be overridden in the ModelAdmin class. Usually, the initial value is set via the GET params. """ response = self.client.get( reverse('admin:admin_views_restaurant_add', current_app=self.current_app), {'name': 'test_value'} ) # this would be the usual behaviour self.assertNotContains(response, 'value="test_value"') # this is the overridden behaviour self.assertContains(response, 'value="overridden_value"') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminJavaScriptTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_js_minified_only_if_debug_is_false(self): """ The minified versions of the JS files are only used when DEBUG is False. """ with override_settings(DEBUG=False): response = self.client.get(reverse('admin:admin_views_section_add')) self.assertNotContains(response, 'vendor/jquery/jquery.js') self.assertContains(response, 'vendor/jquery/jquery.min.js') self.assertContains(response, 'prepopulate.js') self.assertContains(response, 'actions.js') self.assertContains(response, 'collapse.js') self.assertContains(response, 'inlines.js') with override_settings(DEBUG=True): response = self.client.get(reverse('admin:admin_views_section_add')) self.assertContains(response, 'vendor/jquery/jquery.js') self.assertNotContains(response, 'vendor/jquery/jquery.min.js') self.assertContains(response, 'prepopulate.js') self.assertContains(response, 'actions.js') self.assertContains(response, 'collapse.js') self.assertContains(response, 'inlines.js') @override_settings(ROOT_URLCONF='admin_views.urls') class SaveAsTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) def setUp(self): self.client.force_login(self.superuser) def test_save_as_duplication(self): """'save as' creates a new person""" post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42} response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), post_data) self.assertEqual(len(Person.objects.filter(name='John M')), 1) self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1) new_person = Person.objects.latest('id') self.assertRedirects(response, reverse('admin:admin_views_person_change', args=(new_person.pk,))) def test_save_as_continue_false(self): """ Saving a new object using "Save as new" redirects to the changelist instead of the change view when ModelAdmin.save_as_continue=False. """ post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42} url = reverse('admin:admin_views_person_change', args=(self.per1.pk,), current_app=site2.name) response = self.client.post(url, post_data) self.assertEqual(len(Person.objects.filter(name='John M')), 1) self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1) self.assertRedirects(response, reverse('admin:admin_views_person_changelist', current_app=site2.name)) def test_save_as_new_with_validation_errors(self): """ When you click "Save as new" and have a validation error, you only see the "Save as new" button and not the other save buttons, and that only the "Save as" button is visible. """ response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), { '_saveasnew': '', 'gender': 'invalid', '_addanother': 'fail', }) self.assertContains(response, 'Please correct the errors below.') self.assertFalse(response.context['show_save_and_add_another']) self.assertFalse(response.context['show_save_and_continue']) self.assertTrue(response.context['show_save_as_new']) def test_save_as_new_with_validation_errors_with_inlines(self): parent = Parent.objects.create(name='Father') child = Child.objects.create(parent=parent, name='Child') response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), { '_saveasnew': 'Save as new', 'child_set-0-parent': parent.pk, 'child_set-0-id': child.pk, 'child_set-0-name': 'Child', 'child_set-INITIAL_FORMS': 1, 'child_set-MAX_NUM_FORMS': 1000, 'child_set-MIN_NUM_FORMS': 0, 'child_set-TOTAL_FORMS': 4, 'name': '_invalid', }) self.assertContains(response, 'Please correct the error below.') self.assertFalse(response.context['show_save_and_add_another']) self.assertFalse(response.context['show_save_and_continue']) self.assertTrue(response.context['show_save_as_new']) def test_save_as_new_with_inlines_with_validation_errors(self): parent = Parent.objects.create(name='Father') child = Child.objects.create(parent=parent, name='Child') response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), { '_saveasnew': 'Save as new', 'child_set-0-parent': parent.pk, 'child_set-0-id': child.pk, 'child_set-0-name': '_invalid', 'child_set-INITIAL_FORMS': 1, 'child_set-MAX_NUM_FORMS': 1000, 'child_set-MIN_NUM_FORMS': 0, 'child_set-TOTAL_FORMS': 4, 'name': 'Father', }) self.assertContains(response, 'Please correct the error below.') self.assertFalse(response.context['show_save_and_add_another']) self.assertFalse(response.context['show_save_and_continue']) self.assertTrue(response.context['show_save_as_new']) @override_settings(ROOT_URLCONF='admin_views.urls') class CustomModelAdminTest(AdminViewBasicTestCase): def test_custom_admin_site_login_form(self): self.client.logout() response = self.client.get(reverse('admin2:index'), follow=True) self.assertIsInstance(response, TemplateResponse) self.assertEqual(response.status_code, 200) login = self.client.post(reverse('admin2:login'), { REDIRECT_FIELD_NAME: reverse('admin2:index'), 'username': 'customform', 'password': 'secret', }, follow=True) self.assertIsInstance(login, TemplateResponse) self.assertEqual(login.status_code, 200) self.assertContains(login, 'custom form error') self.assertContains(login, 'path/to/media.css') def test_custom_admin_site_login_template(self): self.client.logout() response = self.client.get(reverse('admin2:index'), follow=True) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/login.html') self.assertContains(response, 'Hello from a custom login template') def test_custom_admin_site_logout_template(self): response = self.client.get(reverse('admin2:logout')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/logout.html') self.assertContains(response, 'Hello from a custom logout template') def test_custom_admin_site_index_view_and_template(self): response = self.client.get(reverse('admin2:index')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/index.html') self.assertContains(response, 'Hello from a custom index template *bar*') def test_custom_admin_site_app_index_view_and_template(self): response = self.client.get(reverse('admin2:app_list', args=('admin_views',))) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/app_index.html') self.assertContains(response, 'Hello from a custom app_index template') def test_custom_admin_site_password_change_template(self): response = self.client.get(reverse('admin2:password_change')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/password_change_form.html') self.assertContains(response, 'Hello from a custom password change form template') def test_custom_admin_site_password_change_with_extra_context(self): response = self.client.get(reverse('admin2:password_change')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/password_change_form.html') self.assertContains(response, 'eggs') def test_custom_admin_site_password_change_done_template(self): response = self.client.get(reverse('admin2:password_change_done')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/password_change_done.html') self.assertContains(response, 'Hello from a custom password change done template') def test_custom_admin_site_view(self): self.client.force_login(self.superuser) response = self.client.get(reverse('admin2:my_view')) self.assertEqual(response.content, b"Django is a magical pony!") def test_pwd_change_custom_template(self): self.client.force_login(self.superuser) su = User.objects.get(username='super') response = self.client.get(reverse('admin4:auth_user_password_change', args=(su.pk,))) self.assertEqual(response.status_code, 200) def get_perm(Model, codename): """Return the permission object, for the Model""" ct = ContentType.objects.get_for_model(Model, for_concrete_model=False) return Permission.objects.get(content_type=ct, codename=codename) @override_settings( ROOT_URLCONF='admin_views.urls', # Test with the admin's documented list of required context processors. TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }], ) class AdminViewPermissionsTest(TestCase): """Tests for Admin Views Permissions.""" @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.viewuser = User.objects.create_user(username='viewuser', password='secret', is_staff=True) cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True) cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True) cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret') cls.nostaffuser = User.objects.create_user(username='nostaff', password='secret') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1, another_section=cls.s1, ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') # Setup permissions, for our users who can add, change, and delete. opts = Article._meta # User who can view Articles cls.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('view', opts))) # User who can add Articles cls.adduser.user_permissions.add(get_perm(Article, get_permission_codename('add', opts))) # User who can change Articles cls.changeuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts))) cls.nostaffuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts))) # User who can delete Articles cls.deleteuser.user_permissions.add(get_perm(Article, get_permission_codename('delete', opts))) cls.deleteuser.user_permissions.add(get_perm(Section, get_permission_codename('delete', Section._meta))) # login POST dicts cls.index_url = reverse('admin:index') cls.super_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'super', 'password': 'secret', } cls.super_email_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': '[email protected]', 'password': 'secret', } cls.super_email_bad_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': '[email protected]', 'password': 'notsecret', } cls.adduser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'adduser', 'password': 'secret', } cls.changeuser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'changeuser', 'password': 'secret', } cls.deleteuser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'deleteuser', 'password': 'secret', } cls.nostaff_login = { REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'), 'username': 'nostaff', 'password': 'secret', } cls.joepublic_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'joepublic', 'password': 'secret', } cls.viewuser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'viewuser', 'password': 'secret', } cls.no_username_login = { REDIRECT_FIELD_NAME: cls.index_url, 'password': 'secret', } def test_login(self): """ Make sure only staff members can log in. Successful posts to the login page will redirect to the original url. Unsuccessful attempts will continue to render the login page with a 200 status code. """ login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index')) # Super User response = self.client.get(self.index_url) self.assertRedirects(response, login_url) login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Test if user enters email address response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.super_email_login) self.assertContains(login, ERROR_MESSAGE) # only correct passwords get a username hint login = self.client.post(login_url, self.super_email_bad_login) self.assertContains(login, ERROR_MESSAGE) new_user = User(username='jondoe', password='secret', email='[email protected]') new_user.save() # check to ensure if there are multiple email addresses a user doesn't get a 500 login = self.client.post(login_url, self.super_email_login) self.assertContains(login, ERROR_MESSAGE) # View User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.viewuser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Add User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.adduser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Change User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.changeuser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Delete User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.deleteuser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Regular User should not be able to login. response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.joepublic_login) self.assertEqual(login.status_code, 200) self.assertContains(login, ERROR_MESSAGE) # Requests without username should not return 500 errors. response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.no_username_login) self.assertEqual(login.status_code, 200) self.assertFormError(login, 'form', 'username', ['This field is required.']) def test_login_redirect_for_direct_get(self): """ Login redirect should be to the admin index page when going directly to /admin/login/. """ response = self.client.get(reverse('admin:login')) self.assertEqual(response.status_code, 200) self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse('admin:index')) def test_login_has_permission(self): # Regular User should not be able to login. response = self.client.get(reverse('has_permission_admin:index')) self.assertEqual(response.status_code, 302) login = self.client.post(reverse('has_permission_admin:login'), self.joepublic_login) self.assertEqual(login.status_code, 200) self.assertContains(login, 'permission denied') # User with permissions should be able to login. response = self.client.get(reverse('has_permission_admin:index')) self.assertEqual(response.status_code, 302) login = self.client.post(reverse('has_permission_admin:login'), self.nostaff_login) self.assertRedirects(login, reverse('has_permission_admin:index')) self.assertFalse(login.context) self.client.get(reverse('has_permission_admin:logout')) # Staff should be able to login. response = self.client.get(reverse('has_permission_admin:index')) self.assertEqual(response.status_code, 302) login = self.client.post(reverse('has_permission_admin:login'), { REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'), 'username': 'deleteuser', 'password': 'secret', }) self.assertRedirects(login, reverse('has_permission_admin:index')) self.assertFalse(login.context) self.client.get(reverse('has_permission_admin:logout')) def test_login_successfully_redirects_to_original_URL(self): response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) query_string = 'the-answer=42' redirect_url = '%s?%s' % (self.index_url, query_string) new_next = {REDIRECT_FIELD_NAME: redirect_url} post_data = self.super_login.copy() post_data.pop(REDIRECT_FIELD_NAME) login = self.client.post( '%s?%s' % (reverse('admin:login'), urlencode(new_next)), post_data) self.assertRedirects(login, redirect_url) def test_double_login_is_not_allowed(self): """Regression test for #19327""" login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index')) response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) # Establish a valid admin session login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) # Logging in with non-admin user fails login = self.client.post(login_url, self.joepublic_login) self.assertEqual(login.status_code, 200) self.assertContains(login, ERROR_MESSAGE) # Establish a valid admin session login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) # Logging in with admin user while already logged in login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) def test_login_page_notice_for_non_staff_users(self): """ A logged-in non-staff user trying to access the admin index should be presented with the login page and a hint indicating that the current user doesn't have access to it. """ hint_template = 'You are authenticated as {}' # Anonymous user should not be shown the hint response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'login-form') self.assertNotContains(response, hint_template.format(''), status_code=200) # Non-staff user should be shown the hint self.client.force_login(self.nostaffuser) response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'login-form') self.assertContains(response, hint_template.format(self.nostaffuser.username), status_code=200) def test_add_view(self): """Test add view restricts access and actually adds items.""" add_dict = { 'title': 'Døm ikke', 'content': '<p>great article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', 'section': self.s1.pk, } # Change User should not have access to add articles self.client.force_login(self.changeuser) # make sure the view removes test cookie self.assertIs(self.client.session.test_cookie_worked(), False) response = self.client.get(reverse('admin:admin_views_article_add')) self.assertEqual(response.status_code, 403) # Try POST just to make sure post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) self.client.get(reverse('admin:logout')) # View User should not have access to add articles self.client.force_login(self.viewuser) response = self.client.get(reverse('admin:admin_views_article_add')) self.assertEqual(response.status_code, 403) # Try POST just to make sure post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) # Now give the user permission to add but not change. self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('add', Article._meta))) response = self.client.get(reverse('admin:admin_views_article_add')) self.assertContains(response, '<input type="submit" value="Save and view" name="_continue">') post = self.client.post(reverse('admin:admin_views_article_add'), add_dict, follow=False) self.assertEqual(post.status_code, 302) self.assertEqual(Article.objects.count(), 4) article = Article.objects.latest('pk') response = self.client.get(reverse('admin:admin_views_article_change', args=(article.pk,))) self.assertContains(response, '<li class="success">The article “Døm ikke” was added successfully.</li>') article.delete() self.client.get(reverse('admin:logout')) # Add user may login and POST to add view, then redirect to admin root self.client.force_login(self.adduser) addpage = self.client.get(reverse('admin:admin_views_article_add')) change_list_link = '&rsaquo; <a href="%s">Articles</a>' % reverse('admin:admin_views_article_changelist') self.assertNotContains( addpage, change_list_link, msg_prefix='User restricted to add permission is given link to change list view in breadcrumbs.' ) post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertRedirects(post, self.index_url) self.assertEqual(Article.objects.count(), 4) self.assertEqual(len(mail.outbox), 2) self.assertEqual(mail.outbox[0].subject, 'Greetings from a created object') self.client.get(reverse('admin:logout')) # The addition was logged correctly addition_log = LogEntry.objects.all()[0] new_article = Article.objects.last() article_ct = ContentType.objects.get_for_model(Article) self.assertEqual(addition_log.user_id, self.adduser.pk) self.assertEqual(addition_log.content_type_id, article_ct.pk) self.assertEqual(addition_log.object_id, str(new_article.pk)) self.assertEqual(addition_log.object_repr, "Døm ikke") self.assertEqual(addition_log.action_flag, ADDITION) self.assertEqual(addition_log.get_change_message(), "Added.") # Super can add too, but is redirected to the change list view self.client.force_login(self.superuser) addpage = self.client.get(reverse('admin:admin_views_article_add')) self.assertContains( addpage, change_list_link, msg_prefix='Unrestricted user is not given link to change list view in breadcrumbs.' ) post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertRedirects(post, reverse('admin:admin_views_article_changelist')) self.assertEqual(Article.objects.count(), 5) self.client.get(reverse('admin:logout')) # 8509 - if a normal user is already logged in, it is possible # to change user into the superuser without error self.client.force_login(self.joepublicuser) # Check and make sure that if user expires, data still persists self.client.force_login(self.superuser) # make sure the view removes test cookie self.assertIs(self.client.session.test_cookie_worked(), False) @mock.patch('django.contrib.admin.options.InlineModelAdmin.has_change_permission') def test_add_view_with_view_only_inlines(self, has_change_permission): """User with add permission to a section but view-only for inlines.""" self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('add', Section._meta))) self.client.force_login(self.viewuser) # Valid POST creates a new section. data = { 'name': 'New obj', 'article_set-TOTAL_FORMS': 0, 'article_set-INITIAL_FORMS': 0, } response = self.client.post(reverse('admin:admin_views_section_add'), data) self.assertRedirects(response, reverse('admin:index')) self.assertEqual(Section.objects.latest('id').name, data['name']) # InlineModelAdmin.has_change_permission()'s obj argument is always # None during object add. self.assertEqual([obj for (request, obj), _ in has_change_permission.call_args_list], [None, None]) def test_change_view(self): """Change view should restrict access and allow users to edit items.""" change_dict = { 'title': 'Ikke fordømt', 'content': '<p>edited article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', 'section': self.s1.pk, } article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,)) article_changelist_url = reverse('admin:admin_views_article_changelist') # add user should not be able to view the list of article or change any of them self.client.force_login(self.adduser) response = self.client.get(article_changelist_url) self.assertEqual(response.status_code, 403) response = self.client.get(article_change_url) self.assertEqual(response.status_code, 403) post = self.client.post(article_change_url, change_dict) self.assertEqual(post.status_code, 403) self.client.get(reverse('admin:logout')) # view user can view articles but not make changes. self.client.force_login(self.viewuser) response = self.client.get(article_changelist_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['title'], 'Select article to view') response = self.client.get(article_change_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['title'], 'View article') self.assertContains(response, '<label>Extra form field:</label>') self.assertContains(response, '<a href="/test_admin/admin/admin_views/article/" class="closelink">Close</a>') post = self.client.post(article_change_url, change_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>Middle content</p>') self.client.get(reverse('admin:logout')) # change user can view all items and edit them self.client.force_login(self.changeuser) response = self.client.get(article_changelist_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['title'], 'Select article to change') response = self.client.get(article_change_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['title'], 'Change article') post = self.client.post(article_change_url, change_dict) self.assertRedirects(post, article_changelist_url) self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>edited article</p>') # one error in form should produce singular error message, multiple errors plural change_dict['title'] = '' post = self.client.post(article_change_url, change_dict) self.assertContains( post, 'Please correct the error below.', msg_prefix='Singular error message not found in response to post with one error' ) change_dict['content'] = '' post = self.client.post(article_change_url, change_dict) self.assertContains( post, 'Please correct the errors below.', msg_prefix='Plural error message not found in response to post with multiple errors' ) self.client.get(reverse('admin:logout')) # Test redirection when using row-level change permissions. Refs #11513. r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id") r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id") r3 = RowLevelChangePermissionModel.objects.create(id=3, name='odd id mult 3') r6 = RowLevelChangePermissionModel.objects.create(id=6, name='even id mult 3') change_url_1 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r1.pk,)) change_url_2 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r2.pk,)) change_url_3 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r3.pk,)) change_url_6 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r6.pk,)) logins = [self.superuser, self.viewuser, self.adduser, self.changeuser, self.deleteuser] for login_user in logins: with self.subTest(login_user.username): self.client.force_login(login_user) response = self.client.get(change_url_1) self.assertEqual(response.status_code, 403) response = self.client.post(change_url_1, {'name': 'changed'}) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id') self.assertEqual(response.status_code, 403) response = self.client.get(change_url_2) self.assertEqual(response.status_code, 200) response = self.client.post(change_url_2, {'name': 'changed'}) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed') self.assertRedirects(response, self.index_url) response = self.client.get(change_url_3) self.assertEqual(response.status_code, 200) response = self.client.post(change_url_3, {'name': 'changed'}) self.assertEqual(response.status_code, 403) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=3).name, 'odd id mult 3') response = self.client.get(change_url_6) self.assertEqual(response.status_code, 200) response = self.client.post(change_url_6, {'name': 'changed'}) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=6).name, 'changed') self.assertRedirects(response, self.index_url) self.client.get(reverse('admin:logout')) for login_user in [self.joepublicuser, self.nostaffuser]: with self.subTest(login_user.username): self.client.force_login(login_user) response = self.client.get(change_url_1, follow=True) self.assertContains(response, 'login-form') response = self.client.post(change_url_1, {'name': 'changed'}, follow=True) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id') self.assertContains(response, 'login-form') response = self.client.get(change_url_2, follow=True) self.assertContains(response, 'login-form') response = self.client.post(change_url_2, {'name': 'changed again'}, follow=True) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed') self.assertContains(response, 'login-form') self.client.get(reverse('admin:logout')) def test_change_view_without_object_change_permission(self): """ The object should be read-only if the user has permission to view it and change objects of that type but not to change the current object. """ change_url = reverse('admin9:admin_views_article_change', args=(self.a1.pk,)) self.client.force_login(self.viewuser) response = self.client.get(change_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['title'], 'View article') self.assertContains(response, '<a href="/test_admin/admin9/admin_views/article/" class="closelink">Close</a>') def test_change_view_save_as_new(self): """ 'Save as new' should raise PermissionDenied for users without the 'add' permission. """ change_dict_save_as_new = { '_saveasnew': 'Save as new', 'title': 'Ikke fordømt', 'content': '<p>edited article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', 'section': self.s1.pk, } article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,)) # Add user can perform "Save as new". article_count = Article.objects.count() self.client.force_login(self.adduser) post = self.client.post(article_change_url, change_dict_save_as_new) self.assertRedirects(post, self.index_url) self.assertEqual(Article.objects.count(), article_count + 1) self.client.logout() # Change user cannot perform "Save as new" (no 'add' permission). article_count = Article.objects.count() self.client.force_login(self.changeuser) post = self.client.post(article_change_url, change_dict_save_as_new) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), article_count) # User with both add and change permissions should be redirected to the # change page for the newly created object. article_count = Article.objects.count() self.client.force_login(self.superuser) post = self.client.post(article_change_url, change_dict_save_as_new) self.assertEqual(Article.objects.count(), article_count + 1) new_article = Article.objects.latest('id') self.assertRedirects(post, reverse('admin:admin_views_article_change', args=(new_article.pk,))) def test_change_view_with_view_only_inlines(self): """ User with change permission to a section but view-only for inlines. """ self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta))) self.client.force_login(self.viewuser) # GET shows inlines. response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 3) # Valid POST changes the name. data = { 'name': 'Can edit name with view-only inlines', 'article_set-TOTAL_FORMS': 3, 'article_set-INITIAL_FORMS': 3 } response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data['name']) # Invalid POST reshows inlines. del data['name'] response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 3) def test_change_view_with_view_and_add_inlines(self): """User has view and add permissions on the inline model.""" self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta))) self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('add', Article._meta))) self.client.force_login(self.viewuser) # GET shows inlines. response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 6) # Valid POST creates a new article. data = { 'name': 'Can edit name with view-only inlines', 'article_set-TOTAL_FORMS': 6, 'article_set-INITIAL_FORMS': 3, 'article_set-3-id': [''], 'article_set-3-title': ['A title'], 'article_set-3-content': ['Added content'], 'article_set-3-date_0': ['2008-3-18'], 'article_set-3-date_1': ['11:54:58'], 'article_set-3-section': [str(self.s1.pk)], } response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data['name']) self.assertEqual(Article.objects.count(), 4) # Invalid POST reshows inlines. del data['name'] response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 6) def test_change_view_with_view_and_delete_inlines(self): """User has view and delete permissions on the inline model.""" self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta))) self.client.force_login(self.viewuser) data = { 'name': 'Name is required.', 'article_set-TOTAL_FORMS': 6, 'article_set-INITIAL_FORMS': 3, 'article_set-0-id': [str(self.a1.pk)], 'article_set-0-DELETE': ['on'], } # Inline POST details are ignored without delete permission. response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Article.objects.count(), 3) # Deletion successful when delete permission is added. self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('delete', Article._meta))) data = { 'name': 'Name is required.', 'article_set-TOTAL_FORMS': 6, 'article_set-INITIAL_FORMS': 3, 'article_set-0-id': [str(self.a1.pk)], 'article_set-0-DELETE': ['on'], } response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Article.objects.count(), 2) def test_delete_view(self): """Delete view should restrict access and actually delete items.""" delete_dict = {'post': 'yes'} delete_url = reverse('admin:admin_views_article_delete', args=(self.a1.pk,)) # add user should not be able to delete articles self.client.force_login(self.adduser) response = self.client.get(delete_url) self.assertEqual(response.status_code, 403) post = self.client.post(delete_url, delete_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) self.client.logout() # view user should not be able to delete articles self.client.force_login(self.viewuser) response = self.client.get(delete_url) self.assertEqual(response.status_code, 403) post = self.client.post(delete_url, delete_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) self.client.logout() # Delete user can delete self.client.force_login(self.deleteuser) response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,))) self.assertContains(response, "<h2>Summary</h2>") self.assertContains(response, "<li>Articles: 3</li>") # test response contains link to related Article self.assertContains(response, "admin_views/article/%s/" % self.a1.pk) response = self.client.get(delete_url) self.assertContains(response, "admin_views/article/%s/" % self.a1.pk) self.assertContains(response, "<h2>Summary</h2>") self.assertContains(response, "<li>Articles: 1</li>") self.assertEqual(response.status_code, 200) post = self.client.post(delete_url, delete_dict) self.assertRedirects(post, self.index_url) self.assertEqual(Article.objects.count(), 2) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Greetings from a deleted object') article_ct = ContentType.objects.get_for_model(Article) logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION) self.assertEqual(logged.object_id, str(self.a1.pk)) def test_delete_view_with_no_default_permissions(self): """ The delete view allows users to delete collected objects without a 'delete' permission (ReadOnlyPizza.Meta.default_permissions is empty). """ pizza = ReadOnlyPizza.objects.create(name='Double Cheese') delete_url = reverse('admin:admin_views_readonlypizza_delete', args=(pizza.pk,)) self.client.force_login(self.adduser) response = self.client.get(delete_url) self.assertContains(response, 'admin_views/readonlypizza/%s/' % pizza.pk) self.assertContains(response, '<h2>Summary</h2>') self.assertContains(response, '<li>Read only pizzas: 1</li>') self.assertEqual(response.status_code, 200) post = self.client.post(delete_url, {'post': 'yes'}) self.assertRedirects(post, reverse('admin:admin_views_readonlypizza_changelist')) self.assertEqual(ReadOnlyPizza.objects.count(), 0) def test_delete_view_nonexistent_obj(self): self.client.force_login(self.deleteuser) url = reverse('admin:admin_views_article_delete', args=('nonexistent',)) response = self.client.get(url, follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['article with ID “nonexistent” doesn’t exist. Perhaps it was deleted?'] ) def test_history_view(self): """History view should restrict access.""" # add user should not be able to view the list of article or change any of them self.client.force_login(self.adduser) response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,))) self.assertEqual(response.status_code, 403) self.client.get(reverse('admin:logout')) # view user can view all items self.client.force_login(self.viewuser) response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,))) self.assertEqual(response.status_code, 200) self.client.get(reverse('admin:logout')) # change user can view all items and edit them self.client.force_login(self.changeuser) response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,))) self.assertEqual(response.status_code, 200) # Test redirection when using row-level change permissions. Refs #11513. rl1 = RowLevelChangePermissionModel.objects.create(name="odd id") rl2 = RowLevelChangePermissionModel.objects.create(name="even id") logins = [self.superuser, self.viewuser, self.adduser, self.changeuser, self.deleteuser] for login_user in logins: with self.subTest(login_user.username): self.client.force_login(login_user) url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,)) response = self.client.get(url) self.assertEqual(response.status_code, 403) url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,)) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.client.get(reverse('admin:logout')) for login_user in [self.joepublicuser, self.nostaffuser]: with self.subTest(login_user.username): self.client.force_login(login_user) url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,)) response = self.client.get(url, follow=True) self.assertContains(response, 'login-form') url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,)) response = self.client.get(url, follow=True) self.assertContains(response, 'login-form') self.client.get(reverse('admin:logout')) def test_history_view_bad_url(self): self.client.force_login(self.changeuser) response = self.client.get(reverse('admin:admin_views_article_history', args=('foo',)), follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['article with ID “foo” doesn’t exist. Perhaps it was deleted?'] ) def test_conditionally_show_add_section_link(self): """ The foreign key widget should only show the "add related" button if the user has permission to add that related item. """ self.client.force_login(self.adduser) # The user can't add sections yet, so they shouldn't see the "add section" link. url = reverse('admin:admin_views_article_add') add_link_text = 'add_id_section' response = self.client.get(url) self.assertNotContains(response, add_link_text) # Allow the user to add sections too. Now they can see the "add section" link. user = User.objects.get(username='adduser') perm = get_perm(Section, get_permission_codename('add', Section._meta)) user.user_permissions.add(perm) response = self.client.get(url) self.assertContains(response, add_link_text) def test_conditionally_show_change_section_link(self): """ The foreign key widget should only show the "change related" button if the user has permission to change that related item. """ def get_change_related(response): return response.context['adminform'].form.fields['section'].widget.can_change_related self.client.force_login(self.adduser) # The user can't change sections yet, so they shouldn't see the "change section" link. url = reverse('admin:admin_views_article_add') change_link_text = 'change_id_section' response = self.client.get(url) self.assertFalse(get_change_related(response)) self.assertNotContains(response, change_link_text) # Allow the user to change sections too. Now they can see the "change section" link. user = User.objects.get(username='adduser') perm = get_perm(Section, get_permission_codename('change', Section._meta)) user.user_permissions.add(perm) response = self.client.get(url) self.assertTrue(get_change_related(response)) self.assertContains(response, change_link_text) def test_conditionally_show_delete_section_link(self): """ The foreign key widget should only show the "delete related" button if the user has permission to delete that related item. """ def get_delete_related(response): return response.context['adminform'].form.fields['sub_section'].widget.can_delete_related self.client.force_login(self.adduser) # The user can't delete sections yet, so they shouldn't see the "delete section" link. url = reverse('admin:admin_views_article_add') delete_link_text = 'delete_id_sub_section' response = self.client.get(url) self.assertFalse(get_delete_related(response)) self.assertNotContains(response, delete_link_text) # Allow the user to delete sections too. Now they can see the "delete section" link. user = User.objects.get(username='adduser') perm = get_perm(Section, get_permission_codename('delete', Section._meta)) user.user_permissions.add(perm) response = self.client.get(url) self.assertTrue(get_delete_related(response)) self.assertContains(response, delete_link_text) def test_disabled_permissions_when_logged_in(self): self.client.force_login(self.superuser) superuser = User.objects.get(username='super') superuser.is_active = False superuser.save() response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'id="login-form"') self.assertNotContains(response, 'Log out') response = self.client.get(reverse('secure_view'), follow=True) self.assertContains(response, 'id="login-form"') def test_disabled_staff_permissions_when_logged_in(self): self.client.force_login(self.superuser) superuser = User.objects.get(username='super') superuser.is_staff = False superuser.save() response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'id="login-form"') self.assertNotContains(response, 'Log out') response = self.client.get(reverse('secure_view'), follow=True) self.assertContains(response, 'id="login-form"') def test_app_list_permissions(self): """ If a user has no module perms, the app list returns a 404. """ opts = Article._meta change_user = User.objects.get(username='changeuser') permission = get_perm(Article, get_permission_codename('change', opts)) self.client.force_login(self.changeuser) # the user has no module permissions change_user.user_permissions.remove(permission) response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertEqual(response.status_code, 404) # the user now has module permissions change_user.user_permissions.add(permission) response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertEqual(response.status_code, 200) def test_shortcut_view_only_available_to_staff(self): """ Only admin users should be able to use the admin shortcut view. """ model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey) obj = ModelWithStringPrimaryKey.objects.create(string_pk='foo') shortcut_url = reverse('admin:view_on_site', args=(model_ctype.pk, obj.pk)) # Not logged in: we should see the login page. response = self.client.get(shortcut_url, follow=True) self.assertTemplateUsed(response, 'admin/login.html') # Logged in? Redirect. self.client.force_login(self.superuser) response = self.client.get(shortcut_url, follow=False) # Can't use self.assertRedirects() because User.get_absolute_url() is silly. self.assertEqual(response.status_code, 302) # Domain may depend on contrib.sites tests also run self.assertRegex(response.url, 'http://(testserver|example.com)/dummy/foo/') def test_has_module_permission(self): """ has_module_permission() returns True for all users who have any permission for that module (add, change, or delete), so that the module is displayed on the admin index page. """ self.client.force_login(self.superuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.viewuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.adduser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.changeuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.deleteuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') def test_overriding_has_module_permission(self): """ If has_module_permission() always returns False, the module shouldn't be displayed on the admin index page for any users. """ articles = Article._meta.verbose_name_plural.title() sections = Section._meta.verbose_name_plural.title() index_url = reverse('admin7:index') self.client.force_login(self.superuser) response = self.client.get(index_url) self.assertContains(response, sections) self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.viewuser) response = self.client.get(index_url) self.assertNotContains(response, 'admin_views') self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.adduser) response = self.client.get(index_url) self.assertNotContains(response, 'admin_views') self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.changeuser) response = self.client.get(index_url) self.assertNotContains(response, 'admin_views') self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.deleteuser) response = self.client.get(index_url) self.assertNotContains(response, articles) # The app list displays Sections but not Articles as the latter has # ModelAdmin.has_module_permission() = False. self.client.force_login(self.superuser) response = self.client.get(reverse('admin7:app_list', args=('admin_views',))) self.assertContains(response, sections) self.assertNotContains(response, articles) def test_post_save_message_no_forbidden_links_visible(self): """ Post-save message shouldn't contain a link to the change form if the user doesn't have the change permission. """ self.client.force_login(self.adduser) # Emulate Article creation for user with add-only permission. post_data = { "title": "Fun & games", "content": "Some content", "date_0": "2015-10-31", "date_1": "16:35:00", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_article_add'), post_data, follow=True) self.assertContains( response, '<li class="success">The article “Fun &amp; games” was added successfully.</li>', html=True ) @override_settings( ROOT_URLCONF='admin_views.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }], ) class AdminViewProxyModelPermissionsTests(TestCase): """Tests for proxy models permissions in the admin.""" @classmethod def setUpTestData(cls): cls.viewuser = User.objects.create_user(username='viewuser', password='secret', is_staff=True) cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True) cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True) # Setup permissions. opts = UserProxy._meta cls.viewuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('view', opts))) cls.adduser.user_permissions.add(get_perm(UserProxy, get_permission_codename('add', opts))) cls.changeuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('change', opts))) cls.deleteuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('delete', opts))) # UserProxy instances. cls.user_proxy = UserProxy.objects.create(username='user_proxy', password='secret') def test_add(self): self.client.force_login(self.adduser) url = reverse('admin:admin_views_userproxy_add') data = { 'username': 'can_add', 'password': 'secret', 'date_joined_0': '2019-01-15', 'date_joined_1': '16:59:10', } response = self.client.post(url, data, follow=True) self.assertEqual(response.status_code, 200) self.assertTrue(UserProxy.objects.filter(username='can_add').exists()) def test_view(self): self.client.force_login(self.viewuser) response = self.client.get(reverse('admin:admin_views_userproxy_changelist')) self.assertContains(response, '<h1>Select user proxy to view</h1>') self.assertEqual(response.status_code, 200) response = self.client.get(reverse('admin:admin_views_userproxy_change', args=(self.user_proxy.pk,))) self.assertContains(response, '<h1>View user proxy</h1>') self.assertContains(response, '<div class="readonly">user_proxy</div>') def test_change(self): self.client.force_login(self.changeuser) data = { 'password': self.user_proxy.password, 'username': self.user_proxy.username, 'date_joined_0': self.user_proxy.date_joined.strftime('%Y-%m-%d'), 'date_joined_1': self.user_proxy.date_joined.strftime('%H:%M:%S'), 'first_name': 'first_name', } url = reverse('admin:admin_views_userproxy_change', args=(self.user_proxy.pk,)) response = self.client.post(url, data) self.assertRedirects(response, reverse('admin:admin_views_userproxy_changelist')) self.assertEqual(UserProxy.objects.get(pk=self.user_proxy.pk).first_name, 'first_name') def test_delete(self): self.client.force_login(self.deleteuser) url = reverse('admin:admin_views_userproxy_delete', args=(self.user_proxy.pk,)) response = self.client.post(url, {'post': 'yes'}, follow=True) self.assertEqual(response.status_code, 200) self.assertFalse(UserProxy.objects.filter(pk=self.user_proxy.pk).exists()) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewsNoUrlTest(TestCase): """Regression test for #17333""" @classmethod def setUpTestData(cls): # User who can change Reports cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.changeuser.user_permissions.add(get_perm(Report, get_permission_codename('change', Report._meta))) def test_no_standard_modeladmin_urls(self): """Admin index views don't break when user's ModelAdmin removes standard urls""" self.client.force_login(self.changeuser) r = self.client.get(reverse('admin:index')) # we shouldn't get a 500 error caused by a NoReverseMatch self.assertEqual(r.status_code, 200) self.client.get(reverse('admin:logout')) @skipUnlessDBFeature('can_defer_constraint_checks') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewDeletedObjectsTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True) cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.v1 = Villain.objects.create(name='Adam') cls.v2 = Villain.objects.create(name='Sue') cls.sv1 = SuperVillain.objects.create(name='Bob') cls.pl1 = Plot.objects.create(name='World Domination', team_leader=cls.v1, contact=cls.v2) cls.pl2 = Plot.objects.create(name='World Peace', team_leader=cls.v2, contact=cls.v2) cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1) cls.pd1 = PlotDetails.objects.create(details='almost finished', plot=cls.pl1) cls.sh1 = SecretHideout.objects.create(location='underground bunker', villain=cls.v1) cls.sh2 = SecretHideout.objects.create(location='floating castle', villain=cls.sv1) cls.ssh1 = SuperSecretHideout.objects.create(location='super floating castle!', supervillain=cls.sv1) cls.cy1 = CyclicOne.objects.create(name='I am recursive', two_id=1) cls.cy2 = CyclicTwo.objects.create(name='I am recursive too', one_id=1) def setUp(self): self.client.force_login(self.superuser) def test_nesting(self): """ Objects should be nested to display the relationships that cause them to be scheduled for deletion. """ pattern = re.compile( r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*' r'<li>Plot details: <a href="%s">almost finished</a>' % ( reverse('admin:admin_views_plot_change', args=(self.pl1.pk,)), reverse('admin:admin_views_plotdetails_change', args=(self.pd1.pk,)), ) ) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,))) self.assertRegex(response.content.decode(), pattern) def test_cyclic(self): """ Cyclic relationships should still cause each object to only be listed once. """ one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % ( reverse('admin:admin_views_cyclicone_change', args=(self.cy1.pk,)), ) two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % ( reverse('admin:admin_views_cyclictwo_change', args=(self.cy2.pk,)), ) response = self.client.get(reverse('admin:admin_views_cyclicone_delete', args=(self.cy1.pk,))) self.assertContains(response, one, 1) self.assertContains(response, two, 1) def test_perms_needed(self): self.client.logout() delete_user = User.objects.get(username='deleteuser') delete_user.user_permissions.add(get_perm(Plot, get_permission_codename('delete', Plot._meta))) self.client.force_login(self.deleteuser) response = self.client.get(reverse('admin:admin_views_plot_delete', args=(self.pl1.pk,))) self.assertContains(response, "your account doesn't have permission to delete the following types of objects") self.assertContains(response, "<li>plot details</li>") def test_protected(self): q = Question.objects.create(question="Why?") a1 = Answer.objects.create(question=q, answer="Because.") a2 = Answer.objects.create(question=q, answer="Yes.") response = self.client.get(reverse('admin:admin_views_question_delete', args=(q.pk,))) self.assertContains(response, "would require deleting the following protected related objects") self.assertContains( response, '<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,)) ) self.assertContains( response, '<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,)) ) def test_post_delete_protected(self): """ A POST request to delete protected objects should display the page which says the deletion is prohibited. """ q = Question.objects.create(question='Why?') Answer.objects.create(question=q, answer='Because.') response = self.client.post(reverse('admin:admin_views_question_delete', args=(q.pk,)), {'post': 'yes'}) self.assertEqual(Question.objects.count(), 1) self.assertContains(response, "would require deleting the following protected related objects") def test_restricted(self): album = Album.objects.create(title='Amaryllis') song = Song.objects.create(album=album, name='Unity') response = self.client.get(reverse('admin:admin_views_album_delete', args=(album.pk,))) self.assertContains( response, 'would require deleting the following protected related objects', ) self.assertContains( response, '<li>Song: <a href="%s">Unity</a></li>' % reverse('admin:admin_views_song_change', args=(song.pk,)) ) def test_post_delete_restricted(self): album = Album.objects.create(title='Amaryllis') Song.objects.create(album=album, name='Unity') response = self.client.post( reverse('admin:admin_views_album_delete', args=(album.pk,)), {'post': 'yes'}, ) self.assertEqual(Album.objects.count(), 1) self.assertContains( response, 'would require deleting the following protected related objects', ) def test_not_registered(self): should_contain = """<li>Secret hideout: underground bunker""" response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,))) self.assertContains(response, should_contain, 1) def test_multiple_fkeys_to_same_model(self): """ If a deleted object has two relationships from another model, both of those should be followed in looking for related objects to delete. """ should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse( 'admin:admin_views_plot_change', args=(self.pl1.pk,) ) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,))) self.assertContains(response, should_contain) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,))) self.assertContains(response, should_contain) def test_multiple_fkeys_to_same_instance(self): """ If a deleted object has two relationships pointing to it from another object, the other object should still only be listed once. """ should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse( 'admin:admin_views_plot_change', args=(self.pl2.pk,) ) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,))) self.assertContains(response, should_contain, 1) def test_inheritance(self): """ In the case of an inherited model, if either the child or parent-model instance is deleted, both instances are listed for deletion, as well as any relationships they have. """ should_contain = [ '<li>Villain: <a href="%s">Bob</a>' % reverse('admin:admin_views_villain_change', args=(self.sv1.pk,)), '<li>Super villain: <a href="%s">Bob</a>' % reverse( 'admin:admin_views_supervillain_change', args=(self.sv1.pk,) ), '<li>Secret hideout: floating castle', '<li>Super secret hideout: super floating castle!', ] response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.sv1.pk,))) for should in should_contain: self.assertContains(response, should, 1) response = self.client.get(reverse('admin:admin_views_supervillain_delete', args=(self.sv1.pk,))) for should in should_contain: self.assertContains(response, should, 1) def test_generic_relations(self): """ If a deleted object has GenericForeignKeys pointing to it, those objects should be listed for deletion. """ plot = self.pl3 tag = FunkyTag.objects.create(content_object=plot, name='hott') should_contain = '<li>Funky tag: <a href="%s">hott' % reverse( 'admin:admin_views_funkytag_change', args=(tag.id,)) response = self.client.get(reverse('admin:admin_views_plot_delete', args=(plot.pk,))) self.assertContains(response, should_contain) def test_generic_relations_with_related_query_name(self): """ If a deleted object has GenericForeignKey with GenericRelation(related_query_name='...') pointing to it, those objects should be listed for deletion. """ bookmark = Bookmark.objects.create(name='djangoproject') tag = FunkyTag.objects.create(content_object=bookmark, name='django') tag_url = reverse('admin:admin_views_funkytag_change', args=(tag.id,)) should_contain = '<li>Funky tag: <a href="%s">django' % tag_url response = self.client.get(reverse('admin:admin_views_bookmark_delete', args=(bookmark.pk,))) self.assertContains(response, should_contain) def test_delete_view_uses_get_deleted_objects(self): """The delete view uses ModelAdmin.get_deleted_objects().""" book = Book.objects.create(name='Test Book') response = self.client.get(reverse('admin2:admin_views_book_delete', args=(book.pk,))) # BookAdmin.get_deleted_objects() returns custom text. self.assertContains(response, 'a deletable object') @override_settings(ROOT_URLCONF='admin_views.urls') class TestGenericRelations(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.v1 = Villain.objects.create(name='Adam') cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1) def setUp(self): self.client.force_login(self.superuser) def test_generic_content_object_in_list_display(self): FunkyTag.objects.create(content_object=self.pl3, name='hott') response = self.client.get(reverse('admin:admin_views_funkytag_changelist')) self.assertContains(response, "%s</td>" % self.pl3) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewStringPrimaryKeyTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.pk = ( "abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 " r"""-_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`""" ) cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk) content_type_pk = ContentType.objects.get_for_model(ModelWithStringPrimaryKey).pk user_pk = cls.superuser.pk LogEntry.objects.log_action(user_pk, content_type_pk, cls.pk, cls.pk, 2, change_message='Changed something') def setUp(self): self.client.force_login(self.superuser) def test_get_history_view(self): """ Retrieving the history for an object using urlencoded form of primary key should work. Refs #12349, #18550. """ response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_history', args=(self.pk,))) self.assertContains(response, escape(self.pk)) self.assertContains(response, 'Changed something') self.assertEqual(response.status_code, 200) def test_get_change_view(self): "Retrieving the object using urlencoded form of primary key should work" response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_change', args=(self.pk,))) self.assertContains(response, escape(self.pk)) self.assertEqual(response.status_code, 200) def test_changelist_to_changeform_link(self): "Link to the changeform of the object in changelist should use reverse() and be quoted -- #18072" response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_changelist')) # this URL now comes through reverse(), thus url quoting and iri_to_uri encoding pk_final_url = escape(iri_to_uri(quote(self.pk))) change_url = reverse( 'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',) ).replace('__fk__', pk_final_url) should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (change_url, escape(self.pk)) self.assertContains(response, should_contain) def test_recentactions_link(self): "The link from the recent actions list referring to the changeform of the object should be quoted" response = self.client.get(reverse('admin:index')) link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),)) should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk)) self.assertContains(response, should_contain) def test_deleteconfirmation_link(self): "The link from the delete confirmation page referring back to the changeform of the object should be quoted" url = reverse('admin:admin_views_modelwithstringprimarykey_delete', args=(quote(self.pk),)) response = self.client.get(url) # this URL now comes through reverse(), thus url quoting and iri_to_uri encoding change_url = reverse( 'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',) ).replace('__fk__', escape(iri_to_uri(quote(self.pk)))) should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk)) self.assertContains(response, should_contain) def test_url_conflicts_with_add(self): "A model with a primary key that ends with add or is `add` should be visible" add_model = ModelWithStringPrimaryKey.objects.create(pk="i have something to add") add_model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model.pk),)) ) should_contain = """<h1>Change model with string primary key</h1>""" self.assertContains(response, should_contain) add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add") add_url = reverse('admin:admin_views_modelwithstringprimarykey_add') change_url = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model2.pk),)) self.assertNotEqual(add_url, change_url) def test_url_conflicts_with_delete(self): "A model with a primary key that ends with delete should be visible" delete_model = ModelWithStringPrimaryKey(pk="delete") delete_model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(delete_model.pk),)) ) should_contain = """<h1>Change model with string primary key</h1>""" self.assertContains(response, should_contain) def test_url_conflicts_with_history(self): "A model with a primary key that ends with history should be visible" history_model = ModelWithStringPrimaryKey(pk="history") history_model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(history_model.pk),)) ) should_contain = """<h1>Change model with string primary key</h1>""" self.assertContains(response, should_contain) def test_shortcut_view_with_escaping(self): "'View on site should' work properly with char fields" model = ModelWithStringPrimaryKey(pk='abc_123') model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(model.pk),)) ) should_contain = '/%s/" class="viewsitelink">' % model.pk self.assertContains(response, should_contain) def test_change_view_history_link(self): """Object history button link should work and contain the pk value quoted.""" url = reverse( 'admin:%s_modelwithstringprimarykey_change' % ModelWithStringPrimaryKey._meta.app_label, args=(quote(self.pk),) ) response = self.client.get(url) self.assertEqual(response.status_code, 200) expected_link = reverse( 'admin:%s_modelwithstringprimarykey_history' % ModelWithStringPrimaryKey._meta.app_label, args=(quote(self.pk),) ) self.assertContains(response, '<a href="%s" class="historylink"' % escape(expected_link)) def test_redirect_on_add_view_continue_button(self): """As soon as an object is added using "Save and continue editing" button, the user should be redirected to the object's change_view. In case primary key is a string containing some special characters like slash or underscore, these characters must be escaped (see #22266) """ response = self.client.post( reverse('admin:admin_views_modelwithstringprimarykey_add'), { 'string_pk': '123/history', "_continue": "1", # Save and continue editing } ) self.assertEqual(response.status_code, 302) # temporary redirect self.assertIn('/123_2Fhistory/', response['location']) # PK is quoted @override_settings(ROOT_URLCONF='admin_views.urls') class SecureViewTests(TestCase): """ Test behavior of a view protected by the staff_member_required decorator. """ def test_secure_view_shows_login_if_not_logged_in(self): secure_url = reverse('secure_view') response = self.client.get(secure_url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), secure_url)) response = self.client.get(secure_url, follow=True) self.assertTemplateUsed(response, 'admin/login.html') self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url) def test_staff_member_required_decorator_works_with_argument(self): """ Staff_member_required decorator works with an argument (redirect_field_name). """ secure_url = '/test_admin/admin/secure-view2/' response = self.client.get(secure_url) self.assertRedirects(response, '%s?myfield=%s' % (reverse('admin:login'), secure_url)) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewUnicodeTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.b1 = Book.objects.create(name='Lærdommer') cls.p1 = Promo.objects.create(name='<Promo for Lærdommer>', book=cls.b1) cls.chap1 = Chapter.objects.create( title='Norske bostaver æøå skaper problemer', content='<p>Svært frustrerende med UnicodeDecodeErro</p>', book=cls.b1 ) cls.chap2 = Chapter.objects.create( title='Kjærlighet', content='<p>La kjærligheten til de lidende seire.</p>', book=cls.b1) cls.chap3 = Chapter.objects.create(title='Kjærlighet', content='<p>Noe innhold</p>', book=cls.b1) cls.chap4 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='<Xtra(1) Norske bostaver æøå skaper problemer>') cls.chap5 = ChapterXtra1.objects.create(chap=cls.chap2, xtra='<Xtra(1) Kjærlighet>') cls.chap6 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='<Xtra(1) Kjærlighet>') cls.chap7 = ChapterXtra2.objects.create(chap=cls.chap1, xtra='<Xtra(2) Norske bostaver æøå skaper problemer>') cls.chap8 = ChapterXtra2.objects.create(chap=cls.chap2, xtra='<Xtra(2) Kjærlighet>') cls.chap9 = ChapterXtra2.objects.create(chap=cls.chap3, xtra='<Xtra(2) Kjærlighet>') def setUp(self): self.client.force_login(self.superuser) def test_unicode_edit(self): """ A test to ensure that POST on edit_view handles non-ASCII characters. """ post_data = { "name": "Test lærdommer", # inline data "chapter_set-TOTAL_FORMS": "6", "chapter_set-INITIAL_FORMS": "3", "chapter_set-MAX_NUM_FORMS": "0", "chapter_set-0-id": self.chap1.pk, "chapter_set-0-title": "Norske bostaver æøå skaper problemer", "chapter_set-0-content": "&lt;p&gt;Svært frustrerende med UnicodeDecodeError&lt;/p&gt;", "chapter_set-1-id": self.chap2.id, "chapter_set-1-title": "Kjærlighet.", "chapter_set-1-content": "&lt;p&gt;La kjærligheten til de lidende seire.&lt;/p&gt;", "chapter_set-2-id": self.chap3.id, "chapter_set-2-title": "Need a title.", "chapter_set-2-content": "&lt;p&gt;Newest content&lt;/p&gt;", "chapter_set-3-id": "", "chapter_set-3-title": "", "chapter_set-3-content": "", "chapter_set-4-id": "", "chapter_set-4-title": "", "chapter_set-4-content": "", "chapter_set-5-id": "", "chapter_set-5-title": "", "chapter_set-5-content": "", } response = self.client.post(reverse('admin:admin_views_book_change', args=(self.b1.pk,)), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_unicode_delete(self): """ The delete_view handles non-ASCII characters """ delete_dict = {'post': 'yes'} delete_url = reverse('admin:admin_views_book_delete', args=(self.b1.pk,)) response = self.client.get(delete_url) self.assertEqual(response.status_code, 200) response = self.client.post(delete_url, delete_dict) self.assertRedirects(response, reverse('admin:admin_views_book_changelist')) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewListEditable(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False) cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True) def setUp(self): self.client.force_login(self.superuser) def test_inheritance(self): Podcast.objects.create(name="This Week in Django", release_date=datetime.date.today()) response = self.client.get(reverse('admin:admin_views_podcast_changelist')) self.assertEqual(response.status_code, 200) def test_inheritance_2(self): Vodcast.objects.create(name="This Week in Django", released=True) response = self.client.get(reverse('admin:admin_views_vodcast_changelist')) self.assertEqual(response.status_code, 200) def test_custom_pk(self): Language.objects.create(iso='en', name='English', english_name='English') response = self.client.get(reverse('admin:admin_views_language_changelist')) self.assertEqual(response.status_code, 200) def test_changelist_input_html(self): response = self.client.get(reverse('admin:admin_views_person_changelist')) # 2 inputs per object(the field and the hidden id field) = 6 # 4 management hidden fields = 4 # 4 action inputs (3 regular checkboxes, 1 checkbox to select all) # main form submit button = 1 # search field and search submit button = 2 # CSRF field = 1 # field to track 'select all' across paginated views = 1 # 6 + 4 + 4 + 1 + 2 + 1 + 1 = 19 inputs self.assertContains(response, "<input", count=19) # 1 select per object = 3 selects self.assertContains(response, "<select", count=4) def test_post_messages(self): # Ticket 12707: Saving inline editable should not show admin # action warnings data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": str(self.per1.pk), "form-1-gender": "2", "form-1-id": str(self.per2.pk), "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": str(self.per3.pk), "_save": "Save", } response = self.client.post(reverse('admin:admin_views_person_changelist'), data, follow=True) self.assertEqual(len(response.context['messages']), 1) def test_post_submission(self): data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": str(self.per1.pk), "form-1-gender": "2", "form-1-id": str(self.per2.pk), "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": str(self.per3.pk), "_save": "Save", } self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertIs(Person.objects.get(name="John Mauchly").alive, False) self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2) # test a filtered page data = { "form-TOTAL_FORMS": "2", "form-INITIAL_FORMS": "2", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per1.pk), "form-0-gender": "1", "form-0-alive": "checked", "form-1-id": str(self.per3.pk), "form-1-gender": "1", "form-1-alive": "checked", "_save": "Save", } self.client.post(reverse('admin:admin_views_person_changelist') + '?gender__exact=1', data) self.assertIs(Person.objects.get(name="John Mauchly").alive, True) # test a searched page data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "1", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per1.pk), "form-0-gender": "1", "_save": "Save", } self.client.post(reverse('admin:admin_views_person_changelist') + '?q=john', data) self.assertIs(Person.objects.get(name="John Mauchly").alive, False) def test_non_field_errors(self): """ Non-field errors are displayed for each of the forms in the changelist's formset. """ fd1 = FoodDelivery.objects.create(reference='123', driver='bill', restaurant='thai') fd2 = FoodDelivery.objects.create(reference='456', driver='bill', restaurant='india') fd3 = FoodDelivery.objects.create(reference='789', driver='bill', restaurant='pizza') data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-id": str(fd1.id), "form-0-reference": "123", "form-0-driver": "bill", "form-0-restaurant": "thai", # Same data as above: Forbidden because of unique_together! "form-1-id": str(fd2.id), "form-1-reference": "456", "form-1-driver": "bill", "form-1-restaurant": "thai", "form-2-id": str(fd3.id), "form-2-reference": "789", "form-2-driver": "bill", "form-2-restaurant": "pizza", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data) self.assertContains( response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery ' 'with this Driver and Restaurant already exists.</li></ul></td></tr>', 1, html=True ) data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-id": str(fd1.id), "form-0-reference": "123", "form-0-driver": "bill", "form-0-restaurant": "thai", # Same data as above: Forbidden because of unique_together! "form-1-id": str(fd2.id), "form-1-reference": "456", "form-1-driver": "bill", "form-1-restaurant": "thai", # Same data also. "form-2-id": str(fd3.id), "form-2-reference": "789", "form-2-driver": "bill", "form-2-restaurant": "thai", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data) self.assertContains( response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery ' 'with this Driver and Restaurant already exists.</li></ul></td></tr>', 2, html=True ) def test_non_form_errors(self): # test if non-form errors are handled; ticket #12716 data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "1", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per2.pk), "form-0-alive": "1", "form-0-gender": "2", # The form processing understands this as a list_editable "Save" # and not an action "Go". "_save": "Save", } response = self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertContains(response, "Grace is not a Zombie") def test_non_form_errors_is_errorlist(self): # test if non-form errors are correctly handled; ticket #12878 data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "1", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per2.pk), "form-0-alive": "1", "form-0-gender": "2", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_person_changelist'), data) non_form_errors = response.context['cl'].formset.non_form_errors() self.assertIsInstance(non_form_errors, ErrorList) self.assertEqual(str(non_form_errors), str(ErrorList(["Grace is not a Zombie"]))) def test_list_editable_ordering(self): collector = Collector.objects.create(id=1, name="Frederick Clegg") Category.objects.create(id=1, order=1, collector=collector) Category.objects.create(id=2, order=2, collector=collector) Category.objects.create(id=3, order=0, collector=collector) Category.objects.create(id=4, order=0, collector=collector) # NB: The order values must be changed so that the items are reordered. data = { "form-TOTAL_FORMS": "4", "form-INITIAL_FORMS": "4", "form-MAX_NUM_FORMS": "0", "form-0-order": "14", "form-0-id": "1", "form-0-collector": "1", "form-1-order": "13", "form-1-id": "2", "form-1-collector": "1", "form-2-order": "1", "form-2-id": "3", "form-2-collector": "1", "form-3-order": "0", "form-3-id": "4", "form-3-collector": "1", # The form processing understands this as a list_editable "Save" # and not an action "Go". "_save": "Save", } response = self.client.post(reverse('admin:admin_views_category_changelist'), data) # Successful post will redirect self.assertEqual(response.status_code, 302) # The order values have been applied to the right objects self.assertEqual(Category.objects.get(id=1).order, 14) self.assertEqual(Category.objects.get(id=2).order, 13) self.assertEqual(Category.objects.get(id=3).order, 1) self.assertEqual(Category.objects.get(id=4).order, 0) def test_list_editable_pagination(self): """ Pagination works for list_editable items. """ UnorderedObject.objects.create(id=1, name='Unordered object #1') UnorderedObject.objects.create(id=2, name='Unordered object #2') UnorderedObject.objects.create(id=3, name='Unordered object #3') response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist')) self.assertContains(response, 'Unordered object #3') self.assertContains(response, 'Unordered object #2') self.assertNotContains(response, 'Unordered object #1') response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist') + '?p=1') self.assertNotContains(response, 'Unordered object #3') self.assertNotContains(response, 'Unordered object #2') self.assertContains(response, 'Unordered object #1') def test_list_editable_action_submit(self): # List editable changes should not be executed if the action "Go" button is # used to submit the form. data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": "1", "form-1-gender": "2", "form-1-id": "2", "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": "3", "index": "0", "_selected_action": ['3'], "action": ['', 'delete_selected'], } self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertIs(Person.objects.get(name="John Mauchly").alive, True) self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1) def test_list_editable_action_choices(self): # List editable changes should be executed if the "Save" button is # used to submit the form - any action choices should be ignored. data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": str(self.per1.pk), "form-1-gender": "2", "form-1-id": str(self.per2.pk), "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": str(self.per3.pk), "_save": "Save", "_selected_action": ['1'], "action": ['', 'delete_selected'], } self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertIs(Person.objects.get(name="John Mauchly").alive, False) self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2) def test_list_editable_popup(self): """ Fields should not be list-editable in popups. """ response = self.client.get(reverse('admin:admin_views_person_changelist')) self.assertNotEqual(response.context['cl'].list_editable, ()) response = self.client.get(reverse('admin:admin_views_person_changelist') + '?%s' % IS_POPUP_VAR) self.assertEqual(response.context['cl'].list_editable, ()) def test_pk_hidden_fields(self): """ hidden pk fields aren't displayed in the table body and their corresponding human-readable value is displayed instead. The hidden pk fields are displayed but separately (not in the table) and only once. """ story1 = Story.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...') story2 = Story.objects.create( title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...', ) response = self.client.get(reverse('admin:admin_views_story_changelist')) # Only one hidden field, in a separate place than the table. self.assertContains(response, 'id="id_form-0-id"', 1) self.assertContains(response, 'id="id_form-1-id"', 1) self.assertContains( response, '<div class="hiddenfields">\n' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">' '<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n</div>' % (story2.id, story1.id), html=True ) self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1) self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1) def test_pk_hidden_fields_with_list_display_links(self): """ Similarly as test_pk_hidden_fields, but when the hidden pk fields are referenced in list_display_links. Refs #12475. """ story1 = OtherStory.objects.create( title='The adventures of Guido', content='Once upon a time in Djangoland...', ) story2 = OtherStory.objects.create( title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...', ) link1 = reverse('admin:admin_views_otherstory_change', args=(story1.pk,)) link2 = reverse('admin:admin_views_otherstory_change', args=(story2.pk,)) response = self.client.get(reverse('admin:admin_views_otherstory_changelist')) # Only one hidden field, in a separate place than the table. self.assertContains(response, 'id="id_form-0-id"', 1) self.assertContains(response, 'id="id_form-1-id"', 1) self.assertContains( response, '<div class="hiddenfields">\n' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">' '<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n</div>' % (story2.id, story1.id), html=True ) self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id), 1) self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id), 1) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminSearchTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False) cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True) cls.t1 = Recommender.objects.create() cls.t2 = Recommendation.objects.create(the_recommender=cls.t1) cls.t3 = Recommender.objects.create() cls.t4 = Recommendation.objects.create(the_recommender=cls.t3) cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text='Bar') cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text='Foo') cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text='Few') cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text='Bas') def setUp(self): self.client.force_login(self.superuser) def test_search_on_sibling_models(self): "A search that mentions sibling models" response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar') # confirm the search returned 1 object self.assertContains(response, "\n1 recommendation\n") def test_with_fk_to_field(self): """ The to_field GET parameter is preserved when a search is performed. Refs #10918. """ response = self.client.get(reverse('admin:auth_user_changelist') + '?q=joe&%s=id' % TO_FIELD_VAR) self.assertContains(response, "\n1 user\n") self.assertContains(response, '<input type="hidden" name="%s" value="id">' % TO_FIELD_VAR, html=True) def test_exact_matches(self): response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar') # confirm the search returned one object self.assertContains(response, "\n1 recommendation\n") response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=ba') # confirm the search returned zero objects self.assertContains(response, "\n0 recommendations\n") def test_beginning_matches(self): response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui') # confirm the search returned one object self.assertContains(response, "\n1 person\n") self.assertContains(response, "Guido") response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=uido') # confirm the search returned zero objects self.assertContains(response, "\n0 persons\n") self.assertNotContains(response, "Guido") def test_pluggable_search(self): PluggableSearchPerson.objects.create(name="Bob", age=10) PluggableSearchPerson.objects.create(name="Amy", age=20) response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=Bob') # confirm the search returned one object self.assertContains(response, "\n1 pluggable search person\n") self.assertContains(response, "Bob") response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=20') # confirm the search returned one object self.assertContains(response, "\n1 pluggable search person\n") self.assertContains(response, "Amy") def test_reset_link(self): """ Test presence of reset link in search bar ("1 result (_x total_)"). """ # 1 query for session + 1 for fetching user # + 1 for filtered result + 1 for filtered count # + 1 for total count with self.assertNumQueries(5): response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui') self.assertContains( response, """<span class="small quiet">1 result (<a href="?">3 total</a>)</span>""", html=True ) def test_no_total_count(self): """ #8408 -- "Show all" should be displayed instead of the total count if ModelAdmin.show_full_result_count is False. """ # 1 query for session + 1 for fetching user # + 1 for filtered result + 1 for filtered count with self.assertNumQueries(4): response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar') self.assertContains( response, """<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""", html=True ) self.assertTrue(response.context['cl'].show_admin_actions) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminInheritedInlinesTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_inline(self): """ Inline models which inherit from a common parent are correctly handled. """ foo_user = "foo username" bar_user = "bar username" name_re = re.compile(b'name="(.*?)"') # test the add case response = self.client.get(reverse('admin:admin_views_persona_add')) names = name_re.findall(response.content) # make sure we have no duplicate HTML names self.assertEqual(len(names), len(set(names))) # test the add case post_data = { "name": "Test Name", # inline data "accounts-TOTAL_FORMS": "1", "accounts-INITIAL_FORMS": "0", "accounts-MAX_NUM_FORMS": "0", "accounts-0-username": foo_user, "accounts-2-TOTAL_FORMS": "1", "accounts-2-INITIAL_FORMS": "0", "accounts-2-MAX_NUM_FORMS": "0", "accounts-2-0-username": bar_user, } response = self.client.post(reverse('admin:admin_views_persona_add'), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere self.assertEqual(Persona.objects.count(), 1) self.assertEqual(FooAccount.objects.count(), 1) self.assertEqual(BarAccount.objects.count(), 1) self.assertEqual(FooAccount.objects.all()[0].username, foo_user) self.assertEqual(BarAccount.objects.all()[0].username, bar_user) self.assertEqual(Persona.objects.all()[0].accounts.count(), 2) persona_id = Persona.objects.all()[0].id foo_id = FooAccount.objects.all()[0].id bar_id = BarAccount.objects.all()[0].id # test the edit case response = self.client.get(reverse('admin:admin_views_persona_change', args=(persona_id,))) names = name_re.findall(response.content) # make sure we have no duplicate HTML names self.assertEqual(len(names), len(set(names))) post_data = { "name": "Test Name", "accounts-TOTAL_FORMS": "2", "accounts-INITIAL_FORMS": "1", "accounts-MAX_NUM_FORMS": "0", "accounts-0-username": "%s-1" % foo_user, "accounts-0-account_ptr": str(foo_id), "accounts-0-persona": str(persona_id), "accounts-2-TOTAL_FORMS": "2", "accounts-2-INITIAL_FORMS": "1", "accounts-2-MAX_NUM_FORMS": "0", "accounts-2-0-username": "%s-1" % bar_user, "accounts-2-0-account_ptr": str(bar_id), "accounts-2-0-persona": str(persona_id), } response = self.client.post(reverse('admin:admin_views_persona_change', args=(persona_id,)), post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Persona.objects.count(), 1) self.assertEqual(FooAccount.objects.count(), 1) self.assertEqual(BarAccount.objects.count(), 1) self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user) self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user) self.assertEqual(Persona.objects.all()[0].accounts.count(), 2) @override_settings(ROOT_URLCONF='admin_views.urls') class TestCustomChangeList(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_custom_changelist(self): """ Validate that a custom ChangeList class can be used (#9749) """ # Insert some data post_data = {"name": "First Gadget"} response = self.client.post(reverse('admin:admin_views_gadget_add'), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere # Hit the page once to get messages out of the queue message list response = self.client.get(reverse('admin:admin_views_gadget_changelist')) # Data is still not visible on the page response = self.client.get(reverse('admin:admin_views_gadget_changelist')) self.assertEqual(response.status_code, 200) self.assertNotContains(response, 'First Gadget') @override_settings(ROOT_URLCONF='admin_views.urls') class TestInlineNotEditable(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_GET_parent_add(self): """ InlineModelAdmin broken? """ response = self.client.get(reverse('admin:admin_views_parent_add')) self.assertEqual(response.status_code, 200) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminCustomQuerysetTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.pks = [EmptyModel.objects.create().id for i in range(3)] def setUp(self): self.client.force_login(self.superuser) self.super_login = { REDIRECT_FIELD_NAME: reverse('admin:index'), 'username': 'super', 'password': 'secret', } def test_changelist_view(self): response = self.client.get(reverse('admin:admin_views_emptymodel_changelist')) for i in self.pks: if i > 1: self.assertContains(response, 'Primary key = %s' % i) else: self.assertNotContains(response, 'Primary key = %s' % i) def test_changelist_view_count_queries(self): # create 2 Person objects Person.objects.create(name='person1', gender=1) Person.objects.create(name='person2', gender=2) changelist_url = reverse('admin:admin_views_person_changelist') # 5 queries are expected: 1 for the session, 1 for the user, # 2 for the counts and 1 for the objects on the page with self.assertNumQueries(5): resp = self.client.get(changelist_url) self.assertEqual(resp.context['selection_note'], '0 of 2 selected') self.assertEqual(resp.context['selection_note_all'], 'All 2 selected') with self.assertNumQueries(5): extra = {'q': 'not_in_name'} resp = self.client.get(changelist_url, extra) self.assertEqual(resp.context['selection_note'], '0 of 0 selected') self.assertEqual(resp.context['selection_note_all'], 'All 0 selected') with self.assertNumQueries(5): extra = {'q': 'person'} resp = self.client.get(changelist_url, extra) self.assertEqual(resp.context['selection_note'], '0 of 2 selected') self.assertEqual(resp.context['selection_note_all'], 'All 2 selected') with self.assertNumQueries(5): extra = {'gender__exact': '1'} resp = self.client.get(changelist_url, extra) self.assertEqual(resp.context['selection_note'], '0 of 1 selected') self.assertEqual(resp.context['selection_note_all'], '1 selected') def test_change_view(self): for i in self.pks: url = reverse('admin:admin_views_emptymodel_change', args=(i,)) response = self.client.get(url, follow=True) if i > 1: self.assertEqual(response.status_code, 200) else: self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['empty model with ID “1” doesn’t exist. Perhaps it was deleted?'] ) def test_add_model_modeladmin_defer_qs(self): # Test for #14529. defer() is used in ModelAdmin.get_queryset() # model has __str__ method self.assertEqual(CoverLetter.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "author": "Candidate, Best", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_coverletter_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(CoverLetter.objects.count(), 1) # Message should contain non-ugly model verbose name pk = CoverLetter.objects.all()[0].pk self.assertContains( response, '<li class="success">The cover letter “<a href="%s">' 'Candidate, Best</a>” was added successfully.</li>' % reverse('admin:admin_views_coverletter_change', args=(pk,)), html=True ) # model has no __str__ method self.assertEqual(ShortMessage.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "content": "What's this SMS thing?", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_shortmessage_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(ShortMessage.objects.count(), 1) # Message should contain non-ugly model verbose name sm = ShortMessage.objects.all()[0] self.assertContains( response, '<li class="success">The short message “<a href="%s">' '%s</a>” was added successfully.</li>' % (reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), sm), html=True ) def test_add_model_modeladmin_only_qs(self): # Test for #14529. only() is used in ModelAdmin.get_queryset() # model has __str__ method self.assertEqual(Telegram.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "title": "Urgent telegram", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_telegram_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Telegram.objects.count(), 1) # Message should contain non-ugly model verbose name pk = Telegram.objects.all()[0].pk self.assertContains( response, '<li class="success">The telegram “<a href="%s">' 'Urgent telegram</a>” was added successfully.</li>' % reverse('admin:admin_views_telegram_change', args=(pk,)), html=True ) # model has no __str__ method self.assertEqual(Paper.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "title": "My Modified Paper Title", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_paper_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Paper.objects.count(), 1) # Message should contain non-ugly model verbose name p = Paper.objects.all()[0] self.assertContains( response, '<li class="success">The paper “<a href="%s">' '%s</a>” was added successfully.</li>' % (reverse('admin:admin_views_paper_change', args=(p.pk,)), p), html=True ) def test_edit_model_modeladmin_defer_qs(self): # Test for #14529. defer() is used in ModelAdmin.get_queryset() # model has __str__ method cl = CoverLetter.objects.create(author="John Doe") self.assertEqual(CoverLetter.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_coverletter_change', args=(cl.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "author": "John Doe II", "_save": "Save", } url = reverse('admin:admin_views_coverletter_change', args=(cl.pk,)) response = self.client.post(url, post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(CoverLetter.objects.count(), 1) # Message should contain non-ugly model verbose name. Instance # representation is set by model's __str__() self.assertContains( response, '<li class="success">The cover letter “<a href="%s">' 'John Doe II</a>” was changed successfully.</li>' % reverse('admin:admin_views_coverletter_change', args=(cl.pk,)), html=True ) # model has no __str__ method sm = ShortMessage.objects.create(content="This is expensive") self.assertEqual(ShortMessage.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "content": "Too expensive", "_save": "Save", } url = reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)) response = self.client.post(url, post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(ShortMessage.objects.count(), 1) # Message should contain non-ugly model verbose name. The ugly(!) # instance representation is set by __str__(). self.assertContains( response, '<li class="success">The short message “<a href="%s">' '%s</a>” was changed successfully.</li>' % (reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), sm), html=True ) def test_edit_model_modeladmin_only_qs(self): # Test for #14529. only() is used in ModelAdmin.get_queryset() # model has __str__ method t = Telegram.objects.create(title="First Telegram") self.assertEqual(Telegram.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_telegram_change', args=(t.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "title": "Telegram without typo", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_telegram_change', args=(t.pk,)), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Telegram.objects.count(), 1) # Message should contain non-ugly model verbose name. The instance # representation is set by model's __str__() self.assertContains( response, '<li class="success">The telegram “<a href="%s">' 'Telegram without typo</a>” was changed successfully.</li>' % reverse('admin:admin_views_telegram_change', args=(t.pk,)), html=True ) # model has no __str__ method p = Paper.objects.create(title="My Paper Title") self.assertEqual(Paper.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_paper_change', args=(p.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "title": "My Modified Paper Title", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_paper_change', args=(p.pk,)), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Paper.objects.count(), 1) # Message should contain non-ugly model verbose name. The ugly(!) # instance representation is set by __str__(). self.assertContains( response, '<li class="success">The paper “<a href="%s">' '%s</a>” was changed successfully.</li>' % (reverse('admin:admin_views_paper_change', args=(p.pk,)), p), html=True ) def test_history_view_custom_qs(self): """ Custom querysets are considered for the admin history view. """ self.client.post(reverse('admin:login'), self.super_login) FilteredManager.objects.create(pk=1) FilteredManager.objects.create(pk=2) response = self.client.get(reverse('admin:admin_views_filteredmanager_changelist')) self.assertContains(response, "PK=1") self.assertContains(response, "PK=2") self.assertEqual( self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(1,))).status_code, 200 ) self.assertEqual( self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(2,))).status_code, 200 ) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminInlineFileUploadTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') file1 = tempfile.NamedTemporaryFile(suffix=".file1") file1.write(b'a' * (2 ** 21)) filename = file1.name file1.close() cls.gallery = Gallery.objects.create(name='Test Gallery') cls.picture = Picture.objects.create( name='Test Picture', image=filename, gallery=cls.gallery, ) def setUp(self): self.client.force_login(self.superuser) def test_form_has_multipart_enctype(self): response = self.client.get( reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)) ) self.assertIs(response.context['has_file_field'], True) self.assertContains(response, MULTIPART_ENCTYPE) def test_inline_file_upload_edit_validation_error_post(self): """ Inline file uploads correctly display prior data (#10002). """ post_data = { "name": "Test Gallery", "pictures-TOTAL_FORMS": "2", "pictures-INITIAL_FORMS": "1", "pictures-MAX_NUM_FORMS": "0", "pictures-0-id": str(self.picture.id), "pictures-0-gallery": str(self.gallery.id), "pictures-0-name": "Test Picture", "pictures-0-image": "", "pictures-1-id": "", "pictures-1-gallery": str(self.gallery.id), "pictures-1-name": "Test Picture 2", "pictures-1-image": "", } response = self.client.post( reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)), post_data ) self.assertContains(response, b"Currently") @override_settings(ROOT_URLCONF='admin_views.urls') class AdminInlineTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.collector = Collector.objects.create(pk=1, name='John Fowles') def setUp(self): self.post_data = { "name": "Test Name", "widget_set-TOTAL_FORMS": "3", "widget_set-INITIAL_FORMS": "0", "widget_set-MAX_NUM_FORMS": "0", "widget_set-0-id": "", "widget_set-0-owner": "1", "widget_set-0-name": "", "widget_set-1-id": "", "widget_set-1-owner": "1", "widget_set-1-name": "", "widget_set-2-id": "", "widget_set-2-owner": "1", "widget_set-2-name": "", "doohickey_set-TOTAL_FORMS": "3", "doohickey_set-INITIAL_FORMS": "0", "doohickey_set-MAX_NUM_FORMS": "0", "doohickey_set-0-owner": "1", "doohickey_set-0-code": "", "doohickey_set-0-name": "", "doohickey_set-1-owner": "1", "doohickey_set-1-code": "", "doohickey_set-1-name": "", "doohickey_set-2-owner": "1", "doohickey_set-2-code": "", "doohickey_set-2-name": "", "grommet_set-TOTAL_FORMS": "3", "grommet_set-INITIAL_FORMS": "0", "grommet_set-MAX_NUM_FORMS": "0", "grommet_set-0-code": "", "grommet_set-0-owner": "1", "grommet_set-0-name": "", "grommet_set-1-code": "", "grommet_set-1-owner": "1", "grommet_set-1-name": "", "grommet_set-2-code": "", "grommet_set-2-owner": "1", "grommet_set-2-name": "", "whatsit_set-TOTAL_FORMS": "3", "whatsit_set-INITIAL_FORMS": "0", "whatsit_set-MAX_NUM_FORMS": "0", "whatsit_set-0-owner": "1", "whatsit_set-0-index": "", "whatsit_set-0-name": "", "whatsit_set-1-owner": "1", "whatsit_set-1-index": "", "whatsit_set-1-name": "", "whatsit_set-2-owner": "1", "whatsit_set-2-index": "", "whatsit_set-2-name": "", "fancydoodad_set-TOTAL_FORMS": "3", "fancydoodad_set-INITIAL_FORMS": "0", "fancydoodad_set-MAX_NUM_FORMS": "0", "fancydoodad_set-0-doodad_ptr": "", "fancydoodad_set-0-owner": "1", "fancydoodad_set-0-name": "", "fancydoodad_set-0-expensive": "on", "fancydoodad_set-1-doodad_ptr": "", "fancydoodad_set-1-owner": "1", "fancydoodad_set-1-name": "", "fancydoodad_set-1-expensive": "on", "fancydoodad_set-2-doodad_ptr": "", "fancydoodad_set-2-owner": "1", "fancydoodad_set-2-name": "", "fancydoodad_set-2-expensive": "on", "category_set-TOTAL_FORMS": "3", "category_set-INITIAL_FORMS": "0", "category_set-MAX_NUM_FORMS": "0", "category_set-0-order": "", "category_set-0-id": "", "category_set-0-collector": "1", "category_set-1-order": "", "category_set-1-id": "", "category_set-1-collector": "1", "category_set-2-order": "", "category_set-2-id": "", "category_set-2-collector": "1", } self.client.force_login(self.superuser) def test_simple_inline(self): "A simple model can be saved as inlines" # First add a new inline self.post_data['widget_set-0-name'] = "Widget 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Widget.objects.count(), 1) self.assertEqual(Widget.objects.all()[0].name, "Widget 1") widget_id = Widget.objects.all()[0].id # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="widget_set-0-id"') # No file or image fields, no enctype on the forms self.assertIs(response.context['has_file_field'], False) self.assertNotContains(response, MULTIPART_ENCTYPE) # Now resave that inline self.post_data['widget_set-INITIAL_FORMS'] = "1" self.post_data['widget_set-0-id'] = str(widget_id) self.post_data['widget_set-0-name'] = "Widget 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Widget.objects.count(), 1) self.assertEqual(Widget.objects.all()[0].name, "Widget 1") # Now modify that inline self.post_data['widget_set-INITIAL_FORMS'] = "1" self.post_data['widget_set-0-id'] = str(widget_id) self.post_data['widget_set-0-name'] = "Widget 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Widget.objects.count(), 1) self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated") def test_explicit_autofield_inline(self): "A model with an explicit autofield primary key can be saved as inlines. Regression for #8093" # First add a new inline self.post_data['grommet_set-0-name'] = "Grommet 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Grommet.objects.count(), 1) self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1") # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="grommet_set-0-code"') # Now resave that inline self.post_data['grommet_set-INITIAL_FORMS'] = "1" self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code) self.post_data['grommet_set-0-name'] = "Grommet 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Grommet.objects.count(), 1) self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1") # Now modify that inline self.post_data['grommet_set-INITIAL_FORMS'] = "1" self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code) self.post_data['grommet_set-0-name'] = "Grommet 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Grommet.objects.count(), 1) self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated") def test_char_pk_inline(self): "A model with a character PK can be saved as inlines. Regression for #10992" # First add a new inline self.post_data['doohickey_set-0-code'] = "DH1" self.post_data['doohickey_set-0-name'] = "Doohickey 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(DooHickey.objects.count(), 1) self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1") # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="doohickey_set-0-code"') # Now resave that inline self.post_data['doohickey_set-INITIAL_FORMS'] = "1" self.post_data['doohickey_set-0-code'] = "DH1" self.post_data['doohickey_set-0-name'] = "Doohickey 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(DooHickey.objects.count(), 1) self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1") # Now modify that inline self.post_data['doohickey_set-INITIAL_FORMS'] = "1" self.post_data['doohickey_set-0-code'] = "DH1" self.post_data['doohickey_set-0-name'] = "Doohickey 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(DooHickey.objects.count(), 1) self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated") def test_integer_pk_inline(self): "A model with an integer PK can be saved as inlines. Regression for #10992" # First add a new inline self.post_data['whatsit_set-0-index'] = "42" self.post_data['whatsit_set-0-name'] = "Whatsit 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Whatsit.objects.count(), 1) self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1") # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="whatsit_set-0-index"') # Now resave that inline self.post_data['whatsit_set-INITIAL_FORMS'] = "1" self.post_data['whatsit_set-0-index'] = "42" self.post_data['whatsit_set-0-name'] = "Whatsit 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Whatsit.objects.count(), 1) self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1") # Now modify that inline self.post_data['whatsit_set-INITIAL_FORMS'] = "1" self.post_data['whatsit_set-0-index'] = "42" self.post_data['whatsit_set-0-name'] = "Whatsit 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Whatsit.objects.count(), 1) self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated") def test_inherited_inline(self): "An inherited model can be saved as inlines. Regression for #11042" # First add a new inline self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(FancyDoodad.objects.count(), 1) self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1") doodad_pk = FancyDoodad.objects.all()[0].pk # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"') # Now resave that inline self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1" self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk) self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(FancyDoodad.objects.count(), 1) self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1") # Now modify that inline self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1" self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk) self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(FancyDoodad.objects.count(), 1) self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated") def test_ordered_inline(self): """ An inline with an editable ordering fields is updated correctly. """ # Create some objects with an initial ordering Category.objects.create(id=1, order=1, collector=self.collector) Category.objects.create(id=2, order=2, collector=self.collector) Category.objects.create(id=3, order=0, collector=self.collector) Category.objects.create(id=4, order=0, collector=self.collector) # NB: The order values must be changed so that the items are reordered. self.post_data.update({ "name": "Frederick Clegg", "category_set-TOTAL_FORMS": "7", "category_set-INITIAL_FORMS": "4", "category_set-MAX_NUM_FORMS": "0", "category_set-0-order": "14", "category_set-0-id": "1", "category_set-0-collector": "1", "category_set-1-order": "13", "category_set-1-id": "2", "category_set-1-collector": "1", "category_set-2-order": "1", "category_set-2-id": "3", "category_set-2-collector": "1", "category_set-3-order": "0", "category_set-3-id": "4", "category_set-3-collector": "1", "category_set-4-order": "", "category_set-4-id": "", "category_set-4-collector": "1", "category_set-5-order": "", "category_set-5-id": "", "category_set-5-collector": "1", "category_set-6-order": "", "category_set-6-id": "", "category_set-6-collector": "1", }) collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) # Successful post will redirect self.assertEqual(response.status_code, 302) # The order values have been applied to the right objects self.assertEqual(self.collector.category_set.count(), 4) self.assertEqual(Category.objects.get(id=1).order, 14) self.assertEqual(Category.objects.get(id=2).order, 13) self.assertEqual(Category.objects.get(id=3).order, 1) self.assertEqual(Category.objects.get(id=4).order, 0) @override_settings(ROOT_URLCONF='admin_views.urls') class NeverCacheTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') def setUp(self): self.client.force_login(self.superuser) def test_admin_index(self): "Check the never-cache status of the main index" response = self.client.get(reverse('admin:index')) self.assertEqual(get_max_age(response), 0) def test_app_index(self): "Check the never-cache status of an application index" response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertEqual(get_max_age(response), 0) def test_model_index(self): "Check the never-cache status of a model index" response = self.client.get(reverse('admin:admin_views_fabric_changelist')) self.assertEqual(get_max_age(response), 0) def test_model_add(self): "Check the never-cache status of a model add page" response = self.client.get(reverse('admin:admin_views_fabric_add')) self.assertEqual(get_max_age(response), 0) def test_model_view(self): "Check the never-cache status of a model edit page" response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertEqual(get_max_age(response), 0) def test_model_history(self): "Check the never-cache status of a model history page" response = self.client.get(reverse('admin:admin_views_section_history', args=(self.s1.pk,))) self.assertEqual(get_max_age(response), 0) def test_model_delete(self): "Check the never-cache status of a model delete page" response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,))) self.assertEqual(get_max_age(response), 0) def test_login(self): "Check the never-cache status of login views" self.client.logout() response = self.client.get(reverse('admin:index')) self.assertEqual(get_max_age(response), 0) def test_logout(self): "Check the never-cache status of logout view" response = self.client.get(reverse('admin:logout')) self.assertEqual(get_max_age(response), 0) def test_password_change(self): "Check the never-cache status of the password change view" self.client.logout() response = self.client.get(reverse('admin:password_change')) self.assertIsNone(get_max_age(response)) def test_password_change_done(self): "Check the never-cache status of the password change done view" response = self.client.get(reverse('admin:password_change_done')) self.assertIsNone(get_max_age(response)) def test_JS_i18n(self): "Check the never-cache status of the JavaScript i18n view" response = self.client.get(reverse('admin:jsi18n')) self.assertIsNone(get_max_age(response)) @override_settings(ROOT_URLCONF='admin_views.urls') class PrePopulatedTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def setUp(self): self.client.force_login(self.superuser) def test_prepopulated_on(self): response = self.client.get(reverse('admin:admin_views_prepopulatedpost_add')) self.assertContains(response, "&quot;id&quot;: &quot;#id_slug&quot;") self.assertContains(response, "&quot;dependency_ids&quot;: [&quot;#id_title&quot;]") self.assertContains(response, "&quot;id&quot;: &quot;#id_prepopulatedsubpost_set-0-subslug&quot;") def test_prepopulated_off(self): response = self.client.get(reverse('admin:admin_views_prepopulatedpost_change', args=(self.p1.pk,))) self.assertContains(response, "A Long Title") self.assertNotContains(response, "&quot;id&quot;: &quot;#id_slug&quot;") self.assertNotContains(response, "&quot;dependency_ids&quot;: [&quot;#id_title&quot;]") self.assertNotContains( response, "&quot;id&quot;: &quot;#id_prepopulatedsubpost_set-0-subslug&quot;" ) @override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True) def test_prepopulated_maxlength_localized(self): """ Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure that maxLength (in the JavaScript) is rendered without separators. """ response = self.client.get(reverse('admin:admin_views_prepopulatedpostlargeslug_add')) self.assertContains(response, "&quot;maxLength&quot;: 1000") # instead of 1,000 def test_view_only_add_form(self): """ PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug' which is present in the add view, even if the ModelAdmin.has_change_permission() returns False. """ response = self.client.get(reverse('admin7:admin_views_prepopulatedpost_add')) self.assertContains(response, 'data-prepopulated-fields=') self.assertContains(response, '&quot;id&quot;: &quot;#id_slug&quot;') def test_view_only_change_form(self): """ PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'. That doesn't break a view-only change view. """ response = self.client.get(reverse('admin7:admin_views_prepopulatedpost_change', args=(self.p1.pk,))) self.assertContains(response, 'data-prepopulated-fields="[]"') self.assertContains(response, '<div class="readonly">%s</div>' % self.p1.slug) @override_settings(ROOT_URLCONF='admin_views.urls') class SeleniumTests(AdminSeleniumTestCase): available_apps = ['admin_views'] + AdminSeleniumTestCase.available_apps def setUp(self): self.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') self.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def test_prepopulated_fields(self): """ The JavaScript-automated prepopulated fields work with the main form and with stacked and tabular inlines. Refs #13068, #9264, #9983, #9784. """ self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_mainprepopulated_add')) self.wait_for('.select2') # Main form ---------------------------------------------------------- self.selenium.find_element_by_id('id_pubdate').send_keys('2012-02-18') self.select_option('#id_status', 'option two') self.selenium.find_element_by_id('id_name').send_keys(' the mAin nÀMë and it\'s awεšomeıııİ') slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value') slug3 = self.selenium.find_element_by_id('id_slug3').get_attribute('value') self.assertEqual(slug1, 'the-main-name-and-its-awesomeiiii-2012-02-18') self.assertEqual(slug2, 'option-two-the-main-name-and-its-awesomeiiii') self.assertEqual(slug3, 'the-main-n\xe0m\xeb-and-its-aw\u03b5\u0161ome\u0131\u0131\u0131i') # Stacked inlines ---------------------------------------------------- # Initial inline self.selenium.find_element_by_id('id_relatedprepopulated_set-0-pubdate').send_keys('2011-12-17') self.select_option('#id_relatedprepopulated_set-0-status', 'option one') self.selenium.find_element_by_id('id_relatedprepopulated_set-0-name').send_keys( ' here is a sŤāÇkeð inline ! ' ) slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug2').get_attribute('value') self.assertEqual(slug1, 'here-is-a-stacked-inline-2011-12-17') self.assertEqual(slug2, 'option-one-here-is-a-stacked-inline') initial_select2_inputs = self.selenium.find_elements_by_class_name('select2-selection') # Inline formsets have empty/invisible forms. # Only the 4 visible select2 inputs are initialized. num_initial_select2_inputs = len(initial_select2_inputs) self.assertEqual(num_initial_select2_inputs, 4) # Add an inline self.selenium.find_elements_by_link_text('Add another Related prepopulated')[0].click() self.assertEqual( len(self.selenium.find_elements_by_class_name('select2-selection')), num_initial_select2_inputs + 2 ) self.selenium.find_element_by_id('id_relatedprepopulated_set-1-pubdate').send_keys('1999-01-25') self.select_option('#id_relatedprepopulated_set-1-status', 'option two') self.selenium.find_element_by_id('id_relatedprepopulated_set-1-name').send_keys( ' now you haVe anöther sŤāÇkeð inline with a very ... ' 'loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog text... ' ) slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug2').get_attribute('value') # 50 characters maximum for slug1 field self.assertEqual(slug1, 'now-you-have-another-stacked-inline-with-a-very-lo') # 60 characters maximum for slug2 field self.assertEqual(slug2, 'option-two-now-you-have-another-stacked-inline-with-a-very-l') # Tabular inlines ---------------------------------------------------- # Initial inline self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-pubdate').send_keys('1234-12-07') self.select_option('#id_relatedprepopulated_set-2-0-status', 'option two') self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-name').send_keys( 'And now, with a tÃbűlaŘ inline !!!' ) slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug2').get_attribute('value') self.assertEqual(slug1, 'and-now-with-a-tabular-inline-1234-12-07') self.assertEqual(slug2, 'option-two-and-now-with-a-tabular-inline') # Add an inline # Button may be outside the browser frame. element = self.selenium.find_elements_by_link_text('Add another Related prepopulated')[1] self.selenium.execute_script('window.scrollTo(0, %s);' % element.location['y']) element.click() self.assertEqual( len(self.selenium.find_elements_by_class_name('select2-selection')), num_initial_select2_inputs + 4 ) self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-pubdate').send_keys('1981-08-22') self.select_option('#id_relatedprepopulated_set-2-1-status', 'option one') self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-name').send_keys( r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters' ) slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug2').get_attribute('value') self.assertEqual(slug1, 'tabular-inline-with-ignored-characters-1981-08-22') self.assertEqual(slug2, 'option-one-tabular-inline-with-ignored-characters') # Add an inline without an initial inline. # The button is outside of the browser frame. self.selenium.execute_script("window.scrollTo(0, document.body.scrollHeight);") self.selenium.find_elements_by_link_text('Add another Related prepopulated')[2].click() self.assertEqual( len(self.selenium.find_elements_by_class_name('select2-selection')), num_initial_select2_inputs + 6 ) # Save and check that everything is properly stored in the database with self.wait_page_loaded(): self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.assertEqual(MainPrepopulated.objects.all().count(), 1) MainPrepopulated.objects.get( name=' the mAin nÀMë and it\'s awεšomeıııİ', pubdate='2012-02-18', status='option two', slug1='the-main-name-and-its-awesomeiiii-2012-02-18', slug2='option-two-the-main-name-and-its-awesomeiiii', slug3='the-main-nàmë-and-its-awεšomeıııi', ) self.assertEqual(RelatedPrepopulated.objects.all().count(), 4) RelatedPrepopulated.objects.get( name=' here is a sŤāÇkeð inline ! ', pubdate='2011-12-17', status='option one', slug1='here-is-a-stacked-inline-2011-12-17', slug2='option-one-here-is-a-stacked-inline', ) RelatedPrepopulated.objects.get( # 75 characters in name field name=' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooo', pubdate='1999-01-25', status='option two', slug1='now-you-have-another-stacked-inline-with-a-very-lo', slug2='option-two-now-you-have-another-stacked-inline-with-a-very-l', ) RelatedPrepopulated.objects.get( name='And now, with a tÃbűlaŘ inline !!!', pubdate='1234-12-07', status='option two', slug1='and-now-with-a-tabular-inline-1234-12-07', slug2='option-two-and-now-with-a-tabular-inline', ) RelatedPrepopulated.objects.get( name=r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters', pubdate='1981-08-22', status='option one', slug1='tabular-inline-with-ignored-characters-1981-08-22', slug2='option-one-tabular-inline-with-ignored-characters', ) def test_populate_existing_object(self): """ The prepopulation works for existing objects too, as long as the original field is empty (#19082). """ # Slugs are empty to start with. item = MainPrepopulated.objects.create( name=' this is the mAin nÀMë', pubdate='2012-02-18', status='option two', slug1='', slug2='', ) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) object_url = self.live_server_url + reverse('admin:admin_views_mainprepopulated_change', args=(item.id,)) self.selenium.get(object_url) self.selenium.find_element_by_id('id_name').send_keys(' the best') # The slugs got prepopulated since they were originally empty slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value') self.assertEqual(slug1, 'this-is-the-main-name-the-best-2012-02-18') self.assertEqual(slug2, 'option-two-this-is-the-main-name-the-best') # Save the object with self.wait_page_loaded(): self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.selenium.get(object_url) self.selenium.find_element_by_id('id_name').send_keys(' hello') # The slugs got prepopulated didn't change since they were originally not empty slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value') slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value') self.assertEqual(slug1, 'this-is-the-main-name-the-best-2012-02-18') self.assertEqual(slug2, 'option-two-this-is-the-main-name-the-best') def test_collapsible_fieldset(self): """ The 'collapse' class in fieldsets definition allows to show/hide the appropriate field section. """ self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_add')) self.assertFalse(self.selenium.find_element_by_id('id_title').is_displayed()) self.selenium.find_elements_by_link_text('Show')[0].click() self.assertTrue(self.selenium.find_element_by_id('id_title').is_displayed()) self.assertEqual(self.selenium.find_element_by_id('fieldsetcollapser0').text, "Hide") def test_first_field_focus(self): """JavaScript-assisted auto-focus on first usable form field.""" # First form field has a single widget self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) with self.wait_page_loaded(): self.selenium.get(self.live_server_url + reverse('admin:admin_views_picture_add')) self.assertEqual( self.selenium.switch_to.active_element, self.selenium.find_element_by_id('id_name') ) # First form field has a MultiWidget with self.wait_page_loaded(): self.selenium.get(self.live_server_url + reverse('admin:admin_views_reservation_add')) self.assertEqual( self.selenium.switch_to.active_element, self.selenium.find_element_by_id('id_start_date_0') ) def test_cancel_delete_confirmation(self): "Cancelling the deletion of an object takes the user back one page." pizza = Pizza.objects.create(name="Double Cheese") url = reverse('admin:admin_views_pizza_change', args=(pizza.id,)) full_url = self.live_server_url + url self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(full_url) self.selenium.find_element_by_class_name('deletelink').click() # Click 'cancel' on the delete page. self.selenium.find_element_by_class_name('cancel-link').click() # Wait until we're back on the change page. self.wait_for_text('#content h1', 'Change pizza') self.assertEqual(self.selenium.current_url, full_url) self.assertEqual(Pizza.objects.count(), 1) def test_cancel_delete_related_confirmation(self): """ Cancelling the deletion of an object with relations takes the user back one page. """ pizza = Pizza.objects.create(name="Double Cheese") topping1 = Topping.objects.create(name="Cheddar") topping2 = Topping.objects.create(name="Mozzarella") pizza.toppings.add(topping1, topping2) url = reverse('admin:admin_views_pizza_change', args=(pizza.id,)) full_url = self.live_server_url + url self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(full_url) self.selenium.find_element_by_class_name('deletelink').click() # Click 'cancel' on the delete page. self.selenium.find_element_by_class_name('cancel-link').click() # Wait until we're back on the change page. self.wait_for_text('#content h1', 'Change pizza') self.assertEqual(self.selenium.current_url, full_url) self.assertEqual(Pizza.objects.count(), 1) self.assertEqual(Topping.objects.count(), 2) def test_list_editable_popups(self): """ list_editable foreign keys have add/change popups. """ from selenium.webdriver.support.ui import Select s1 = Section.objects.create(name='Test section') Article.objects.create( title='foo', content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=s1, ) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_changelist')) # Change popup self.selenium.find_element_by_id('change_id_form-0-section').click() self.wait_for_and_switch_to_popup() self.wait_for_text('#content h1', 'Change section') name_input = self.selenium.find_element_by_id('id_name') name_input.clear() name_input.send_keys('<i>edited section</i>') self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element_by_id('id_form-0-section')) self.assertEqual(select.first_selected_option.text, '<i>edited section</i>') # Rendered select2 input. select2_display = self.selenium.find_element_by_class_name('select2-selection__rendered') # Clear button (×\n) is included in text. self.assertEqual(select2_display.text, '×\n<i>edited section</i>') # Add popup self.selenium.find_element_by_id('add_id_form-0-section').click() self.wait_for_and_switch_to_popup() self.wait_for_text('#content h1', 'Add section') self.selenium.find_element_by_id('id_name').send_keys('new section') self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element_by_id('id_form-0-section')) self.assertEqual(select.first_selected_option.text, 'new section') select2_display = self.selenium.find_element_by_class_name('select2-selection__rendered') # Clear button (×\n) is included in text. self.assertEqual(select2_display.text, '×\nnew section') def test_inline_uuid_pk_edit_with_popup(self): from selenium.webdriver.support.ui import Select parent = ParentWithUUIDPK.objects.create(title='test') related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,)) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element_by_id('change_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element_by_id('id_parent')) self.assertEqual(select.first_selected_option.text, str(parent.id)) self.assertEqual(select.first_selected_option.get_attribute('value'), str(parent.id)) def test_inline_uuid_pk_add_with_popup(self): from selenium.webdriver.support.ui import Select self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_relatedwithuuidpkmodel_add')) self.selenium.find_element_by_id('add_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element_by_id('id_title').send_keys('test') self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element_by_id('id_parent')) uuid_id = str(ParentWithUUIDPK.objects.first().id) self.assertEqual(select.first_selected_option.text, uuid_id) self.assertEqual(select.first_selected_option.get_attribute('value'), uuid_id) def test_inline_uuid_pk_delete_with_popup(self): from selenium.webdriver.support.ui import Select parent = ParentWithUUIDPK.objects.create(title='test') related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,)) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element_by_id('delete_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element_by_xpath('//input[@value="Yes, I’m sure"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element_by_id('id_parent')) self.assertEqual(ParentWithUUIDPK.objects.count(), 0) self.assertEqual(select.first_selected_option.text, '---------') self.assertEqual(select.first_selected_option.get_attribute('value'), '') def test_inline_with_popup_cancel_delete(self): """Clicking ""No, take me back" on a delete popup closes the window.""" parent = ParentWithUUIDPK.objects.create(title='test') related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,)) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element_by_id('delete_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element_by_xpath('//a[text()="No, take me back"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) self.assertEqual(len(self.selenium.window_handles), 1) def test_list_editable_raw_id_fields(self): parent = ParentWithUUIDPK.objects.create(title='test') parent2 = ParentWithUUIDPK.objects.create(title='test2') RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_changelist', current_app=site2.name) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element_by_id('lookup_id_form-0-parent').click() self.wait_for_and_switch_to_popup() # Select "parent2" in the popup. self.selenium.find_element_by_link_text(str(parent2.pk)).click() self.selenium.switch_to.window(self.selenium.window_handles[0]) # The newly selected pk should appear in the raw id input. value = self.selenium.find_element_by_id('id_form-0-parent').get_attribute('value') self.assertEqual(value, str(parent2.pk)) @override_settings(ROOT_URLCONF='admin_views.urls') class ReadonlyTest(AdminFieldExtractionMixin, TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_readonly_get(self): response = self.client.get(reverse('admin:admin_views_post_add')) self.assertEqual(response.status_code, 200) self.assertNotContains(response, 'name="posted"') # 3 fields + 2 submit buttons + 5 inline management form fields, + 2 # hidden fields for inlines + 1 field for the inline + 2 empty form self.assertContains(response, "<input", count=15) self.assertContains(response, formats.localize(datetime.date.today())) self.assertContains(response, "<label>Awesomeness level:</label>") self.assertContains(response, "Very awesome.") self.assertContains(response, "Unknown coolness.") self.assertContains(response, "foo") # Multiline text in a readonly field gets <br> tags self.assertContains(response, 'Multiline<br>test<br>string') self.assertContains(response, '<div class="readonly">Multiline<br>html<br>content</div>', html=True) self.assertContains(response, 'InlineMultiline<br>test<br>string') self.assertContains(response, formats.localize(datetime.date.today() - datetime.timedelta(days=7))) self.assertContains(response, '<div class="form-row field-coolness">') self.assertContains(response, '<div class="form-row field-awesomeness_level">') self.assertContains(response, '<div class="form-row field-posted">') self.assertContains(response, '<div class="form-row field-value">') self.assertContains(response, '<div class="form-row">') self.assertContains(response, '<div class="help">', 3) self.assertContains( response, '<div class="help">Some help text for the title (with Unicode ŠĐĆŽćžšđ)</div>', html=True ) self.assertContains( response, '<div class="help">Some help text for the content (with Unicode ŠĐĆŽćžšđ)</div>', html=True ) self.assertContains( response, '<div class="help">Some help text for the date (with Unicode ŠĐĆŽćžšđ)</div>', html=True ) p = Post.objects.create(title="I worked on readonly_fields", content="Its good stuff") response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,))) self.assertContains(response, "%d amount of cool" % p.pk) def test_readonly_text_field(self): p = Post.objects.create( title="Readonly test", content="test", readonly_content='test\r\n\r\ntest\r\n\r\ntest\r\n\r\ntest', ) Link.objects.create( url="http://www.djangoproject.com", post=p, readonly_link_content="test\r\nlink", ) response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,))) # Checking readonly field. self.assertContains(response, 'test<br><br>test<br><br>test<br><br>test') # Checking readonly field in inline. self.assertContains(response, 'test<br>link') def test_readonly_post(self): data = { "title": "Django Got Readonly Fields", "content": "This is an incredible development.", "link_set-TOTAL_FORMS": "1", "link_set-INITIAL_FORMS": "0", "link_set-MAX_NUM_FORMS": "0", } response = self.client.post(reverse('admin:admin_views_post_add'), data) self.assertEqual(response.status_code, 302) self.assertEqual(Post.objects.count(), 1) p = Post.objects.get() self.assertEqual(p.posted, datetime.date.today()) data["posted"] = "10-8-1990" # some date that's not today response = self.client.post(reverse('admin:admin_views_post_add'), data) self.assertEqual(response.status_code, 302) self.assertEqual(Post.objects.count(), 2) p = Post.objects.order_by('-id')[0] self.assertEqual(p.posted, datetime.date.today()) def test_readonly_manytomany(self): "Regression test for #13004" response = self.client.get(reverse('admin:admin_views_pizza_add')) self.assertEqual(response.status_code, 200) def test_user_password_change_limited_queryset(self): su = User.objects.filter(is_superuser=True)[0] response = self.client.get(reverse('admin2:auth_user_password_change', args=(su.pk,))) self.assertEqual(response.status_code, 404) def test_change_form_renders_correct_null_choice_value(self): """ Regression test for #17911. """ choice = Choice.objects.create(choice=None) response = self.client.get(reverse('admin:admin_views_choice_change', args=(choice.pk,))) self.assertContains(response, '<div class="readonly">No opinion</div>', html=True) def test_readonly_manytomany_backwards_ref(self): """ Regression test for #16433 - backwards references for related objects broke if the related field is read-only due to the help_text attribute """ topping = Topping.objects.create(name='Salami') pizza = Pizza.objects.create(name='Americano') pizza.toppings.add(topping) response = self.client.get(reverse('admin:admin_views_topping_add')) self.assertEqual(response.status_code, 200) def test_readonly_manytomany_forwards_ref(self): topping = Topping.objects.create(name='Salami') pizza = Pizza.objects.create(name='Americano') pizza.toppings.add(topping) response = self.client.get(reverse('admin:admin_views_pizza_change', args=(pizza.pk,))) self.assertContains(response, '<label>Toppings:</label>', html=True) self.assertContains(response, '<div class="readonly">Salami</div>', html=True) def test_readonly_onetoone_backwards_ref(self): """ Can reference a reverse OneToOneField in ModelAdmin.readonly_fields. """ v1 = Villain.objects.create(name='Adam') pl = Plot.objects.create(name='Test Plot', team_leader=v1, contact=v1) pd = PlotDetails.objects.create(details='Brand New Plot', plot=pl) response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,))) field = self.get_admin_readonly_field(response, 'plotdetails') self.assertEqual(field.contents(), 'Brand New Plot') # The reverse relation also works if the OneToOneField is null. pd.plot = None pd.save() response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,))) field = self.get_admin_readonly_field(response, 'plotdetails') self.assertEqual(field.contents(), '-') # default empty value def test_readonly_field_overrides(self): """ Regression test for #22087 - ModelForm Meta overrides are ignored by AdminReadonlyField """ p = FieldOverridePost.objects.create(title="Test Post", content="Test Content") response = self.client.get(reverse('admin:admin_views_fieldoverridepost_change', args=(p.pk,))) self.assertContains(response, '<div class="help">Overridden help text for the date</div>') self.assertContains(response, '<label for="id_public">Overridden public label:</label>', html=True) self.assertNotContains(response, 'Some help text for the date (with Unicode ŠĐĆŽćžšđ)') def test_correct_autoescaping(self): """ Make sure that non-field readonly elements are properly autoescaped (#24461) """ section = Section.objects.create(name='<a>evil</a>') response = self.client.get(reverse('admin:admin_views_section_change', args=(section.pk,))) self.assertNotContains(response, "<a>evil</a>", status_code=200) self.assertContains(response, "&lt;a&gt;evil&lt;/a&gt;", status_code=200) def test_label_suffix_translated(self): pizza = Pizza.objects.create(name='Americano') url = reverse('admin:admin_views_pizza_change', args=(pizza.pk,)) with self.settings(LANGUAGE_CODE='fr'): response = self.client.get(url) self.assertContains(response, '<label>Toppings\u00A0:</label>', html=True) @override_settings(ROOT_URLCONF='admin_views.urls') class LimitChoicesToInAdminTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_limit_choices_to_as_callable(self): """Test for ticket 2445 changes to admin.""" threepwood = Character.objects.create( username='threepwood', last_action=datetime.datetime.today() + datetime.timedelta(days=1), ) marley = Character.objects.create( username='marley', last_action=datetime.datetime.today() - datetime.timedelta(days=1), ) response = self.client.get(reverse('admin:admin_views_stumpjoke_add')) # The allowed option should appear twice; the limited option should not appear. self.assertContains(response, threepwood.username, count=2) self.assertNotContains(response, marley.username) @override_settings(ROOT_URLCONF='admin_views.urls') class RawIdFieldsTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_limit_choices_to(self): """Regression test for 14880""" actor = Actor.objects.create(name="Palin", age=27) Inquisition.objects.create(expected=True, leader=actor, country="England") Inquisition.objects.create(expected=False, leader=actor, country="Spain") response = self.client.get(reverse('admin:admin_views_sketch_add')) # Find the link m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content) self.assertTrue(m) # Got a match popup_url = m[1].decode().replace('&amp;', '&') # Handle relative links popup_url = urljoin(response.request['PATH_INFO'], popup_url) # Get the popup and verify the correct objects show up in the resulting # page. This step also tests integers, strings and booleans in the # lookup query string; in model we define inquisition field to have a # limit_choices_to option that includes a filter on a string field # (inquisition__actor__name), a filter on an integer field # (inquisition__actor__age), and a filter on a boolean field # (inquisition__expected). response2 = self.client.get(popup_url) self.assertContains(response2, "Spain") self.assertNotContains(response2, "England") def test_limit_choices_to_isnull_false(self): """Regression test for 20182""" Actor.objects.create(name="Palin", age=27) Actor.objects.create(name="Kilbraken", age=50, title="Judge") response = self.client.get(reverse('admin:admin_views_sketch_add')) # Find the link m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content) self.assertTrue(m) # Got a match popup_url = m[1].decode().replace('&amp;', '&') # Handle relative links popup_url = urljoin(response.request['PATH_INFO'], popup_url) # Get the popup and verify the correct objects show up in the resulting # page. This step tests field__isnull=0 gets parsed correctly from the # lookup query string; in model we define defendant0 field to have a # limit_choices_to option that includes "actor__title__isnull=False". response2 = self.client.get(popup_url) self.assertContains(response2, "Kilbraken") self.assertNotContains(response2, "Palin") def test_limit_choices_to_isnull_true(self): """Regression test for 20182""" Actor.objects.create(name="Palin", age=27) Actor.objects.create(name="Kilbraken", age=50, title="Judge") response = self.client.get(reverse('admin:admin_views_sketch_add')) # Find the link m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content) self.assertTrue(m) # Got a match popup_url = m[1].decode().replace('&amp;', '&') # Handle relative links popup_url = urljoin(response.request['PATH_INFO'], popup_url) # Get the popup and verify the correct objects show up in the resulting # page. This step tests field__isnull=1 gets parsed correctly from the # lookup query string; in model we define defendant1 field to have a # limit_choices_to option that includes "actor__title__isnull=True". response2 = self.client.get(popup_url) self.assertNotContains(response2, "Kilbraken") self.assertContains(response2, "Palin") def test_list_display_method_same_name_as_reverse_accessor(self): """ Should be able to use a ModelAdmin method in list_display that has the same name as a reverse model field ("sketch" in this case). """ actor = Actor.objects.create(name="Palin", age=27) Inquisition.objects.create(expected=True, leader=actor, country="England") response = self.client.get(reverse('admin:admin_views_inquisition_changelist')) self.assertContains(response, 'list-display-sketch') @override_settings(ROOT_URLCONF='admin_views.urls') class UserAdminTest(TestCase): """ Tests user CRUD functionality. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True) cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False) cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True) def setUp(self): self.client.force_login(self.superuser) def test_save_button(self): user_count = User.objects.count() response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', }) new_user = User.objects.get(username='newuser') self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,))) self.assertEqual(User.objects.count(), user_count + 1) self.assertTrue(new_user.has_usable_password()) def test_save_continue_editing_button(self): user_count = User.objects.count() response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', '_continue': '1', }) new_user = User.objects.get(username='newuser') new_user_url = reverse('admin:auth_user_change', args=(new_user.pk,)) self.assertRedirects(response, new_user_url, fetch_redirect_response=False) self.assertEqual(User.objects.count(), user_count + 1) self.assertTrue(new_user.has_usable_password()) response = self.client.get(new_user_url) self.assertContains( response, '<li class="success">The user “<a href="%s">' '%s</a>” was added successfully. You may edit it again below.</li>' % (new_user_url, new_user), html=True, ) def test_password_mismatch(self): response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'mismatch', }) self.assertEqual(response.status_code, 200) self.assertFormError(response, 'adminform', 'password', []) self.assertFormError(response, 'adminform', 'password2', ['The two password fields didn’t match.']) def test_user_fk_add_popup(self): """User addition through a FK popup should return the appropriate JavaScript response.""" response = self.client.get(reverse('admin:admin_views_album_add')) self.assertContains(response, reverse('admin:auth_user_add')) self.assertContains(response, 'class="related-widget-wrapper-link add-related" id="add_id_owner"') response = self.client.get(reverse('admin:auth_user_add') + '?_popup=1') self.assertNotContains(response, 'name="_continue"') self.assertNotContains(response, 'name="_addanother"') data = { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', '_popup': '1', '_save': '1', } response = self.client.post(reverse('admin:auth_user_add') + '?_popup=1', data, follow=True) self.assertContains(response, '&quot;obj&quot;: &quot;newuser&quot;') def test_user_fk_change_popup(self): """User change through a FK popup should return the appropriate JavaScript response.""" response = self.client.get(reverse('admin:admin_views_album_add')) self.assertContains(response, reverse('admin:auth_user_change', args=('__fk__',))) self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"') user = User.objects.get(username='changeuser') url = reverse('admin:auth_user_change', args=(user.pk,)) + '?_popup=1' response = self.client.get(url) self.assertNotContains(response, 'name="_continue"') self.assertNotContains(response, 'name="_addanother"') data = { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', 'last_login_0': '2007-05-30', 'last_login_1': '13:20:10', 'date_joined_0': '2007-05-30', 'date_joined_1': '13:20:10', '_popup': '1', '_save': '1', } response = self.client.post(url, data, follow=True) self.assertContains(response, '&quot;obj&quot;: &quot;newuser&quot;') self.assertContains(response, '&quot;action&quot;: &quot;change&quot;') def test_user_fk_delete_popup(self): """User deletion through a FK popup should return the appropriate JavaScript response.""" response = self.client.get(reverse('admin:admin_views_album_add')) self.assertContains(response, reverse('admin:auth_user_delete', args=('__fk__',))) self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"') user = User.objects.get(username='changeuser') url = reverse('admin:auth_user_delete', args=(user.pk,)) + '?_popup=1' response = self.client.get(url) self.assertEqual(response.status_code, 200) data = { 'post': 'yes', '_popup': '1', } response = self.client.post(url, data, follow=True) self.assertContains(response, '&quot;action&quot;: &quot;delete&quot;') def test_save_add_another_button(self): user_count = User.objects.count() response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', '_addanother': '1', }) new_user = User.objects.order_by('-id')[0] self.assertRedirects(response, reverse('admin:auth_user_add')) self.assertEqual(User.objects.count(), user_count + 1) self.assertTrue(new_user.has_usable_password()) def test_user_permission_performance(self): u = User.objects.all()[0] # Don't depend on a warm cache, see #17377. ContentType.objects.clear_cache() with self.assertNumQueries(10): response = self.client.get(reverse('admin:auth_user_change', args=(u.pk,))) self.assertEqual(response.status_code, 200) def test_form_url_present_in_context(self): u = User.objects.all()[0] response = self.client.get(reverse('admin3:auth_user_password_change', args=(u.pk,))) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['form_url'], 'pony') @override_settings(ROOT_URLCONF='admin_views.urls') class GroupAdminTest(TestCase): """ Tests group CRUD functionality. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_save_button(self): group_count = Group.objects.count() response = self.client.post(reverse('admin:auth_group_add'), { 'name': 'newgroup', }) Group.objects.order_by('-id')[0] self.assertRedirects(response, reverse('admin:auth_group_changelist')) self.assertEqual(Group.objects.count(), group_count + 1) def test_group_permission_performance(self): g = Group.objects.create(name="test_group") # Ensure no queries are skipped due to cached content type for Group. ContentType.objects.clear_cache() with self.assertNumQueries(8): response = self.client.get(reverse('admin:auth_group_change', args=(g.pk,))) self.assertEqual(response.status_code, 200) @override_settings(ROOT_URLCONF='admin_views.urls') class CSSTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def setUp(self): self.client.force_login(self.superuser) def test_field_prefix_css_classes(self): """ Fields have a CSS class name with a 'field-' prefix. """ response = self.client.get(reverse('admin:admin_views_post_add')) # The main form self.assertContains(response, 'class="form-row field-title"') self.assertContains(response, 'class="form-row field-content"') self.assertContains(response, 'class="form-row field-public"') self.assertContains(response, 'class="form-row field-awesomeness_level"') self.assertContains(response, 'class="form-row field-coolness"') self.assertContains(response, 'class="form-row field-value"') self.assertContains(response, 'class="form-row"') # The lambda function # The tabular inline self.assertContains(response, '<td class="field-url">') self.assertContains(response, '<td class="field-posted">') def test_index_css_classes(self): """ CSS class names are used for each app and model on the admin index pages (#17050). """ # General index page response = self.client.get(reverse('admin:index')) self.assertContains(response, '<div class="app-admin_views module') self.assertContains(response, '<tr class="model-actor">') self.assertContains(response, '<tr class="model-album">') # App index page response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertContains(response, '<div class="app-admin_views module') self.assertContains(response, '<tr class="model-actor">') self.assertContains(response, '<tr class="model-album">') def test_app_model_in_form_body_class(self): """ Ensure app and model tag are correctly read by change_form template """ response = self.client.get(reverse('admin:admin_views_section_add')) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_app_model_in_list_body_class(self): """ Ensure app and model tag are correctly read by change_list template """ response = self.client.get(reverse('admin:admin_views_section_changelist')) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_app_model_in_delete_confirmation_body_class(self): """ Ensure app and model tag are correctly read by delete_confirmation template """ response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,))) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_app_model_in_app_index_body_class(self): """ Ensure app and model tag are correctly read by app_index template """ response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertContains(response, '<body class=" dashboard app-admin_views') def test_app_model_in_delete_selected_confirmation_body_class(self): """ Ensure app and model tag are correctly read by delete_selected_confirmation template """ action_data = { ACTION_CHECKBOX_NAME: [self.s1.pk], 'action': 'delete_selected', 'index': 0, } response = self.client.post(reverse('admin:admin_views_section_changelist'), action_data) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_changelist_field_classes(self): """ Cells of the change list table should contain the field name in their class attribute Refs #11195. """ Podcast.objects.create(name="Django Dose", release_date=datetime.date.today()) response = self.client.get(reverse('admin:admin_views_podcast_changelist')) self.assertContains(response, '<th class="field-name">') self.assertContains(response, '<td class="field-release_date nowrap">') self.assertContains(response, '<td class="action-checkbox">') try: import docutils except ImportError: docutils = None @unittest.skipUnless(docutils, "no docutils installed.") @override_settings(ROOT_URLCONF='admin_views.urls') @modify_settings(INSTALLED_APPS={'append': ['django.contrib.admindocs', 'django.contrib.flatpages']}) class AdminDocsTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_tags(self): response = self.client.get(reverse('django-admindocs-tags')) # The builtin tag group exists self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True) # A builtin tag exists in both the index and detail self.assertContains(response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True) self.assertContains(response, '<li><a href="#built_in-autoescape">autoescape</a></li>', html=True) # An app tag exists in both the index and detail self.assertContains(response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True) self.assertContains(response, '<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>', html=True) # The admin list tag group exists self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True) # An admin list tag exists in both the index and detail self.assertContains(response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True) self.assertContains(response, '<li><a href="#admin_list-admin_actions">admin_actions</a></li>', html=True) def test_filters(self): response = self.client.get(reverse('django-admindocs-filters')) # The builtin filter group exists self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True) # A builtin filter exists in both the index and detail self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True) self.assertContains(response, '<li><a href="#built_in-add">add</a></li>', html=True) @override_settings( ROOT_URLCONF='admin_views.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }], USE_I18N=False, ) class ValidXHTMLTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_lang_name_present(self): response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertNotContains(response, ' lang=""') self.assertNotContains(response, ' xml:lang=""') @override_settings(ROOT_URLCONF='admin_views.urls', USE_THOUSAND_SEPARATOR=True, USE_L10N=True) class DateHierarchyTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def assert_non_localized_year(self, response, year): """ The year is not localized with USE_THOUSAND_SEPARATOR (#15234). """ self.assertNotContains(response, formats.number_format(year)) def assert_contains_year_link(self, response, date): self.assertContains(response, '?release_date__year=%d"' % date.year) def assert_contains_month_link(self, response, date): self.assertContains( response, '?release_date__month=%d&amp;release_date__year=%d"' % ( date.month, date.year)) def assert_contains_day_link(self, response, date): self.assertContains( response, '?release_date__day=%d&amp;' 'release_date__month=%d&amp;release_date__year=%d"' % ( date.day, date.month, date.year)) def test_empty(self): """ No date hierarchy links display with empty changelist. """ response = self.client.get( reverse('admin:admin_views_podcast_changelist')) self.assertNotContains(response, 'release_date__year=') self.assertNotContains(response, 'release_date__month=') self.assertNotContains(response, 'release_date__day=') def test_single(self): """ Single day-level date hierarchy appears for single object. """ DATE = datetime.date(2000, 6, 30) Podcast.objects.create(release_date=DATE) url = reverse('admin:admin_views_podcast_changelist') response = self.client.get(url) self.assert_contains_day_link(response, DATE) self.assert_non_localized_year(response, 2000) def test_within_month(self): """ day-level links appear for changelist within single month. """ DATES = (datetime.date(2000, 6, 30), datetime.date(2000, 6, 15), datetime.date(2000, 6, 3)) for date in DATES: Podcast.objects.create(release_date=date) url = reverse('admin:admin_views_podcast_changelist') response = self.client.get(url) for date in DATES: self.assert_contains_day_link(response, date) self.assert_non_localized_year(response, 2000) def test_within_year(self): """ month-level links appear for changelist within single year. """ DATES = (datetime.date(2000, 1, 30), datetime.date(2000, 3, 15), datetime.date(2000, 5, 3)) for date in DATES: Podcast.objects.create(release_date=date) url = reverse('admin:admin_views_podcast_changelist') response = self.client.get(url) # no day-level links self.assertNotContains(response, 'release_date__day=') for date in DATES: self.assert_contains_month_link(response, date) self.assert_non_localized_year(response, 2000) def test_multiple_years(self): """ year-level links appear for year-spanning changelist. """ DATES = (datetime.date(2001, 1, 30), datetime.date(2003, 3, 15), datetime.date(2005, 5, 3)) for date in DATES: Podcast.objects.create(release_date=date) response = self.client.get( reverse('admin:admin_views_podcast_changelist')) # no day/month-level links self.assertNotContains(response, 'release_date__day=') self.assertNotContains(response, 'release_date__month=') for date in DATES: self.assert_contains_year_link(response, date) # and make sure GET parameters still behave correctly for date in DATES: url = '%s?release_date__year=%d' % ( reverse('admin:admin_views_podcast_changelist'), date.year) response = self.client.get(url) self.assert_contains_month_link(response, date) self.assert_non_localized_year(response, 2000) self.assert_non_localized_year(response, 2003) self.assert_non_localized_year(response, 2005) url = '%s?release_date__year=%d&release_date__month=%d' % ( reverse('admin:admin_views_podcast_changelist'), date.year, date.month) response = self.client.get(url) self.assert_contains_day_link(response, date) self.assert_non_localized_year(response, 2000) self.assert_non_localized_year(response, 2003) self.assert_non_localized_year(response, 2005) def test_related_field(self): questions_data = ( # (posted data, number of answers), (datetime.date(2001, 1, 30), 0), (datetime.date(2003, 3, 15), 1), (datetime.date(2005, 5, 3), 2), ) for date, answer_count in questions_data: question = Question.objects.create(posted=date) for i in range(answer_count): question.answer_set.create() response = self.client.get(reverse('admin:admin_views_answer_changelist')) for date, answer_count in questions_data: link = '?question__posted__year=%d"' % date.year if answer_count > 0: self.assertContains(response, link) else: self.assertNotContains(response, link) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminCustomSaveRelatedTests(TestCase): """ One can easily customize the way related objects are saved. Refs #16115. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_should_be_able_to_edit_related_objects_on_add_view(self): post = { 'child_set-TOTAL_FORMS': '3', 'child_set-INITIAL_FORMS': '0', 'name': 'Josh Stone', 'child_set-0-name': 'Paul', 'child_set-1-name': 'Catherine', } self.client.post(reverse('admin:admin_views_parent_add'), post) self.assertEqual(1, Parent.objects.count()) self.assertEqual(2, Child.objects.count()) children_names = list(Child.objects.order_by('name').values_list('name', flat=True)) self.assertEqual('Josh Stone', Parent.objects.latest('id').name) self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names) def test_should_be_able_to_edit_related_objects_on_change_view(self): parent = Parent.objects.create(name='Josh Stone') paul = Child.objects.create(parent=parent, name='Paul') catherine = Child.objects.create(parent=parent, name='Catherine') post = { 'child_set-TOTAL_FORMS': '5', 'child_set-INITIAL_FORMS': '2', 'name': 'Josh Stone', 'child_set-0-name': 'Paul', 'child_set-0-id': paul.id, 'child_set-1-name': 'Catherine', 'child_set-1-id': catherine.id, } self.client.post(reverse('admin:admin_views_parent_change', args=(parent.id,)), post) children_names = list(Child.objects.order_by('name').values_list('name', flat=True)) self.assertEqual('Josh Stone', Parent.objects.latest('id').name) self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names) def test_should_be_able_to_edit_related_objects_on_changelist_view(self): parent = Parent.objects.create(name='Josh Rock') Child.objects.create(parent=parent, name='Paul') Child.objects.create(parent=parent, name='Catherine') post = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '0', 'form-0-id': parent.id, 'form-0-name': 'Josh Stone', '_save': 'Save' } self.client.post(reverse('admin:admin_views_parent_changelist'), post) children_names = list(Child.objects.order_by('name').values_list('name', flat=True)) self.assertEqual('Josh Stone', Parent.objects.latest('id').name) self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewLogoutTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def test_logout(self): self.client.force_login(self.superuser) response = self.client.get(reverse('admin:logout')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'registration/logged_out.html') self.assertEqual(response.request['PATH_INFO'], reverse('admin:logout')) self.assertFalse(response.context['has_permission']) self.assertNotContains(response, 'user-tools') # user-tools div shouldn't visible. def test_client_logout_url_can_be_used_to_login(self): response = self.client.get(reverse('admin:logout')) self.assertEqual(response.status_code, 302) # we should be redirected to the login page. # follow the redirect and test results. response = self.client.get(reverse('admin:logout'), follow=True) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'admin/login.html') self.assertEqual(response.request['PATH_INFO'], reverse('admin:login')) self.assertContains(response, '<input type="hidden" name="next" value="%s">' % reverse('admin:index')) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminUserMessageTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def send_message(self, level): """ Helper that sends a post to the dummy test methods and asserts that a message with the level has appeared in the response. """ action_data = { ACTION_CHECKBOX_NAME: [1], 'action': 'message_%s' % level, 'index': 0, } response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'), action_data, follow=True) self.assertContains(response, '<li class="%s">Test %s</li>' % (level, level), html=True) @override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request def test_message_debug(self): self.send_message('debug') def test_message_info(self): self.send_message('info') def test_message_success(self): self.send_message('success') def test_message_warning(self): self.send_message('warning') def test_message_error(self): self.send_message('error') def test_message_extra_tags(self): action_data = { ACTION_CHECKBOX_NAME: [1], 'action': 'message_extra_tags', 'index': 0, } response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'), action_data, follow=True) self.assertContains(response, '<li class="extra_tag info">Test tags</li>', html=True) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminKeepChangeListFiltersTests(TestCase): admin_site = site @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret') def setUp(self): self.client.force_login(self.superuser) def assertURLEqual(self, url1, url2, msg_prefix=''): """ Assert that two URLs are equal despite the ordering of their querystring. Refs #22360. """ parsed_url1 = urlparse(url1) path1 = parsed_url1.path parsed_qs1 = dict(parse_qsl(parsed_url1.query)) parsed_url2 = urlparse(url2) path2 = parsed_url2.path parsed_qs2 = dict(parse_qsl(parsed_url2.query)) for parsed_qs in [parsed_qs1, parsed_qs2]: if '_changelist_filters' in parsed_qs: changelist_filters = parsed_qs['_changelist_filters'] parsed_filters = dict(parse_qsl(changelist_filters)) parsed_qs['_changelist_filters'] = parsed_filters self.assertEqual(path1, path2) self.assertEqual(parsed_qs1, parsed_qs2) def test_assert_url_equal(self): # Test equality. change_user_url = reverse('admin:auth_user_change', args=(self.joepublicuser.pk,)) self.assertURLEqual( 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ), 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ) ) # Test inequality. with self.assertRaises(AssertionError): self.assertURLEqual( 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ), 'http://testserver{}?_changelist_filters=is_staff__exact%3D1%26is_superuser__exact%3D1'.format( change_user_url ) ) # Ignore scheme and host. self.assertURLEqual( 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ), '{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url) ) # Ignore ordering of querystring. self.assertURLEqual( '{}?is_staff__exact=0&is_superuser__exact=0'.format(reverse('admin:auth_user_changelist')), '{}?is_superuser__exact=0&is_staff__exact=0'.format(reverse('admin:auth_user_changelist')) ) # Ignore ordering of _changelist_filters. self.assertURLEqual( '{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url), '{}?_changelist_filters=is_superuser__exact%3D0%26is_staff__exact%3D0'.format(change_user_url) ) def get_changelist_filters(self): return { 'is_superuser__exact': 0, 'is_staff__exact': 0, } def get_changelist_filters_querystring(self): return urlencode(self.get_changelist_filters()) def get_preserved_filters_querystring(self): return urlencode({ '_changelist_filters': self.get_changelist_filters_querystring() }) def get_sample_user_id(self): return self.joepublicuser.pk def get_changelist_url(self): return '%s?%s' % ( reverse('admin:auth_user_changelist', current_app=self.admin_site.name), self.get_changelist_filters_querystring(), ) def get_add_url(self, add_preserved_filters=True): url = reverse('admin:auth_user_add', current_app=self.admin_site.name) if add_preserved_filters: url = '%s?%s' % (url, self.get_preserved_filters_querystring()) return url def get_change_url(self, user_id=None, add_preserved_filters=True): if user_id is None: user_id = self.get_sample_user_id() url = reverse('admin:auth_user_change', args=(user_id,), current_app=self.admin_site.name) if add_preserved_filters: url = '%s?%s' % (url, self.get_preserved_filters_querystring()) return url def get_history_url(self, user_id=None): if user_id is None: user_id = self.get_sample_user_id() return "%s?%s" % ( reverse('admin:auth_user_history', args=(user_id,), current_app=self.admin_site.name), self.get_preserved_filters_querystring(), ) def get_delete_url(self, user_id=None): if user_id is None: user_id = self.get_sample_user_id() return "%s?%s" % ( reverse('admin:auth_user_delete', args=(user_id,), current_app=self.admin_site.name), self.get_preserved_filters_querystring(), ) def test_changelist_view(self): response = self.client.get(self.get_changelist_url()) self.assertEqual(response.status_code, 200) # Check the `change_view` link has the correct querystring. detail_link = re.search( '<a href="(.*?)">{}</a>'.format(self.joepublicuser.username), response.content.decode() ) self.assertURLEqual(detail_link[1], self.get_change_url()) def test_change_view(self): # Get the `change_view`. response = self.client.get(self.get_change_url()) self.assertEqual(response.status_code, 200) # Check the form action. form_action = re.search( '<form action="(.*?)" method="post" id="user_form" novalidate>', response.content.decode() ) self.assertURLEqual(form_action[1], '?%s' % self.get_preserved_filters_querystring()) # Check the history link. history_link = re.search( '<a href="(.*?)" class="historylink">History</a>', response.content.decode() ) self.assertURLEqual(history_link[1], self.get_history_url()) # Check the delete link. delete_link = re.search( '<a href="(.*?)" class="deletelink">Delete</a>', response.content.decode() ) self.assertURLEqual(delete_link[1], self.get_delete_url()) # Test redirect on "Save". post_data = { 'username': 'joepublic', 'last_login_0': '2007-05-30', 'last_login_1': '13:20:10', 'date_joined_0': '2007-05-30', 'date_joined_1': '13:20:10', } post_data['_save'] = 1 response = self.client.post(self.get_change_url(), data=post_data) self.assertRedirects(response, self.get_changelist_url()) post_data.pop('_save') # Test redirect on "Save and continue". post_data['_continue'] = 1 response = self.client.post(self.get_change_url(), data=post_data) self.assertRedirects(response, self.get_change_url()) post_data.pop('_continue') # Test redirect on "Save and add new". post_data['_addanother'] = 1 response = self.client.post(self.get_change_url(), data=post_data) self.assertRedirects(response, self.get_add_url()) post_data.pop('_addanother') def test_change_view_without_preserved_filters(self): response = self.client.get(self.get_change_url(add_preserved_filters=False)) # The action attribute is omitted. self.assertContains(response, '<form method="post" id="user_form" novalidate>') def test_add_view(self): # Get the `add_view`. response = self.client.get(self.get_add_url()) self.assertEqual(response.status_code, 200) # Check the form action. form_action = re.search( '<form action="(.*?)" method="post" id="user_form" novalidate>', response.content.decode() ) self.assertURLEqual(form_action[1], '?%s' % self.get_preserved_filters_querystring()) post_data = { 'username': 'dummy', 'password1': 'test', 'password2': 'test', } # Test redirect on "Save". post_data['_save'] = 1 response = self.client.post(self.get_add_url(), data=post_data) self.assertRedirects(response, self.get_change_url(User.objects.get(username='dummy').pk)) post_data.pop('_save') # Test redirect on "Save and continue". post_data['username'] = 'dummy2' post_data['_continue'] = 1 response = self.client.post(self.get_add_url(), data=post_data) self.assertRedirects(response, self.get_change_url(User.objects.get(username='dummy2').pk)) post_data.pop('_continue') # Test redirect on "Save and add new". post_data['username'] = 'dummy3' post_data['_addanother'] = 1 response = self.client.post(self.get_add_url(), data=post_data) self.assertRedirects(response, self.get_add_url()) post_data.pop('_addanother') def test_add_view_without_preserved_filters(self): response = self.client.get(self.get_add_url(add_preserved_filters=False)) # The action attribute is omitted. self.assertContains(response, '<form method="post" id="user_form" novalidate>') def test_delete_view(self): # Test redirect on "Delete". response = self.client.post(self.get_delete_url(), {'post': 'yes'}) self.assertRedirects(response, self.get_changelist_url()) def test_url_prefix(self): context = { 'preserved_filters': self.get_preserved_filters_querystring(), 'opts': User._meta, } prefixes = ('', '/prefix/', '/後台/') for prefix in prefixes: with self.subTest(prefix=prefix), override_script_prefix(prefix): url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name) self.assertURLEqual( self.get_changelist_url(), add_preserved_filters(context, url), ) class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests): admin_site = site2 @override_settings(ROOT_URLCONF='admin_views.urls') class TestLabelVisibility(TestCase): """ #11277 -Labels of hidden fields in admin were not hidden. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_all_fields_visible(self): response = self.client.get(reverse('admin:admin_views_emptymodelvisible_add')) self.assert_fieldline_visible(response) self.assert_field_visible(response, 'first') self.assert_field_visible(response, 'second') def test_all_fields_hidden(self): response = self.client.get(reverse('admin:admin_views_emptymodelhidden_add')) self.assert_fieldline_hidden(response) self.assert_field_hidden(response, 'first') self.assert_field_hidden(response, 'second') def test_mixin(self): response = self.client.get(reverse('admin:admin_views_emptymodelmixin_add')) self.assert_fieldline_visible(response) self.assert_field_hidden(response, 'first') self.assert_field_visible(response, 'second') def assert_field_visible(self, response, field_name): self.assertContains(response, '<div class="fieldBox field-%s">' % field_name) def assert_field_hidden(self, response, field_name): self.assertContains(response, '<div class="fieldBox field-%s hidden">' % field_name) def assert_fieldline_visible(self, response): self.assertContains(response, '<div class="form-row field-first field-second">') def assert_fieldline_hidden(self, response): self.assertContains(response, '<div class="form-row hidden') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewOnSiteTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = State.objects.create(name='New York') cls.s2 = State.objects.create(name='Illinois') cls.s3 = State.objects.create(name='California') cls.c1 = City.objects.create(state=cls.s1, name='New York') cls.c2 = City.objects.create(state=cls.s2, name='Chicago') cls.c3 = City.objects.create(state=cls.s3, name='San Francisco') cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza') cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard') cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner') cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels') cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away') cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant') cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi') cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi') cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe') def setUp(self): self.client.force_login(self.superuser) def test_add_view_form_and_formsets_run_validation(self): """ Issue #20522 Verifying that if the parent form fails validation, the inlines also run validation even if validation is contingent on parent form data. Also, assertFormError() and assertFormsetError() is usable for admin forms and formsets. """ # The form validation should fail because 'some_required_info' is # not included on the parent form, and the family_name of the parent # does not match that of the child post_data = { 'family_name': 'Test1', 'dependentchild_set-TOTAL_FORMS': '1', 'dependentchild_set-INITIAL_FORMS': '0', 'dependentchild_set-MAX_NUM_FORMS': '1', 'dependentchild_set-0-id': '', 'dependentchild_set-0-parent': '', 'dependentchild_set-0-family_name': 'Test2', } response = self.client.post(reverse('admin:admin_views_parentwithdependentchildren_add'), post_data) self.assertFormError(response, 'adminform', 'some_required_info', ['This field is required.']) msg = "The form 'adminform' in context 0 does not contain the non-field error 'Error'" with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'adminform', None, ['Error']) self.assertFormsetError( response, 'inline_admin_formset', 0, None, ['Children must share a family name with their parents in this contrived test case'] ) msg = "The formset 'inline_admin_formset' in context 12 does not contain any non-form errors." with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(response, 'inline_admin_formset', None, None, ['Error']) def test_change_view_form_and_formsets_run_validation(self): """ Issue #20522 Verifying that if the parent form fails validation, the inlines also run validation even if validation is contingent on parent form data """ pwdc = ParentWithDependentChildren.objects.create(some_required_info=6, family_name='Test1') # The form validation should fail because 'some_required_info' is # not included on the parent form, and the family_name of the parent # does not match that of the child post_data = { 'family_name': 'Test2', 'dependentchild_set-TOTAL_FORMS': '1', 'dependentchild_set-INITIAL_FORMS': '0', 'dependentchild_set-MAX_NUM_FORMS': '1', 'dependentchild_set-0-id': '', 'dependentchild_set-0-parent': str(pwdc.id), 'dependentchild_set-0-family_name': 'Test1', } response = self.client.post( reverse('admin:admin_views_parentwithdependentchildren_change', args=(pwdc.id,)), post_data ) self.assertFormError(response, 'adminform', 'some_required_info', ['This field is required.']) self.assertFormsetError( response, 'inline_admin_formset', 0, None, ['Children must share a family name with their parents in this contrived test case'] ) def test_check(self): "The view_on_site value is either a boolean or a callable" try: admin = CityAdmin(City, AdminSite()) CityAdmin.view_on_site = True self.assertEqual(admin.check(), []) CityAdmin.view_on_site = False self.assertEqual(admin.check(), []) CityAdmin.view_on_site = lambda obj: obj.get_absolute_url() self.assertEqual(admin.check(), []) CityAdmin.view_on_site = [] self.assertEqual(admin.check(), [ Error( "The value of 'view_on_site' must be a callable or a boolean value.", obj=CityAdmin, id='admin.E025', ), ]) finally: # Restore the original values for the benefit of other tests. CityAdmin.view_on_site = True def test_false(self): "The 'View on site' button is not displayed if view_on_site is False" response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,))) content_type_pk = ContentType.objects.get_for_model(Restaurant).pk self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, 1))) def test_true(self): "The default behavior is followed if view_on_site is True" response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,))) content_type_pk = ContentType.objects.get_for_model(City).pk self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk))) def test_callable(self): "The right link is displayed if view_on_site is a callable" response = self.client.get(reverse('admin:admin_views_worker_change', args=(self.w1.pk,))) self.assertContains(response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name)) def test_missing_get_absolute_url(self): "None is returned if model doesn't have get_absolute_url" model_admin = ModelAdmin(Worker, None) self.assertIsNone(model_admin.get_view_on_site_url(Worker())) @override_settings(ROOT_URLCONF='admin_views.urls') class InlineAdminViewOnSiteTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = State.objects.create(name='New York') cls.s2 = State.objects.create(name='Illinois') cls.s3 = State.objects.create(name='California') cls.c1 = City.objects.create(state=cls.s1, name='New York') cls.c2 = City.objects.create(state=cls.s2, name='Chicago') cls.c3 = City.objects.create(state=cls.s3, name='San Francisco') cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza') cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard') cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner') cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels') cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away') cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant') cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi') cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi') cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe') def setUp(self): self.client.force_login(self.superuser) def test_false(self): "The 'View on site' button is not displayed if view_on_site is False" response = self.client.get(reverse('admin:admin_views_state_change', args=(self.s1.pk,))) content_type_pk = ContentType.objects.get_for_model(City).pk self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk))) def test_true(self): "The 'View on site' button is displayed if view_on_site is True" response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,))) content_type_pk = ContentType.objects.get_for_model(Restaurant).pk self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.r1.pk))) def test_callable(self): "The right link is displayed if view_on_site is a callable" response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,))) self.assertContains(response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name)) @override_settings(ROOT_URLCONF='admin_views.urls') class GetFormsetsWithInlinesArgumentTest(TestCase): """ #23934 - When adding a new model instance in the admin, the 'obj' argument of get_formsets_with_inlines() should be None. When changing, it should be equal to the existing model instance. The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception if obj is not None during add_view or obj is None during change_view. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_explicitly_provided_pk(self): post_data = {'name': '1'} response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_add'), post_data) self.assertEqual(response.status_code, 302) post_data = {'name': '2'} response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_change', args=(1,)), post_data) self.assertEqual(response.status_code, 302) def test_implicitly_generated_pk(self): post_data = {'name': '1'} response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_add'), post_data) self.assertEqual(response.status_code, 302) post_data = {'name': '2'} response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_change', args=(1,)), post_data) self.assertEqual(response.status_code, 302)
b9b45737f892caa68736110a43b939d39676e6a18a068c418cb6acd6e34149dd
from django.contrib import admin from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.auth.models import User from django.test import TestCase, override_settings from django.urls import path, reverse class AdminSiteWithSidebar(admin.AdminSite): pass class AdminSiteWithoutSidebar(admin.AdminSite): enable_nav_sidebar = False site_with_sidebar = AdminSiteWithSidebar(name='test_with_sidebar') site_without_sidebar = AdminSiteWithoutSidebar(name='test_without_sidebar') site_with_sidebar.register(User) urlpatterns = [ path('test_sidebar/admin/', site_with_sidebar.urls), path('test_wihout_sidebar/admin/', site_without_sidebar.urls), ] @override_settings(ROOT_URLCONF='admin_views.test_nav_sidebar') class AdminSidebarTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser( username='super', password='secret', email='[email protected]', ) def setUp(self): self.client.force_login(self.superuser) def test_sidebar_not_on_index(self): response = self.client.get(reverse('test_with_sidebar:index')) self.assertNotContains(response, '<nav class="sticky" id="nav-sidebar">') def test_sidebar_disabled(self): response = self.client.get(reverse('test_without_sidebar:index')) self.assertNotContains(response, '<nav class="sticky" id="nav-sidebar">') def test_sidebar_unauthenticated(self): self.client.logout() response = self.client.get(reverse('test_with_sidebar:login')) self.assertNotContains(response, '<nav class="sticky" id="nav-sidebar">') def test_sidebar_aria_current_page(self): url = reverse('test_with_sidebar:auth_user_changelist') response = self.client.get(url) self.assertContains(response, '<nav class="sticky" id="nav-sidebar">') self.assertContains(response, '<a href="%s" aria-current="page">Users</a>' % url) @override_settings( TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }] ) def test_sidebar_aria_current_page_missing_without_request_context_processor(self): url = reverse('test_with_sidebar:auth_user_changelist') response = self.client.get(url) self.assertContains(response, '<nav class="sticky" id="nav-sidebar">') # Does not include aria-current attribute. self.assertContains(response, '<a href="%s">Users</a>' % url) self.assertNotContains(response, 'aria-current') @override_settings(ROOT_URLCONF='admin_views.test_nav_sidebar') class SeleniumTests(AdminSeleniumTestCase): def setUp(self): self.superuser = User.objects.create_superuser( username='super', password='secret', email='[email protected]', ) self.admin_login(username='super', password='secret', login_url=reverse('test_with_sidebar:index')) self.selenium.execute_script("localStorage.removeItem('django.admin.navSidebarIsOpen')") def test_sidebar_starts_open(self): self.selenium.get(self.live_server_url + reverse('test_with_sidebar:auth_user_changelist')) main_element = self.selenium.find_element_by_css_selector('#main') self.assertIn('shifted', main_element.get_attribute('class').split()) def test_sidebar_can_be_closed(self): self.selenium.get(self.live_server_url + reverse('test_with_sidebar:auth_user_changelist')) toggle_button = self.selenium.find_element_by_css_selector('#toggle-nav-sidebar') self.assertEqual(toggle_button.tag_name, 'button') self.assertEqual(toggle_button.get_attribute('aria-label'), 'Toggle navigation') for link in self.selenium.find_elements_by_css_selector('#nav-sidebar a'): self.assertEqual(link.get_attribute('tabIndex'), '0') toggle_button.click() # Hidden sidebar is not reachable via keyboard navigation. for link in self.selenium.find_elements_by_css_selector('#nav-sidebar a'): self.assertEqual(link.get_attribute('tabIndex'), '-1') main_element = self.selenium.find_element_by_css_selector('#main') self.assertNotIn('shifted', main_element.get_attribute('class').split()) def test_sidebar_state_persists(self): self.selenium.get(self.live_server_url + reverse('test_with_sidebar:auth_user_changelist')) self.assertIsNone(self.selenium.execute_script("return localStorage.getItem('django.admin.navSidebarIsOpen')")) toggle_button = self.selenium.find_element_by_css_selector('#toggle-nav-sidebar') toggle_button.click() self.assertEqual( self.selenium.execute_script("return localStorage.getItem('django.admin.navSidebarIsOpen')"), 'false', ) self.selenium.get(self.live_server_url + reverse('test_with_sidebar:auth_user_changelist')) main_element = self.selenium.find_element_by_css_selector('#main') self.assertNotIn('shifted', main_element.get_attribute('class').split()) toggle_button = self.selenium.find_element_by_css_selector('#toggle-nav-sidebar') # Hidden sidebar is not reachable via keyboard navigation. for link in self.selenium.find_elements_by_css_selector('#nav-sidebar a'): self.assertEqual(link.get_attribute('tabIndex'), '-1') toggle_button.click() for link in self.selenium.find_elements_by_css_selector('#nav-sidebar a'): self.assertEqual(link.get_attribute('tabIndex'), '0') self.assertEqual( self.selenium.execute_script("return localStorage.getItem('django.admin.navSidebarIsOpen')"), 'true', ) self.selenium.get(self.live_server_url + reverse('test_with_sidebar:auth_user_changelist')) main_element = self.selenium.find_element_by_css_selector('#main') self.assertIn('shifted', main_element.get_attribute('class').split())
1e42852cfb5f9cccc56cdafea23041438e6e07ee207df08243c899db3226155b
import unittest from migrations.test_base import OperationTestBase from django.db import NotSupportedError, connection from django.db.migrations.state import ProjectState from django.db.models import Index from django.test import modify_settings, override_settings from django.test.utils import CaptureQueriesContext from . import PostgreSQLTestCase try: from django.contrib.postgres.operations import ( AddIndexConcurrently, BloomExtension, CreateExtension, RemoveIndexConcurrently, ) from django.contrib.postgres.indexes import BrinIndex, BTreeIndex except ImportError: pass @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific tests.') @modify_settings(INSTALLED_APPS={'append': 'migrations'}) class AddIndexConcurrentlyTests(OperationTestBase): app_label = 'test_add_concurrently' def test_requires_atomic_false(self): project_state = self.set_up_test_model(self.app_label) new_state = project_state.clone() operation = AddIndexConcurrently( 'Pony', Index(fields=['pink'], name='pony_pink_idx'), ) msg = ( 'The AddIndexConcurrently operation cannot be executed inside ' 'a transaction (set atomic = False on the migration).' ) with self.assertRaisesMessage(NotSupportedError, msg): with connection.schema_editor(atomic=True) as editor: operation.database_forwards(self.app_label, editor, project_state, new_state) def test_add(self): project_state = self.set_up_test_model(self.app_label, index=False) table_name = '%s_pony' % self.app_label index = Index(fields=['pink'], name='pony_pink_idx') new_state = project_state.clone() operation = AddIndexConcurrently('Pony', index) self.assertEqual( operation.describe(), 'Concurrently create index pony_pink_idx on field(s) pink of ' 'model Pony' ) operation.state_forwards(self.app_label, new_state) self.assertEqual(len(new_state.models[self.app_label, 'pony'].options['indexes']), 1) self.assertIndexNotExists(table_name, ['pink']) # Add index. with connection.schema_editor(atomic=False) as editor: operation.database_forwards(self.app_label, editor, project_state, new_state) self.assertIndexExists(table_name, ['pink']) # Reversal. with connection.schema_editor(atomic=False) as editor: operation.database_backwards(self.app_label, editor, new_state, project_state) self.assertIndexNotExists(table_name, ['pink']) # Deconstruction. name, args, kwargs = operation.deconstruct() self.assertEqual(name, 'AddIndexConcurrently') self.assertEqual(args, []) self.assertEqual(kwargs, {'model_name': 'Pony', 'index': index}) def test_add_other_index_type(self): project_state = self.set_up_test_model(self.app_label, index=False) table_name = '%s_pony' % self.app_label new_state = project_state.clone() operation = AddIndexConcurrently( 'Pony', BrinIndex(fields=['pink'], name='pony_pink_brin_idx'), ) self.assertIndexNotExists(table_name, ['pink']) # Add index. with connection.schema_editor(atomic=False) as editor: operation.database_forwards(self.app_label, editor, project_state, new_state) self.assertIndexExists(table_name, ['pink'], index_type='brin') # Reversal. with connection.schema_editor(atomic=False) as editor: operation.database_backwards(self.app_label, editor, new_state, project_state) self.assertIndexNotExists(table_name, ['pink']) def test_add_with_options(self): project_state = self.set_up_test_model(self.app_label, index=False) table_name = '%s_pony' % self.app_label new_state = project_state.clone() index = BTreeIndex(fields=['pink'], name='pony_pink_btree_idx', fillfactor=70) operation = AddIndexConcurrently('Pony', index) self.assertIndexNotExists(table_name, ['pink']) # Add index. with connection.schema_editor(atomic=False) as editor: operation.database_forwards(self.app_label, editor, project_state, new_state) self.assertIndexExists(table_name, ['pink'], index_type='btree') # Reversal. with connection.schema_editor(atomic=False) as editor: operation.database_backwards(self.app_label, editor, new_state, project_state) self.assertIndexNotExists(table_name, ['pink']) @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific tests.') @modify_settings(INSTALLED_APPS={'append': 'migrations'}) class RemoveIndexConcurrentlyTests(OperationTestBase): app_label = 'test_rm_concurrently' def test_requires_atomic_false(self): project_state = self.set_up_test_model(self.app_label, index=True) new_state = project_state.clone() operation = RemoveIndexConcurrently('Pony', 'pony_pink_idx') msg = ( 'The RemoveIndexConcurrently operation cannot be executed inside ' 'a transaction (set atomic = False on the migration).' ) with self.assertRaisesMessage(NotSupportedError, msg): with connection.schema_editor(atomic=True) as editor: operation.database_forwards(self.app_label, editor, project_state, new_state) def test_remove(self): project_state = self.set_up_test_model(self.app_label, index=True) table_name = '%s_pony' % self.app_label self.assertTableExists(table_name) new_state = project_state.clone() operation = RemoveIndexConcurrently('Pony', 'pony_pink_idx') self.assertEqual( operation.describe(), 'Concurrently remove index pony_pink_idx from Pony', ) operation.state_forwards(self.app_label, new_state) self.assertEqual(len(new_state.models[self.app_label, 'pony'].options['indexes']), 0) self.assertIndexExists(table_name, ['pink']) # Remove index. with connection.schema_editor(atomic=False) as editor: operation.database_forwards(self.app_label, editor, project_state, new_state) self.assertIndexNotExists(table_name, ['pink']) # Reversal. with connection.schema_editor(atomic=False) as editor: operation.database_backwards(self.app_label, editor, new_state, project_state) self.assertIndexExists(table_name, ['pink']) # Deconstruction. name, args, kwargs = operation.deconstruct() self.assertEqual(name, 'RemoveIndexConcurrently') self.assertEqual(args, []) self.assertEqual(kwargs, {'model_name': 'Pony', 'name': 'pony_pink_idx'}) class NoExtensionRouter(): def allow_migrate(self, db, app_label, **hints): return False @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific tests.') class CreateExtensionTests(PostgreSQLTestCase): app_label = 'test_allow_create_extention' @override_settings(DATABASE_ROUTERS=[NoExtensionRouter()]) def test_no_allow_migrate(self): operation = CreateExtension('tablefunc') project_state = ProjectState() new_state = project_state.clone() # Don't create an extension. with CaptureQueriesContext(connection) as captured_queries: with connection.schema_editor(atomic=False) as editor: operation.database_forwards(self.app_label, editor, project_state, new_state) self.assertEqual(len(captured_queries), 0) # Reversal. with CaptureQueriesContext(connection) as captured_queries: with connection.schema_editor(atomic=False) as editor: operation.database_backwards(self.app_label, editor, new_state, project_state) self.assertEqual(len(captured_queries), 0) def test_allow_migrate(self): operation = CreateExtension('tablefunc') self.assertEqual(operation.migration_name_fragment, 'create_extension_tablefunc') project_state = ProjectState() new_state = project_state.clone() # Create an extension. with CaptureQueriesContext(connection) as captured_queries: with connection.schema_editor(atomic=False) as editor: operation.database_forwards(self.app_label, editor, project_state, new_state) self.assertEqual(len(captured_queries), 4) self.assertIn('CREATE EXTENSION IF NOT EXISTS', captured_queries[1]['sql']) # Reversal. with CaptureQueriesContext(connection) as captured_queries: with connection.schema_editor(atomic=False) as editor: operation.database_backwards(self.app_label, editor, new_state, project_state) self.assertEqual(len(captured_queries), 2) self.assertIn('DROP EXTENSION IF EXISTS', captured_queries[1]['sql']) def test_create_existing_extension(self): operation = BloomExtension() self.assertEqual(operation.migration_name_fragment, 'create_extension_bloom') project_state = ProjectState() new_state = project_state.clone() # Don't create an existing extension. with CaptureQueriesContext(connection) as captured_queries: with connection.schema_editor(atomic=False) as editor: operation.database_forwards(self.app_label, editor, project_state, new_state) self.assertEqual(len(captured_queries), 3) self.assertIn('SELECT', captured_queries[0]['sql']) def test_drop_nonexistent_extension(self): operation = CreateExtension('tablefunc') project_state = ProjectState() new_state = project_state.clone() # Don't drop a nonexistent extension. with CaptureQueriesContext(connection) as captured_queries: with connection.schema_editor(atomic=False) as editor: operation.database_backwards(self.app_label, editor, project_state, new_state) self.assertEqual(len(captured_queries), 1) self.assertIn('SELECT', captured_queries[0]['sql'])
cfef469e0140fbb06c55181ea393726232ea5dde49675d7f96a55650aff63be3
import unittest from datetime import date, datetime, time, timedelta from decimal import Decimal from operator import attrgetter, itemgetter from uuid import UUID from django.core.exceptions import FieldError from django.db.models import ( BinaryField, BooleanField, Case, CharField, Count, DurationField, F, GenericIPAddressField, IntegerField, Max, Min, Q, Sum, TextField, TimeField, UUIDField, Value, When, ) from django.test import SimpleTestCase, TestCase from .models import CaseTestModel, Client, FKCaseTestModel, O2OCaseTestModel try: from PIL import Image except ImportError: Image = None class CaseExpressionTests(TestCase): @classmethod def setUpTestData(cls): o = CaseTestModel.objects.create(integer=1, integer2=1, string='1') O2OCaseTestModel.objects.create(o2o=o, integer=1) FKCaseTestModel.objects.create(fk=o, integer=1) o = CaseTestModel.objects.create(integer=2, integer2=3, string='2') O2OCaseTestModel.objects.create(o2o=o, integer=2) FKCaseTestModel.objects.create(fk=o, integer=2) FKCaseTestModel.objects.create(fk=o, integer=3) o = CaseTestModel.objects.create(integer=3, integer2=4, string='3') O2OCaseTestModel.objects.create(o2o=o, integer=3) FKCaseTestModel.objects.create(fk=o, integer=3) FKCaseTestModel.objects.create(fk=o, integer=4) o = CaseTestModel.objects.create(integer=2, integer2=2, string='2') O2OCaseTestModel.objects.create(o2o=o, integer=2) FKCaseTestModel.objects.create(fk=o, integer=2) FKCaseTestModel.objects.create(fk=o, integer=3) o = CaseTestModel.objects.create(integer=3, integer2=4, string='3') O2OCaseTestModel.objects.create(o2o=o, integer=3) FKCaseTestModel.objects.create(fk=o, integer=3) FKCaseTestModel.objects.create(fk=o, integer=4) o = CaseTestModel.objects.create(integer=3, integer2=3, string='3') O2OCaseTestModel.objects.create(o2o=o, integer=3) FKCaseTestModel.objects.create(fk=o, integer=3) FKCaseTestModel.objects.create(fk=o, integer=4) o = CaseTestModel.objects.create(integer=4, integer2=5, string='4') O2OCaseTestModel.objects.create(o2o=o, integer=1) FKCaseTestModel.objects.create(fk=o, integer=5) # GROUP BY on Oracle fails with TextField/BinaryField; see #24096. cls.non_lob_fields = [ f.name for f in CaseTestModel._meta.get_fields() if not (f.is_relation and f.auto_created) and not isinstance(f, (BinaryField, TextField)) ] def test_annotate(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(test=Case( When(integer=1, then=Value('one')), When(integer=2, then=Value('two')), default=Value('other'), output_field=CharField(), )).order_by('pk'), [(1, 'one'), (2, 'two'), (3, 'other'), (2, 'two'), (3, 'other'), (3, 'other'), (4, 'other')], transform=attrgetter('integer', 'test') ) def test_annotate_without_default(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(test=Case( When(integer=1, then=1), When(integer=2, then=2), output_field=IntegerField(), )).order_by('pk'), [(1, 1), (2, 2), (3, None), (2, 2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'test') ) def test_annotate_with_expression_as_value(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(f_test=Case( When(integer=1, then=F('integer') + 1), When(integer=2, then=F('integer') + 3), default='integer', )).order_by('pk'), [(1, 2), (2, 5), (3, 3), (2, 5), (3, 3), (3, 3), (4, 4)], transform=attrgetter('integer', 'f_test') ) def test_annotate_with_expression_as_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(f_test=Case( When(integer2=F('integer'), then=Value('equal')), When(integer2=F('integer') + 1, then=Value('+1')), output_field=CharField(), )).order_by('pk'), [(1, 'equal'), (2, '+1'), (3, '+1'), (2, 'equal'), (3, '+1'), (3, 'equal'), (4, '+1')], transform=attrgetter('integer', 'f_test') ) def test_annotate_with_join_in_value(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(join_test=Case( When(integer=1, then=F('o2o_rel__integer') + 1), When(integer=2, then=F('o2o_rel__integer') + 3), default='o2o_rel__integer', )).order_by('pk'), [(1, 2), (2, 5), (3, 3), (2, 5), (3, 3), (3, 3), (4, 1)], transform=attrgetter('integer', 'join_test') ) def test_annotate_with_in_clause(self): fk_rels = FKCaseTestModel.objects.filter(integer__in=[5]) self.assertQuerysetEqual( CaseTestModel.objects.only('pk', 'integer').annotate(in_test=Sum(Case( When(fk_rel__in=fk_rels, then=F('fk_rel__integer')), default=Value(0), ))).order_by('pk'), [(1, 0), (2, 0), (3, 0), (2, 0), (3, 0), (3, 0), (4, 5)], transform=attrgetter('integer', 'in_test') ) def test_annotate_with_join_in_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(join_test=Case( When(integer2=F('o2o_rel__integer'), then=Value('equal')), When(integer2=F('o2o_rel__integer') + 1, then=Value('+1')), default=Value('other'), output_field=CharField(), )).order_by('pk'), [(1, 'equal'), (2, '+1'), (3, '+1'), (2, 'equal'), (3, '+1'), (3, 'equal'), (4, 'other')], transform=attrgetter('integer', 'join_test') ) def test_annotate_with_join_in_predicate(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(join_test=Case( When(o2o_rel__integer=1, then=Value('one')), When(o2o_rel__integer=2, then=Value('two')), When(o2o_rel__integer=3, then=Value('three')), default=Value('other'), output_field=CharField(), )).order_by('pk'), [(1, 'one'), (2, 'two'), (3, 'three'), (2, 'two'), (3, 'three'), (3, 'three'), (4, 'one')], transform=attrgetter('integer', 'join_test') ) def test_annotate_with_annotation_in_value(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( f_plus_1=F('integer') + 1, f_plus_3=F('integer') + 3, ).annotate( f_test=Case( When(integer=1, then='f_plus_1'), When(integer=2, then='f_plus_3'), default='integer', ), ).order_by('pk'), [(1, 2), (2, 5), (3, 3), (2, 5), (3, 3), (3, 3), (4, 4)], transform=attrgetter('integer', 'f_test') ) def test_annotate_with_annotation_in_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( f_plus_1=F('integer') + 1, ).annotate( f_test=Case( When(integer2=F('integer'), then=Value('equal')), When(integer2=F('f_plus_1'), then=Value('+1')), output_field=CharField(), ), ).order_by('pk'), [(1, 'equal'), (2, '+1'), (3, '+1'), (2, 'equal'), (3, '+1'), (3, 'equal'), (4, '+1')], transform=attrgetter('integer', 'f_test') ) def test_annotate_with_annotation_in_predicate(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( f_minus_2=F('integer') - 2, ).annotate( test=Case( When(f_minus_2=-1, then=Value('negative one')), When(f_minus_2=0, then=Value('zero')), When(f_minus_2=1, then=Value('one')), default=Value('other'), output_field=CharField(), ), ).order_by('pk'), [(1, 'negative one'), (2, 'zero'), (3, 'one'), (2, 'zero'), (3, 'one'), (3, 'one'), (4, 'other')], transform=attrgetter('integer', 'test') ) def test_annotate_with_aggregation_in_value(self): self.assertQuerysetEqual( CaseTestModel.objects.values(*self.non_lob_fields).annotate( min=Min('fk_rel__integer'), max=Max('fk_rel__integer'), ).annotate( test=Case( When(integer=2, then='min'), When(integer=3, then='max'), ), ).order_by('pk'), [(1, None, 1, 1), (2, 2, 2, 3), (3, 4, 3, 4), (2, 2, 2, 3), (3, 4, 3, 4), (3, 4, 3, 4), (4, None, 5, 5)], transform=itemgetter('integer', 'test', 'min', 'max') ) def test_annotate_with_aggregation_in_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.values(*self.non_lob_fields).annotate( min=Min('fk_rel__integer'), max=Max('fk_rel__integer'), ).annotate( test=Case( When(integer2=F('min'), then=Value('min')), When(integer2=F('max'), then=Value('max')), output_field=CharField(), ), ).order_by('pk'), [(1, 1, 'min'), (2, 3, 'max'), (3, 4, 'max'), (2, 2, 'min'), (3, 4, 'max'), (3, 3, 'min'), (4, 5, 'min')], transform=itemgetter('integer', 'integer2', 'test') ) def test_annotate_with_aggregation_in_predicate(self): self.assertQuerysetEqual( CaseTestModel.objects.values(*self.non_lob_fields).annotate( max=Max('fk_rel__integer'), ).annotate( test=Case( When(max=3, then=Value('max = 3')), When(max=4, then=Value('max = 4')), default=Value(''), output_field=CharField(), ), ).order_by('pk'), [(1, 1, ''), (2, 3, 'max = 3'), (3, 4, 'max = 4'), (2, 3, 'max = 3'), (3, 4, 'max = 4'), (3, 4, 'max = 4'), (4, 5, '')], transform=itemgetter('integer', 'max', 'test') ) def test_annotate_exclude(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(test=Case( When(integer=1, then=Value('one')), When(integer=2, then=Value('two')), default=Value('other'), output_field=CharField(), )).exclude(test='other').order_by('pk'), [(1, 'one'), (2, 'two'), (2, 'two')], transform=attrgetter('integer', 'test') ) def test_annotate_values_not_in_order_by(self): self.assertEqual( list(CaseTestModel.objects.annotate(test=Case( When(integer=1, then=Value('one')), When(integer=2, then=Value('two')), When(integer=3, then=Value('three')), default=Value('other'), output_field=CharField(), )).order_by('test').values_list('integer', flat=True)), [1, 4, 3, 3, 3, 2, 2] ) def test_annotate_with_empty_when(self): objects = CaseTestModel.objects.annotate( selected=Case( When(pk__in=[], then=Value('selected')), default=Value('not selected'), output_field=CharField() ) ) self.assertEqual(len(objects), CaseTestModel.objects.count()) self.assertTrue(all(obj.selected == 'not selected' for obj in objects)) def test_combined_expression(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( test=Case( When(integer=1, then=2), When(integer=2, then=1), default=3, output_field=IntegerField(), ) + 1, ).order_by('pk'), [(1, 3), (2, 2), (3, 4), (2, 2), (3, 4), (3, 4), (4, 4)], transform=attrgetter('integer', 'test') ) def test_in_subquery(self): self.assertQuerysetEqual( CaseTestModel.objects.filter( pk__in=CaseTestModel.objects.annotate( test=Case( When(integer=F('integer2'), then='pk'), When(integer=4, then='pk'), output_field=IntegerField(), ), ).values('test')).order_by('pk'), [(1, 1), (2, 2), (3, 3), (4, 5)], transform=attrgetter('integer', 'integer2') ) def test_condition_with_lookups(self): qs = CaseTestModel.objects.annotate( test=Case( When(Q(integer2=1), string='2', then=Value(False)), When(Q(integer2=1), string='1', then=Value(True)), default=Value(False), output_field=BooleanField(), ), ) self.assertIs(qs.get(integer=1).test, True) def test_case_reuse(self): SOME_CASE = Case( When(pk=0, then=Value('0')), default=Value('1'), output_field=CharField(), ) self.assertQuerysetEqual( CaseTestModel.objects.annotate(somecase=SOME_CASE).order_by('pk'), CaseTestModel.objects.annotate(somecase=SOME_CASE).order_by('pk').values_list('pk', 'somecase'), lambda x: (x.pk, x.somecase) ) def test_aggregate(self): self.assertEqual( CaseTestModel.objects.aggregate( one=Sum(Case( When(integer=1, then=1), output_field=IntegerField(), )), two=Sum(Case( When(integer=2, then=1), output_field=IntegerField(), )), three=Sum(Case( When(integer=3, then=1), output_field=IntegerField(), )), four=Sum(Case( When(integer=4, then=1), output_field=IntegerField(), )), ), {'one': 1, 'two': 2, 'three': 3, 'four': 1} ) def test_aggregate_with_expression_as_value(self): self.assertEqual( CaseTestModel.objects.aggregate( one=Sum(Case(When(integer=1, then='integer'))), two=Sum(Case(When(integer=2, then=F('integer') - 1))), three=Sum(Case(When(integer=3, then=F('integer') + 1))), ), {'one': 1, 'two': 2, 'three': 12} ) def test_aggregate_with_expression_as_condition(self): self.assertEqual( CaseTestModel.objects.aggregate( equal=Sum(Case( When(integer2=F('integer'), then=1), output_field=IntegerField(), )), plus_one=Sum(Case( When(integer2=F('integer') + 1, then=1), output_field=IntegerField(), )), ), {'equal': 3, 'plus_one': 4} ) def test_filter(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer2=Case( When(integer=2, then=3), When(integer=3, then=4), default=1, output_field=IntegerField(), )).order_by('pk'), [(1, 1), (2, 3), (3, 4), (3, 4)], transform=attrgetter('integer', 'integer2') ) def test_filter_without_default(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer2=Case( When(integer=2, then=3), When(integer=3, then=4), output_field=IntegerField(), )).order_by('pk'), [(2, 3), (3, 4), (3, 4)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_expression_as_value(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer2=Case( When(integer=2, then=F('integer') + 1), When(integer=3, then=F('integer')), default='integer', )).order_by('pk'), [(1, 1), (2, 3), (3, 3)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_expression_as_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(string=Case( When(integer2=F('integer'), then=Value('2')), When(integer2=F('integer') + 1, then=Value('3')), output_field=CharField(), )).order_by('pk'), [(3, 4, '3'), (2, 2, '2'), (3, 4, '3')], transform=attrgetter('integer', 'integer2', 'string') ) def test_filter_with_join_in_value(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer2=Case( When(integer=2, then=F('o2o_rel__integer') + 1), When(integer=3, then=F('o2o_rel__integer')), default='o2o_rel__integer', )).order_by('pk'), [(1, 1), (2, 3), (3, 3)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_join_in_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer=Case( When(integer2=F('o2o_rel__integer') + 1, then=2), When(integer2=F('o2o_rel__integer'), then=3), output_field=IntegerField(), )).order_by('pk'), [(2, 3), (3, 3)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_join_in_predicate(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer2=Case( When(o2o_rel__integer=1, then=1), When(o2o_rel__integer=2, then=3), When(o2o_rel__integer=3, then=4), output_field=IntegerField(), )).order_by('pk'), [(1, 1), (2, 3), (3, 4), (3, 4)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_annotation_in_value(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( f=F('integer'), f_plus_1=F('integer') + 1, ).filter( integer2=Case( When(integer=2, then='f_plus_1'), When(integer=3, then='f'), ), ).order_by('pk'), [(2, 3), (3, 3)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_annotation_in_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( f_plus_1=F('integer') + 1, ).filter( integer=Case( When(integer2=F('integer'), then=2), When(integer2=F('f_plus_1'), then=3), output_field=IntegerField(), ), ).order_by('pk'), [(3, 4), (2, 2), (3, 4)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_annotation_in_predicate(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( f_plus_1=F('integer') + 1, ).filter( integer2=Case( When(f_plus_1=3, then=3), When(f_plus_1=4, then=4), default=1, output_field=IntegerField(), ), ).order_by('pk'), [(1, 1), (2, 3), (3, 4), (3, 4)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_aggregation_in_value(self): self.assertQuerysetEqual( CaseTestModel.objects.values(*self.non_lob_fields).annotate( min=Min('fk_rel__integer'), max=Max('fk_rel__integer'), ).filter( integer2=Case( When(integer=2, then='min'), When(integer=3, then='max'), ), ).order_by('pk'), [(3, 4, 3, 4), (2, 2, 2, 3), (3, 4, 3, 4)], transform=itemgetter('integer', 'integer2', 'min', 'max') ) def test_filter_with_aggregation_in_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.values(*self.non_lob_fields).annotate( min=Min('fk_rel__integer'), max=Max('fk_rel__integer'), ).filter( integer=Case( When(integer2=F('min'), then=2), When(integer2=F('max'), then=3), ), ).order_by('pk'), [(3, 4, 3, 4), (2, 2, 2, 3), (3, 4, 3, 4)], transform=itemgetter('integer', 'integer2', 'min', 'max') ) def test_filter_with_aggregation_in_predicate(self): self.assertQuerysetEqual( CaseTestModel.objects.values(*self.non_lob_fields).annotate( max=Max('fk_rel__integer'), ).filter( integer=Case( When(max=3, then=2), When(max=4, then=3), ), ).order_by('pk'), [(2, 3, 3), (3, 4, 4), (2, 2, 3), (3, 4, 4), (3, 3, 4)], transform=itemgetter('integer', 'integer2', 'max') ) def test_update(self): CaseTestModel.objects.update( string=Case( When(integer=1, then=Value('one')), When(integer=2, then=Value('two')), default=Value('other'), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 'one'), (2, 'two'), (3, 'other'), (2, 'two'), (3, 'other'), (3, 'other'), (4, 'other')], transform=attrgetter('integer', 'string') ) def test_update_without_default(self): CaseTestModel.objects.update( integer2=Case( When(integer=1, then=1), When(integer=2, then=2), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 1), (2, 2), (3, None), (2, 2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'integer2') ) def test_update_with_expression_as_value(self): CaseTestModel.objects.update( integer=Case( When(integer=1, then=F('integer') + 1), When(integer=2, then=F('integer') + 3), default='integer', ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [('1', 2), ('2', 5), ('3', 3), ('2', 5), ('3', 3), ('3', 3), ('4', 4)], transform=attrgetter('string', 'integer') ) def test_update_with_expression_as_condition(self): CaseTestModel.objects.update( string=Case( When(integer2=F('integer'), then=Value('equal')), When(integer2=F('integer') + 1, then=Value('+1')), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 'equal'), (2, '+1'), (3, '+1'), (2, 'equal'), (3, '+1'), (3, 'equal'), (4, '+1')], transform=attrgetter('integer', 'string') ) def test_update_with_join_in_condition_raise_field_error(self): with self.assertRaisesMessage(FieldError, 'Joined field references are not permitted in this query'): CaseTestModel.objects.update( integer=Case( When(integer2=F('o2o_rel__integer') + 1, then=2), When(integer2=F('o2o_rel__integer'), then=3), output_field=IntegerField(), ), ) def test_update_with_join_in_predicate_raise_field_error(self): with self.assertRaisesMessage(FieldError, 'Joined field references are not permitted in this query'): CaseTestModel.objects.update( string=Case( When(o2o_rel__integer=1, then=Value('one')), When(o2o_rel__integer=2, then=Value('two')), When(o2o_rel__integer=3, then=Value('three')), default=Value('other'), output_field=CharField(), ), ) def test_update_big_integer(self): CaseTestModel.objects.update( big_integer=Case( When(integer=1, then=1), When(integer=2, then=2), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 1), (2, 2), (3, None), (2, 2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'big_integer') ) def test_update_binary(self): CaseTestModel.objects.update( binary=Case( When(integer=1, then=Value(b'one', output_field=BinaryField())), When(integer=2, then=Value(b'two', output_field=BinaryField())), default=Value(b'', output_field=BinaryField()), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, b'one'), (2, b'two'), (3, b''), (2, b'two'), (3, b''), (3, b''), (4, b'')], transform=lambda o: (o.integer, bytes(o.binary)) ) def test_update_boolean(self): CaseTestModel.objects.update( boolean=Case( When(integer=1, then=True), When(integer=2, then=True), default=False, ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, True), (2, True), (3, False), (2, True), (3, False), (3, False), (4, False)], transform=attrgetter('integer', 'boolean') ) def test_update_date(self): CaseTestModel.objects.update( date=Case( When(integer=1, then=date(2015, 1, 1)), When(integer=2, then=date(2015, 1, 2)), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [ (1, date(2015, 1, 1)), (2, date(2015, 1, 2)), (3, None), (2, date(2015, 1, 2)), (3, None), (3, None), (4, None) ], transform=attrgetter('integer', 'date') ) def test_update_date_time(self): CaseTestModel.objects.update( date_time=Case( When(integer=1, then=datetime(2015, 1, 1)), When(integer=2, then=datetime(2015, 1, 2)), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [ (1, datetime(2015, 1, 1)), (2, datetime(2015, 1, 2)), (3, None), (2, datetime(2015, 1, 2)), (3, None), (3, None), (4, None) ], transform=attrgetter('integer', 'date_time') ) def test_update_decimal(self): CaseTestModel.objects.update( decimal=Case( When(integer=1, then=Decimal('1.1')), When(integer=2, then=Decimal('2.2')), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [ (1, Decimal('1.1')), (2, Decimal('2.2')), (3, None), (2, Decimal('2.2')), (3, None), (3, None), (4, None) ], transform=attrgetter('integer', 'decimal') ) def test_update_duration(self): CaseTestModel.objects.update( duration=Case( # fails on sqlite if output_field is not set explicitly on all # Values containing timedeltas When(integer=1, then=Value(timedelta(1), output_field=DurationField())), When(integer=2, then=Value(timedelta(2), output_field=DurationField())), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, timedelta(1)), (2, timedelta(2)), (3, None), (2, timedelta(2)), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'duration') ) def test_update_email(self): CaseTestModel.objects.update( email=Case( When(integer=1, then=Value('[email protected]')), When(integer=2, then=Value('[email protected]')), default=Value(''), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, '[email protected]'), (2, '[email protected]'), (3, ''), (2, '[email protected]'), (3, ''), (3, ''), (4, '')], transform=attrgetter('integer', 'email') ) def test_update_file(self): CaseTestModel.objects.update( file=Case( When(integer=1, then=Value('~/1')), When(integer=2, then=Value('~/2')), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, '~/1'), (2, '~/2'), (3, ''), (2, '~/2'), (3, ''), (3, ''), (4, '')], transform=lambda o: (o.integer, str(o.file)) ) def test_update_file_path(self): CaseTestModel.objects.update( file_path=Case( When(integer=1, then=Value('~/1')), When(integer=2, then=Value('~/2')), default=Value(''), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, '~/1'), (2, '~/2'), (3, ''), (2, '~/2'), (3, ''), (3, ''), (4, '')], transform=attrgetter('integer', 'file_path') ) def test_update_float(self): CaseTestModel.objects.update( float=Case( When(integer=1, then=1.1), When(integer=2, then=2.2), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 1.1), (2, 2.2), (3, None), (2, 2.2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'float') ) @unittest.skipUnless(Image, "Pillow not installed") def test_update_image(self): CaseTestModel.objects.update( image=Case( When(integer=1, then=Value('~/1')), When(integer=2, then=Value('~/2')), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, '~/1'), (2, '~/2'), (3, ''), (2, '~/2'), (3, ''), (3, ''), (4, '')], transform=lambda o: (o.integer, str(o.image)) ) def test_update_generic_ip_address(self): CaseTestModel.objects.update( generic_ip_address=Case( # fails on postgresql if output_field is not set explicitly When(integer=1, then=Value('1.1.1.1')), When(integer=2, then=Value('2.2.2.2')), output_field=GenericIPAddressField(), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, '1.1.1.1'), (2, '2.2.2.2'), (3, None), (2, '2.2.2.2'), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'generic_ip_address') ) def test_update_null_boolean(self): CaseTestModel.objects.update( null_boolean=Case( When(integer=1, then=True), When(integer=2, then=False), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, True), (2, False), (3, None), (2, False), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'null_boolean') ) def test_update_null_boolean_old(self): CaseTestModel.objects.update( null_boolean_old=Case( When(integer=1, then=True), When(integer=2, then=False), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, True), (2, False), (3, None), (2, False), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'null_boolean_old') ) def test_update_positive_big_integer(self): CaseTestModel.objects.update( positive_big_integer=Case( When(integer=1, then=1), When(integer=2, then=2), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 1), (2, 2), (3, None), (2, 2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'positive_big_integer') ) def test_update_positive_integer(self): CaseTestModel.objects.update( positive_integer=Case( When(integer=1, then=1), When(integer=2, then=2), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 1), (2, 2), (3, None), (2, 2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'positive_integer') ) def test_update_positive_small_integer(self): CaseTestModel.objects.update( positive_small_integer=Case( When(integer=1, then=1), When(integer=2, then=2), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 1), (2, 2), (3, None), (2, 2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'positive_small_integer') ) def test_update_slug(self): CaseTestModel.objects.update( slug=Case( When(integer=1, then=Value('1')), When(integer=2, then=Value('2')), default=Value(''), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, '1'), (2, '2'), (3, ''), (2, '2'), (3, ''), (3, ''), (4, '')], transform=attrgetter('integer', 'slug') ) def test_update_small_integer(self): CaseTestModel.objects.update( small_integer=Case( When(integer=1, then=1), When(integer=2, then=2), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 1), (2, 2), (3, None), (2, 2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'small_integer') ) def test_update_string(self): CaseTestModel.objects.filter(string__in=['1', '2']).update( string=Case( When(integer=1, then=Value('1', output_field=CharField())), When(integer=2, then=Value('2', output_field=CharField())), ), ) self.assertQuerysetEqual( CaseTestModel.objects.filter(string__in=['1', '2']).order_by('pk'), [(1, '1'), (2, '2'), (2, '2')], transform=attrgetter('integer', 'string') ) def test_update_text(self): CaseTestModel.objects.update( text=Case( When(integer=1, then=Value('1')), When(integer=2, then=Value('2')), default=Value(''), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, '1'), (2, '2'), (3, ''), (2, '2'), (3, ''), (3, ''), (4, '')], transform=attrgetter('integer', 'text') ) def test_update_time(self): CaseTestModel.objects.update( time=Case( # fails on sqlite if output_field is not set explicitly on all # Values containing times When(integer=1, then=Value(time(1), output_field=TimeField())), When(integer=2, then=Value(time(2), output_field=TimeField())), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, time(1)), (2, time(2)), (3, None), (2, time(2)), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'time') ) def test_update_url(self): CaseTestModel.objects.update( url=Case( When(integer=1, then=Value('http://1.example.com/')), When(integer=2, then=Value('http://2.example.com/')), default=Value(''), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [ (1, 'http://1.example.com/'), (2, 'http://2.example.com/'), (3, ''), (2, 'http://2.example.com/'), (3, ''), (3, ''), (4, '') ], transform=attrgetter('integer', 'url') ) def test_update_uuid(self): CaseTestModel.objects.update( uuid=Case( # fails on sqlite if output_field is not set explicitly on all # Values containing UUIDs When(integer=1, then=Value( UUID('11111111111111111111111111111111'), output_field=UUIDField(), )), When(integer=2, then=Value( UUID('22222222222222222222222222222222'), output_field=UUIDField(), )), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [ (1, UUID('11111111111111111111111111111111')), (2, UUID('22222222222222222222222222222222')), (3, None), (2, UUID('22222222222222222222222222222222')), (3, None), (3, None), (4, None), ], transform=attrgetter('integer', 'uuid') ) def test_update_fk(self): obj1, obj2 = CaseTestModel.objects.all()[:2] CaseTestModel.objects.update( fk=Case( When(integer=1, then=obj1.pk), When(integer=2, then=obj2.pk), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, obj1.pk), (2, obj2.pk), (3, None), (2, obj2.pk), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'fk_id') ) def test_lookup_in_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( test=Case( When(integer__lt=2, then=Value('less than 2')), When(integer__gt=2, then=Value('greater than 2')), default=Value('equal to 2'), output_field=CharField(), ), ).order_by('pk'), [ (1, 'less than 2'), (2, 'equal to 2'), (3, 'greater than 2'), (2, 'equal to 2'), (3, 'greater than 2'), (3, 'greater than 2'), (4, 'greater than 2') ], transform=attrgetter('integer', 'test') ) def test_lookup_different_fields(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( test=Case( When(integer=2, integer2=3, then=Value('when')), default=Value('default'), output_field=CharField(), ), ).order_by('pk'), [ (1, 1, 'default'), (2, 3, 'when'), (3, 4, 'default'), (2, 2, 'default'), (3, 4, 'default'), (3, 3, 'default'), (4, 5, 'default') ], transform=attrgetter('integer', 'integer2', 'test') ) def test_combined_q_object(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( test=Case( When(Q(integer=2) | Q(integer2=3), then=Value('when')), default=Value('default'), output_field=CharField(), ), ).order_by('pk'), [ (1, 1, 'default'), (2, 3, 'when'), (3, 4, 'default'), (2, 2, 'when'), (3, 4, 'default'), (3, 3, 'when'), (4, 5, 'default') ], transform=attrgetter('integer', 'integer2', 'test') ) def test_order_by_conditional_implicit(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer__lte=2).annotate(test=Case( When(integer=1, then=2), When(integer=2, then=1), default=3, output_field=IntegerField(), )).order_by('test', 'pk'), [(2, 1), (2, 1), (1, 2)], transform=attrgetter('integer', 'test') ) def test_order_by_conditional_explicit(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer__lte=2).annotate(test=Case( When(integer=1, then=2), When(integer=2, then=1), default=3, output_field=IntegerField(), )).order_by(F('test').asc(), 'pk'), [(2, 1), (2, 1), (1, 2)], transform=attrgetter('integer', 'test') ) def test_join_promotion(self): o = CaseTestModel.objects.create(integer=1, integer2=1, string='1') # Testing that: # 1. There isn't any object on the remote side of the fk_rel # relation. If the query used inner joins, then the join to fk_rel # would remove o from the results. So, in effect we are testing that # we are promoting the fk_rel join to a left outer join here. # 2. The default value of 3 is generated for the case expression. self.assertQuerysetEqual( CaseTestModel.objects.filter(pk=o.pk).annotate( foo=Case( When(fk_rel__pk=1, then=2), default=3, output_field=IntegerField() ), ), [(o, 3)], lambda x: (x, x.foo) ) # Now 2 should be generated, as the fk_rel is null. self.assertQuerysetEqual( CaseTestModel.objects.filter(pk=o.pk).annotate( foo=Case( When(fk_rel__isnull=True, then=2), default=3, output_field=IntegerField() ), ), [(o, 2)], lambda x: (x, x.foo) ) def test_join_promotion_multiple_annotations(self): o = CaseTestModel.objects.create(integer=1, integer2=1, string='1') # Testing that: # 1. There isn't any object on the remote side of the fk_rel # relation. If the query used inner joins, then the join to fk_rel # would remove o from the results. So, in effect we are testing that # we are promoting the fk_rel join to a left outer join here. # 2. The default value of 3 is generated for the case expression. self.assertQuerysetEqual( CaseTestModel.objects.filter(pk=o.pk).annotate( foo=Case( When(fk_rel__pk=1, then=2), default=3, output_field=IntegerField() ), bar=Case( When(fk_rel__pk=1, then=4), default=5, output_field=IntegerField() ), ), [(o, 3, 5)], lambda x: (x, x.foo, x.bar) ) # Now 2 should be generated, as the fk_rel is null. self.assertQuerysetEqual( CaseTestModel.objects.filter(pk=o.pk).annotate( foo=Case( When(fk_rel__isnull=True, then=2), default=3, output_field=IntegerField() ), bar=Case( When(fk_rel__isnull=True, then=4), default=5, output_field=IntegerField() ), ), [(o, 2, 4)], lambda x: (x, x.foo, x.bar) ) def test_m2m_exclude(self): CaseTestModel.objects.create(integer=10, integer2=1, string='1') qs = CaseTestModel.objects.values_list('id', 'integer').annotate( cnt=Sum( Case(When(~Q(fk_rel__integer=1), then=1), default=2), output_field=IntegerField() ), ).order_by('integer') # The first o has 2 as its fk_rel__integer=1, thus it hits the # default=2 case. The other ones have 2 as the result as they have 2 # fk_rel objects, except for integer=4 and integer=10 (created above). # The integer=4 case has one integer, thus the result is 1, and # integer=10 doesn't have any and this too generates 1 (instead of 0) # as ~Q() also matches nulls. self.assertQuerysetEqual( qs, [(1, 2), (2, 2), (2, 2), (3, 2), (3, 2), (3, 2), (4, 1), (10, 1)], lambda x: x[1:] ) def test_m2m_reuse(self): CaseTestModel.objects.create(integer=10, integer2=1, string='1') # Need to use values before annotate so that Oracle will not group # by fields it isn't capable of grouping by. qs = CaseTestModel.objects.values_list('id', 'integer').annotate( cnt=Sum( Case(When(~Q(fk_rel__integer=1), then=1), default=2), output_field=IntegerField() ), ).annotate( cnt2=Sum( Case(When(~Q(fk_rel__integer=1), then=1), default=2), output_field=IntegerField() ), ).order_by('integer') self.assertEqual(str(qs.query).count(' JOIN '), 1) self.assertQuerysetEqual( qs, [(1, 2, 2), (2, 2, 2), (2, 2, 2), (3, 2, 2), (3, 2, 2), (3, 2, 2), (4, 1, 1), (10, 1, 1)], lambda x: x[1:] ) class CaseDocumentationExamples(TestCase): @classmethod def setUpTestData(cls): Client.objects.create( name='Jane Doe', account_type=Client.REGULAR, registered_on=date.today() - timedelta(days=36), ) Client.objects.create( name='James Smith', account_type=Client.GOLD, registered_on=date.today() - timedelta(days=5), ) Client.objects.create( name='Jack Black', account_type=Client.PLATINUM, registered_on=date.today() - timedelta(days=10 * 365), ) def test_simple_example(self): self.assertQuerysetEqual( Client.objects.annotate( discount=Case( When(account_type=Client.GOLD, then=Value('5%')), When(account_type=Client.PLATINUM, then=Value('10%')), default=Value('0%'), output_field=CharField(), ), ).order_by('pk'), [('Jane Doe', '0%'), ('James Smith', '5%'), ('Jack Black', '10%')], transform=attrgetter('name', 'discount') ) def test_lookup_example(self): a_month_ago = date.today() - timedelta(days=30) a_year_ago = date.today() - timedelta(days=365) self.assertQuerysetEqual( Client.objects.annotate( discount=Case( When(registered_on__lte=a_year_ago, then=Value('10%')), When(registered_on__lte=a_month_ago, then=Value('5%')), default=Value('0%'), output_field=CharField(), ), ).order_by('pk'), [('Jane Doe', '5%'), ('James Smith', '0%'), ('Jack Black', '10%')], transform=attrgetter('name', 'discount') ) def test_conditional_update_example(self): a_month_ago = date.today() - timedelta(days=30) a_year_ago = date.today() - timedelta(days=365) Client.objects.update( account_type=Case( When(registered_on__lte=a_year_ago, then=Value(Client.PLATINUM)), When(registered_on__lte=a_month_ago, then=Value(Client.GOLD)), default=Value(Client.REGULAR), ), ) self.assertQuerysetEqual( Client.objects.all().order_by('pk'), [('Jane Doe', 'G'), ('James Smith', 'R'), ('Jack Black', 'P')], transform=attrgetter('name', 'account_type') ) def test_conditional_aggregation_example(self): Client.objects.create( name='Jean Grey', account_type=Client.REGULAR, registered_on=date.today(), ) Client.objects.create( name='James Bond', account_type=Client.PLATINUM, registered_on=date.today(), ) Client.objects.create( name='Jane Porter', account_type=Client.PLATINUM, registered_on=date.today(), ) self.assertEqual( Client.objects.aggregate( regular=Count('pk', filter=Q(account_type=Client.REGULAR)), gold=Count('pk', filter=Q(account_type=Client.GOLD)), platinum=Count('pk', filter=Q(account_type=Client.PLATINUM)), ), {'regular': 2, 'gold': 1, 'platinum': 3} ) # This was the example before the filter argument was added. self.assertEqual( Client.objects.aggregate( regular=Sum(Case( When(account_type=Client.REGULAR, then=1), output_field=IntegerField(), )), gold=Sum(Case( When(account_type=Client.GOLD, then=1), output_field=IntegerField(), )), platinum=Sum(Case( When(account_type=Client.PLATINUM, then=1), output_field=IntegerField(), )), ), {'regular': 2, 'gold': 1, 'platinum': 3} ) def test_filter_example(self): a_month_ago = date.today() - timedelta(days=30) a_year_ago = date.today() - timedelta(days=365) self.assertQuerysetEqual( Client.objects.filter( registered_on__lte=Case( When(account_type=Client.GOLD, then=a_month_ago), When(account_type=Client.PLATINUM, then=a_year_ago), ), ), [('Jack Black', 'P')], transform=attrgetter('name', 'account_type') ) def test_hash(self): expression_1 = Case( When(account_type__in=[Client.REGULAR, Client.GOLD], then=1), default=2, output_field=IntegerField(), ) expression_2 = Case( When(account_type__in=(Client.REGULAR, Client.GOLD), then=1), default=2, output_field=IntegerField(), ) expression_3 = Case(When(account_type__in=[Client.REGULAR, Client.GOLD], then=1), default=2) expression_4 = Case(When(account_type__in=[Client.PLATINUM, Client.GOLD], then=2), default=1) self.assertEqual(hash(expression_1), hash(expression_2)) self.assertNotEqual(hash(expression_2), hash(expression_3)) self.assertNotEqual(hash(expression_1), hash(expression_4)) self.assertNotEqual(hash(expression_3), hash(expression_4)) class CaseWhenTests(SimpleTestCase): def test_only_when_arguments(self): msg = 'Positional arguments must all be When objects.' with self.assertRaisesMessage(TypeError, msg): Case(When(Q(pk__in=[])), object()) def test_invalid_when_constructor_args(self): msg = ( 'When() supports a Q object, a boolean expression, or lookups as ' 'a condition.' ) with self.assertRaisesMessage(TypeError, msg): When(condition=object()) with self.assertRaisesMessage(TypeError, msg): When(condition=Value(1, output_field=IntegerField())) with self.assertRaisesMessage(TypeError, msg): When(Value(1, output_field=IntegerField()), string='1') with self.assertRaisesMessage(TypeError, msg): When() def test_empty_q_object(self): msg = "An empty Q() can't be used as a When() condition." with self.assertRaisesMessage(ValueError, msg): When(Q(), then=Value(True))
e07b09d88f039daf72b90d01ba3c19810cc3b2f3abd3a5c5d1beeb38023bcd7b
import json import sys from django.test import SimpleTestCase, ignore_warnings from django.utils import text from django.utils.deprecation import RemovedInDjango40Warning from django.utils.functional import lazystr from django.utils.text import format_lazy from django.utils.translation import gettext_lazy, override IS_WIDE_BUILD = (len('\U0001F4A9') == 1) class TestUtilsText(SimpleTestCase): def test_get_text_list(self): self.assertEqual(text.get_text_list(['a', 'b', 'c', 'd']), 'a, b, c or d') self.assertEqual(text.get_text_list(['a', 'b', 'c'], 'and'), 'a, b and c') self.assertEqual(text.get_text_list(['a', 'b'], 'and'), 'a and b') self.assertEqual(text.get_text_list(['a']), 'a') self.assertEqual(text.get_text_list([]), '') with override('ar'): self.assertEqual(text.get_text_list(['a', 'b', 'c']), "a، b أو c") def test_smart_split(self): testdata = [ ('This is "a person" test.', ['This', 'is', '"a person"', 'test.']), ('This is "a person\'s" test.', ['This', 'is', '"a person\'s"', 'test.']), ('This is "a person\\"s" test.', ['This', 'is', '"a person\\"s"', 'test.']), ('"a \'one', ['"a', "'one"]), ('all friends\' tests', ['all', 'friends\'', 'tests']), ('url search_page words="something else"', ['url', 'search_page', 'words="something else"']), ("url search_page words='something else'", ['url', 'search_page', "words='something else'"]), ('url search_page words "something else"', ['url', 'search_page', 'words', '"something else"']), ('url search_page words-"something else"', ['url', 'search_page', 'words-"something else"']), ('url search_page words=hello', ['url', 'search_page', 'words=hello']), ('url search_page words="something else', ['url', 'search_page', 'words="something', 'else']), ("cut:','|cut:' '", ["cut:','|cut:' '"]), (lazystr("a b c d"), # Test for #20231 ['a', 'b', 'c', 'd']), ] for test, expected in testdata: self.assertEqual(list(text.smart_split(test)), expected) def test_truncate_chars(self): truncator = text.Truncator('The quick brown fox jumped over the lazy dog.') self.assertEqual('The quick brown fox jumped over the lazy dog.', truncator.chars(100)), self.assertEqual('The quick brown fox …', truncator.chars(21)), self.assertEqual('The quick brown fo.....', truncator.chars(23, '.....')), self.assertEqual('.....', truncator.chars(4, '.....')), nfc = text.Truncator('o\xfco\xfco\xfco\xfc') nfd = text.Truncator('ou\u0308ou\u0308ou\u0308ou\u0308') self.assertEqual('oüoüoüoü', nfc.chars(8)) self.assertEqual('oüoüoüoü', nfd.chars(8)) self.assertEqual('oü…', nfc.chars(3)) self.assertEqual('oü…', nfd.chars(3)) # Ensure the final length is calculated correctly when there are # combining characters with no precomposed form, and that combining # characters are not split up. truncator = text.Truncator('-B\u030AB\u030A----8') self.assertEqual('-B\u030A…', truncator.chars(3)) self.assertEqual('-B\u030AB\u030A-…', truncator.chars(5)) self.assertEqual('-B\u030AB\u030A----8', truncator.chars(8)) # Ensure the length of the end text is correctly calculated when it # contains combining characters with no precomposed form. truncator = text.Truncator('-----') self.assertEqual('---B\u030A', truncator.chars(4, 'B\u030A')) self.assertEqual('-----', truncator.chars(5, 'B\u030A')) # Make a best effort to shorten to the desired length, but requesting # a length shorter than the ellipsis shouldn't break self.assertEqual('…', text.Truncator('asdf').chars(0)) # lazy strings are handled correctly self.assertEqual(text.Truncator(lazystr('The quick brown fox')).chars(10), 'The quick…') def test_truncate_chars_html(self): perf_test_values = [ (('</a' + '\t' * 50000) + '//>', None), ('&' * 50000, '&' * 9 + '…'), ('_X<<<<<<<<<<<>', None), ] for value, expected in perf_test_values: with self.subTest(value=value): truncator = text.Truncator(value) self.assertEqual(expected if expected else value, truncator.chars(10, html=True)) def test_truncate_words(self): truncator = text.Truncator('The quick brown fox jumped over the lazy dog.') self.assertEqual('The quick brown fox jumped over the lazy dog.', truncator.words(10)) self.assertEqual('The quick brown fox…', truncator.words(4)) self.assertEqual('The quick brown fox[snip]', truncator.words(4, '[snip]')) # lazy strings are handled correctly truncator = text.Truncator(lazystr('The quick brown fox jumped over the lazy dog.')) self.assertEqual('The quick brown fox…', truncator.words(4)) def test_truncate_html_words(self): truncator = text.Truncator( '<p id="par"><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>' ) self.assertEqual( '<p id="par"><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>', truncator.words(10, html=True) ) self.assertEqual( '<p id="par"><strong><em>The quick brown fox…</em></strong></p>', truncator.words(4, html=True) ) self.assertEqual( '<p id="par"><strong><em>The quick brown fox....</em></strong></p>', truncator.words(4, '....', html=True) ) self.assertEqual( '<p id="par"><strong><em>The quick brown fox</em></strong></p>', truncator.words(4, '', html=True) ) # Test with new line inside tag truncator = text.Truncator( '<p>The quick <a href="xyz.html"\n id="mylink">brown fox</a> jumped over the lazy dog.</p>' ) self.assertEqual( '<p>The quick <a href="xyz.html"\n id="mylink">brown…</a></p>', truncator.words(3, html=True) ) # Test self-closing tags truncator = text.Truncator('<br/>The <hr />quick brown fox jumped over the lazy dog.') self.assertEqual('<br/>The <hr />quick brown…', truncator.words(3, html=True)) truncator = text.Truncator('<br>The <hr/>quick <em>brown fox</em> jumped over the lazy dog.') self.assertEqual('<br>The <hr/>quick <em>brown…</em>', truncator.words(3, html=True)) # Test html entities truncator = text.Truncator('<i>Buenos d&iacute;as! &#x00bf;C&oacute;mo est&aacute;?</i>') self.assertEqual('<i>Buenos d&iacute;as! &#x00bf;C&oacute;mo…</i>', truncator.words(3, html=True)) truncator = text.Truncator('<p>I &lt;3 python, what about you?</p>') self.assertEqual('<p>I &lt;3 python,…</p>', truncator.words(3, html=True)) perf_test_values = [ ('</a' + '\t' * 50000) + '//>', '&' * 50000, '_X<<<<<<<<<<<>', ] for value in perf_test_values: with self.subTest(value=value): truncator = text.Truncator(value) self.assertEqual(value, truncator.words(50, html=True)) def test_wrap(self): digits = '1234 67 9' self.assertEqual(text.wrap(digits, 100), '1234 67 9') self.assertEqual(text.wrap(digits, 9), '1234 67 9') self.assertEqual(text.wrap(digits, 8), '1234 67\n9') self.assertEqual(text.wrap('short\na long line', 7), 'short\na long\nline') self.assertEqual(text.wrap('do-not-break-long-words please? ok', 8), 'do-not-break-long-words\nplease?\nok') long_word = 'l%sng' % ('o' * 20) self.assertEqual(text.wrap(long_word, 20), long_word) self.assertEqual(text.wrap('a %s word' % long_word, 10), 'a\n%s\nword' % long_word) self.assertEqual(text.wrap(lazystr(digits), 100), '1234 67 9') def test_normalize_newlines(self): self.assertEqual(text.normalize_newlines("abc\ndef\rghi\r\n"), "abc\ndef\nghi\n") self.assertEqual(text.normalize_newlines("\n\r\r\n\r"), "\n\n\n\n") self.assertEqual(text.normalize_newlines("abcdefghi"), "abcdefghi") self.assertEqual(text.normalize_newlines(""), "") self.assertEqual(text.normalize_newlines(lazystr("abc\ndef\rghi\r\n")), "abc\ndef\nghi\n") def test_phone2numeric(self): numeric = text.phone2numeric('0800 flowers') self.assertEqual(numeric, '0800 3569377') lazy_numeric = lazystr(text.phone2numeric('0800 flowers')) self.assertEqual(lazy_numeric, '0800 3569377') def test_slugify(self): items = ( # given - expected - Unicode? ('Hello, World!', 'hello-world', False), ('spam & eggs', 'spam-eggs', False), (' multiple---dash and space ', 'multiple-dash-and-space', False), ('\t whitespace-in-value \n', 'whitespace-in-value', False), ('underscore_in-value', 'underscore_in-value', False), ('__strip__underscore-value___', 'strip__underscore-value', False), ('--strip-dash-value---', 'strip-dash-value', False), ('__strip-mixed-value---', 'strip-mixed-value', False), ('_ -strip-mixed-value _-', 'strip-mixed-value', False), ('spam & ıçüş', 'spam-ıçüş', True), ('foo ıç bar', 'foo-ıç-bar', True), (' foo ıç bar', 'foo-ıç-bar', True), ('你好', '你好', True), ('İstanbul', 'istanbul', True), ) for value, output, is_unicode in items: self.assertEqual(text.slugify(value, allow_unicode=is_unicode), output) # interning the result may be useful, e.g. when fed to Path. self.assertEqual(sys.intern(text.slugify('a')), 'a') @ignore_warnings(category=RemovedInDjango40Warning) def test_unescape_entities(self): items = [ ('', ''), ('foo', 'foo'), ('&amp;', '&'), ('&am;', '&am;'), ('&#x26;', '&'), ('&#xk;', '&#xk;'), ('&#38;', '&'), ('foo &amp; bar', 'foo & bar'), ('foo & bar', 'foo & bar'), ] for value, output in items: self.assertEqual(text.unescape_entities(value), output) self.assertEqual(text.unescape_entities(lazystr(value)), output) def test_unescape_entities_deprecated(self): msg = ( 'django.utils.text.unescape_entities() is deprecated in favor of ' 'html.unescape().' ) with self.assertWarnsMessage(RemovedInDjango40Warning, msg): text.unescape_entities('foo') def test_unescape_string_literal(self): items = [ ('"abc"', 'abc'), ("'abc'", 'abc'), ('"a \"bc\""', 'a "bc"'), ("'\'ab\' c'", "'ab' c"), ] for value, output in items: self.assertEqual(text.unescape_string_literal(value), output) self.assertEqual(text.unescape_string_literal(lazystr(value)), output) def test_get_valid_filename(self): filename = "^&'@{}[],$=!-#()%+~_123.txt" self.assertEqual(text.get_valid_filename(filename), "-_123.txt") self.assertEqual(text.get_valid_filename(lazystr(filename)), "-_123.txt") def test_compress_sequence(self): data = [{'key': i} for i in range(10)] seq = list(json.JSONEncoder().iterencode(data)) seq = [s.encode() for s in seq] actual_length = len(b''.join(seq)) out = text.compress_sequence(seq) compressed_length = len(b''.join(out)) self.assertLess(compressed_length, actual_length) def test_format_lazy(self): self.assertEqual('django/test', format_lazy('{}/{}', 'django', lazystr('test'))) self.assertEqual('django/test', format_lazy('{0}/{1}', *('django', 'test'))) self.assertEqual('django/test', format_lazy('{a}/{b}', **{'a': 'django', 'b': 'test'})) self.assertEqual('django/test', format_lazy('{a[0]}/{a[1]}', a=('django', 'test'))) t = {} s = format_lazy('{0[a]}-{p[a]}', t, p=t) t['a'] = lazystr('django') self.assertEqual('django-django', s) t['a'] = 'update' self.assertEqual('update-update', s) # The format string can be lazy. (string comes from contrib.admin) s = format_lazy( gettext_lazy('Added {name} “{object}”.'), name='article', object='My first try', ) with override('fr'): self.assertEqual('Ajout de article «\xa0My first try\xa0».', s)
f77b90e92c8a3e2d81ca933fcc9fd5c04dee5680eacb3a83620ef099fa4e7323
import operator from django.db import DatabaseError, NotSupportedError, connection from django.db.models import Exists, F, IntegerField, OuterRef, Value from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature from .models import Number, ReservedName @skipUnlessDBFeature('supports_select_union') class QuerySetSetOperationTests(TestCase): @classmethod def setUpTestData(cls): Number.objects.bulk_create(Number(num=i, other_num=10 - i) for i in range(10)) def assertNumbersEqual(self, queryset, expected_numbers, ordered=True): self.assertQuerysetEqual(queryset, expected_numbers, operator.attrgetter('num'), ordered) def test_simple_union(self): qs1 = Number.objects.filter(num__lte=1) qs2 = Number.objects.filter(num__gte=8) qs3 = Number.objects.filter(num=5) self.assertNumbersEqual(qs1.union(qs2, qs3), [0, 1, 5, 8, 9], ordered=False) @skipUnlessDBFeature('supports_select_intersection') def test_simple_intersection(self): qs1 = Number.objects.filter(num__lte=5) qs2 = Number.objects.filter(num__gte=5) qs3 = Number.objects.filter(num__gte=4, num__lte=6) self.assertNumbersEqual(qs1.intersection(qs2, qs3), [5], ordered=False) @skipUnlessDBFeature('supports_select_intersection') def test_intersection_with_values(self): ReservedName.objects.create(name='a', order=2) qs1 = ReservedName.objects.all() reserved_name = qs1.intersection(qs1).values('name', 'order', 'id').get() self.assertEqual(reserved_name['name'], 'a') self.assertEqual(reserved_name['order'], 2) reserved_name = qs1.intersection(qs1).values_list('name', 'order', 'id').get() self.assertEqual(reserved_name[:2], ('a', 2)) @skipUnlessDBFeature('supports_select_difference') def test_simple_difference(self): qs1 = Number.objects.filter(num__lte=5) qs2 = Number.objects.filter(num__lte=4) self.assertNumbersEqual(qs1.difference(qs2), [5], ordered=False) def test_union_distinct(self): qs1 = Number.objects.all() qs2 = Number.objects.all() self.assertEqual(len(list(qs1.union(qs2, all=True))), 20) self.assertEqual(len(list(qs1.union(qs2))), 10) @skipUnlessDBFeature('supports_select_intersection') def test_intersection_with_empty_qs(self): qs1 = Number.objects.all() qs2 = Number.objects.none() qs3 = Number.objects.filter(pk__in=[]) self.assertEqual(len(qs1.intersection(qs2)), 0) self.assertEqual(len(qs1.intersection(qs3)), 0) self.assertEqual(len(qs2.intersection(qs1)), 0) self.assertEqual(len(qs3.intersection(qs1)), 0) self.assertEqual(len(qs2.intersection(qs2)), 0) self.assertEqual(len(qs3.intersection(qs3)), 0) @skipUnlessDBFeature('supports_select_difference') def test_difference_with_empty_qs(self): qs1 = Number.objects.all() qs2 = Number.objects.none() qs3 = Number.objects.filter(pk__in=[]) self.assertEqual(len(qs1.difference(qs2)), 10) self.assertEqual(len(qs1.difference(qs3)), 10) self.assertEqual(len(qs2.difference(qs1)), 0) self.assertEqual(len(qs3.difference(qs1)), 0) self.assertEqual(len(qs2.difference(qs2)), 0) self.assertEqual(len(qs3.difference(qs3)), 0) @skipUnlessDBFeature('supports_select_difference') def test_difference_with_values(self): ReservedName.objects.create(name='a', order=2) qs1 = ReservedName.objects.all() qs2 = ReservedName.objects.none() reserved_name = qs1.difference(qs2).values('name', 'order', 'id').get() self.assertEqual(reserved_name['name'], 'a') self.assertEqual(reserved_name['order'], 2) reserved_name = qs1.difference(qs2).values_list('name', 'order', 'id').get() self.assertEqual(reserved_name[:2], ('a', 2)) def test_union_with_empty_qs(self): qs1 = Number.objects.all() qs2 = Number.objects.none() qs3 = Number.objects.filter(pk__in=[]) self.assertEqual(len(qs1.union(qs2)), 10) self.assertEqual(len(qs2.union(qs1)), 10) self.assertEqual(len(qs1.union(qs3)), 10) self.assertEqual(len(qs3.union(qs1)), 10) self.assertEqual(len(qs2.union(qs1, qs1, qs1)), 10) self.assertEqual(len(qs2.union(qs1, qs1, all=True)), 20) self.assertEqual(len(qs2.union(qs2)), 0) self.assertEqual(len(qs3.union(qs3)), 0) def test_limits(self): qs1 = Number.objects.all() qs2 = Number.objects.all() self.assertEqual(len(list(qs1.union(qs2)[:2])), 2) def test_ordering(self): qs1 = Number.objects.filter(num__lte=1) qs2 = Number.objects.filter(num__gte=2, num__lte=3) self.assertNumbersEqual(qs1.union(qs2).order_by('-num'), [3, 2, 1, 0]) def test_ordering_by_alias(self): qs1 = Number.objects.filter(num__lte=1).values(alias=F('num')) qs2 = Number.objects.filter(num__gte=2, num__lte=3).values(alias=F('num')) self.assertQuerysetEqual( qs1.union(qs2).order_by('-alias'), [3, 2, 1, 0], operator.itemgetter('alias'), ) def test_ordering_by_f_expression(self): qs1 = Number.objects.filter(num__lte=1) qs2 = Number.objects.filter(num__gte=2, num__lte=3) self.assertNumbersEqual(qs1.union(qs2).order_by(F('num').desc()), [3, 2, 1, 0]) def test_ordering_by_f_expression_and_alias(self): qs1 = Number.objects.filter(num__lte=1).values(alias=F('other_num')) qs2 = Number.objects.filter(num__gte=2, num__lte=3).values(alias=F('other_num')) self.assertQuerysetEqual( qs1.union(qs2).order_by(F('alias').desc()), [10, 9, 8, 7], operator.itemgetter('alias'), ) Number.objects.create(num=-1) self.assertQuerysetEqual( qs1.union(qs2).order_by(F('alias').desc(nulls_last=True)), [10, 9, 8, 7, None], operator.itemgetter('alias'), ) def test_union_with_values(self): ReservedName.objects.create(name='a', order=2) qs1 = ReservedName.objects.all() reserved_name = qs1.union(qs1).values('name', 'order', 'id').get() self.assertEqual(reserved_name['name'], 'a') self.assertEqual(reserved_name['order'], 2) reserved_name = qs1.union(qs1).values_list('name', 'order', 'id').get() self.assertEqual(reserved_name[:2], ('a', 2)) # List of columns can be changed. reserved_name = qs1.union(qs1).values_list('order').get() self.assertEqual(reserved_name, (2,)) def test_union_with_two_annotated_values_list(self): qs1 = Number.objects.filter(num=1).annotate( count=Value(0, IntegerField()), ).values_list('num', 'count') qs2 = Number.objects.filter(num=2).values('pk').annotate( count=F('num'), ).annotate( num=Value(1, IntegerField()), ).values_list('num', 'count') self.assertCountEqual(qs1.union(qs2), [(1, 0), (2, 1)]) def test_union_with_extra_and_values_list(self): qs1 = Number.objects.filter(num=1).extra( select={'count': 0}, ).values_list('num', 'count') qs2 = Number.objects.filter(num=2).extra(select={'count': 1}) self.assertCountEqual(qs1.union(qs2), [(1, 0), (2, 1)]) def test_union_with_values_list_on_annotated_and_unannotated(self): ReservedName.objects.create(name='rn1', order=1) qs1 = Number.objects.annotate( has_reserved_name=Exists(ReservedName.objects.filter(order=OuterRef('num'))) ).filter(has_reserved_name=True) qs2 = Number.objects.filter(num=9) self.assertCountEqual(qs1.union(qs2).values_list('num', flat=True), [1, 9]) def test_union_with_values_list_and_order(self): ReservedName.objects.bulk_create([ ReservedName(name='rn1', order=7), ReservedName(name='rn2', order=5), ReservedName(name='rn0', order=6), ReservedName(name='rn9', order=-1), ]) qs1 = ReservedName.objects.filter(order__gte=6) qs2 = ReservedName.objects.filter(order__lte=5) union_qs = qs1.union(qs2) for qs, expected_result in ( # Order by a single column. (union_qs.order_by('-pk').values_list('order', flat=True), [-1, 6, 5, 7]), (union_qs.order_by('pk').values_list('order', flat=True), [7, 5, 6, -1]), (union_qs.values_list('order', flat=True).order_by('-pk'), [-1, 6, 5, 7]), (union_qs.values_list('order', flat=True).order_by('pk'), [7, 5, 6, -1]), # Order by multiple columns. (union_qs.order_by('-name', 'pk').values_list('order', flat=True), [-1, 5, 7, 6]), (union_qs.values_list('order', flat=True).order_by('-name', 'pk'), [-1, 5, 7, 6]), ): with self.subTest(qs=qs): self.assertEqual(list(qs), expected_result) def test_count_union(self): qs1 = Number.objects.filter(num__lte=1).values('num') qs2 = Number.objects.filter(num__gte=2, num__lte=3).values('num') self.assertEqual(qs1.union(qs2).count(), 4) def test_count_union_empty_result(self): qs = Number.objects.filter(pk__in=[]) self.assertEqual(qs.union(qs).count(), 0) @skipUnlessDBFeature('supports_select_difference') def test_count_difference(self): qs1 = Number.objects.filter(num__lt=10) qs2 = Number.objects.filter(num__lt=9) self.assertEqual(qs1.difference(qs2).count(), 1) @skipUnlessDBFeature('supports_select_intersection') def test_count_intersection(self): qs1 = Number.objects.filter(num__gte=5) qs2 = Number.objects.filter(num__lte=5) self.assertEqual(qs1.intersection(qs2).count(), 1) @skipUnlessDBFeature('supports_slicing_ordering_in_compound') def test_ordering_subqueries(self): qs1 = Number.objects.order_by('num')[:2] qs2 = Number.objects.order_by('-num')[:2] self.assertNumbersEqual(qs1.union(qs2).order_by('-num')[:4], [9, 8, 1, 0]) @skipIfDBFeature('supports_slicing_ordering_in_compound') def test_unsupported_ordering_slicing_raises_db_error(self): qs1 = Number.objects.all() qs2 = Number.objects.all() msg = 'LIMIT/OFFSET not allowed in subqueries of compound statements' with self.assertRaisesMessage(DatabaseError, msg): list(qs1.union(qs2[:10])) msg = 'ORDER BY not allowed in subqueries of compound statements' with self.assertRaisesMessage(DatabaseError, msg): list(qs1.order_by('id').union(qs2)) @skipIfDBFeature('supports_select_intersection') def test_unsupported_intersection_raises_db_error(self): qs1 = Number.objects.all() qs2 = Number.objects.all() msg = 'intersection is not supported on this database backend' with self.assertRaisesMessage(NotSupportedError, msg): list(qs1.intersection(qs2)) def test_combining_multiple_models(self): ReservedName.objects.create(name='99 little bugs', order=99) qs1 = Number.objects.filter(num=1).values_list('num', flat=True) qs2 = ReservedName.objects.values_list('order') self.assertEqual(list(qs1.union(qs2).order_by('num')), [1, 99]) def test_order_raises_on_non_selected_column(self): qs1 = Number.objects.filter().annotate( annotation=Value(1, IntegerField()), ).values('annotation', num2=F('num')) qs2 = Number.objects.filter().values('id', 'num') # Should not raise list(qs1.union(qs2).order_by('annotation')) list(qs1.union(qs2).order_by('num2')) msg = 'ORDER BY term does not match any column in the result set' # 'id' is not part of the select with self.assertRaisesMessage(DatabaseError, msg): list(qs1.union(qs2).order_by('id')) # 'num' got realiased to num2 with self.assertRaisesMessage(DatabaseError, msg): list(qs1.union(qs2).order_by('num')) with self.assertRaisesMessage(DatabaseError, msg): list(qs1.union(qs2).order_by(F('num'))) with self.assertRaisesMessage(DatabaseError, msg): list(qs1.union(qs2).order_by(F('num').desc())) # switched order, now 'exists' again: list(qs2.union(qs1).order_by('num')) @skipUnlessDBFeature('supports_select_difference', 'supports_select_intersection') def test_qs_with_subcompound_qs(self): qs1 = Number.objects.all() qs2 = Number.objects.intersection(Number.objects.filter(num__gt=1)) self.assertEqual(qs1.difference(qs2).count(), 2) def test_order_by_same_type(self): qs = Number.objects.all() union = qs.union(qs) numbers = list(range(10)) self.assertNumbersEqual(union.order_by('num'), numbers) self.assertNumbersEqual(union.order_by('other_num'), reversed(numbers)) def test_unsupported_operations_on_combined_qs(self): qs = Number.objects.all() msg = 'Calling QuerySet.%s() after %s() is not supported.' combinators = ['union'] if connection.features.supports_select_difference: combinators.append('difference') if connection.features.supports_select_intersection: combinators.append('intersection') for combinator in combinators: for operation in ( 'annotate', 'defer', 'delete', 'distinct', 'exclude', 'extra', 'filter', 'only', 'prefetch_related', 'select_related', 'update', ): with self.subTest(combinator=combinator, operation=operation): with self.assertRaisesMessage( NotSupportedError, msg % (operation, combinator), ): getattr(getattr(qs, combinator)(qs), operation)()
eb1e281162da237a4ead093e726e0496e7e0ba23b1ea12abaef9fc7f2f425b9a
""" Testing using the Test Client The test client is a class that can act like a simple browser for testing purposes. It allows the user to compose GET and POST requests, and obtain the response that the server gave to those requests. The server Response objects are annotated with the details of the contexts and templates that were rendered during the process of serving the request. ``Client`` objects are stateful - they will retain cookie (and thus session) details for the lifetime of the ``Client`` instance. This is not intended as a replacement for Twill, Selenium, or other browser automation frameworks - it is here to allow testing against the contexts and templates produced by a view, rather than the HTML rendered to the end-user. """ import itertools import tempfile from unittest import mock from django.contrib.auth.models import User from django.core import mail from django.http import HttpResponse, HttpResponseNotAllowed from django.test import ( AsyncRequestFactory, Client, RequestFactory, SimpleTestCase, TestCase, override_settings, ) from django.urls import reverse_lazy from .views import TwoArgException, get_view, post_view, trace_view @override_settings(ROOT_URLCONF='test_client.urls') class ClientTest(TestCase): @classmethod def setUpTestData(cls): cls.u1 = User.objects.create_user(username='testclient', password='password') cls.u2 = User.objects.create_user(username='inactive', password='password', is_active=False) def test_get_view(self): "GET a view" # The data is ignored, but let's check it doesn't crash the system # anyway. data = {'var': '\xf2'} response = self.client.get('/get_view/', data) # Check some response details self.assertContains(response, 'This is a test') self.assertEqual(response.context['var'], '\xf2') self.assertEqual(response.templates[0].name, 'GET Template') def test_query_string_encoding(self): # WSGI requires latin-1 encoded strings. response = self.client.get('/get_view/?var=1\ufffd') self.assertEqual(response.context['var'], '1\ufffd') def test_get_data_none(self): msg = ( "Cannot encode None for key 'value' in a query string. Did you " "mean to pass an empty string or omit the value?" ) with self.assertRaisesMessage(TypeError, msg): self.client.get('/get_view/', {'value': None}) def test_get_post_view(self): "GET a view that normally expects POSTs" response = self.client.get('/post_view/', {}) # Check some response details self.assertEqual(response.status_code, 200) self.assertEqual(response.templates[0].name, 'Empty GET Template') self.assertTemplateUsed(response, 'Empty GET Template') self.assertTemplateNotUsed(response, 'Empty POST Template') def test_empty_post(self): "POST an empty dictionary to a view" response = self.client.post('/post_view/', {}) # Check some response details self.assertEqual(response.status_code, 200) self.assertEqual(response.templates[0].name, 'Empty POST Template') self.assertTemplateNotUsed(response, 'Empty GET Template') self.assertTemplateUsed(response, 'Empty POST Template') def test_post(self): "POST some data to a view" post_data = { 'value': 37 } response = self.client.post('/post_view/', post_data) # Check some response details self.assertEqual(response.status_code, 200) self.assertEqual(response.context['data'], '37') self.assertEqual(response.templates[0].name, 'POST Template') self.assertContains(response, 'Data received') def test_post_data_none(self): msg = ( "Cannot encode None for key 'value' as POST data. Did you mean " "to pass an empty string or omit the value?" ) with self.assertRaisesMessage(TypeError, msg): self.client.post('/post_view/', {'value': None}) def test_json_serialization(self): """The test client serializes JSON data.""" methods = ('post', 'put', 'patch', 'delete') tests = ( ({'value': 37}, {'value': 37}), ([37, True], [37, True]), ((37, False), [37, False]), ) for method in methods: with self.subTest(method=method): for data, expected in tests: with self.subTest(data): client_method = getattr(self.client, method) method_name = method.upper() response = client_method('/json_view/', data, content_type='application/json') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['data'], expected) self.assertContains(response, 'Viewing %s page.' % method_name) def test_json_encoder_argument(self): """The test Client accepts a json_encoder.""" mock_encoder = mock.MagicMock() mock_encoding = mock.MagicMock() mock_encoder.return_value = mock_encoding mock_encoding.encode.return_value = '{"value": 37}' client = self.client_class(json_encoder=mock_encoder) # Vendored tree JSON content types are accepted. client.post('/json_view/', {'value': 37}, content_type='application/vnd.api+json') self.assertTrue(mock_encoder.called) self.assertTrue(mock_encoding.encode.called) def test_put(self): response = self.client.put('/put_view/', {'foo': 'bar'}) self.assertEqual(response.status_code, 200) self.assertEqual(response.templates[0].name, 'PUT Template') self.assertEqual(response.context['data'], "{'foo': 'bar'}") self.assertEqual(response.context['Content-Length'], '14') def test_trace(self): """TRACE a view""" response = self.client.trace('/trace_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['method'], 'TRACE') self.assertEqual(response.templates[0].name, 'TRACE Template') def test_response_headers(self): "Check the value of HTTP headers returned in a response" response = self.client.get("/header_view/") self.assertEqual(response['X-DJANGO-TEST'], 'Slartibartfast') def test_response_attached_request(self): """ The returned response has a ``request`` attribute with the originating environ dict and a ``wsgi_request`` with the originating WSGIRequest. """ response = self.client.get("/header_view/") self.assertTrue(hasattr(response, 'request')) self.assertTrue(hasattr(response, 'wsgi_request')) for key, value in response.request.items(): self.assertIn(key, response.wsgi_request.environ) self.assertEqual(response.wsgi_request.environ[key], value) def test_response_resolver_match(self): """ The response contains a ResolverMatch instance. """ response = self.client.get('/header_view/') self.assertTrue(hasattr(response, 'resolver_match')) def test_response_resolver_match_redirect_follow(self): """ The response ResolverMatch instance contains the correct information when following redirects. """ response = self.client.get('/redirect_view/', follow=True) self.assertEqual(response.resolver_match.url_name, 'get_view') def test_response_resolver_match_regular_view(self): """ The response ResolverMatch instance contains the correct information when accessing a regular view. """ response = self.client.get('/get_view/') self.assertEqual(response.resolver_match.url_name, 'get_view') def test_raw_post(self): "POST raw data (with a content type) to a view" test_doc = """<?xml version="1.0" encoding="utf-8"?> <library><book><title>Blink</title><author>Malcolm Gladwell</author></book></library> """ response = self.client.post('/raw_post_view/', test_doc, content_type='text/xml') self.assertEqual(response.status_code, 200) self.assertEqual(response.templates[0].name, "Book template") self.assertEqual(response.content, b"Blink - Malcolm Gladwell") def test_insecure(self): "GET a URL through http" response = self.client.get('/secure_view/', secure=False) self.assertFalse(response.test_was_secure_request) self.assertEqual(response.test_server_port, '80') def test_secure(self): "GET a URL through https" response = self.client.get('/secure_view/', secure=True) self.assertTrue(response.test_was_secure_request) self.assertEqual(response.test_server_port, '443') def test_redirect(self): "GET a URL that redirects elsewhere" response = self.client.get('/redirect_view/') self.assertRedirects(response, '/get_view/') def test_redirect_with_query(self): "GET a URL that redirects with given GET parameters" response = self.client.get('/redirect_view/', {'var': 'value'}) self.assertRedirects(response, '/get_view/?var=value') def test_redirect_with_query_ordering(self): """assertRedirects() ignores the order of query string parameters.""" response = self.client.get('/redirect_view/', {'var': 'value', 'foo': 'bar'}) self.assertRedirects(response, '/get_view/?var=value&foo=bar') self.assertRedirects(response, '/get_view/?foo=bar&var=value') def test_permanent_redirect(self): "GET a URL that redirects permanently elsewhere" response = self.client.get('/permanent_redirect_view/') self.assertRedirects(response, '/get_view/', status_code=301) def test_temporary_redirect(self): "GET a URL that does a non-permanent redirect" response = self.client.get('/temporary_redirect_view/') self.assertRedirects(response, '/get_view/', status_code=302) def test_redirect_to_strange_location(self): "GET a URL that redirects to a non-200 page" response = self.client.get('/double_redirect_view/') # The response was a 302, and that the attempt to get the redirection # location returned 301 when retrieved self.assertRedirects(response, '/permanent_redirect_view/', target_status_code=301) def test_follow_redirect(self): "A URL that redirects can be followed to termination." response = self.client.get('/double_redirect_view/', follow=True) self.assertRedirects(response, '/get_view/', status_code=302, target_status_code=200) self.assertEqual(len(response.redirect_chain), 2) def test_follow_relative_redirect(self): "A URL with a relative redirect can be followed." response = self.client.get('/accounts/', follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.request['PATH_INFO'], '/accounts/login/') def test_follow_relative_redirect_no_trailing_slash(self): "A URL with a relative redirect with no trailing slash can be followed." response = self.client.get('/accounts/no_trailing_slash', follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.request['PATH_INFO'], '/accounts/login/') def test_follow_307_and_308_redirect(self): """ A 307 or 308 redirect preserves the request method after the redirect. """ methods = ('get', 'post', 'head', 'options', 'put', 'patch', 'delete', 'trace') codes = (307, 308) for method, code in itertools.product(methods, codes): with self.subTest(method=method, code=code): req_method = getattr(self.client, method) response = req_method('/redirect_view_%s/' % code, data={'value': 'test'}, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.request['PATH_INFO'], '/post_view/') self.assertEqual(response.request['REQUEST_METHOD'], method.upper()) def test_follow_307_and_308_preserves_query_string(self): methods = ('post', 'options', 'put', 'patch', 'delete', 'trace') codes = (307, 308) for method, code in itertools.product(methods, codes): with self.subTest(method=method, code=code): req_method = getattr(self.client, method) response = req_method( '/redirect_view_%s_query_string/' % code, data={'value': 'test'}, follow=True, ) self.assertRedirects(response, '/post_view/?hello=world', status_code=code) self.assertEqual(response.request['QUERY_STRING'], 'hello=world') def test_follow_307_and_308_get_head_query_string(self): methods = ('get', 'head') codes = (307, 308) for method, code in itertools.product(methods, codes): with self.subTest(method=method, code=code): req_method = getattr(self.client, method) response = req_method( '/redirect_view_%s_query_string/' % code, data={'value': 'test'}, follow=True, ) self.assertRedirects(response, '/post_view/?hello=world', status_code=code) self.assertEqual(response.request['QUERY_STRING'], 'value=test') def test_follow_307_and_308_preserves_post_data(self): for code in (307, 308): with self.subTest(code=code): response = self.client.post('/redirect_view_%s/' % code, data={'value': 'test'}, follow=True) self.assertContains(response, 'test is the value') def test_follow_307_and_308_preserves_put_body(self): for code in (307, 308): with self.subTest(code=code): response = self.client.put('/redirect_view_%s/?to=/put_view/' % code, data='a=b', follow=True) self.assertContains(response, 'a=b is the body') def test_follow_307_and_308_preserves_get_params(self): data = {'var': 30, 'to': '/get_view/'} for code in (307, 308): with self.subTest(code=code): response = self.client.get('/redirect_view_%s/' % code, data=data, follow=True) self.assertContains(response, '30 is the value') def test_redirect_http(self): "GET a URL that redirects to an http URI" response = self.client.get('/http_redirect_view/', follow=True) self.assertFalse(response.test_was_secure_request) def test_redirect_https(self): "GET a URL that redirects to an https URI" response = self.client.get('/https_redirect_view/', follow=True) self.assertTrue(response.test_was_secure_request) def test_notfound_response(self): "GET a URL that responds as '404:Not Found'" response = self.client.get('/bad_view/') self.assertContains(response, 'MAGIC', status_code=404) def test_valid_form(self): "POST valid data to a form" post_data = { 'text': 'Hello World', 'email': '[email protected]', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Valid POST Template") def test_valid_form_with_hints(self): "GET a form, providing hints in the GET data" hints = { 'text': 'Hello World', 'multi': ('b', 'c', 'e') } response = self.client.get('/form_view/', data=hints) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Form GET Template") # The multi-value data has been rolled out ok self.assertContains(response, 'Select a valid choice.', 0) def test_incomplete_data_form(self): "POST incomplete data to a form" post_data = { 'text': 'Hello World', 'value': 37 } response = self.client.post('/form_view/', post_data) self.assertContains(response, 'This field is required.', 3) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Invalid POST Template") self.assertFormError(response, 'form', 'email', 'This field is required.') self.assertFormError(response, 'form', 'single', 'This field is required.') self.assertFormError(response, 'form', 'multi', 'This field is required.') def test_form_error(self): "POST erroneous data to a form" post_data = { 'text': 'Hello World', 'email': 'not an email address', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Invalid POST Template") self.assertFormError(response, 'form', 'email', 'Enter a valid email address.') def test_valid_form_with_template(self): "POST valid data to a form using multiple templates" post_data = { 'text': 'Hello World', 'email': '[email protected]', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view_with_template/', post_data) self.assertContains(response, 'POST data OK') self.assertTemplateUsed(response, "form_view.html") self.assertTemplateUsed(response, 'base.html') self.assertTemplateNotUsed(response, "Valid POST Template") def test_incomplete_data_form_with_template(self): "POST incomplete data to a form using multiple templates" post_data = { 'text': 'Hello World', 'value': 37 } response = self.client.post('/form_view_with_template/', post_data) self.assertContains(response, 'POST data has errors') self.assertTemplateUsed(response, 'form_view.html') self.assertTemplateUsed(response, 'base.html') self.assertTemplateNotUsed(response, "Invalid POST Template") self.assertFormError(response, 'form', 'email', 'This field is required.') self.assertFormError(response, 'form', 'single', 'This field is required.') self.assertFormError(response, 'form', 'multi', 'This field is required.') def test_form_error_with_template(self): "POST erroneous data to a form using multiple templates" post_data = { 'text': 'Hello World', 'email': 'not an email address', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view_with_template/', post_data) self.assertContains(response, 'POST data has errors') self.assertTemplateUsed(response, "form_view.html") self.assertTemplateUsed(response, 'base.html') self.assertTemplateNotUsed(response, "Invalid POST Template") self.assertFormError(response, 'form', 'email', 'Enter a valid email address.') def test_unknown_page(self): "GET an invalid URL" response = self.client.get('/unknown_view/') # The response was a 404 self.assertEqual(response.status_code, 404) def test_url_parameters(self): "Make sure that URL ;-parameters are not stripped." response = self.client.get('/unknown_view/;some-parameter') # The path in the response includes it (ignore that it's a 404) self.assertEqual(response.request['PATH_INFO'], '/unknown_view/;some-parameter') def test_view_with_login(self): "Request a page that is protected with @login_required" # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/') # Log in login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') @override_settings( INSTALLED_APPS=['django.contrib.auth'], SESSION_ENGINE='django.contrib.sessions.backends.file', ) def test_view_with_login_when_sessions_app_is_not_installed(self): self.test_view_with_login() def test_view_with_force_login(self): "Request a page that is protected with @login_required" # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/') # Log in self.client.force_login(self.u1) # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') def test_view_with_method_login(self): "Request a page that is protected with a @login_required method" # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_method_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_method_view/') # Log in login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Request a page that requires a login response = self.client.get('/login_protected_method_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') def test_view_with_method_force_login(self): "Request a page that is protected with a @login_required method" # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_method_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_method_view/') # Log in self.client.force_login(self.u1) # Request a page that requires a login response = self.client.get('/login_protected_method_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') def test_view_with_login_and_custom_redirect(self): "Request a page that is protected with @login_required(redirect_field_name='redirect_to')" # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_view_custom_redirect/') self.assertRedirects(response, '/accounts/login/?redirect_to=/login_protected_view_custom_redirect/') # Log in login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Request a page that requires a login response = self.client.get('/login_protected_view_custom_redirect/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') def test_view_with_force_login_and_custom_redirect(self): """ Request a page that is protected with @login_required(redirect_field_name='redirect_to') """ # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_view_custom_redirect/') self.assertRedirects(response, '/accounts/login/?redirect_to=/login_protected_view_custom_redirect/') # Log in self.client.force_login(self.u1) # Request a page that requires a login response = self.client.get('/login_protected_view_custom_redirect/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') def test_view_with_bad_login(self): "Request a page that is protected with @login, but use bad credentials" login = self.client.login(username='otheruser', password='nopassword') self.assertFalse(login) def test_view_with_inactive_login(self): """ An inactive user may login if the authenticate backend allows it. """ credentials = {'username': 'inactive', 'password': 'password'} self.assertFalse(self.client.login(**credentials)) with self.settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.AllowAllUsersModelBackend']): self.assertTrue(self.client.login(**credentials)) @override_settings( AUTHENTICATION_BACKENDS=[ 'django.contrib.auth.backends.ModelBackend', 'django.contrib.auth.backends.AllowAllUsersModelBackend', ] ) def test_view_with_inactive_force_login(self): "Request a page that is protected with @login, but use an inactive login" # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/') # Log in self.client.force_login(self.u2, backend='django.contrib.auth.backends.AllowAllUsersModelBackend') # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'inactive') def test_logout(self): "Request a logout after logging in" # Log in self.client.login(username='testclient', password='password') # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') # Log out self.client.logout() # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/') def test_logout_with_force_login(self): "Request a logout after logging in" # Log in self.client.force_login(self.u1) # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') # Log out self.client.logout() # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/') @override_settings( AUTHENTICATION_BACKENDS=[ 'django.contrib.auth.backends.ModelBackend', 'test_client.auth_backends.TestClientBackend', ], ) def test_force_login_with_backend(self): """ Request a page that is protected with @login_required when using force_login() and passing a backend. """ # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/') # Log in self.client.force_login(self.u1, backend='test_client.auth_backends.TestClientBackend') self.assertEqual(self.u1.backend, 'test_client.auth_backends.TestClientBackend') # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') @override_settings( AUTHENTICATION_BACKENDS=[ 'django.contrib.auth.backends.ModelBackend', 'test_client.auth_backends.TestClientBackend', ], ) def test_force_login_without_backend(self): """ force_login() without passing a backend and with multiple backends configured should automatically use the first backend. """ self.client.force_login(self.u1) response = self.client.get('/login_protected_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') self.assertEqual(self.u1.backend, 'django.contrib.auth.backends.ModelBackend') @override_settings(AUTHENTICATION_BACKENDS=[ 'test_client.auth_backends.BackendWithoutGetUserMethod', 'django.contrib.auth.backends.ModelBackend', ]) def test_force_login_with_backend_missing_get_user(self): """ force_login() skips auth backends without a get_user() method. """ self.client.force_login(self.u1) self.assertEqual(self.u1.backend, 'django.contrib.auth.backends.ModelBackend') @override_settings(SESSION_ENGINE="django.contrib.sessions.backends.signed_cookies") def test_logout_cookie_sessions(self): self.test_logout() def test_view_with_permissions(self): "Request a page that is protected with @permission_required" # Get the page without logging in. Should result in 302. response = self.client.get('/permission_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/permission_protected_view/') # Log in login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Log in with wrong permissions. Should result in 302. response = self.client.get('/permission_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/permission_protected_view/') # TODO: Log in with right permissions and request the page again def test_view_with_permissions_exception(self): "Request a page that is protected with @permission_required but raises an exception" # Get the page without logging in. Should result in 403. response = self.client.get('/permission_protected_view_exception/') self.assertEqual(response.status_code, 403) # Log in login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Log in with wrong permissions. Should result in 403. response = self.client.get('/permission_protected_view_exception/') self.assertEqual(response.status_code, 403) def test_view_with_method_permissions(self): "Request a page that is protected with a @permission_required method" # Get the page without logging in. Should result in 302. response = self.client.get('/permission_protected_method_view/') self.assertRedirects(response, '/accounts/login/?next=/permission_protected_method_view/') # Log in login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Log in with wrong permissions. Should result in 302. response = self.client.get('/permission_protected_method_view/') self.assertRedirects(response, '/accounts/login/?next=/permission_protected_method_view/') # TODO: Log in with right permissions and request the page again def test_external_redirect(self): response = self.client.get('/django_project_redirect/') self.assertRedirects(response, 'https://www.djangoproject.com/', fetch_redirect_response=False) def test_external_redirect_with_fetch_error_msg(self): """ assertRedirects without fetch_redirect_response=False raises a relevant ValueError rather than a non-descript AssertionError. """ response = self.client.get('/django_project_redirect/') msg = ( "The test client is unable to fetch remote URLs (got " "https://www.djangoproject.com/). If the host is served by Django, " "add 'www.djangoproject.com' to ALLOWED_HOSTS. " "Otherwise, use assertRedirects(..., fetch_redirect_response=False)." ) with self.assertRaisesMessage(ValueError, msg): self.assertRedirects(response, 'https://www.djangoproject.com/') def test_session_modifying_view(self): "Request a page that modifies the session" # Session value isn't set initially with self.assertRaises(KeyError): self.client.session['tobacconist'] self.client.post('/session_view/') # The session was modified self.assertEqual(self.client.session['tobacconist'], 'hovercraft') @override_settings( INSTALLED_APPS=[], SESSION_ENGINE='django.contrib.sessions.backends.file', ) def test_sessions_app_is_not_installed(self): self.test_session_modifying_view() @override_settings( INSTALLED_APPS=[], SESSION_ENGINE='django.contrib.sessions.backends.nonexistent', ) def test_session_engine_is_invalid(self): with self.assertRaisesMessage(ImportError, 'nonexistent'): self.test_session_modifying_view() def test_view_with_exception(self): "Request a page that is known to throw an error" with self.assertRaises(KeyError): self.client.get("/broken_view/") def test_exc_info(self): client = Client(raise_request_exception=False) response = client.get("/broken_view/") self.assertEqual(response.status_code, 500) exc_type, exc_value, exc_traceback = response.exc_info self.assertIs(exc_type, KeyError) self.assertIsInstance(exc_value, KeyError) self.assertEqual(str(exc_value), "'Oops! Looks like you wrote some bad code.'") self.assertIsNotNone(exc_traceback) def test_exc_info_none(self): response = self.client.get("/get_view/") self.assertIsNone(response.exc_info) def test_mail_sending(self): "Mail is redirected to a dummy outbox during test setup" response = self.client.get('/mail_sending_view/') self.assertEqual(response.status_code, 200) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Test message') self.assertEqual(mail.outbox[0].body, 'This is a test email') self.assertEqual(mail.outbox[0].from_email, '[email protected]') self.assertEqual(mail.outbox[0].to[0], '[email protected]') self.assertEqual(mail.outbox[0].to[1], '[email protected]') def test_reverse_lazy_decodes(self): "reverse_lazy() works in the test client" data = {'var': 'data'} response = self.client.get(reverse_lazy('get_view'), data) # Check some response details self.assertContains(response, 'This is a test') def test_relative_redirect(self): response = self.client.get('/accounts/') self.assertRedirects(response, '/accounts/login/') def test_relative_redirect_no_trailing_slash(self): response = self.client.get('/accounts/no_trailing_slash') self.assertRedirects(response, '/accounts/login/') def test_mass_mail_sending(self): "Mass mail is redirected to a dummy outbox during test setup" response = self.client.get('/mass_mail_sending_view/') self.assertEqual(response.status_code, 200) self.assertEqual(len(mail.outbox), 2) self.assertEqual(mail.outbox[0].subject, 'First Test message') self.assertEqual(mail.outbox[0].body, 'This is the first test email') self.assertEqual(mail.outbox[0].from_email, '[email protected]') self.assertEqual(mail.outbox[0].to[0], '[email protected]') self.assertEqual(mail.outbox[0].to[1], '[email protected]') self.assertEqual(mail.outbox[1].subject, 'Second Test message') self.assertEqual(mail.outbox[1].body, 'This is the second test email') self.assertEqual(mail.outbox[1].from_email, '[email protected]') self.assertEqual(mail.outbox[1].to[0], '[email protected]') self.assertEqual(mail.outbox[1].to[1], '[email protected]') def test_exception_following_nested_client_request(self): """ A nested test client request shouldn't clobber exception signals from the outer client request. """ with self.assertRaisesMessage(Exception, 'exception message'): self.client.get('/nesting_exception_view/') def test_response_raises_multi_arg_exception(self): """A request may raise an exception with more than one required arg.""" with self.assertRaises(TwoArgException) as cm: self.client.get('/two_arg_exception/') self.assertEqual(cm.exception.args, ('one', 'two')) def test_uploading_temp_file(self): with tempfile.TemporaryFile() as test_file: response = self.client.post('/upload_view/', data={'temp_file': test_file}) self.assertEqual(response.content, b'temp_file') def test_uploading_named_temp_file(self): test_file = tempfile.NamedTemporaryFile() response = self.client.post('/upload_view/', data={'named_temp_file': test_file}) self.assertEqual(response.content, b'named_temp_file') @override_settings( MIDDLEWARE=['django.middleware.csrf.CsrfViewMiddleware'], ROOT_URLCONF='test_client.urls', ) class CSRFEnabledClientTests(SimpleTestCase): def test_csrf_enabled_client(self): "A client can be instantiated with CSRF checks enabled" csrf_client = Client(enforce_csrf_checks=True) # The normal client allows the post response = self.client.post('/post_view/', {}) self.assertEqual(response.status_code, 200) # The CSRF-enabled client rejects it response = csrf_client.post('/post_view/', {}) self.assertEqual(response.status_code, 403) class CustomTestClient(Client): i_am_customized = "Yes" class CustomTestClientTest(SimpleTestCase): client_class = CustomTestClient def test_custom_test_client(self): """A test case can specify a custom class for self.client.""" self.assertIs(hasattr(self.client, "i_am_customized"), True) def _generic_view(request): return HttpResponse(status=200) @override_settings(ROOT_URLCONF='test_client.urls') class RequestFactoryTest(SimpleTestCase): """Tests for the request factory.""" # A mapping between names of HTTP/1.1 methods and their test views. http_methods_and_views = ( ('get', get_view), ('post', post_view), ('put', _generic_view), ('patch', _generic_view), ('delete', _generic_view), ('head', _generic_view), ('options', _generic_view), ('trace', trace_view), ) request_factory = RequestFactory() def test_request_factory(self): """The request factory implements all the HTTP/1.1 methods.""" for method_name, view in self.http_methods_and_views: method = getattr(self.request_factory, method_name) request = method('/somewhere/') response = view(request) self.assertEqual(response.status_code, 200) def test_get_request_from_factory(self): """ The request factory returns a templated response for a GET request. """ request = self.request_factory.get('/somewhere/') response = get_view(request) self.assertContains(response, 'This is a test') def test_trace_request_from_factory(self): """The request factory returns an echo response for a TRACE request.""" url_path = '/somewhere/' request = self.request_factory.trace(url_path) response = trace_view(request) protocol = request.META["SERVER_PROTOCOL"] echoed_request_line = "TRACE {} {}".format(url_path, protocol) self.assertContains(response, echoed_request_line) @override_settings(ROOT_URLCONF='test_client.urls') class AsyncClientTest(TestCase): async def test_response_resolver_match(self): response = await self.async_client.get('/async_get_view/') self.assertTrue(hasattr(response, 'resolver_match')) self.assertEqual(response.resolver_match.url_name, 'async_get_view') async def test_follow_parameter_not_implemented(self): msg = 'AsyncClient request methods do not accept the follow parameter.' tests = ( 'get', 'post', 'put', 'patch', 'delete', 'head', 'options', 'trace', ) for method_name in tests: with self.subTest(method=method_name): method = getattr(self.async_client, method_name) with self.assertRaisesMessage(NotImplementedError, msg): await method('/redirect_view/', follow=True) @override_settings(ROOT_URLCONF='test_client.urls') class AsyncRequestFactoryTest(SimpleTestCase): request_factory = AsyncRequestFactory() async def test_request_factory(self): tests = ( 'get', 'post', 'put', 'patch', 'delete', 'head', 'options', 'trace', ) for method_name in tests: with self.subTest(method=method_name): async def async_generic_view(request): if request.method.lower() != method_name: return HttpResponseNotAllowed(method_name) return HttpResponse(status=200) method = getattr(self.request_factory, method_name) request = method('/somewhere/') response = await async_generic_view(request) self.assertEqual(response.status_code, 200)
9af324dfa1c4219a8ab1413ea90f53f01e1ae9e741bb1c8939e6a649ec4bf7e0
from django.contrib.auth import views as auth_views from django.urls import path from django.views.generic import RedirectView from . import views urlpatterns = [ path('upload_view/', views.upload_view, name='upload_view'), path('get_view/', views.get_view, name='get_view'), path('post_view/', views.post_view), path('put_view/', views.put_view), path('trace_view/', views.trace_view), path('header_view/', views.view_with_header), path('raw_post_view/', views.raw_post_view), path('redirect_view/', views.redirect_view), path('redirect_view_307/', views.method_saving_307_redirect_view), path( 'redirect_view_307_query_string/', views.method_saving_307_redirect_query_string_view, ), path('redirect_view_308/', views.method_saving_308_redirect_view), path( 'redirect_view_308_query_string/', views.method_saving_308_redirect_query_string_view, ), path('secure_view/', views.view_with_secure), path('permanent_redirect_view/', RedirectView.as_view(url='/get_view/', permanent=True)), path('temporary_redirect_view/', RedirectView.as_view(url='/get_view/', permanent=False)), path('http_redirect_view/', RedirectView.as_view(url='/secure_view/')), path('https_redirect_view/', RedirectView.as_view(url='https://testserver/secure_view/')), path('double_redirect_view/', views.double_redirect_view), path('bad_view/', views.bad_view), path('form_view/', views.form_view), path('form_view_with_template/', views.form_view_with_template), path('formset_view/', views.formset_view), path('json_view/', views.json_view), path('login_protected_view/', views.login_protected_view), path('login_protected_method_view/', views.login_protected_method_view), path('login_protected_view_custom_redirect/', views.login_protected_view_changed_redirect), path('permission_protected_view/', views.permission_protected_view), path('permission_protected_view_exception/', views.permission_protected_view_exception), path('permission_protected_method_view/', views.permission_protected_method_view), path('session_view/', views.session_view), path('broken_view/', views.broken_view), path('mail_sending_view/', views.mail_sending_view), path('mass_mail_sending_view/', views.mass_mail_sending_view), path('nesting_exception_view/', views.nesting_exception_view), path('django_project_redirect/', views.django_project_redirect), path('two_arg_exception/', views.two_arg_exception), path('accounts/', RedirectView.as_view(url='login/')), path('accounts/no_trailing_slash', RedirectView.as_view(url='login/')), path('accounts/login/', auth_views.LoginView.as_view(template_name='login.html')), path('accounts/logout/', auth_views.LogoutView.as_view()), # Async views. path('async_get_view/', views.async_get_view, name='async_get_view'), ]
1560df14ae745fa227b1e793a7a4368c1c0a1cd24530ce2b911d31381281b138
import json from urllib.parse import urlencode from xml.dom.minidom import parseString from django.contrib.auth.decorators import login_required, permission_required from django.core import mail from django.core.exceptions import ValidationError from django.forms import fields from django.forms.forms import Form from django.forms.formsets import BaseFormSet, formset_factory from django.http import ( HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound, HttpResponseRedirect, ) from django.shortcuts import render from django.template import Context, Template from django.test import Client from django.utils.decorators import method_decorator def get_view(request): "A simple view that expects a GET request, and returns a rendered template" t = Template('This is a test. {{ var }} is the value.', name='GET Template') c = Context({'var': request.GET.get('var', 42)}) return HttpResponse(t.render(c)) async def async_get_view(request): return HttpResponse(b'GET content.') def trace_view(request): """ A simple view that expects a TRACE request and echoes its status line. TRACE requests should not have an entity; the view will return a 400 status response if it is present. """ if request.method.upper() != "TRACE": return HttpResponseNotAllowed("TRACE") elif request.body: return HttpResponseBadRequest("TRACE requests MUST NOT include an entity") else: protocol = request.META["SERVER_PROTOCOL"] t = Template( '{{ method }} {{ uri }} {{ version }}', name="TRACE Template", ) c = Context({ 'method': request.method, 'uri': request.path, 'version': protocol, }) return HttpResponse(t.render(c)) def put_view(request): if request.method == 'PUT': t = Template('Data received: {{ data }} is the body.', name='PUT Template') c = Context({ 'Content-Length': request.META['CONTENT_LENGTH'], 'data': request.body.decode(), }) else: t = Template('Viewing GET page.', name='Empty GET Template') c = Context() return HttpResponse(t.render(c)) def post_view(request): """A view that expects a POST, and returns a different template depending on whether any POST data is available """ if request.method == 'POST': if request.POST: t = Template('Data received: {{ data }} is the value.', name='POST Template') c = Context({'data': request.POST['value']}) else: t = Template('Viewing POST page.', name='Empty POST Template') c = Context() else: t = Template('Viewing GET page.', name='Empty GET Template') c = Context() return HttpResponse(t.render(c)) def json_view(request): """ A view that expects a request with the header 'application/json' and JSON data, which is deserialized and included in the context. """ if request.META.get('CONTENT_TYPE') != 'application/json': return HttpResponse() t = Template('Viewing {} page. With data {{ data }}.'.format(request.method)) data = json.loads(request.body.decode('utf-8')) c = Context({'data': data}) return HttpResponse(t.render(c)) def view_with_header(request): "A view that has a custom header" response = HttpResponse() response['X-DJANGO-TEST'] = 'Slartibartfast' return response def raw_post_view(request): """A view which expects raw XML to be posted and returns content extracted from the XML""" if request.method == 'POST': root = parseString(request.body) first_book = root.firstChild.firstChild title, author = [n.firstChild.nodeValue for n in first_book.childNodes] t = Template("{{ title }} - {{ author }}", name="Book template") c = Context({"title": title, "author": author}) else: t = Template("GET request.", name="Book GET template") c = Context() return HttpResponse(t.render(c)) def redirect_view(request): "A view that redirects all requests to the GET view" if request.GET: query = '?' + urlencode(request.GET, True) else: query = '' return HttpResponseRedirect('/get_view/' + query) def method_saving_307_redirect_query_string_view(request): return HttpResponseRedirect('/post_view/?hello=world', status=307) def method_saving_308_redirect_query_string_view(request): return HttpResponseRedirect('/post_view/?hello=world', status=308) def _post_view_redirect(request, status_code): """Redirect to /post_view/ using the status code.""" redirect_to = request.GET.get('to', '/post_view/') return HttpResponseRedirect(redirect_to, status=status_code) def method_saving_307_redirect_view(request): return _post_view_redirect(request, 307) def method_saving_308_redirect_view(request): return _post_view_redirect(request, 308) def view_with_secure(request): "A view that indicates if the request was secure" response = HttpResponse() response.test_was_secure_request = request.is_secure() response.test_server_port = request.META.get('SERVER_PORT', 80) return response def double_redirect_view(request): "A view that redirects all requests to a redirection view" return HttpResponseRedirect('/permanent_redirect_view/') def bad_view(request): "A view that returns a 404 with some error content" return HttpResponseNotFound('Not found!. This page contains some MAGIC content') TestChoices = ( ('a', 'First Choice'), ('b', 'Second Choice'), ('c', 'Third Choice'), ('d', 'Fourth Choice'), ('e', 'Fifth Choice') ) class TestForm(Form): text = fields.CharField() email = fields.EmailField() value = fields.IntegerField() single = fields.ChoiceField(choices=TestChoices) multi = fields.MultipleChoiceField(choices=TestChoices) def clean(self): cleaned_data = self.cleaned_data if cleaned_data.get("text") == "Raise non-field error": raise ValidationError("Non-field error.") return cleaned_data def form_view(request): "A view that tests a simple form" if request.method == 'POST': form = TestForm(request.POST) if form.is_valid(): t = Template('Valid POST data.', name='Valid POST Template') c = Context() else: t = Template('Invalid POST data. {{ form.errors }}', name='Invalid POST Template') c = Context({'form': form}) else: form = TestForm(request.GET) t = Template('Viewing base form. {{ form }}.', name='Form GET Template') c = Context({'form': form}) return HttpResponse(t.render(c)) def form_view_with_template(request): "A view that tests a simple form" if request.method == 'POST': form = TestForm(request.POST) if form.is_valid(): message = 'POST data OK' else: message = 'POST data has errors' else: form = TestForm() message = 'GET form page' return render(request, 'form_view.html', { 'form': form, 'message': message, }) class BaseTestFormSet(BaseFormSet): def clean(self): """No two email addresses are the same.""" if any(self.errors): # Don't bother validating the formset unless each form is valid return emails = [] for i in range(0, self.total_form_count()): form = self.forms[i] email = form.cleaned_data['email'] if email in emails: raise ValidationError( "Forms in a set must have distinct email addresses." ) emails.append(email) TestFormSet = formset_factory(TestForm, BaseTestFormSet) def formset_view(request): "A view that tests a simple formset" if request.method == 'POST': formset = TestFormSet(request.POST) if formset.is_valid(): t = Template('Valid POST data.', name='Valid POST Template') c = Context() else: t = Template('Invalid POST data. {{ my_formset.errors }}', name='Invalid POST Template') c = Context({'my_formset': formset}) else: formset = TestForm(request.GET) t = Template('Viewing base formset. {{ my_formset }}.', name='Formset GET Template') c = Context({'my_formset': formset}) return HttpResponse(t.render(c)) @login_required def login_protected_view(request): "A simple view that is login protected." t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) @login_required(redirect_field_name='redirect_to') def login_protected_view_changed_redirect(request): "A simple view that is login protected with a custom redirect field set" t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) def _permission_protected_view(request): "A simple view that is permission protected." t = Template('This is a permission protected test. ' 'Username is {{ user.username }}. ' 'Permissions are {{ user.get_all_permissions }}.', name='Permissions Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) permission_protected_view = permission_required('permission_not_granted')(_permission_protected_view) permission_protected_view_exception = ( permission_required('permission_not_granted', raise_exception=True)(_permission_protected_view) ) class _ViewManager: @method_decorator(login_required) def login_protected_view(self, request): t = Template('This is a login protected test using a method. ' 'Username is {{ user.username }}.', name='Login Method Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) @method_decorator(permission_required('permission_not_granted')) def permission_protected_view(self, request): t = Template('This is a permission protected test using a method. ' 'Username is {{ user.username }}. ' 'Permissions are {{ user.get_all_permissions }}.', name='Permissions Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) _view_manager = _ViewManager() login_protected_method_view = _view_manager.login_protected_view permission_protected_method_view = _view_manager.permission_protected_view def session_view(request): "A view that modifies the session" request.session['tobacconist'] = 'hovercraft' t = Template('This is a view that modifies the session.', name='Session Modifying View Template') c = Context() return HttpResponse(t.render(c)) def broken_view(request): """A view which just raises an exception, simulating a broken view.""" raise KeyError("Oops! Looks like you wrote some bad code.") def mail_sending_view(request): mail.EmailMessage( "Test message", "This is a test email", "[email protected]", ['[email protected]', '[email protected]']).send() return HttpResponse("Mail sent") def mass_mail_sending_view(request): m1 = mail.EmailMessage( 'First Test message', 'This is the first test email', '[email protected]', ['[email protected]', '[email protected]']) m2 = mail.EmailMessage( 'Second Test message', 'This is the second test email', '[email protected]', ['[email protected]', '[email protected]']) c = mail.get_connection() c.send_messages([m1, m2]) return HttpResponse("Mail sent") def nesting_exception_view(request): """ A view that uses a nested client to call another view and then raises an exception. """ client = Client() client.get('/get_view/') raise Exception('exception message') def django_project_redirect(request): return HttpResponseRedirect('https://www.djangoproject.com/') def upload_view(request): """Prints keys of request.FILES to the response.""" return HttpResponse(', '.join(request.FILES)) class TwoArgException(Exception): def __init__(self, one, two): pass def two_arg_exception(request): raise TwoArgException('one', 'two')
c96d14a70633aa1d42bc5f9aab9be51768b981b1c9c70292c5922fd10a279d18
import datetime import decimal import gettext as gettext_module import os import pickle import re import tempfile from contextlib import contextmanager from importlib import import_module from pathlib import Path from unittest import mock from asgiref.local import Local from django import forms from django.apps import AppConfig from django.conf import settings from django.conf.locale import LANG_INFO from django.conf.urls.i18n import i18n_patterns from django.template import Context, Template from django.test import ( RequestFactory, SimpleTestCase, TestCase, override_settings, ) from django.utils import translation from django.utils.deprecation import RemovedInDjango40Warning from django.utils.formats import ( date_format, get_format, get_format_modules, iter_format_modules, localize, localize_input, reset_format_cache, sanitize_separators, time_format, ) from django.utils.numberformat import format as nformat from django.utils.safestring import SafeString, mark_safe from django.utils.translation import ( LANGUAGE_SESSION_KEY, activate, check_for_language, deactivate, get_language, get_language_bidi, get_language_from_request, get_language_info, gettext, gettext_lazy, ngettext, ngettext_lazy, npgettext, npgettext_lazy, pgettext, round_away_from_one, to_language, to_locale, trans_null, trans_real, ugettext, ugettext_lazy, ugettext_noop, ungettext, ungettext_lazy, ) from django.utils.translation.reloader import ( translation_file_changed, watch_for_translation_changes, ) from .forms import CompanyForm, I18nForm, SelectDateForm from .models import Company, TestModel here = os.path.dirname(os.path.abspath(__file__)) extended_locale_paths = settings.LOCALE_PATHS + [ os.path.join(here, 'other', 'locale'), ] class AppModuleStub: def __init__(self, **kwargs): self.__dict__.update(kwargs) @contextmanager def patch_formats(lang, **settings): from django.utils.formats import _format_cache # Populate _format_cache with temporary values for key, value in settings.items(): _format_cache[(key, lang)] = value try: yield finally: reset_format_cache() class TranslationTests(SimpleTestCase): @translation.override('de') def test_legacy_aliases(self): """ Pre-Django 2.0 aliases with u prefix are still available. """ msg = ( 'django.utils.translation.ugettext_noop() is deprecated in favor ' 'of django.utils.translation.gettext_noop().' ) with self.assertWarnsMessage(RemovedInDjango40Warning, msg): self.assertEqual(ugettext_noop("Image"), "Image") msg = ( 'django.utils.translation.ugettext() is deprecated in favor of ' 'django.utils.translation.gettext().' ) with self.assertWarnsMessage(RemovedInDjango40Warning, msg): self.assertEqual(ugettext("Image"), "Bild") msg = ( 'django.utils.translation.ugettext_lazy() is deprecated in favor ' 'of django.utils.translation.gettext_lazy().' ) with self.assertWarnsMessage(RemovedInDjango40Warning, msg): self.assertEqual(ugettext_lazy("Image"), gettext_lazy("Image")) msg = ( 'django.utils.translation.ungettext() is deprecated in favor of ' 'django.utils.translation.ngettext().' ) with self.assertWarnsMessage(RemovedInDjango40Warning, msg): self.assertEqual(ungettext("%d year", "%d years", 0) % 0, "0 Jahre") msg = ( 'django.utils.translation.ungettext_lazy() is deprecated in favor ' 'of django.utils.translation.ngettext_lazy().' ) with self.assertWarnsMessage(RemovedInDjango40Warning, msg): self.assertEqual( ungettext_lazy("%d year", "%d years", 0) % 0, ngettext_lazy("%d year", "%d years", 0) % 0, ) @translation.override('fr') def test_plural(self): """ Test plurals with ngettext. French differs from English in that 0 is singular. """ self.assertEqual(ngettext("%d year", "%d years", 0) % 0, "0 année") self.assertEqual(ngettext("%d year", "%d years", 2) % 2, "2 années") self.assertEqual(ngettext("%(size)d byte", "%(size)d bytes", 0) % {'size': 0}, "0 octet") self.assertEqual(ngettext("%(size)d byte", "%(size)d bytes", 2) % {'size': 2}, "2 octets") def test_plural_null(self): g = trans_null.ngettext self.assertEqual(g('%d year', '%d years', 0) % 0, '0 years') self.assertEqual(g('%d year', '%d years', 1) % 1, '1 year') self.assertEqual(g('%d year', '%d years', 2) % 2, '2 years') @override_settings(LOCALE_PATHS=extended_locale_paths) @translation.override('fr') def test_multiple_plurals_per_language(self): """ Normally, French has 2 plurals. As other/locale/fr/LC_MESSAGES/django.po has a different plural equation with 3 plurals, this tests if those plural are honored. """ self.assertEqual(ngettext("%d singular", "%d plural", 0) % 0, "0 pluriel1") self.assertEqual(ngettext("%d singular", "%d plural", 1) % 1, "1 singulier") self.assertEqual(ngettext("%d singular", "%d plural", 2) % 2, "2 pluriel2") french = trans_real.catalog() # Internal _catalog can query subcatalogs (from different po files). self.assertEqual(french._catalog[('%d singular', 0)], '%d singulier') self.assertEqual(french._catalog[('%d hour', 0)], '%d heure') def test_override(self): activate('de') try: with translation.override('pl'): self.assertEqual(get_language(), 'pl') self.assertEqual(get_language(), 'de') with translation.override(None): self.assertIsNone(get_language()) with translation.override('pl'): pass self.assertIsNone(get_language()) self.assertEqual(get_language(), 'de') finally: deactivate() def test_override_decorator(self): @translation.override('pl') def func_pl(): self.assertEqual(get_language(), 'pl') @translation.override(None) def func_none(): self.assertIsNone(get_language()) try: activate('de') func_pl() self.assertEqual(get_language(), 'de') func_none() self.assertEqual(get_language(), 'de') finally: deactivate() def test_override_exit(self): """ The language restored is the one used when the function was called, not the one used when the decorator was initialized (#23381). """ activate('fr') @translation.override('pl') def func_pl(): pass deactivate() try: activate('en') func_pl() self.assertEqual(get_language(), 'en') finally: deactivate() def test_lazy_objects(self): """ Format string interpolation should work with *_lazy objects. """ s = gettext_lazy('Add %(name)s') d = {'name': 'Ringo'} self.assertEqual('Add Ringo', s % d) with translation.override('de', deactivate=True): self.assertEqual('Ringo hinzuf\xfcgen', s % d) with translation.override('pl'): self.assertEqual('Dodaj Ringo', s % d) # It should be possible to compare *_lazy objects. s1 = gettext_lazy('Add %(name)s') self.assertEqual(s, s1) s2 = gettext_lazy('Add %(name)s') s3 = gettext_lazy('Add %(name)s') self.assertEqual(s2, s3) self.assertEqual(s, s2) s4 = gettext_lazy('Some other string') self.assertNotEqual(s, s4) def test_lazy_pickle(self): s1 = gettext_lazy("test") self.assertEqual(str(s1), "test") s2 = pickle.loads(pickle.dumps(s1)) self.assertEqual(str(s2), "test") @override_settings(LOCALE_PATHS=extended_locale_paths) def test_ngettext_lazy(self): simple_with_format = ngettext_lazy('%d good result', '%d good results') simple_context_with_format = npgettext_lazy('Exclamation', '%d good result', '%d good results') simple_without_format = ngettext_lazy('good result', 'good results') with translation.override('de'): self.assertEqual(simple_with_format % 1, '1 gutes Resultat') self.assertEqual(simple_with_format % 4, '4 guten Resultate') self.assertEqual(simple_context_with_format % 1, '1 gutes Resultat!') self.assertEqual(simple_context_with_format % 4, '4 guten Resultate!') self.assertEqual(simple_without_format % 1, 'gutes Resultat') self.assertEqual(simple_without_format % 4, 'guten Resultate') complex_nonlazy = ngettext_lazy('Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 4) complex_deferred = ngettext_lazy( 'Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 'num' ) complex_context_nonlazy = npgettext_lazy( 'Greeting', 'Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 4 ) complex_context_deferred = npgettext_lazy( 'Greeting', 'Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 'num' ) with translation.override('de'): self.assertEqual(complex_nonlazy % {'num': 4, 'name': 'Jim'}, 'Hallo Jim, 4 guten Resultate') self.assertEqual(complex_deferred % {'name': 'Jim', 'num': 1}, 'Hallo Jim, 1 gutes Resultat') self.assertEqual(complex_deferred % {'name': 'Jim', 'num': 5}, 'Hallo Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_deferred % {'name': 'Jim'} self.assertEqual(complex_context_nonlazy % {'num': 4, 'name': 'Jim'}, 'Willkommen Jim, 4 guten Resultate') self.assertEqual(complex_context_deferred % {'name': 'Jim', 'num': 1}, 'Willkommen Jim, 1 gutes Resultat') self.assertEqual(complex_context_deferred % {'name': 'Jim', 'num': 5}, 'Willkommen Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_context_deferred % {'name': 'Jim'} @override_settings(LOCALE_PATHS=extended_locale_paths) def test_ngettext_lazy_format_style(self): simple_with_format = ngettext_lazy('{} good result', '{} good results') simple_context_with_format = npgettext_lazy('Exclamation', '{} good result', '{} good results') with translation.override('de'): self.assertEqual(simple_with_format.format(1), '1 gutes Resultat') self.assertEqual(simple_with_format.format(4), '4 guten Resultate') self.assertEqual(simple_context_with_format.format(1), '1 gutes Resultat!') self.assertEqual(simple_context_with_format.format(4), '4 guten Resultate!') complex_nonlazy = ngettext_lazy('Hi {name}, {num} good result', 'Hi {name}, {num} good results', 4) complex_deferred = ngettext_lazy( 'Hi {name}, {num} good result', 'Hi {name}, {num} good results', 'num' ) complex_context_nonlazy = npgettext_lazy( 'Greeting', 'Hi {name}, {num} good result', 'Hi {name}, {num} good results', 4 ) complex_context_deferred = npgettext_lazy( 'Greeting', 'Hi {name}, {num} good result', 'Hi {name}, {num} good results', 'num' ) with translation.override('de'): self.assertEqual(complex_nonlazy.format(num=4, name='Jim'), 'Hallo Jim, 4 guten Resultate') self.assertEqual(complex_deferred.format(name='Jim', num=1), 'Hallo Jim, 1 gutes Resultat') self.assertEqual(complex_deferred.format(name='Jim', num=5), 'Hallo Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_deferred.format(name='Jim') self.assertEqual(complex_context_nonlazy.format(num=4, name='Jim'), 'Willkommen Jim, 4 guten Resultate') self.assertEqual(complex_context_deferred.format(name='Jim', num=1), 'Willkommen Jim, 1 gutes Resultat') self.assertEqual(complex_context_deferred.format(name='Jim', num=5), 'Willkommen Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_context_deferred.format(name='Jim') def test_ngettext_lazy_bool(self): self.assertTrue(ngettext_lazy('%d good result', '%d good results')) self.assertFalse(ngettext_lazy('', '')) def test_ngettext_lazy_pickle(self): s1 = ngettext_lazy('%d good result', '%d good results') self.assertEqual(s1 % 1, '1 good result') self.assertEqual(s1 % 8, '8 good results') s2 = pickle.loads(pickle.dumps(s1)) self.assertEqual(s2 % 1, '1 good result') self.assertEqual(s2 % 8, '8 good results') @override_settings(LOCALE_PATHS=extended_locale_paths) def test_pgettext(self): trans_real._active = Local() trans_real._translations = {} with translation.override('de'): self.assertEqual(pgettext("unexisting", "May"), "May") self.assertEqual(pgettext("month name", "May"), "Mai") self.assertEqual(pgettext("verb", "May"), "Kann") self.assertEqual(npgettext("search", "%d result", "%d results", 4) % 4, "4 Resultate") def test_empty_value(self): """Empty value must stay empty after being translated (#23196).""" with translation.override('de'): self.assertEqual('', gettext('')) s = mark_safe('') self.assertEqual(s, gettext(s)) @override_settings(LOCALE_PATHS=extended_locale_paths) def test_safe_status(self): """ Translating a string requiring no auto-escaping with gettext or pgettext shouldn't change the "safe" status. """ trans_real._active = Local() trans_real._translations = {} s1 = mark_safe('Password') s2 = mark_safe('May') with translation.override('de', deactivate=True): self.assertIs(type(gettext(s1)), SafeString) self.assertIs(type(pgettext('month name', s2)), SafeString) self.assertEqual('aPassword', SafeString('a') + s1) self.assertEqual('Passworda', s1 + SafeString('a')) self.assertEqual('Passworda', s1 + mark_safe('a')) self.assertEqual('aPassword', mark_safe('a') + s1) self.assertEqual('as', mark_safe('a') + mark_safe('s')) def test_maclines(self): """ Translations on files with Mac or DOS end of lines will be converted to unix EOF in .po catalogs. """ ca_translation = trans_real.translation('ca') ca_translation._catalog['Mac\nEOF\n'] = 'Catalan Mac\nEOF\n' ca_translation._catalog['Win\nEOF\n'] = 'Catalan Win\nEOF\n' with translation.override('ca', deactivate=True): self.assertEqual('Catalan Mac\nEOF\n', gettext('Mac\rEOF\r')) self.assertEqual('Catalan Win\nEOF\n', gettext('Win\r\nEOF\r\n')) def test_to_locale(self): tests = ( ('en', 'en'), ('EN', 'en'), ('en-us', 'en_US'), ('EN-US', 'en_US'), # With > 2 characters after the dash. ('sr-latn', 'sr_Latn'), ('sr-LATN', 'sr_Latn'), # With private use subtag (x-informal). ('nl-nl-x-informal', 'nl_NL-x-informal'), ('NL-NL-X-INFORMAL', 'nl_NL-x-informal'), ('sr-latn-x-informal', 'sr_Latn-x-informal'), ('SR-LATN-X-INFORMAL', 'sr_Latn-x-informal'), ) for lang, locale in tests: with self.subTest(lang=lang): self.assertEqual(to_locale(lang), locale) def test_to_language(self): self.assertEqual(to_language('en_US'), 'en-us') self.assertEqual(to_language('sr_Lat'), 'sr-lat') def test_language_bidi(self): self.assertIs(get_language_bidi(), False) with translation.override(None): self.assertIs(get_language_bidi(), False) def test_language_bidi_null(self): self.assertIs(trans_null.get_language_bidi(), False) with override_settings(LANGUAGE_CODE='he'): self.assertIs(get_language_bidi(), True) class TranslationLoadingTests(SimpleTestCase): def setUp(self): """Clear translation state.""" self._old_language = get_language() self._old_translations = trans_real._translations deactivate() trans_real._translations = {} def tearDown(self): trans_real._translations = self._old_translations activate(self._old_language) @override_settings( USE_I18N=True, LANGUAGE_CODE='en', LANGUAGES=[ ('en', 'English'), ('en-ca', 'English (Canada)'), ('en-nz', 'English (New Zealand)'), ('en-au', 'English (Australia)'), ], LOCALE_PATHS=[os.path.join(here, 'loading')], INSTALLED_APPS=['i18n.loading_app'], ) def test_translation_loading(self): """ "loading_app" does not have translations for all languages provided by "loading". Catalogs are merged correctly. """ tests = [ ('en', 'local country person'), ('en_AU', 'aussie'), ('en_NZ', 'kiwi'), ('en_CA', 'canuck'), ] # Load all relevant translations. for language, _ in tests: activate(language) # Catalogs are merged correctly. for language, nickname in tests: with self.subTest(language=language): activate(language) self.assertEqual(gettext('local country person'), nickname) class TranslationThreadSafetyTests(SimpleTestCase): def setUp(self): self._old_language = get_language() self._translations = trans_real._translations # here we rely on .split() being called inside the _fetch() # in trans_real.translation() class sideeffect_str(str): def split(self, *args, **kwargs): res = str.split(self, *args, **kwargs) trans_real._translations['en-YY'] = None return res trans_real._translations = {sideeffect_str('en-XX'): None} def tearDown(self): trans_real._translations = self._translations activate(self._old_language) def test_bug14894_translation_activate_thread_safety(self): translation_count = len(trans_real._translations) # May raise RuntimeError if translation.activate() isn't thread-safe. translation.activate('pl') # make sure sideeffect_str actually added a new translation self.assertLess(translation_count, len(trans_real._translations)) @override_settings(USE_L10N=True) class FormattingTests(SimpleTestCase): def setUp(self): super().setUp() self.n = decimal.Decimal('66666.666') self.f = 99999.999 self.d = datetime.date(2009, 12, 31) self.dt = datetime.datetime(2009, 12, 31, 20, 50) self.t = datetime.time(10, 15, 48) self.long = 10000 self.ctxt = Context({ 'n': self.n, 't': self.t, 'd': self.d, 'dt': self.dt, 'f': self.f, 'l': self.long, }) def test_all_format_strings(self): all_locales = LANG_INFO.keys() some_date = datetime.date(2017, 10, 14) some_datetime = datetime.datetime(2017, 10, 14, 10, 23) for locale in all_locales: with self.subTest(locale=locale), translation.override(locale): self.assertIn('2017', date_format(some_date)) # Uses DATE_FORMAT by default self.assertIn('23', time_format(some_datetime)) # Uses TIME_FORMAT by default self.assertIn('2017', date_format(some_datetime, format=get_format('DATETIME_FORMAT'))) self.assertIn('2017', date_format(some_date, format=get_format('YEAR_MONTH_FORMAT'))) self.assertIn('14', date_format(some_date, format=get_format('MONTH_DAY_FORMAT'))) self.assertIn('2017', date_format(some_date, format=get_format('SHORT_DATE_FORMAT'))) self.assertIn('2017', date_format(some_datetime, format=get_format('SHORT_DATETIME_FORMAT'))) def test_locale_independent(self): """ Localization of numbers """ with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666.66', nformat(self.n, decimal_sep='.', decimal_pos=2, grouping=3, thousand_sep=',')) self.assertEqual('66666A6', nformat(self.n, decimal_sep='A', decimal_pos=1, grouping=1, thousand_sep='B')) self.assertEqual('66666', nformat(self.n, decimal_sep='X', decimal_pos=0, grouping=1, thousand_sep='Y')) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual( '66,666.66', nformat(self.n, decimal_sep='.', decimal_pos=2, grouping=3, thousand_sep=',') ) self.assertEqual( '6B6B6B6B6A6', nformat(self.n, decimal_sep='A', decimal_pos=1, grouping=1, thousand_sep='B') ) self.assertEqual('-66666.6', nformat(-66666.666, decimal_sep='.', decimal_pos=1)) self.assertEqual('-66666.0', nformat(int('-66666'), decimal_sep='.', decimal_pos=1)) self.assertEqual('10000.0', nformat(self.long, decimal_sep='.', decimal_pos=1)) self.assertEqual( '10,00,00,000.00', nformat(100000000.00, decimal_sep='.', decimal_pos=2, grouping=(3, 2, 0), thousand_sep=',') ) self.assertEqual( '1,0,00,000,0000.00', nformat(10000000000.00, decimal_sep='.', decimal_pos=2, grouping=(4, 3, 2, 1, 0), thousand_sep=',') ) self.assertEqual( '10000,00,000.00', nformat(1000000000.00, decimal_sep='.', decimal_pos=2, grouping=(3, 2, -1), thousand_sep=',') ) # This unusual grouping/force_grouping combination may be triggered by the intcomma filter (#17414) self.assertEqual( '10000', nformat(self.long, decimal_sep='.', decimal_pos=0, grouping=0, force_grouping=True) ) # date filter self.assertEqual('31.12.2009 в 20:50', Template('{{ dt|date:"d.m.Y в H:i" }}').render(self.ctxt)) self.assertEqual('⌚ 10:15', Template('{{ t|time:"⌚ H:i" }}').render(self.ctxt)) @override_settings(USE_L10N=False) def test_l10n_disabled(self): """ Catalan locale with format i18n disabled translations will be used, but not formats """ with translation.override('ca', deactivate=True): self.maxDiff = 3000 self.assertEqual('N j, Y', get_format('DATE_FORMAT')) self.assertEqual(0, get_format('FIRST_DAY_OF_WEEK')) self.assertEqual('.', get_format('DECIMAL_SEPARATOR')) self.assertEqual('10:15 a.m.', time_format(self.t)) self.assertEqual('des. 31, 2009', date_format(self.d)) self.assertEqual('desembre 2009', date_format(self.d, 'YEAR_MONTH_FORMAT')) self.assertEqual('12/31/2009 8:50 p.m.', date_format(self.dt, 'SHORT_DATETIME_FORMAT')) self.assertEqual('No localizable', localize('No localizable')) self.assertEqual('66666.666', localize(self.n)) self.assertEqual('99999.999', localize(self.f)) self.assertEqual('10000', localize(self.long)) self.assertEqual('des. 31, 2009', localize(self.d)) self.assertEqual('des. 31, 2009, 8:50 p.m.', localize(self.dt)) self.assertEqual('66666.666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99999.999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('des. 31, 2009', Template('{{ d }}').render(self.ctxt)) self.assertEqual('des. 31, 2009, 8:50 p.m.', Template('{{ dt }}').render(self.ctxt)) self.assertEqual('66666.67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000.0', Template('{{ f|floatformat }}').render(self.ctxt)) self.assertEqual('10:15 a.m.', Template('{{ t|time:"TIME_FORMAT" }}').render(self.ctxt)) self.assertEqual('12/31/2009', Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt)) self.assertEqual( '12/31/2009 8:50 p.m.', Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt) ) form = I18nForm({ 'decimal_field': '66666,666', 'float_field': '99999,999', 'date_field': '31/12/2009', 'datetime_field': '31/12/2009 20:50', 'time_field': '20:50', 'integer_field': '1.234', }) self.assertFalse(form.is_valid()) self.assertEqual(['Introdu\xefu un n\xfamero.'], form.errors['float_field']) self.assertEqual(['Introdu\xefu un n\xfamero.'], form.errors['decimal_field']) self.assertEqual(['Introdu\xefu una data v\xe0lida.'], form.errors['date_field']) self.assertEqual(['Introdu\xefu una data/hora v\xe0lides.'], form.errors['datetime_field']) self.assertEqual(['Introdu\xefu un n\xfamero sencer.'], form.errors['integer_field']) form2 = SelectDateForm({ 'date_field_month': '12', 'date_field_day': '31', 'date_field_year': '2009' }) self.assertTrue(form2.is_valid()) self.assertEqual(datetime.date(2009, 12, 31), form2.cleaned_data['date_field']) self.assertHTMLEqual( '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">gener</option>' '<option value="2">febrer</option>' '<option value="3">mar\xe7</option>' '<option value="4">abril</option>' '<option value="5">maig</option>' '<option value="6">juny</option>' '<option value="7">juliol</option>' '<option value="8">agost</option>' '<option value="9">setembre</option>' '<option value="10">octubre</option>' '<option value="11">novembre</option>' '<option value="12" selected>desembre</option>' '</select>' '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) # We shouldn't change the behavior of the floatformat filter re: # thousand separator and grouping when USE_L10N is False even # if the USE_THOUSAND_SEPARATOR, NUMBER_GROUPING and # THOUSAND_SEPARATOR settings are specified with self.settings(USE_THOUSAND_SEPARATOR=True, NUMBER_GROUPING=1, THOUSAND_SEPARATOR='!'): self.assertEqual('66666.67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000.0', Template('{{ f|floatformat }}').render(self.ctxt)) def test_false_like_locale_formats(self): """ The active locale's formats take precedence over the default settings even if they would be interpreted as False in a conditional test (e.g. 0 or empty string) (#16938). """ with translation.override('fr'): with self.settings(USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR='!'): self.assertEqual('\xa0', get_format('THOUSAND_SEPARATOR')) # Even a second time (after the format has been cached)... self.assertEqual('\xa0', get_format('THOUSAND_SEPARATOR')) with self.settings(FIRST_DAY_OF_WEEK=0): self.assertEqual(1, get_format('FIRST_DAY_OF_WEEK')) # Even a second time (after the format has been cached)... self.assertEqual(1, get_format('FIRST_DAY_OF_WEEK')) def test_l10n_enabled(self): self.maxDiff = 3000 # Catalan locale with translation.override('ca', deactivate=True): self.assertEqual(r'j \d\e F \d\e Y', get_format('DATE_FORMAT')) self.assertEqual(1, get_format('FIRST_DAY_OF_WEEK')) self.assertEqual(',', get_format('DECIMAL_SEPARATOR')) self.assertEqual('10:15', time_format(self.t)) self.assertEqual('31 de desembre de 2009', date_format(self.d)) self.assertEqual('desembre del 2009', date_format(self.d, 'YEAR_MONTH_FORMAT')) self.assertEqual('31/12/2009 20:50', date_format(self.dt, 'SHORT_DATETIME_FORMAT')) self.assertEqual('No localizable', localize('No localizable')) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66.666,666', localize(self.n)) self.assertEqual('99.999,999', localize(self.f)) self.assertEqual('10.000', localize(self.long)) self.assertEqual('True', localize(True)) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666,666', localize(self.n)) self.assertEqual('99999,999', localize(self.f)) self.assertEqual('10000', localize(self.long)) self.assertEqual('31 de desembre de 2009', localize(self.d)) self.assertEqual('31 de desembre de 2009 a les 20:50', localize(self.dt)) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66.666,666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99.999,999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('10.000', Template('{{ l }}').render(self.ctxt)) with self.settings(USE_THOUSAND_SEPARATOR=True): form3 = I18nForm({ 'decimal_field': '66.666,666', 'float_field': '99.999,999', 'date_field': '31/12/2009', 'datetime_field': '31/12/2009 20:50', 'time_field': '20:50', 'integer_field': '1.234', }) self.assertTrue(form3.is_valid()) self.assertEqual(decimal.Decimal('66666.666'), form3.cleaned_data['decimal_field']) self.assertEqual(99999.999, form3.cleaned_data['float_field']) self.assertEqual(datetime.date(2009, 12, 31), form3.cleaned_data['date_field']) self.assertEqual(datetime.datetime(2009, 12, 31, 20, 50), form3.cleaned_data['datetime_field']) self.assertEqual(datetime.time(20, 50), form3.cleaned_data['time_field']) self.assertEqual(1234, form3.cleaned_data['integer_field']) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666,666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99999,999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('31 de desembre de 2009', Template('{{ d }}').render(self.ctxt)) self.assertEqual('31 de desembre de 2009 a les 20:50', Template('{{ dt }}').render(self.ctxt)) self.assertEqual('66666,67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000,0', Template('{{ f|floatformat }}').render(self.ctxt)) self.assertEqual('10:15', Template('{{ t|time:"TIME_FORMAT" }}').render(self.ctxt)) self.assertEqual('31/12/2009', Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt)) self.assertEqual( '31/12/2009 20:50', Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt) ) self.assertEqual(date_format(datetime.datetime.now(), "DATE_FORMAT"), Template('{% now "DATE_FORMAT" %}').render(self.ctxt)) with self.settings(USE_THOUSAND_SEPARATOR=False): form4 = I18nForm({ 'decimal_field': '66666,666', 'float_field': '99999,999', 'date_field': '31/12/2009', 'datetime_field': '31/12/2009 20:50', 'time_field': '20:50', 'integer_field': '1234', }) self.assertTrue(form4.is_valid()) self.assertEqual(decimal.Decimal('66666.666'), form4.cleaned_data['decimal_field']) self.assertEqual(99999.999, form4.cleaned_data['float_field']) self.assertEqual(datetime.date(2009, 12, 31), form4.cleaned_data['date_field']) self.assertEqual(datetime.datetime(2009, 12, 31, 20, 50), form4.cleaned_data['datetime_field']) self.assertEqual(datetime.time(20, 50), form4.cleaned_data['time_field']) self.assertEqual(1234, form4.cleaned_data['integer_field']) form5 = SelectDateForm({ 'date_field_month': '12', 'date_field_day': '31', 'date_field_year': '2009' }) self.assertTrue(form5.is_valid()) self.assertEqual(datetime.date(2009, 12, 31), form5.cleaned_data['date_field']) self.assertHTMLEqual( '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">gener</option>' '<option value="2">febrer</option>' '<option value="3">mar\xe7</option>' '<option value="4">abril</option>' '<option value="5">maig</option>' '<option value="6">juny</option>' '<option value="7">juliol</option>' '<option value="8">agost</option>' '<option value="9">setembre</option>' '<option value="10">octubre</option>' '<option value="11">novembre</option>' '<option value="12" selected>desembre</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) # Russian locale (with E as month) with translation.override('ru', deactivate=True): self.assertHTMLEqual( '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">\u042f\u043d\u0432\u0430\u0440\u044c</option>' '<option value="2">\u0424\u0435\u0432\u0440\u0430\u043b\u044c</option>' '<option value="3">\u041c\u0430\u0440\u0442</option>' '<option value="4">\u0410\u043f\u0440\u0435\u043b\u044c</option>' '<option value="5">\u041c\u0430\u0439</option>' '<option value="6">\u0418\u044e\u043d\u044c</option>' '<option value="7">\u0418\u044e\u043b\u044c</option>' '<option value="8">\u0410\u0432\u0433\u0443\u0441\u0442</option>' '<option value="9">\u0421\u0435\u043d\u0442\u044f\u0431\u0440\u044c</option>' '<option value="10">\u041e\u043a\u0442\u044f\u0431\u0440\u044c</option>' '<option value="11">\u041d\u043e\u044f\u0431\u0440\u044c</option>' '<option value="12" selected>\u0414\u0435\u043a\u0430\u0431\u0440\u044c</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) # English locale with translation.override('en', deactivate=True): self.assertEqual('N j, Y', get_format('DATE_FORMAT')) self.assertEqual(0, get_format('FIRST_DAY_OF_WEEK')) self.assertEqual('.', get_format('DECIMAL_SEPARATOR')) self.assertEqual('Dec. 31, 2009', date_format(self.d)) self.assertEqual('December 2009', date_format(self.d, 'YEAR_MONTH_FORMAT')) self.assertEqual('12/31/2009 8:50 p.m.', date_format(self.dt, 'SHORT_DATETIME_FORMAT')) self.assertEqual('No localizable', localize('No localizable')) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66,666.666', localize(self.n)) self.assertEqual('99,999.999', localize(self.f)) self.assertEqual('10,000', localize(self.long)) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666.666', localize(self.n)) self.assertEqual('99999.999', localize(self.f)) self.assertEqual('10000', localize(self.long)) self.assertEqual('Dec. 31, 2009', localize(self.d)) self.assertEqual('Dec. 31, 2009, 8:50 p.m.', localize(self.dt)) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66,666.666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99,999.999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('10,000', Template('{{ l }}').render(self.ctxt)) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666.666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99999.999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('Dec. 31, 2009', Template('{{ d }}').render(self.ctxt)) self.assertEqual('Dec. 31, 2009, 8:50 p.m.', Template('{{ dt }}').render(self.ctxt)) self.assertEqual('66666.67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000.0', Template('{{ f|floatformat }}').render(self.ctxt)) self.assertEqual('12/31/2009', Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt)) self.assertEqual( '12/31/2009 8:50 p.m.', Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt) ) form5 = I18nForm({ 'decimal_field': '66666.666', 'float_field': '99999.999', 'date_field': '12/31/2009', 'datetime_field': '12/31/2009 20:50', 'time_field': '20:50', 'integer_field': '1234', }) self.assertTrue(form5.is_valid()) self.assertEqual(decimal.Decimal('66666.666'), form5.cleaned_data['decimal_field']) self.assertEqual(99999.999, form5.cleaned_data['float_field']) self.assertEqual(datetime.date(2009, 12, 31), form5.cleaned_data['date_field']) self.assertEqual(datetime.datetime(2009, 12, 31, 20, 50), form5.cleaned_data['datetime_field']) self.assertEqual(datetime.time(20, 50), form5.cleaned_data['time_field']) self.assertEqual(1234, form5.cleaned_data['integer_field']) form6 = SelectDateForm({ 'date_field_month': '12', 'date_field_day': '31', 'date_field_year': '2009' }) self.assertTrue(form6.is_valid()) self.assertEqual(datetime.date(2009, 12, 31), form6.cleaned_data['date_field']) self.assertHTMLEqual( '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">January</option>' '<option value="2">February</option>' '<option value="3">March</option>' '<option value="4">April</option>' '<option value="5">May</option>' '<option value="6">June</option>' '<option value="7">July</option>' '<option value="8">August</option>' '<option value="9">September</option>' '<option value="10">October</option>' '<option value="11">November</option>' '<option value="12" selected>December</option>' '</select>' '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) def test_sub_locales(self): """ Check if sublocales fall back to the main locale """ with self.settings(USE_THOUSAND_SEPARATOR=True): with translation.override('de-at', deactivate=True): self.assertEqual('66.666,666', Template('{{ n }}').render(self.ctxt)) with translation.override('es-us', deactivate=True): self.assertEqual('31 de Diciembre de 2009', date_format(self.d)) def test_localized_input(self): """ Tests if form input is correctly localized """ self.maxDiff = 1200 with translation.override('de-at', deactivate=True): form6 = CompanyForm({ 'name': 'acme', 'date_added': datetime.datetime(2009, 12, 31, 6, 0, 0), 'cents_paid': decimal.Decimal('59.47'), 'products_delivered': 12000, }) self.assertTrue(form6.is_valid()) self.assertHTMLEqual( form6.as_ul(), '<li><label for="id_name">Name:</label>' '<input id="id_name" type="text" name="name" value="acme" maxlength="50" required></li>' '<li><label for="id_date_added">Date added:</label>' '<input type="text" name="date_added" value="31.12.2009 06:00:00" id="id_date_added" required></li>' '<li><label for="id_cents_paid">Cents paid:</label>' '<input type="text" name="cents_paid" value="59,47" id="id_cents_paid" required></li>' '<li><label for="id_products_delivered">Products delivered:</label>' '<input type="text" name="products_delivered" value="12000" id="id_products_delivered" required>' '</li>' ) self.assertEqual(localize_input(datetime.datetime(2009, 12, 31, 6, 0, 0)), '31.12.2009 06:00:00') self.assertEqual(datetime.datetime(2009, 12, 31, 6, 0, 0), form6.cleaned_data['date_added']) with self.settings(USE_THOUSAND_SEPARATOR=True): # Checking for the localized "products_delivered" field self.assertInHTML( '<input type="text" name="products_delivered" ' 'value="12.000" id="id_products_delivered" required>', form6.as_ul() ) def test_localized_input_func(self): tests = ( (True, 'True'), (datetime.date(1, 1, 1), '0001-01-01'), (datetime.datetime(1, 1, 1), '0001-01-01 00:00:00'), ) with self.settings(USE_THOUSAND_SEPARATOR=True): for value, expected in tests: with self.subTest(value=value): self.assertEqual(localize_input(value), expected) def test_sanitize_separators(self): """ Tests django.utils.formats.sanitize_separators. """ # Non-strings are untouched self.assertEqual(sanitize_separators(123), 123) with translation.override('ru', deactivate=True): # Russian locale has non-breaking space (\xa0) as thousand separator # Usual space is accepted too when sanitizing inputs with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual(sanitize_separators('1\xa0234\xa0567'), '1234567') self.assertEqual(sanitize_separators('77\xa0777,777'), '77777.777') self.assertEqual(sanitize_separators('12 345'), '12345') self.assertEqual(sanitize_separators('77 777,777'), '77777.777') with self.settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=False): self.assertEqual(sanitize_separators('12\xa0345'), '12\xa0345') with self.settings(USE_THOUSAND_SEPARATOR=True): with patch_formats(get_language(), THOUSAND_SEPARATOR='.', DECIMAL_SEPARATOR=','): self.assertEqual(sanitize_separators('10.234'), '10234') # Suspicion that user entered dot as decimal separator (#22171) self.assertEqual(sanitize_separators('10.10'), '10.10') with self.settings(USE_L10N=False, DECIMAL_SEPARATOR=','): self.assertEqual(sanitize_separators('1001,10'), '1001.10') self.assertEqual(sanitize_separators('1001.10'), '1001.10') with self.settings( USE_L10N=False, DECIMAL_SEPARATOR=',', USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR='.' ): self.assertEqual(sanitize_separators('1.001,10'), '1001.10') self.assertEqual(sanitize_separators('1001,10'), '1001.10') self.assertEqual(sanitize_separators('1001.10'), '1001.10') self.assertEqual(sanitize_separators('1,001.10'), '1.001.10') # Invalid output def test_iter_format_modules(self): """ Tests the iter_format_modules function. """ # Importing some format modules so that we can compare the returned # modules with these expected modules default_mod = import_module('django.conf.locale.de.formats') test_mod = import_module('i18n.other.locale.de.formats') test_mod2 = import_module('i18n.other2.locale.de.formats') with translation.override('de-at', deactivate=True): # Should return the correct default module when no setting is set self.assertEqual(list(iter_format_modules('de')), [default_mod]) # When the setting is a string, should return the given module and # the default module self.assertEqual( list(iter_format_modules('de', 'i18n.other.locale')), [test_mod, default_mod]) # When setting is a list of strings, should return the given # modules and the default module self.assertEqual( list(iter_format_modules('de', ['i18n.other.locale', 'i18n.other2.locale'])), [test_mod, test_mod2, default_mod]) def test_iter_format_modules_stability(self): """ Tests the iter_format_modules function always yields format modules in a stable and correct order in presence of both base ll and ll_CC formats. """ en_format_mod = import_module('django.conf.locale.en.formats') en_gb_format_mod = import_module('django.conf.locale.en_GB.formats') self.assertEqual(list(iter_format_modules('en-gb')), [en_gb_format_mod, en_format_mod]) def test_get_format_modules_lang(self): with translation.override('de', deactivate=True): self.assertEqual('.', get_format('DECIMAL_SEPARATOR', lang='en')) def test_get_format_modules_stability(self): with self.settings(FORMAT_MODULE_PATH='i18n.other.locale'): with translation.override('de', deactivate=True): old = "%r" % get_format_modules(reverse=True) new = "%r" % get_format_modules(reverse=True) # second try self.assertEqual(new, old, 'Value returned by get_formats_modules() must be preserved between calls.') def test_localize_templatetag_and_filter(self): """ Test the {% localize %} templatetag and the localize/unlocalize filters. """ context = Context({'int': 1455, 'float': 3.14, 'date': datetime.date(2016, 12, 31)}) template1 = Template( '{% load l10n %}{% localize %}{{ int }}/{{ float }}/{{ date }}{% endlocalize %}; ' '{% localize on %}{{ int }}/{{ float }}/{{ date }}{% endlocalize %}' ) template2 = Template( '{% load l10n %}{{ int }}/{{ float }}/{{ date }}; ' '{% localize off %}{{ int }}/{{ float }}/{{ date }};{% endlocalize %} ' '{{ int }}/{{ float }}/{{ date }}' ) template3 = Template( '{% load l10n %}{{ int }}/{{ float }}/{{ date }}; ' '{{ int|unlocalize }}/{{ float|unlocalize }}/{{ date|unlocalize }}' ) template4 = Template( '{% load l10n %}{{ int }}/{{ float }}/{{ date }}; ' '{{ int|localize }}/{{ float|localize }}/{{ date|localize }}' ) expected_localized = '1.455/3,14/31. Dezember 2016' expected_unlocalized = '1455/3.14/Dez. 31, 2016' output1 = '; '.join([expected_localized, expected_localized]) output2 = '; '.join([expected_localized, expected_unlocalized, expected_localized]) output3 = '; '.join([expected_localized, expected_unlocalized]) output4 = '; '.join([expected_unlocalized, expected_localized]) with translation.override('de', deactivate=True): with self.settings(USE_L10N=False, USE_THOUSAND_SEPARATOR=True): self.assertEqual(template1.render(context), output1) self.assertEqual(template4.render(context), output4) with self.settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True): self.assertEqual(template1.render(context), output1) self.assertEqual(template2.render(context), output2) self.assertEqual(template3.render(context), output3) def test_localized_as_text_as_hidden_input(self): """ Tests if form input with 'as_hidden' or 'as_text' is correctly localized. Ticket #18777 """ self.maxDiff = 1200 with translation.override('de-at', deactivate=True): template = Template('{% load l10n %}{{ form.date_added }}; {{ form.cents_paid }}') template_as_text = Template('{% load l10n %}{{ form.date_added.as_text }}; {{ form.cents_paid.as_text }}') template_as_hidden = Template( '{% load l10n %}{{ form.date_added.as_hidden }}; {{ form.cents_paid.as_hidden }}' ) form = CompanyForm({ 'name': 'acme', 'date_added': datetime.datetime(2009, 12, 31, 6, 0, 0), 'cents_paid': decimal.Decimal('59.47'), 'products_delivered': 12000, }) context = Context({'form': form}) self.assertTrue(form.is_valid()) self.assertHTMLEqual( template.render(context), '<input id="id_date_added" name="date_added" type="text" value="31.12.2009 06:00:00" required>;' '<input id="id_cents_paid" name="cents_paid" type="text" value="59,47" required>' ) self.assertHTMLEqual( template_as_text.render(context), '<input id="id_date_added" name="date_added" type="text" value="31.12.2009 06:00:00" required>;' ' <input id="id_cents_paid" name="cents_paid" type="text" value="59,47" required>' ) self.assertHTMLEqual( template_as_hidden.render(context), '<input id="id_date_added" name="date_added" type="hidden" value="31.12.2009 06:00:00">;' '<input id="id_cents_paid" name="cents_paid" type="hidden" value="59,47">' ) def test_format_arbitrary_settings(self): self.assertEqual(get_format('DEBUG'), 'DEBUG') def test_get_custom_format(self): with self.settings(FORMAT_MODULE_PATH='i18n.other.locale'): with translation.override('fr', deactivate=True): self.assertEqual('d/m/Y CUSTOM', get_format('CUSTOM_DAY_FORMAT')) def test_admin_javascript_supported_input_formats(self): """ The first input format for DATE_INPUT_FORMATS, TIME_INPUT_FORMATS, and DATETIME_INPUT_FORMATS must not contain %f since that's unsupported by the admin's time picker widget. """ regex = re.compile('%([^BcdHImMpSwxXyY%])') for language_code, language_name in settings.LANGUAGES: for format_name in ('DATE_INPUT_FORMATS', 'TIME_INPUT_FORMATS', 'DATETIME_INPUT_FORMATS'): with self.subTest(language=language_code, format=format_name): formatter = get_format(format_name, lang=language_code)[0] self.assertEqual( regex.findall(formatter), [], "%s locale's %s uses an unsupported format code." % (language_code, format_name) ) class MiscTests(SimpleTestCase): rf = RequestFactory() @override_settings(LANGUAGE_CODE='de') def test_english_fallback(self): """ With a non-English LANGUAGE_CODE and if the active language is English or one of its variants, the untranslated string should be returned (instead of falling back to LANGUAGE_CODE) (See #24413). """ self.assertEqual(gettext("Image"), "Bild") with translation.override('en'): self.assertEqual(gettext("Image"), "Image") with translation.override('en-us'): self.assertEqual(gettext("Image"), "Image") with translation.override('en-ca'): self.assertEqual(gettext("Image"), "Image") def test_parse_spec_http_header(self): """ Testing HTTP header parsing. First, we test that we can parse the values according to the spec (and that we extract all the pieces in the right order). """ tests = [ # Good headers ('de', [('de', 1.0)]), ('en-AU', [('en-au', 1.0)]), ('es-419', [('es-419', 1.0)]), ('*;q=1.00', [('*', 1.0)]), ('en-AU;q=0.123', [('en-au', 0.123)]), ('en-au;q=0.5', [('en-au', 0.5)]), ('en-au;q=1.0', [('en-au', 1.0)]), ('da, en-gb;q=0.25, en;q=0.5', [('da', 1.0), ('en', 0.5), ('en-gb', 0.25)]), ('en-au-xx', [('en-au-xx', 1.0)]), ('de,en-au;q=0.75,en-us;q=0.5,en;q=0.25,es;q=0.125,fa;q=0.125', [('de', 1.0), ('en-au', 0.75), ('en-us', 0.5), ('en', 0.25), ('es', 0.125), ('fa', 0.125)]), ('*', [('*', 1.0)]), ('de;q=0.', [('de', 0.0)]), ('en; q=1,', [('en', 1.0)]), ('en; q=1.0, * ; q=0.5', [('en', 1.0), ('*', 0.5)]), # Bad headers ('en-gb;q=1.0000', []), ('en;q=0.1234', []), ('en;q=.2', []), ('abcdefghi-au', []), ('**', []), ('en,,gb', []), ('en-au;q=0.1.0', []), (('X' * 97) + 'Z,en', []), ('da, en-gb;q=0.8, en;q=0.7,#', []), ('de;q=2.0', []), ('de;q=0.a', []), ('12-345', []), ('', []), ('en;q=1e0', []), ] for value, expected in tests: with self.subTest(value=value): self.assertEqual(trans_real.parse_accept_lang_header(value), tuple(expected)) def test_parse_literal_http_header(self): """ Now test that we parse a literal HTTP header correctly. """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-br'} self.assertEqual('pt-br', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt'} self.assertEqual('pt', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'es,de'} self.assertEqual('es', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'es-ar,de'} self.assertEqual('es-ar', g(r)) # This test assumes there won't be a Django translation to a US # variation of the Spanish language, a safe assumption. When the # user sets it as the preferred language, the main 'es' # translation should be selected instead. r.META = {'HTTP_ACCEPT_LANGUAGE': 'es-us'} self.assertEqual(g(r), 'es') # This tests the following scenario: there isn't a main language (zh) # translation of Django but there is a translation to variation (zh-hans) # the user sets zh-hans as the preferred language, it should be selected # by Django without falling back nor ignoring it. r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-hans,de'} self.assertEqual(g(r), 'zh-hans') r.META = {'HTTP_ACCEPT_LANGUAGE': 'NL'} self.assertEqual('nl', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'fy'} self.assertEqual('fy', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'ia'} self.assertEqual('ia', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'sr-latn'} self.assertEqual('sr-latn', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-hans'} self.assertEqual('zh-hans', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-hant'} self.assertEqual('zh-hant', g(r)) @override_settings( LANGUAGES=[ ('en', 'English'), ('zh-hans', 'Simplified Chinese'), ('zh-hant', 'Traditional Chinese'), ] ) def test_support_for_deprecated_chinese_language_codes(self): """ Some browsers (Firefox, IE, etc.) use deprecated language codes. As these language codes will be removed in Django 1.9, these will be incorrectly matched. For example zh-tw (traditional) will be interpreted as zh-hans (simplified), which is wrong. So we should also accept these deprecated language codes. refs #18419 -- this is explicitly for browser compatibility """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-cn,en'} self.assertEqual(g(r), 'zh-hans') r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-tw,en'} self.assertEqual(g(r), 'zh-hant') def test_special_fallback_language(self): """ Some languages may have special fallbacks that don't follow the simple 'fr-ca' -> 'fr' logic (notably Chinese codes). """ r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-my,en'} self.assertEqual(get_language_from_request(r), 'zh-hans') def test_parse_language_cookie(self): """ Now test that we parse language preferences stored in a cookie correctly. """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'pt-br'} r.META = {} self.assertEqual('pt-br', g(r)) r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'pt'} r.META = {} self.assertEqual('pt', g(r)) r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'es'} r.META = {'HTTP_ACCEPT_LANGUAGE': 'de'} self.assertEqual('es', g(r)) # This test assumes there won't be a Django translation to a US # variation of the Spanish language, a safe assumption. When the # user sets it as the preferred language, the main 'es' # translation should be selected instead. r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'es-us'} r.META = {} self.assertEqual(g(r), 'es') # This tests the following scenario: there isn't a main language (zh) # translation of Django but there is a translation to variation (zh-hans) # the user sets zh-hans as the preferred language, it should be selected # by Django without falling back nor ignoring it. r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'zh-hans'} r.META = {'HTTP_ACCEPT_LANGUAGE': 'de'} self.assertEqual(g(r), 'zh-hans') @override_settings( USE_I18N=True, LANGUAGES=[ ('en', 'English'), ('de', 'German'), ('de-at', 'Austrian German'), ('pt-br', 'Portuguese (Brazil)'), ], ) def test_get_supported_language_variant_real(self): g = trans_real.get_supported_language_variant self.assertEqual(g('en'), 'en') self.assertEqual(g('en-gb'), 'en') self.assertEqual(g('de'), 'de') self.assertEqual(g('de-at'), 'de-at') self.assertEqual(g('de-ch'), 'de') self.assertEqual(g('pt-br'), 'pt-br') self.assertEqual(g('pt'), 'pt-br') self.assertEqual(g('pt-pt'), 'pt-br') with self.assertRaises(LookupError): g('pt', strict=True) with self.assertRaises(LookupError): g('pt-pt', strict=True) with self.assertRaises(LookupError): g('xyz') with self.assertRaises(LookupError): g('xy-zz') def test_get_supported_language_variant_null(self): g = trans_null.get_supported_language_variant self.assertEqual(g(settings.LANGUAGE_CODE), settings.LANGUAGE_CODE) with self.assertRaises(LookupError): g('pt') with self.assertRaises(LookupError): g('de') with self.assertRaises(LookupError): g('de-at') with self.assertRaises(LookupError): g('de', strict=True) with self.assertRaises(LookupError): g('de-at', strict=True) with self.assertRaises(LookupError): g('xyz') @override_settings( LANGUAGES=[ ('en', 'English'), ('de', 'German'), ('de-at', 'Austrian German'), ('pl', 'Polish'), ], ) def test_get_language_from_path_real(self): g = trans_real.get_language_from_path self.assertEqual(g('/pl/'), 'pl') self.assertEqual(g('/pl'), 'pl') self.assertIsNone(g('/xyz/')) self.assertEqual(g('/en/'), 'en') self.assertEqual(g('/en-gb/'), 'en') self.assertEqual(g('/de/'), 'de') self.assertEqual(g('/de-at/'), 'de-at') self.assertEqual(g('/de-ch/'), 'de') self.assertIsNone(g('/de-simple-page/')) def test_get_language_from_path_null(self): g = trans_null.get_language_from_path self.assertIsNone(g('/pl/')) self.assertIsNone(g('/pl')) self.assertIsNone(g('/xyz/')) def test_cache_resetting(self): """ After setting LANGUAGE, the cache should be cleared and languages previously valid should not be used (#14170). """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-br'} self.assertEqual('pt-br', g(r)) with self.settings(LANGUAGES=[('en', 'English')]): self.assertNotEqual('pt-br', g(r)) def test_i18n_patterns_returns_list(self): with override_settings(USE_I18N=False): self.assertIsInstance(i18n_patterns([]), list) with override_settings(USE_I18N=True): self.assertIsInstance(i18n_patterns([]), list) class ResolutionOrderI18NTests(SimpleTestCase): def setUp(self): super().setUp() activate('de') def tearDown(self): deactivate() super().tearDown() def assertGettext(self, msgid, msgstr): result = gettext(msgid) self.assertIn( msgstr, result, "The string '%s' isn't in the translation of '%s'; the actual result is '%s'." % (msgstr, msgid, result) ) class AppResolutionOrderI18NTests(ResolutionOrderI18NTests): @override_settings(LANGUAGE_CODE='de') def test_app_translation(self): # Original translation. self.assertGettext('Date/time', 'Datum/Zeit') # Different translation. with self.modify_settings(INSTALLED_APPS={'append': 'i18n.resolution'}): # Force refreshing translations. activate('de') # Doesn't work because it's added later in the list. self.assertGettext('Date/time', 'Datum/Zeit') with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.admin.apps.SimpleAdminConfig'}): # Force refreshing translations. activate('de') # Unless the original is removed from the list. self.assertGettext('Date/time', 'Datum/Zeit (APP)') @override_settings(LOCALE_PATHS=extended_locale_paths) class LocalePathsResolutionOrderI18NTests(ResolutionOrderI18NTests): def test_locale_paths_translation(self): self.assertGettext('Time', 'LOCALE_PATHS') def test_locale_paths_override_app_translation(self): with self.settings(INSTALLED_APPS=['i18n.resolution']): self.assertGettext('Time', 'LOCALE_PATHS') class DjangoFallbackResolutionOrderI18NTests(ResolutionOrderI18NTests): def test_django_fallback(self): self.assertEqual(gettext('Date/time'), 'Datum/Zeit') @override_settings(INSTALLED_APPS=['i18n.territorial_fallback']) class TranslationFallbackI18NTests(ResolutionOrderI18NTests): def test_sparse_territory_catalog(self): """ Untranslated strings for territorial language variants use the translations of the generic language. In this case, the de-de translation falls back to de. """ with translation.override('de-de'): self.assertGettext('Test 1 (en)', '(de-de)') self.assertGettext('Test 2 (en)', '(de)') class TestModels(TestCase): def test_lazy(self): tm = TestModel() tm.save() def test_safestr(self): c = Company(cents_paid=12, products_delivered=1) c.name = SafeString('Iñtërnâtiônàlizætiøn1') c.save() class TestLanguageInfo(SimpleTestCase): def test_localized_language_info(self): li = get_language_info('de') self.assertEqual(li['code'], 'de') self.assertEqual(li['name_local'], 'Deutsch') self.assertEqual(li['name'], 'German') self.assertIs(li['bidi'], False) def test_unknown_language_code(self): with self.assertRaisesMessage(KeyError, "Unknown language code xx"): get_language_info('xx') with translation.override('xx'): # A language with no translation catalogs should fallback to the # untranslated string. self.assertEqual(gettext("Title"), "Title") def test_unknown_only_country_code(self): li = get_language_info('de-xx') self.assertEqual(li['code'], 'de') self.assertEqual(li['name_local'], 'Deutsch') self.assertEqual(li['name'], 'German') self.assertIs(li['bidi'], False) def test_unknown_language_code_and_country_code(self): with self.assertRaisesMessage(KeyError, "Unknown language code xx-xx and xx"): get_language_info('xx-xx') def test_fallback_language_code(self): """ get_language_info return the first fallback language info if the lang_info struct does not contain the 'name' key. """ li = get_language_info('zh-my') self.assertEqual(li['code'], 'zh-hans') li = get_language_info('zh-hans') self.assertEqual(li['code'], 'zh-hans') @override_settings( USE_I18N=True, LANGUAGES=[ ('en', 'English'), ('fr', 'French'), ], MIDDLEWARE=[ 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ROOT_URLCONF='i18n.urls', ) class LocaleMiddlewareTests(TestCase): def test_streaming_response(self): # Regression test for #5241 response = self.client.get('/fr/streaming/') self.assertContains(response, "Oui/Non") response = self.client.get('/en/streaming/') self.assertContains(response, "Yes/No") @override_settings( MIDDLEWARE=[ 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ) def test_language_not_saved_to_session(self): """ The Current language isno' automatically saved to the session on every request (#21473). """ self.client.get('/fr/simple/') self.assertNotIn(LANGUAGE_SESSION_KEY, self.client.session) @override_settings( USE_I18N=True, LANGUAGES=[ ('en', 'English'), ('de', 'German'), ('fr', 'French'), ], MIDDLEWARE=[ 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ROOT_URLCONF='i18n.urls_default_unprefixed', LANGUAGE_CODE='en', ) class UnprefixedDefaultLanguageTests(SimpleTestCase): def test_default_lang_without_prefix(self): """ With i18n_patterns(..., prefix_default_language=False), the default language (settings.LANGUAGE_CODE) should be accessible without a prefix. """ response = self.client.get('/simple/') self.assertEqual(response.content, b'Yes') def test_other_lang_with_prefix(self): response = self.client.get('/fr/simple/') self.assertEqual(response.content, b'Oui') def test_unprefixed_language_other_than_accept_language(self): response = self.client.get('/simple/', HTTP_ACCEPT_LANGUAGE='fr') self.assertEqual(response.content, b'Yes') def test_page_with_dash(self): # A page starting with /de* shouldn't match the 'de' language code. response = self.client.get('/de-simple-page/') self.assertEqual(response.content, b'Yes') def test_no_redirect_on_404(self): """ A request for a nonexistent URL shouldn't cause a redirect to /<default_language>/<request_url> when prefix_default_language=False and /<default_language>/<request_url> has a URL match (#27402). """ # A match for /group1/group2/ must exist for this to act as a # regression test. response = self.client.get('/group1/group2/') self.assertEqual(response.status_code, 200) response = self.client.get('/nonexistent/') self.assertEqual(response.status_code, 404) @override_settings( USE_I18N=True, LANGUAGES=[ ('bg', 'Bulgarian'), ('en-us', 'English'), ('pt-br', 'Portuguese (Brazil)'), ], MIDDLEWARE=[ 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ROOT_URLCONF='i18n.urls' ) class CountrySpecificLanguageTests(SimpleTestCase): rf = RequestFactory() def test_check_for_language(self): self.assertTrue(check_for_language('en')) self.assertTrue(check_for_language('en-us')) self.assertTrue(check_for_language('en-US')) self.assertFalse(check_for_language('en_US')) self.assertTrue(check_for_language('be')) self.assertTrue(check_for_language('be@latin')) self.assertTrue(check_for_language('sr-RS@latin')) self.assertTrue(check_for_language('sr-RS@12345')) self.assertFalse(check_for_language('en-ü')) self.assertFalse(check_for_language('en\x00')) self.assertFalse(check_for_language(None)) self.assertFalse(check_for_language('be@ ')) # Specifying encoding is not supported (Django enforces UTF-8) self.assertFalse(check_for_language('tr-TR.UTF-8')) self.assertFalse(check_for_language('tr-TR.UTF8')) self.assertFalse(check_for_language('de-DE.utf-8')) def test_check_for_language_null(self): self.assertIs(trans_null.check_for_language('en'), True) def test_get_language_from_request(self): # issue 19919 r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'en-US,en;q=0.8,bg;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('en-us', lang) r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'bg-bg,en-US;q=0.8,en;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('bg', lang) def test_get_language_from_request_null(self): lang = trans_null.get_language_from_request(None) self.assertEqual(lang, 'en') with override_settings(LANGUAGE_CODE='de'): lang = trans_null.get_language_from_request(None) self.assertEqual(lang, 'de') def test_specific_language_codes(self): # issue 11915 r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt,en-US;q=0.8,en;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('pt-br', lang) r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-pt,en-US;q=0.8,en;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('pt-br', lang) class TranslationFilesMissing(SimpleTestCase): def setUp(self): super().setUp() self.gettext_find_builtin = gettext_module.find def tearDown(self): gettext_module.find = self.gettext_find_builtin super().tearDown() def patchGettextFind(self): gettext_module.find = lambda *args, **kw: None def test_failure_finding_default_mo_files(self): """OSError is raised if the default language is unparseable.""" self.patchGettextFind() trans_real._translations = {} with self.assertRaises(OSError): activate('en') class NonDjangoLanguageTests(SimpleTestCase): """ A language non present in default Django languages can still be installed/used by a Django project. """ @override_settings( USE_I18N=True, LANGUAGES=[ ('en-us', 'English'), ('xxx', 'Somelanguage'), ], LANGUAGE_CODE='xxx', LOCALE_PATHS=[os.path.join(here, 'commands', 'locale')], ) def test_non_django_language(self): self.assertEqual(get_language(), 'xxx') self.assertEqual(gettext("year"), "reay") @override_settings(USE_I18N=True) def test_check_for_language(self): with tempfile.TemporaryDirectory() as app_dir: os.makedirs(os.path.join(app_dir, 'locale', 'dummy_Lang', 'LC_MESSAGES')) open(os.path.join(app_dir, 'locale', 'dummy_Lang', 'LC_MESSAGES', 'django.mo'), 'w').close() app_config = AppConfig('dummy_app', AppModuleStub(__path__=[app_dir])) with mock.patch('django.apps.apps.get_app_configs', return_value=[app_config]): self.assertIs(check_for_language('dummy-lang'), True) @override_settings( USE_I18N=True, LANGUAGES=[ ('en-us', 'English'), # xyz language has no locale files ('xyz', 'XYZ'), ], ) @translation.override('xyz') def test_plural_non_django_language(self): self.assertEqual(get_language(), 'xyz') self.assertEqual(ngettext('year', 'years', 2), 'years') @override_settings(USE_I18N=True) class WatchForTranslationChangesTests(SimpleTestCase): @override_settings(USE_I18N=False) def test_i18n_disabled(self): mocked_sender = mock.MagicMock() watch_for_translation_changes(mocked_sender) mocked_sender.watch_dir.assert_not_called() def test_i18n_enabled(self): mocked_sender = mock.MagicMock() watch_for_translation_changes(mocked_sender) self.assertGreater(mocked_sender.watch_dir.call_count, 1) def test_i18n_locale_paths(self): mocked_sender = mock.MagicMock() with tempfile.TemporaryDirectory() as app_dir: with self.settings(LOCALE_PATHS=[app_dir]): watch_for_translation_changes(mocked_sender) mocked_sender.watch_dir.assert_any_call(Path(app_dir), '**/*.mo') def test_i18n_app_dirs(self): mocked_sender = mock.MagicMock() with self.settings(INSTALLED_APPS=['tests.i18n.sampleproject']): watch_for_translation_changes(mocked_sender) project_dir = Path(__file__).parent / 'sampleproject' / 'locale' mocked_sender.watch_dir.assert_any_call(project_dir, '**/*.mo') def test_i18n_app_dirs_ignore_django_apps(self): mocked_sender = mock.MagicMock() with self.settings(INSTALLED_APPS=['django.contrib.admin']): watch_for_translation_changes(mocked_sender) mocked_sender.watch_dir.assert_called_once_with(Path('locale'), '**/*.mo') def test_i18n_local_locale(self): mocked_sender = mock.MagicMock() watch_for_translation_changes(mocked_sender) locale_dir = Path(__file__).parent / 'locale' mocked_sender.watch_dir.assert_any_call(locale_dir, '**/*.mo') class TranslationFileChangedTests(SimpleTestCase): def setUp(self): self.gettext_translations = gettext_module._translations.copy() self.trans_real_translations = trans_real._translations.copy() def tearDown(self): gettext._translations = self.gettext_translations trans_real._translations = self.trans_real_translations def test_ignores_non_mo_files(self): gettext_module._translations = {'foo': 'bar'} path = Path('test.py') self.assertIsNone(translation_file_changed(None, path)) self.assertEqual(gettext_module._translations, {'foo': 'bar'}) def test_resets_cache_with_mo_files(self): gettext_module._translations = {'foo': 'bar'} trans_real._translations = {'foo': 'bar'} trans_real._default = 1 trans_real._active = False path = Path('test.mo') self.assertIs(translation_file_changed(None, path), True) self.assertEqual(gettext_module._translations, {}) self.assertEqual(trans_real._translations, {}) self.assertIsNone(trans_real._default) self.assertIsInstance(trans_real._active, Local) class UtilsTests(SimpleTestCase): def test_round_away_from_one(self): tests = [ (0, 0), (0., 0), (0.25, 0), (0.5, 0), (0.75, 0), (1, 1), (1., 1), (1.25, 2), (1.5, 2), (1.75, 2), (-0., 0), (-0.25, -1), (-0.5, -1), (-0.75, -1), (-1, -1), (-1., -1), (-1.25, -2), (-1.5, -2), (-1.75, -2), ] for value, expected in tests: with self.subTest(value=value): self.assertEqual(round_away_from_one(value), expected)
cc52c3a0b1474fda90452baf43c40b87329b566f7da071145270b4ff3b3680d8
import gettext import os import re from datetime import datetime, timedelta from importlib import import_module import pytz from django import forms from django.conf import settings from django.contrib import admin from django.contrib.admin import widgets from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.auth.models import User from django.core.files.storage import default_storage from django.core.files.uploadedfile import SimpleUploadedFile from django.db.models import ( CharField, DateField, DateTimeField, ManyToManyField, UUIDField, ) from django.test import SimpleTestCase, TestCase, override_settings from django.urls import reverse from django.utils import translation from .models import ( Advisor, Album, Band, Bee, Car, Company, Event, Honeycomb, Individual, Inventory, Member, MyFileField, Profile, School, Student, ) from .widgetadmin import site as widget_admin_site class TestDataMixin: @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email=None) cls.u2 = User.objects.create_user(username='testser', password='secret') Car.objects.create(owner=cls.superuser, make='Volkswagen', model='Passat') Car.objects.create(owner=cls.u2, make='BMW', model='M3') class AdminFormfieldForDBFieldTests(SimpleTestCase): """ Tests for correct behavior of ModelAdmin.formfield_for_dbfield """ def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides): """ Helper to call formfield_for_dbfield for a given model and field name and verify that the returned formfield is appropriate. """ # Override any settings on the model admin class MyModelAdmin(admin.ModelAdmin): pass for k in admin_overrides: setattr(MyModelAdmin, k, admin_overrides[k]) # Construct the admin, and ask it for a formfield ma = MyModelAdmin(model, admin.site) ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None) # "unwrap" the widget wrapper, if needed if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper): widget = ff.widget.widget else: widget = ff.widget self.assertIsInstance(widget, widgetclass) # Return the formfield so that other tests can continue return ff def test_DateField(self): self.assertFormfield(Event, 'start_date', widgets.AdminDateWidget) def test_DateTimeField(self): self.assertFormfield(Member, 'birthdate', widgets.AdminSplitDateTime) def test_TimeField(self): self.assertFormfield(Event, 'start_time', widgets.AdminTimeWidget) def test_TextField(self): self.assertFormfield(Event, 'description', widgets.AdminTextareaWidget) def test_URLField(self): self.assertFormfield(Event, 'link', widgets.AdminURLFieldWidget) def test_IntegerField(self): self.assertFormfield(Event, 'min_age', widgets.AdminIntegerFieldWidget) def test_CharField(self): self.assertFormfield(Member, 'name', widgets.AdminTextInputWidget) def test_EmailField(self): self.assertFormfield(Member, 'email', widgets.AdminEmailInputWidget) def test_FileField(self): self.assertFormfield(Album, 'cover_art', widgets.AdminFileWidget) def test_ForeignKey(self): self.assertFormfield(Event, 'main_band', forms.Select) def test_raw_id_ForeignKey(self): self.assertFormfield(Event, 'main_band', widgets.ForeignKeyRawIdWidget, raw_id_fields=['main_band']) def test_radio_fields_ForeignKey(self): ff = self.assertFormfield(Event, 'main_band', widgets.AdminRadioSelect, radio_fields={'main_band': admin.VERTICAL}) self.assertIsNone(ff.empty_label) def test_many_to_many(self): self.assertFormfield(Band, 'members', forms.SelectMultiple) def test_raw_id_many_to_many(self): self.assertFormfield(Band, 'members', widgets.ManyToManyRawIdWidget, raw_id_fields=['members']) def test_filtered_many_to_many(self): self.assertFormfield(Band, 'members', widgets.FilteredSelectMultiple, filter_vertical=['members']) def test_formfield_overrides(self): self.assertFormfield(Event, 'start_date', forms.TextInput, formfield_overrides={DateField: {'widget': forms.TextInput}}) def test_formfield_overrides_widget_instances(self): """ Widget instances in formfield_overrides are not shared between different fields. (#19423) """ class BandAdmin(admin.ModelAdmin): formfield_overrides = { CharField: {'widget': forms.TextInput(attrs={'size': '10'})} } ma = BandAdmin(Band, admin.site) f1 = ma.formfield_for_dbfield(Band._meta.get_field('name'), request=None) f2 = ma.formfield_for_dbfield(Band._meta.get_field('style'), request=None) self.assertNotEqual(f1.widget, f2.widget) self.assertEqual(f1.widget.attrs['maxlength'], '100') self.assertEqual(f2.widget.attrs['maxlength'], '20') self.assertEqual(f2.widget.attrs['size'], '10') def test_formfield_overrides_m2m_filter_widget(self): """ The autocomplete_fields, raw_id_fields, filter_vertical, and filter_horizontal widgets for ManyToManyFields may be overridden by specifying a widget in formfield_overrides. """ class BandAdmin(admin.ModelAdmin): filter_vertical = ['members'] formfield_overrides = { ManyToManyField: {'widget': forms.CheckboxSelectMultiple}, } ma = BandAdmin(Band, admin.site) field = ma.formfield_for_dbfield(Band._meta.get_field('members'), request=None) self.assertIsInstance(field.widget.widget, forms.CheckboxSelectMultiple) def test_formfield_overrides_for_datetime_field(self): """ Overriding the widget for DateTimeField doesn't overrides the default form_class for that field (#26449). """ class MemberAdmin(admin.ModelAdmin): formfield_overrides = {DateTimeField: {'widget': widgets.AdminSplitDateTime}} ma = MemberAdmin(Member, admin.site) f1 = ma.formfield_for_dbfield(Member._meta.get_field('birthdate'), request=None) self.assertIsInstance(f1.widget, widgets.AdminSplitDateTime) self.assertIsInstance(f1, forms.SplitDateTimeField) def test_formfield_overrides_for_custom_field(self): """ formfield_overrides works for a custom field class. """ class AlbumAdmin(admin.ModelAdmin): formfield_overrides = {MyFileField: {'widget': forms.TextInput()}} ma = AlbumAdmin(Member, admin.site) f1 = ma.formfield_for_dbfield(Album._meta.get_field('backside_art'), request=None) self.assertIsInstance(f1.widget, forms.TextInput) def test_field_with_choices(self): self.assertFormfield(Member, 'gender', forms.Select) def test_choices_with_radio_fields(self): self.assertFormfield(Member, 'gender', widgets.AdminRadioSelect, radio_fields={'gender': admin.VERTICAL}) def test_inheritance(self): self.assertFormfield(Album, 'backside_art', widgets.AdminFileWidget) def test_m2m_widgets(self): """m2m fields help text as it applies to admin app (#9321).""" class AdvisorAdmin(admin.ModelAdmin): filter_vertical = ['companies'] self.assertFormfield(Advisor, 'companies', widgets.FilteredSelectMultiple, filter_vertical=['companies']) ma = AdvisorAdmin(Advisor, admin.site) f = ma.formfield_for_dbfield(Advisor._meta.get_field('companies'), request=None) self.assertEqual( f.help_text, 'Hold down “Control”, or “Command” on a Mac, to select more than one.' ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, TestCase): def test_filter_choices_by_request_user(self): """ Ensure the user can only see their own cars in the foreign key dropdown. """ self.client.force_login(self.superuser) response = self.client.get(reverse('admin:admin_widgets_cartire_add')) self.assertNotContains(response, "BMW M3") self.assertContains(response, "Volkswagen Passat") @override_settings(ROOT_URLCONF='admin_widgets.urls') class AdminForeignKeyWidgetChangeList(TestDataMixin, TestCase): def setUp(self): self.client.force_login(self.superuser) def test_changelist_ForeignKey(self): response = self.client.get(reverse('admin:admin_widgets_car_changelist')) self.assertContains(response, '/auth/user/add/') @override_settings(ROOT_URLCONF='admin_widgets.urls') class AdminForeignKeyRawIdWidget(TestDataMixin, TestCase): def setUp(self): self.client.force_login(self.superuser) def test_nonexistent_target_id(self): band = Band.objects.create(name='Bogey Blues') pk = band.pk band.delete() post_data = { "main_band": str(pk), } # Try posting with a nonexistent pk in a raw id field: this # should result in an error message, not a server exception. response = self.client.post(reverse('admin:admin_widgets_event_add'), post_data) self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.') def test_invalid_target_id(self): for test_str in ('Iñtërnâtiônàlizætiøn', "1234'", -1234): # This should result in an error message, not a server exception. response = self.client.post(reverse('admin:admin_widgets_event_add'), {"main_band": test_str}) self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.') def test_url_params_from_lookup_dict_any_iterable(self): lookup1 = widgets.url_params_from_lookup_dict({'color__in': ('red', 'blue')}) lookup2 = widgets.url_params_from_lookup_dict({'color__in': ['red', 'blue']}) self.assertEqual(lookup1, {'color__in': 'red,blue'}) self.assertEqual(lookup1, lookup2) def test_url_params_from_lookup_dict_callable(self): def my_callable(): return 'works' lookup1 = widgets.url_params_from_lookup_dict({'myfield': my_callable}) lookup2 = widgets.url_params_from_lookup_dict({'myfield': my_callable()}) self.assertEqual(lookup1, lookup2) def test_label_and_url_for_value_invalid_uuid(self): field = Bee._meta.get_field('honeycomb') self.assertIsInstance(field.target_field, UUIDField) widget = widgets.ForeignKeyRawIdWidget(field.remote_field, admin.site) self.assertEqual(widget.label_and_url_for_value('invalid-uuid'), ('', '')) class FilteredSelectMultipleWidgetTest(SimpleTestCase): def test_render(self): # Backslash in verbose_name to ensure it is JavaScript escaped. w = widgets.FilteredSelectMultiple('test\\', False) self.assertHTMLEqual( w.render('test', 'test'), '<select multiple name="test" class="selectfilter" ' 'data-field-name="test\\" data-is-stacked="0">\n</select>' ) def test_stacked_render(self): # Backslash in verbose_name to ensure it is JavaScript escaped. w = widgets.FilteredSelectMultiple('test\\', True) self.assertHTMLEqual( w.render('test', 'test'), '<select multiple name="test" class="selectfilterstacked" ' 'data-field-name="test\\" data-is-stacked="1">\n</select>' ) class AdminDateWidgetTest(SimpleTestCase): def test_attrs(self): w = widgets.AdminDateWidget() self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="2007-12-01" type="text" class="vDateField" name="test" size="10">', ) # pass attrs to widget w = widgets.AdminDateWidget(attrs={'size': 20, 'class': 'myDateField'}) self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="2007-12-01" type="text" class="myDateField" name="test" size="20">', ) class AdminTimeWidgetTest(SimpleTestCase): def test_attrs(self): w = widgets.AdminTimeWidget() self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="09:30:00" type="text" class="vTimeField" name="test" size="8">', ) # pass attrs to widget w = widgets.AdminTimeWidget(attrs={'size': 20, 'class': 'myTimeField'}) self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="09:30:00" type="text" class="myTimeField" name="test" size="20">', ) class AdminSplitDateTimeWidgetTest(SimpleTestCase): def test_render(self): w = widgets.AdminSplitDateTime() self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<p class="datetime">' 'Date: <input value="2007-12-01" type="text" class="vDateField" ' 'name="test_0" size="10"><br>' 'Time: <input value="09:30:00" type="text" class="vTimeField" ' 'name="test_1" size="8"></p>' ) def test_localization(self): w = widgets.AdminSplitDateTime() with self.settings(USE_L10N=True), translation.override('de-at'): w.is_localized = True self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<p class="datetime">' 'Datum: <input value="01.12.2007" type="text" ' 'class="vDateField" name="test_0"size="10"><br>' 'Zeit: <input value="09:30:00" type="text" class="vTimeField" ' 'name="test_1" size="8"></p>' ) class AdminURLWidgetTest(SimpleTestCase): def test_get_context_validates_url(self): w = widgets.AdminURLFieldWidget() for invalid in ['', '/not/a/full/url/', 'javascript:alert("Danger XSS!")']: with self.subTest(url=invalid): self.assertFalse(w.get_context('name', invalid, {})['url_valid']) self.assertTrue(w.get_context('name', 'http://example.com', {})['url_valid']) def test_render(self): w = widgets.AdminURLFieldWidget() self.assertHTMLEqual( w.render('test', ''), '<input class="vURLField" name="test" type="url">' ) self.assertHTMLEqual( w.render('test', 'http://example.com'), '<p class="url">Currently:<a href="http://example.com">' 'http://example.com</a><br>' 'Change:<input class="vURLField" name="test" type="url" ' 'value="http://example.com"></p>' ) def test_render_idn(self): w = widgets.AdminURLFieldWidget() self.assertHTMLEqual( w.render('test', 'http://example-äüö.com'), '<p class="url">Currently: <a href="http://xn--example--7za4pnc.com">' 'http://example-äüö.com</a><br>' 'Change:<input class="vURLField" name="test" type="url" ' 'value="http://example-äüö.com"></p>' ) def test_render_quoting(self): """ WARNING: This test doesn't use assertHTMLEqual since it will get rid of some escapes which are tested here! """ HREF_RE = re.compile('href="([^"]+)"') VALUE_RE = re.compile('value="([^"]+)"') TEXT_RE = re.compile('<a[^>]+>([^>]+)</a>') w = widgets.AdminURLFieldWidget() output = w.render('test', 'http://example.com/<sometag>some-text</sometag>') self.assertEqual( HREF_RE.search(output)[1], 'http://example.com/%3Csometag%3Esome-text%3C/sometag%3E', ) self.assertEqual( TEXT_RE.search(output)[1], 'http://example.com/&lt;sometag&gt;some-text&lt;/sometag&gt;', ) self.assertEqual( VALUE_RE.search(output)[1], 'http://example.com/&lt;sometag&gt;some-text&lt;/sometag&gt;', ) output = w.render('test', 'http://example-äüö.com/<sometag>some-text</sometag>') self.assertEqual( HREF_RE.search(output)[1], 'http://xn--example--7za4pnc.com/%3Csometag%3Esome-text%3C/sometag%3E', ) self.assertEqual( TEXT_RE.search(output)[1], 'http://example-äüö.com/&lt;sometag&gt;some-text&lt;/sometag&gt;', ) self.assertEqual( VALUE_RE.search(output)[1], 'http://example-äüö.com/&lt;sometag&gt;some-text&lt;/sometag&gt;', ) output = w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"') self.assertEqual( HREF_RE.search(output)[1], 'http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22', ) self.assertEqual( TEXT_RE.search(output)[1], 'http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;' 'alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;' ) self.assertEqual( VALUE_RE.search(output)[1], 'http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;', ) class AdminUUIDWidgetTests(SimpleTestCase): def test_attrs(self): w = widgets.AdminUUIDInputWidget() self.assertHTMLEqual( w.render('test', '550e8400-e29b-41d4-a716-446655440000'), '<input value="550e8400-e29b-41d4-a716-446655440000" type="text" class="vUUIDField" name="test">', ) w = widgets.AdminUUIDInputWidget(attrs={'class': 'myUUIDInput'}) self.assertHTMLEqual( w.render('test', '550e8400-e29b-41d4-a716-446655440000'), '<input value="550e8400-e29b-41d4-a716-446655440000" type="text" class="myUUIDInput" name="test">', ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class AdminFileWidgetTests(TestDataMixin, TestCase): @classmethod def setUpTestData(cls): super().setUpTestData() band = Band.objects.create(name='Linkin Park') cls.album = band.album_set.create( name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg' ) def test_render(self): w = widgets.AdminFileWidget() self.assertHTMLEqual( w.render('test', self.album.cover_art), '<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/' r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> ' '<span class="clearable-file-input">' '<input type="checkbox" name="test-clear" id="test-clear_id"> ' '<label for="test-clear_id">Clear</label></span><br>' 'Change: <input type="file" name="test"></p>' % { 'STORAGE_URL': default_storage.url(''), }, ) self.assertHTMLEqual( w.render('test', SimpleUploadedFile('test', b'content')), '<input type="file" name="test">', ) def test_render_required(self): widget = widgets.AdminFileWidget() widget.is_required = True self.assertHTMLEqual( widget.render('test', self.album.cover_art), '<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/' r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a><br>' 'Change: <input type="file" name="test"></p>' % { 'STORAGE_URL': default_storage.url(''), }, ) def test_render_disabled(self): widget = widgets.AdminFileWidget(attrs={'disabled': True}) self.assertHTMLEqual( widget.render('test', self.album.cover_art), '<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/' r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> ' '<span class="clearable-file-input">' '<input type="checkbox" name="test-clear" id="test-clear_id" disabled>' '<label for="test-clear_id">Clear</label></span><br>' 'Change: <input type="file" name="test" disabled></p>' % { 'STORAGE_URL': default_storage.url(''), }, ) def test_readonly_fields(self): """ File widgets should render as a link when they're marked "read only." """ self.client.force_login(self.superuser) response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,))) self.assertContains( response, '<div class="readonly"><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">' r'albums\hybrid_theory.jpg</a></div>' % {'STORAGE_URL': default_storage.url('')}, html=True, ) self.assertNotContains( response, '<input type="file" name="cover_art" id="id_cover_art">', html=True, ) response = self.client.get(reverse('admin:admin_widgets_album_add')) self.assertContains( response, '<div class="readonly"></div>', html=True, ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class ForeignKeyRawIdWidgetTest(TestCase): def test_render(self): band = Band.objects.create(name='Linkin Park') band.album_set.create( name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg' ) rel = Album._meta.get_field('band').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('test', band.pk, attrs={}), '<input type="text" name="test" value="%(bandpk)s" ' 'class="vForeignKeyRawIdAdminField">' '<a href="/admin_widgets/band/?_to_field=id" class="related-lookup" ' 'id="lookup_id_test" title="Lookup"></a>&nbsp;<strong>' '<a href="/admin_widgets/band/%(bandpk)s/change/">Linkin Park</a>' '</strong>' % {'bandpk': band.pk} ) def test_relations_to_non_primary_key(self): # ForeignKeyRawIdWidget works with fields which aren't related to # the model's primary key. apple = Inventory.objects.create(barcode=86, name='Apple') Inventory.objects.create(barcode=22, name='Pear') core = Inventory.objects.create( barcode=87, name='Core', parent=apple ) rel = Inventory._meta.get_field('parent').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('test', core.parent_id, attrs={}), '<input type="text" name="test" value="86" ' 'class="vForeignKeyRawIdAdminField">' '<a href="/admin_widgets/inventory/?_to_field=barcode" ' 'class="related-lookup" id="lookup_id_test" title="Lookup"></a>' '&nbsp;<strong><a href="/admin_widgets/inventory/%(pk)s/change/">' 'Apple</a></strong>' % {'pk': apple.pk} ) def test_fk_related_model_not_in_admin(self): # FK to a model not registered with admin site. Raw ID widget should # have no magnifying glass link. See #16542 big_honeycomb = Honeycomb.objects.create(location='Old tree') big_honeycomb.bee_set.create() rel = Bee._meta.get_field('honeycomb').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('honeycomb_widget', big_honeycomb.pk, attrs={}), '<input type="text" name="honeycomb_widget" value="%(hcombpk)s">' '&nbsp;<strong>%(hcomb)s</strong>' % {'hcombpk': big_honeycomb.pk, 'hcomb': big_honeycomb} ) def test_fk_to_self_model_not_in_admin(self): # FK to self, not registered with admin site. Raw ID widget should have # no magnifying glass link. See #16542 subject1 = Individual.objects.create(name='Subject #1') Individual.objects.create(name='Child', parent=subject1) rel = Individual._meta.get_field('parent').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('individual_widget', subject1.pk, attrs={}), '<input type="text" name="individual_widget" value="%(subj1pk)s">' '&nbsp;<strong>%(subj1)s</strong>' % {'subj1pk': subject1.pk, 'subj1': subject1} ) def test_proper_manager_for_label_lookup(self): # see #9258 rel = Inventory._meta.get_field('parent').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) hidden = Inventory.objects.create( barcode=93, name='Hidden', hidden=True ) child_of_hidden = Inventory.objects.create( barcode=94, name='Child of hidden', parent=hidden ) self.assertHTMLEqual( w.render('test', child_of_hidden.parent_id, attrs={}), '<input type="text" name="test" value="93" class="vForeignKeyRawIdAdminField">' '<a href="/admin_widgets/inventory/?_to_field=barcode" ' 'class="related-lookup" id="lookup_id_test" title="Lookup"></a>' '&nbsp;<strong><a href="/admin_widgets/inventory/%(pk)s/change/">' 'Hidden</a></strong>' % {'pk': hidden.pk} ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class ManyToManyRawIdWidgetTest(TestCase): def test_render(self): band = Band.objects.create(name='Linkin Park') m1 = Member.objects.create(name='Chester') m2 = Member.objects.create(name='Mike') band.members.add(m1, m2) rel = Band._meta.get_field('members').remote_field w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('test', [m1.pk, m2.pk], attrs={}), ( '<input type="text" name="test" value="%(m1pk)s,%(m2pk)s" class="vManyToManyRawIdAdminField">' '<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>' ) % {'m1pk': m1.pk, 'm2pk': m2.pk} ) self.assertHTMLEqual( w.render('test', [m1.pk]), ( '<input type="text" name="test" value="%(m1pk)s" class="vManyToManyRawIdAdminField">' '<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>' ) % {'m1pk': m1.pk} ) def test_m2m_related_model_not_in_admin(self): # M2M relationship with model not registered with admin site. Raw ID # widget should have no magnifying glass link. See #16542 consultor1 = Advisor.objects.create(name='Rockstar Techie') c1 = Company.objects.create(name='Doodle') c2 = Company.objects.create(name='Pear') consultor1.companies.add(c1, c2) rel = Advisor._meta.get_field('companies').remote_field w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('company_widget1', [c1.pk, c2.pk], attrs={}), '<input type="text" name="company_widget1" value="%(c1pk)s,%(c2pk)s">' % {'c1pk': c1.pk, 'c2pk': c2.pk} ) self.assertHTMLEqual( w.render('company_widget2', [c1.pk]), '<input type="text" name="company_widget2" value="%(c1pk)s">' % {'c1pk': c1.pk} ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class RelatedFieldWidgetWrapperTests(SimpleTestCase): def test_no_can_add_related(self): rel = Individual._meta.get_field('parent').remote_field w = widgets.AdminRadioSelect() # Used to fail with a name error. w = widgets.RelatedFieldWidgetWrapper(w, rel, widget_admin_site) self.assertFalse(w.can_add_related) def test_select_multiple_widget_cant_change_delete_related(self): rel = Individual._meta.get_field('parent').remote_field widget = forms.SelectMultiple() wrapper = widgets.RelatedFieldWidgetWrapper( widget, rel, widget_admin_site, can_add_related=True, can_change_related=True, can_delete_related=True, ) self.assertTrue(wrapper.can_add_related) self.assertFalse(wrapper.can_change_related) self.assertFalse(wrapper.can_delete_related) def test_on_delete_cascade_rel_cant_delete_related(self): rel = Individual._meta.get_field('soulmate').remote_field widget = forms.Select() wrapper = widgets.RelatedFieldWidgetWrapper( widget, rel, widget_admin_site, can_add_related=True, can_change_related=True, can_delete_related=True, ) self.assertTrue(wrapper.can_add_related) self.assertTrue(wrapper.can_change_related) self.assertFalse(wrapper.can_delete_related) def test_custom_widget_render(self): class CustomWidget(forms.Select): def render(self, *args, **kwargs): return 'custom render output' rel = Album._meta.get_field('band').remote_field widget = CustomWidget() wrapper = widgets.RelatedFieldWidgetWrapper( widget, rel, widget_admin_site, can_add_related=True, can_change_related=True, can_delete_related=True, ) output = wrapper.render('name', 'value') self.assertIn('custom render output', output) def test_widget_delegates_value_omitted_from_data(self): class CustomWidget(forms.Select): def value_omitted_from_data(self, data, files, name): return False rel = Album._meta.get_field('band').remote_field widget = CustomWidget() wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site) self.assertIs(wrapper.value_omitted_from_data({}, {}, 'band'), False) def test_widget_is_hidden(self): rel = Album._meta.get_field('band').remote_field widget = forms.HiddenInput() widget.choices = () wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site) self.assertIs(wrapper.is_hidden, True) context = wrapper.get_context('band', None, {}) self.assertIs(context['is_hidden'], True) output = wrapper.render('name', 'value') # Related item links are hidden. self.assertNotIn('<a ', output) def test_widget_is_not_hidden(self): rel = Album._meta.get_field('band').remote_field widget = forms.Select() wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site) self.assertIs(wrapper.is_hidden, False) context = wrapper.get_context('band', None, {}) self.assertIs(context['is_hidden'], False) output = wrapper.render('name', 'value') # Related item links are present. self.assertIn('<a ', output) @override_settings(ROOT_URLCONF='admin_widgets.urls') class AdminWidgetSeleniumTestCase(AdminSeleniumTestCase): available_apps = ['admin_widgets'] + AdminSeleniumTestCase.available_apps def setUp(self): self.u1 = User.objects.create_superuser(username='super', password='secret', email='[email protected]') class DateTimePickerSeleniumTests(AdminWidgetSeleniumTestCase): def test_show_hide_date_time_picker_widgets(self): """ Pressing the ESC key or clicking on a widget value closes the date and time picker widgets. """ from selenium.webdriver.common.keys import Keys self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add')) # First, with the date picker widget --------------------------------- cal_icon = self.selenium.find_element_by_id('calendarlink0') # The date picker is hidden self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed()) # Click the calendar icon cal_icon.click() # The date picker is visible self.assertTrue(self.selenium.find_element_by_id('calendarbox0').is_displayed()) # Press the ESC key self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE]) # The date picker is hidden again self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed()) # Click the calendar icon, then on the 15th of current month cal_icon.click() self.selenium.find_element_by_xpath("//a[contains(text(), '15')]").click() self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed()) self.assertEqual( self.selenium.find_element_by_id('id_birthdate_0').get_attribute('value'), datetime.today().strftime('%Y-%m-') + '15', ) # Then, with the time picker widget ---------------------------------- time_icon = self.selenium.find_element_by_id('clocklink0') # The time picker is hidden self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed()) # Click the time icon time_icon.click() # The time picker is visible self.assertTrue(self.selenium.find_element_by_id('clockbox0').is_displayed()) self.assertEqual( [ x.text for x in self.selenium.find_elements_by_xpath("//ul[@class='timelist']/li/a") ], ['Now', 'Midnight', '6 a.m.', 'Noon', '6 p.m.'] ) # Press the ESC key self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE]) # The time picker is hidden again self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed()) # Click the time icon, then select the 'Noon' value time_icon.click() self.selenium.find_element_by_xpath("//a[contains(text(), 'Noon')]").click() self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed()) self.assertEqual( self.selenium.find_element_by_id('id_birthdate_1').get_attribute('value'), '12:00:00', ) def test_calendar_nonday_class(self): """ Ensure cells that are not days of the month have the `nonday` CSS class. Refs #4574. """ self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add')) # fill in the birth date. self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01') # Click the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # get all the tds within the calendar calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') # make sure the first and last 6 cells have class nonday for td in tds[:6] + tds[-6:]: self.assertEqual(td.get_attribute('class'), 'nonday') def test_calendar_selected_class(self): """ Ensure cell for the day in the input has the `selected` CSS class. Refs #4574. """ self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add')) # fill in the birth date. self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01') # Click the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # get all the tds within the calendar calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') # verify the selected cell selected = tds[6] self.assertEqual(selected.get_attribute('class'), 'selected') self.assertEqual(selected.text, '1') def test_calendar_no_selected_class(self): """ Ensure no cells are given the selected class when the field is empty. Refs #4574. """ self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add')) # Click the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # get all the tds within the calendar calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') # verify there are no cells with the selected class selected = [td for td in tds if td.get_attribute('class') == 'selected'] self.assertEqual(len(selected), 0) def test_calendar_show_date_from_input(self): """ The calendar shows the date from the input field for every locale supported by Django. """ self.selenium.set_window_size(1024, 768) self.admin_login(username='super', password='secret', login_url='/') # Enter test data member = Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M') # Get month name translations for every locale month_string = 'May' path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale') for language_code, language_name in settings.LANGUAGES: try: catalog = gettext.translation('djangojs', path, [language_code]) except OSError: continue if month_string in catalog._catalog: month_name = catalog._catalog[month_string] else: month_name = month_string # Get the expected caption may_translation = month_name expected_caption = '{:s} {:d}'.format(may_translation.upper(), 1984) # Test with every locale with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True): # Open a page that has a date picker widget url = reverse('admin:admin_widgets_member_change', args=(member.pk,)) self.selenium.get(self.live_server_url + url) # Click on the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # Make sure that the right month and year are displayed self.wait_for_text('#calendarin0 caption', expected_caption) @override_settings(TIME_ZONE='Asia/Singapore') class DateTimePickerShortcutsSeleniumTests(AdminWidgetSeleniumTestCase): def test_date_time_picker_shortcuts(self): """ date/time/datetime picker shortcuts work in the current time zone. Refs #20663. This test case is fairly tricky, it relies on selenium still running the browser in the default time zone "America/Chicago" despite `override_settings` changing the time zone to "Asia/Singapore". """ self.admin_login(username='super', password='secret', login_url='/') error_margin = timedelta(seconds=10) # If we are neighbouring a DST, we add an hour of error margin. tz = pytz.timezone('America/Chicago') utc_now = datetime.now(pytz.utc) tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname() tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname() if tz_yesterday != tz_tomorrow: error_margin += timedelta(hours=1) self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add')) self.selenium.find_element_by_id('id_name').send_keys('test') # Click on the "today" and "now" shortcuts. shortcuts = self.selenium.find_elements_by_css_selector('.field-birthdate .datetimeshortcuts') now = datetime.now() for shortcut in shortcuts: shortcut.find_element_by_tag_name('a').click() # There is a time zone mismatch warning. # Warning: This would effectively fail if the TIME_ZONE defined in the # settings has the same UTC offset as "Asia/Singapore" because the # mismatch warning would be rightfully missing from the page. self.selenium.find_elements_by_css_selector('.field-birthdate .timezonewarning') # Submit the form. with self.wait_page_loaded(): self.selenium.find_element_by_name('_save').click() # Make sure that "now" in javascript is within 10 seconds # from "now" on the server side. member = Member.objects.get(name='test') self.assertGreater(member.birthdate, now - error_margin) self.assertLess(member.birthdate, now + error_margin) # The above tests run with Asia/Singapore which are on the positive side of # UTC. Here we test with a timezone on the negative side. @override_settings(TIME_ZONE='US/Eastern') class DateTimePickerAltTimezoneSeleniumTests(DateTimePickerShortcutsSeleniumTests): pass class HorizontalVerticalFilterSeleniumTests(AdminWidgetSeleniumTestCase): def setUp(self): super().setUp() self.lisa = Student.objects.create(name='Lisa') self.john = Student.objects.create(name='John') self.bob = Student.objects.create(name='Bob') self.peter = Student.objects.create(name='Peter') self.jenny = Student.objects.create(name='Jenny') self.jason = Student.objects.create(name='Jason') self.cliff = Student.objects.create(name='Cliff') self.arthur = Student.objects.create(name='Arthur') self.school = School.objects.create(name='School of Awesome') def assertActiveButtons(self, mode, field_name, choose, remove, choose_all=None, remove_all=None): choose_link = '#id_%s_add_link' % field_name choose_all_link = '#id_%s_add_all_link' % field_name remove_link = '#id_%s_remove_link' % field_name remove_all_link = '#id_%s_remove_all_link' % field_name self.assertEqual(self.has_css_class(choose_link, 'active'), choose) self.assertEqual(self.has_css_class(remove_link, 'active'), remove) if mode == 'horizontal': self.assertEqual(self.has_css_class(choose_all_link, 'active'), choose_all) self.assertEqual(self.has_css_class(remove_all_link, 'active'), remove_all) def execute_basic_operations(self, mode, field_name): original_url = self.selenium.current_url from_box = '#id_%s_from' % field_name to_box = '#id_%s_to' % field_name choose_link = 'id_%s_add_link' % field_name choose_all_link = 'id_%s_add_all_link' % field_name remove_link = 'id_%s_remove_link' % field_name remove_all_link = 'id_%s_remove_all_link' % field_name # Initial positions --------------------------------------------------- self.assertSelectOptions(from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ]) self.assertSelectOptions(to_box, [str(self.lisa.id), str(self.peter.id)]) self.assertActiveButtons(mode, field_name, False, False, True, True) # Click 'Choose all' -------------------------------------------------- if mode == 'horizontal': self.selenium.find_element_by_id(choose_all_link).click() elif mode == 'vertical': # There 's no 'Choose all' button in vertical mode, so individually # select all options and click 'Choose'. for option in self.selenium.find_elements_by_css_selector(from_box + ' > option'): option.click() self.selenium.find_element_by_id(choose_link).click() self.assertSelectOptions(from_box, []) self.assertSelectOptions(to_box, [ str(self.lisa.id), str(self.peter.id), str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ]) self.assertActiveButtons(mode, field_name, False, False, False, True) # Click 'Remove all' -------------------------------------------------- if mode == 'horizontal': self.selenium.find_element_by_id(remove_all_link).click() elif mode == 'vertical': # There 's no 'Remove all' button in vertical mode, so individually # select all options and click 'Remove'. for option in self.selenium.find_elements_by_css_selector(to_box + ' > option'): option.click() self.selenium.find_element_by_id(remove_link).click() self.assertSelectOptions(from_box, [ str(self.lisa.id), str(self.peter.id), str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ]) self.assertSelectOptions(to_box, []) self.assertActiveButtons(mode, field_name, False, False, True, False) # Choose some options ------------------------------------------------ from_lisa_select_option = self.selenium.find_element_by_css_selector( '{} > option[value="{}"]'.format(from_box, self.lisa.id) ) # Check the title attribute is there for tool tips: ticket #20821 self.assertEqual(from_lisa_select_option.get_attribute('title'), from_lisa_select_option.get_attribute('text')) self.select_option(from_box, str(self.lisa.id)) self.select_option(from_box, str(self.jason.id)) self.select_option(from_box, str(self.bob.id)) self.select_option(from_box, str(self.john.id)) self.assertActiveButtons(mode, field_name, True, False, True, False) self.selenium.find_element_by_id(choose_link).click() self.assertActiveButtons(mode, field_name, False, False, True, True) self.assertSelectOptions(from_box, [ str(self.peter.id), str(self.arthur.id), str(self.cliff.id), str(self.jenny.id), ]) self.assertSelectOptions(to_box, [ str(self.lisa.id), str(self.bob.id), str(self.jason.id), str(self.john.id), ]) # Check the tooltip is still there after moving: ticket #20821 to_lisa_select_option = self.selenium.find_element_by_css_selector( '{} > option[value="{}"]'.format(to_box, self.lisa.id) ) self.assertEqual(to_lisa_select_option.get_attribute('title'), to_lisa_select_option.get_attribute('text')) # Remove some options ------------------------------------------------- self.select_option(to_box, str(self.lisa.id)) self.select_option(to_box, str(self.bob.id)) self.assertActiveButtons(mode, field_name, False, True, True, True) self.selenium.find_element_by_id(remove_link).click() self.assertActiveButtons(mode, field_name, False, False, True, True) self.assertSelectOptions(from_box, [ str(self.peter.id), str(self.arthur.id), str(self.cliff.id), str(self.jenny.id), str(self.lisa.id), str(self.bob.id) ]) self.assertSelectOptions(to_box, [str(self.jason.id), str(self.john.id)]) # Choose some more options -------------------------------------------- self.select_option(from_box, str(self.arthur.id)) self.select_option(from_box, str(self.cliff.id)) self.selenium.find_element_by_id(choose_link).click() self.assertSelectOptions(from_box, [ str(self.peter.id), str(self.jenny.id), str(self.lisa.id), str(self.bob.id), ]) self.assertSelectOptions(to_box, [ str(self.jason.id), str(self.john.id), str(self.arthur.id), str(self.cliff.id), ]) # Choose some more options -------------------------------------------- self.select_option(from_box, str(self.peter.id)) self.select_option(from_box, str(self.lisa.id)) # Confirm they're selected after clicking inactive buttons: ticket #26575 self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)]) self.selenium.find_element_by_id(remove_link).click() self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)]) # Unselect the options ------------------------------------------------ self.deselect_option(from_box, str(self.peter.id)) self.deselect_option(from_box, str(self.lisa.id)) # Choose some more options -------------------------------------------- self.select_option(to_box, str(self.jason.id)) self.select_option(to_box, str(self.john.id)) # Confirm they're selected after clicking inactive buttons: ticket #26575 self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)]) self.selenium.find_element_by_id(choose_link).click() self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)]) # Unselect the options ------------------------------------------------ self.deselect_option(to_box, str(self.jason.id)) self.deselect_option(to_box, str(self.john.id)) # Pressing buttons shouldn't change the URL. self.assertEqual(self.selenium.current_url, original_url) def test_basic(self): self.selenium.set_window_size(1024, 768) self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username='super', password='secret', login_url='/') self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,))) self.wait_page_ready() self.execute_basic_operations('vertical', 'students') self.execute_basic_operations('horizontal', 'alumni') # Save and check that everything is properly stored in the database --- self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.wait_page_ready() self.school = School.objects.get(id=self.school.id) # Reload from database self.assertEqual(list(self.school.students.all()), [self.arthur, self.cliff, self.jason, self.john]) self.assertEqual(list(self.school.alumni.all()), [self.arthur, self.cliff, self.jason, self.john]) def test_filter(self): """ Typing in the search box filters out options displayed in the 'from' box. """ from selenium.webdriver.common.keys import Keys self.selenium.set_window_size(1024, 768) self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username='super', password='secret', login_url='/') self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,))) for field_name in ['students', 'alumni']: from_box = '#id_%s_from' % field_name to_box = '#id_%s_to' % field_name choose_link = 'id_%s_add_link' % field_name remove_link = 'id_%s_remove_link' % field_name input = self.selenium.find_element_by_id('id_%s_input' % field_name) # Initial values self.assertSelectOptions(from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ]) # Typing in some characters filters out non-matching options input.send_keys('a') self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) input.send_keys('R') self.assertSelectOptions(from_box, [str(self.arthur.id)]) # Clearing the text box makes the other options reappear input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ]) # ----------------------------------------------------------------- # Choosing a filtered option sends it properly to the 'to' box. input.send_keys('a') self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) self.select_option(from_box, str(self.jason.id)) self.selenium.find_element_by_id(choose_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id)]) self.assertSelectOptions(to_box, [ str(self.lisa.id), str(self.peter.id), str(self.jason.id), ]) self.select_option(to_box, str(self.lisa.id)) self.selenium.find_element_by_id(remove_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.lisa.id)]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE]) # Clear text box self.assertSelectOptions(from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jenny.id), str(self.john.id), str(self.lisa.id), ]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) # ----------------------------------------------------------------- # Pressing enter on a filtered option sends it properly to # the 'to' box. self.select_option(to_box, str(self.jason.id)) self.selenium.find_element_by_id(remove_link).click() input.send_keys('ja') self.assertSelectOptions(from_box, [str(self.jason.id)]) input.send_keys([Keys.ENTER]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE]) # Save and check that everything is properly stored in the database --- with self.wait_page_loaded(): self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.school = School.objects.get(id=self.school.id) # Reload from database self.assertEqual(list(self.school.students.all()), [self.jason, self.peter]) self.assertEqual(list(self.school.alumni.all()), [self.jason, self.peter]) def test_back_button_bug(self): """ Some browsers had a bug where navigating away from the change page and then clicking the browser's back button would clear the filter_horizontal/filter_vertical widgets (#13614). """ self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username='super', password='secret', login_url='/') change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,)) self.selenium.get(self.live_server_url + change_url) # Navigate away and go back to the change form page. self.selenium.find_element_by_link_text('Home').click() self.selenium.back() expected_unselected_values = [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ] expected_selected_values = [str(self.lisa.id), str(self.peter.id)] # Everything is still in place self.assertSelectOptions('#id_students_from', expected_unselected_values) self.assertSelectOptions('#id_students_to', expected_selected_values) self.assertSelectOptions('#id_alumni_from', expected_unselected_values) self.assertSelectOptions('#id_alumni_to', expected_selected_values) def test_refresh_page(self): """ Horizontal and vertical filter widgets keep selected options on page reload (#22955). """ self.school.students.add(self.arthur, self.jason) self.school.alumni.add(self.arthur, self.jason) self.admin_login(username='super', password='secret', login_url='/') change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,)) self.selenium.get(self.live_server_url + change_url) options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option')) self.assertEqual(options_len, 2) # self.selenium.refresh() or send_keys(Keys.F5) does hard reload and # doesn't replicate what happens when a user clicks the browser's # 'Refresh' button. with self.wait_page_loaded(): self.selenium.execute_script("location.reload()") options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option')) self.assertEqual(options_len, 2) class AdminRawIdWidgetSeleniumTests(AdminWidgetSeleniumTestCase): def setUp(self): super().setUp() Band.objects.create(id=42, name='Bogey Blues') Band.objects.create(id=98, name='Green Potatoes') def test_ForeignKey(self): self.admin_login(username='super', password='secret', login_url='/') self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_event_add')) main_window = self.selenium.current_window_handle # No value has been selected yet self.assertEqual(self.selenium.find_element_by_id('id_main_band').get_attribute('value'), '') # Open the popup window and click on a band self.selenium.find_element_by_id('lookup_id_main_band').click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element_by_link_text('Bogey Blues') self.assertIn('/band/42/', link.get_attribute('href')) link.click() # The field now contains the selected band's id self.selenium.switch_to.window(main_window) self.wait_for_value('#id_main_band', '42') # Reopen the popup window and click on another band self.selenium.find_element_by_id('lookup_id_main_band').click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element_by_link_text('Green Potatoes') self.assertIn('/band/98/', link.get_attribute('href')) link.click() # The field now contains the other selected band's id self.selenium.switch_to.window(main_window) self.wait_for_value('#id_main_band', '98') def test_many_to_many(self): self.admin_login(username='super', password='secret', login_url='/') self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_event_add')) main_window = self.selenium.current_window_handle # No value has been selected yet self.assertEqual(self.selenium.find_element_by_id('id_supporting_bands').get_attribute('value'), '') # Help text for the field is displayed self.assertEqual( self.selenium.find_element_by_css_selector('.field-supporting_bands div.help').text, 'Supporting Bands.' ) # Open the popup window and click on a band self.selenium.find_element_by_id('lookup_id_supporting_bands').click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element_by_link_text('Bogey Blues') self.assertIn('/band/42/', link.get_attribute('href')) link.click() # The field now contains the selected band's id self.selenium.switch_to.window(main_window) self.wait_for_value('#id_supporting_bands', '42') # Reopen the popup window and click on another band self.selenium.find_element_by_id('lookup_id_supporting_bands').click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element_by_link_text('Green Potatoes') self.assertIn('/band/98/', link.get_attribute('href')) link.click() # The field now contains the two selected bands' ids self.selenium.switch_to.window(main_window) self.wait_for_value('#id_supporting_bands', '42,98') class RelatedFieldWidgetSeleniumTests(AdminWidgetSeleniumTestCase): def test_ForeignKey_using_to_field(self): self.admin_login(username='super', password='secret', login_url='/') self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_profile_add')) main_window = self.selenium.current_window_handle # Click the Add User button to add new self.selenium.find_element_by_id('add_id_user').click() self.wait_for_and_switch_to_popup() password_field = self.selenium.find_element_by_id('id_password') password_field.send_keys('password') username_field = self.selenium.find_element_by_id('id_username') username_value = 'newuser' username_field.send_keys(username_value) save_button_css_selector = '.submit-row > input[type=submit]' self.selenium.find_element_by_css_selector(save_button_css_selector).click() self.selenium.switch_to.window(main_window) # The field now contains the new user self.selenium.find_element_by_css_selector('#id_user option[value=newuser]') # Click the Change User button to change it self.selenium.find_element_by_id('change_id_user').click() self.wait_for_and_switch_to_popup() username_field = self.selenium.find_element_by_id('id_username') username_value = 'changednewuser' username_field.clear() username_field.send_keys(username_value) save_button_css_selector = '.submit-row > input[type=submit]' self.selenium.find_element_by_css_selector(save_button_css_selector).click() self.selenium.switch_to.window(main_window) self.selenium.find_element_by_css_selector('#id_user option[value=changednewuser]') # Go ahead and submit the form to make sure it works self.selenium.find_element_by_css_selector(save_button_css_selector).click() self.wait_for_text('li.success', 'The profile “changednewuser” was added successfully.') profiles = Profile.objects.all() self.assertEqual(len(profiles), 1) self.assertEqual(profiles[0].user.username, username_value)
79e97b54a4b89b74e12152fc2c8fd17d1afc1d83e4ef005d3e9b803008282122
from django.core.files.uploadedfile import SimpleUploadedFile from django.forms import ClearableFileInput, MultiWidget from .base import WidgetTest class FakeFieldFile: """ Quacks like a FieldFile (has a .url and string representation), but doesn't require us to care about storages etc. """ url = 'something' def __str__(self): return self.url class ClearableFileInputTest(WidgetTest): widget = ClearableFileInput() def test_clear_input_renders(self): """ A ClearableFileInput with is_required False and rendered with an initial value that is a file renders a clear checkbox. """ self.check_html(self.widget, 'myfile', FakeFieldFile(), html=( """ Currently: <a href="something">something</a> <input type="checkbox" name="myfile-clear" id="myfile-clear_id"> <label for="myfile-clear_id">Clear</label><br> Change: <input type="file" name="myfile"> """ )) def test_html_escaped(self): """ A ClearableFileInput should escape name, filename, and URL when rendering HTML (#15182). """ class StrangeFieldFile: url = "something?chapter=1&sect=2&copy=3&lang=en" def __str__(self): return '''something<div onclick="alert('oops')">.jpg''' self.check_html(ClearableFileInput(), 'my<div>file', StrangeFieldFile(), html=( """ Currently: <a href="something?chapter=1&amp;sect=2&amp;copy=3&amp;lang=en"> something&lt;div onclick=&quot;alert(&#x27;oops&#x27;)&quot;&gt;.jpg</a> <input type="checkbox" name="my&lt;div&gt;file-clear" id="my&lt;div&gt;file-clear_id"> <label for="my&lt;div&gt;file-clear_id">Clear</label><br> Change: <input type="file" name="my&lt;div&gt;file"> """ )) def test_clear_input_renders_only_if_not_required(self): """ A ClearableFileInput with is_required=True does not render a clear checkbox. """ widget = ClearableFileInput() widget.is_required = True self.check_html(widget, 'myfile', FakeFieldFile(), html=( """ Currently: <a href="something">something</a> <br> Change: <input type="file" name="myfile"> """ )) def test_clear_input_renders_only_if_initial(self): """ A ClearableFileInput instantiated with no initial value does not render a clear checkbox. """ self.check_html(self.widget, 'myfile', None, html='<input type="file" name="myfile">') def test_render_disabled(self): self.check_html( self.widget, 'myfile', FakeFieldFile(), attrs={'disabled': True}, html=( 'Currently: <a href="something">something</a>' '<input type="checkbox" name="myfile-clear" ' 'id="myfile-clear_id" disabled>' '<label for="myfile-clear_id">Clear</label><br>' 'Change: <input type="file" name="myfile" disabled>' ), ) def test_render_as_subwidget(self): """A ClearableFileInput as a subwidget of MultiWidget.""" widget = MultiWidget(widgets=(self.widget,)) self.check_html(widget, 'myfile', [FakeFieldFile()], html=( """ Currently: <a href="something">something</a> <input type="checkbox" name="myfile_0-clear" id="myfile_0-clear_id"> <label for="myfile_0-clear_id">Clear</label><br> Change: <input type="file" name="myfile_0"> """ )) def test_clear_input_checked_returns_false(self): """ ClearableFileInput.value_from_datadict returns False if the clear checkbox is checked, if not required. """ value = self.widget.value_from_datadict( data={'myfile-clear': True}, files={}, name='myfile', ) self.assertIs(value, False) def test_clear_input_checked_returns_false_only_if_not_required(self): """ ClearableFileInput.value_from_datadict never returns False if the field is required. """ widget = ClearableFileInput() widget.is_required = True field = SimpleUploadedFile('something.txt', b'content') value = widget.value_from_datadict( data={'myfile-clear': True}, files={'myfile': field}, name='myfile', ) self.assertEqual(value, field) def test_html_does_not_mask_exceptions(self): """ A ClearableFileInput should not mask exceptions produced while checking that it has a value. """ class FailingURLFieldFile: @property def url(self): raise ValueError('Canary') def __str__(self): return 'value' with self.assertRaisesMessage(ValueError, 'Canary'): self.widget.render('myfile', FailingURLFieldFile()) def test_url_as_property(self): class URLFieldFile: @property def url(self): return 'https://www.python.org/' def __str__(self): return 'value' html = self.widget.render('myfile', URLFieldFile()) self.assertInHTML('<a href="https://www.python.org/">value</a>', html) def test_return_false_if_url_does_not_exists(self): class NoURLFieldFile: def __str__(self): return 'value' html = self.widget.render('myfile', NoURLFieldFile()) self.assertHTMLEqual(html, '<input name="myfile" type="file">') def test_use_required_attribute(self): # False when initial data exists. The file input is left blank by the # user to keep the existing, initial value. self.assertIs(self.widget.use_required_attribute(None), True) self.assertIs(self.widget.use_required_attribute('resume.txt'), False) def test_value_omitted_from_data(self): widget = ClearableFileInput() self.assertIs(widget.value_omitted_from_data({}, {}, 'field'), True) self.assertIs(widget.value_omitted_from_data({}, {'field': 'x'}, 'field'), False) self.assertIs(widget.value_omitted_from_data({'field-clear': 'y'}, {}, 'field'), False)
054e3a24c9e7f2f41c6060fa9f1f1f17ac508edaa8ae61d932f69c759a87e335
from django.apps import AppConfig class LoadingAppConfig(AppConfig): name = 'loading_app'
b0e45c9bb3ebfe1046e247d9602f03a7ba423f6a3e0928e904a26696e5918db5
import datetime import decimal import unicodedata from importlib import import_module from django.conf import settings from django.utils import dateformat, datetime_safe, numberformat from django.utils.functional import lazy from django.utils.translation import ( check_for_language, get_language, to_locale, ) # format_cache is a mapping from (format_type, lang) to the format string. # By using the cache, it is possible to avoid running get_format_modules # repeatedly. _format_cache = {} _format_modules_cache = {} ISO_INPUT_FORMATS = { 'DATE_INPUT_FORMATS': ['%Y-%m-%d'], 'TIME_INPUT_FORMATS': ['%H:%M:%S', '%H:%M:%S.%f', '%H:%M'], 'DATETIME_INPUT_FORMATS': [ '%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M:%S.%f', '%Y-%m-%d %H:%M', '%Y-%m-%d' ], } FORMAT_SETTINGS = frozenset([ 'DECIMAL_SEPARATOR', 'THOUSAND_SEPARATOR', 'NUMBER_GROUPING', 'FIRST_DAY_OF_WEEK', 'MONTH_DAY_FORMAT', 'TIME_FORMAT', 'DATE_FORMAT', 'DATETIME_FORMAT', 'SHORT_DATE_FORMAT', 'SHORT_DATETIME_FORMAT', 'YEAR_MONTH_FORMAT', 'DATE_INPUT_FORMATS', 'TIME_INPUT_FORMATS', 'DATETIME_INPUT_FORMATS', ]) def reset_format_cache(): """Clear any cached formats. This method is provided primarily for testing purposes, so that the effects of cached formats can be removed. """ global _format_cache, _format_modules_cache _format_cache = {} _format_modules_cache = {} def iter_format_modules(lang, format_module_path=None): """Find format modules.""" if not check_for_language(lang): return if format_module_path is None: format_module_path = settings.FORMAT_MODULE_PATH format_locations = [] if format_module_path: if isinstance(format_module_path, str): format_module_path = [format_module_path] for path in format_module_path: format_locations.append(path + '.%s') format_locations.append('django.conf.locale.%s') locale = to_locale(lang) locales = [locale] if '_' in locale: locales.append(locale.split('_')[0]) for location in format_locations: for loc in locales: try: yield import_module('%s.formats' % (location % loc)) except ImportError: pass def get_format_modules(lang=None, reverse=False): """Return a list of the format modules found.""" if lang is None: lang = get_language() if lang not in _format_modules_cache: _format_modules_cache[lang] = list(iter_format_modules(lang, settings.FORMAT_MODULE_PATH)) modules = _format_modules_cache[lang] if reverse: return list(reversed(modules)) return modules def get_format(format_type, lang=None, use_l10n=None): """ For a specific format type, return the format for the current language (locale). Default to the format in the settings. format_type is the name of the format, e.g. 'DATE_FORMAT'. If use_l10n is provided and is not None, it forces the value to be localized (or not), overriding the value of settings.USE_L10N. """ use_l10n = use_l10n or (use_l10n is None and settings.USE_L10N) if use_l10n and lang is None: lang = get_language() cache_key = (format_type, lang) try: return _format_cache[cache_key] except KeyError: pass # The requested format_type has not been cached yet. Try to find it in any # of the format_modules for the given lang if l10n is enabled. If it's not # there or if l10n is disabled, fall back to the project settings. val = None if use_l10n: for module in get_format_modules(lang): val = getattr(module, format_type, None) if val is not None: break if val is None: if format_type not in FORMAT_SETTINGS: return format_type val = getattr(settings, format_type) elif format_type in ISO_INPUT_FORMATS: # If a list of input formats from one of the format_modules was # retrieved, make sure the ISO_INPUT_FORMATS are in this list. val = list(val) for iso_input in ISO_INPUT_FORMATS.get(format_type, ()): if iso_input not in val: val.append(iso_input) _format_cache[cache_key] = val return val get_format_lazy = lazy(get_format, str, list, tuple) def date_format(value, format=None, use_l10n=None): """ Format a datetime.date or datetime.datetime object using a localizable format. If use_l10n is provided and is not None, that will force the value to be localized (or not), overriding the value of settings.USE_L10N. """ return dateformat.format(value, get_format(format or 'DATE_FORMAT', use_l10n=use_l10n)) def time_format(value, format=None, use_l10n=None): """ Format a datetime.time object using a localizable format. If use_l10n is provided and is not None, it forces the value to be localized (or not), overriding the value of settings.USE_L10N. """ return dateformat.time_format(value, get_format(format or 'TIME_FORMAT', use_l10n=use_l10n)) def number_format(value, decimal_pos=None, use_l10n=None, force_grouping=False): """ Format a numeric value using localization settings. If use_l10n is provided and is not None, it forces the value to be localized (or not), overriding the value of settings.USE_L10N. """ if use_l10n or (use_l10n is None and settings.USE_L10N): lang = get_language() else: lang = None return numberformat.format( value, get_format('DECIMAL_SEPARATOR', lang, use_l10n=use_l10n), decimal_pos, get_format('NUMBER_GROUPING', lang, use_l10n=use_l10n), get_format('THOUSAND_SEPARATOR', lang, use_l10n=use_l10n), force_grouping=force_grouping, use_l10n=use_l10n, ) def localize(value, use_l10n=None): """ Check if value is a localizable type (date, number...) and return it formatted as a string using current locale format. If use_l10n is provided and is not None, it forces the value to be localized (or not), overriding the value of settings.USE_L10N. """ if isinstance(value, str): # Handle strings first for performance reasons. return value elif isinstance(value, bool): # Make sure booleans don't get treated as numbers return str(value) elif isinstance(value, (decimal.Decimal, float, int)): if use_l10n is False: return str(value) return number_format(value, use_l10n=use_l10n) elif isinstance(value, datetime.datetime): return date_format(value, 'DATETIME_FORMAT', use_l10n=use_l10n) elif isinstance(value, datetime.date): return date_format(value, use_l10n=use_l10n) elif isinstance(value, datetime.time): return time_format(value, 'TIME_FORMAT', use_l10n=use_l10n) return value def localize_input(value, default=None): """ Check if an input value is a localizable type and return it formatted with the appropriate formatting string of the current locale. """ if isinstance(value, str): # Handle strings first for performance reasons. return value elif isinstance(value, bool): # Don't treat booleans as numbers. return str(value) elif isinstance(value, (decimal.Decimal, float, int)): return number_format(value) elif isinstance(value, datetime.datetime): value = datetime_safe.new_datetime(value) format = default or get_format('DATETIME_INPUT_FORMATS')[0] return value.strftime(format) elif isinstance(value, datetime.date): value = datetime_safe.new_date(value) format = default or get_format('DATE_INPUT_FORMATS')[0] return value.strftime(format) elif isinstance(value, datetime.time): format = default or get_format('TIME_INPUT_FORMATS')[0] return value.strftime(format) return value def sanitize_separators(value): """ Sanitize a value according to the current decimal and thousand separator setting. Used with form field input. """ if isinstance(value, str): parts = [] decimal_separator = get_format('DECIMAL_SEPARATOR') if decimal_separator in value: value, decimals = value.split(decimal_separator, 1) parts.append(decimals) if settings.USE_THOUSAND_SEPARATOR: thousand_sep = get_format('THOUSAND_SEPARATOR') if thousand_sep == '.' and value.count('.') == 1 and len(value.split('.')[-1]) != 3: # Special case where we suspect a dot meant decimal separator (see #22171) pass else: for replacement in { thousand_sep, unicodedata.normalize('NFKD', thousand_sep)}: value = value.replace(replacement, '') parts.append(value) value = '.'.join(reversed(parts)) return value
5af0693e5f942eaf51903638e29be5b286931bb317b4e87a46d680f8ccda060d
import cgi import codecs import copy import warnings from io import BytesIO from itertools import chain from urllib.parse import quote, urlencode, urljoin, urlsplit from django.conf import settings from django.core import signing from django.core.exceptions import ( DisallowedHost, ImproperlyConfigured, RequestDataTooBig, ) from django.core.files import uploadhandler from django.http.multipartparser import MultiPartParser, MultiPartParserError from django.utils.datastructures import ( CaseInsensitiveMapping, ImmutableList, MultiValueDict, ) from django.utils.deprecation import RemovedInDjango40Warning from django.utils.encoding import escape_uri_path, iri_to_uri from django.utils.functional import cached_property from django.utils.http import is_same_domain, limited_parse_qsl from django.utils.regex_helper import _lazy_re_compile from .multipartparser import parse_header RAISE_ERROR = object() host_validation_re = _lazy_re_compile(r"^([a-z0-9.-]+|\[[a-f0-9]*:[a-f0-9\.:]+\])(:\d+)?$") class UnreadablePostError(OSError): pass class RawPostDataException(Exception): """ You cannot access raw_post_data from a request that has multipart/* POST data if it has been accessed via POST, FILES, etc.. """ pass class HttpRequest: """A basic HTTP request.""" # The encoding used in GET/POST dicts. None means use default setting. _encoding = None _upload_handlers = [] def __init__(self): # WARNING: The `WSGIRequest` subclass doesn't call `super`. # Any variable assignment made here should also happen in # `WSGIRequest.__init__()`. self.GET = QueryDict(mutable=True) self.POST = QueryDict(mutable=True) self.COOKIES = {} self.META = {} self.FILES = MultiValueDict() self.path = '' self.path_info = '' self.method = None self.resolver_match = None self.content_type = None self.content_params = None def __repr__(self): if self.method is None or not self.get_full_path(): return '<%s>' % self.__class__.__name__ return '<%s: %s %r>' % (self.__class__.__name__, self.method, self.get_full_path()) @cached_property def headers(self): return HttpHeaders(self.META) @cached_property def accepted_types(self): """Return a list of MediaType instances.""" return parse_accept_header(self.headers.get('Accept', '*/*')) def accepts(self, media_type): return any( accepted_type.match(media_type) for accepted_type in self.accepted_types ) def _set_content_type_params(self, meta): """Set content_type, content_params, and encoding.""" self.content_type, self.content_params = cgi.parse_header(meta.get('CONTENT_TYPE', '')) if 'charset' in self.content_params: try: codecs.lookup(self.content_params['charset']) except LookupError: pass else: self.encoding = self.content_params['charset'] def _get_raw_host(self): """ Return the HTTP host using the environment or request headers. Skip allowed hosts protection, so may return an insecure host. """ # We try three options, in order of decreasing preference. if settings.USE_X_FORWARDED_HOST and ( 'HTTP_X_FORWARDED_HOST' in self.META): host = self.META['HTTP_X_FORWARDED_HOST'] elif 'HTTP_HOST' in self.META: host = self.META['HTTP_HOST'] else: # Reconstruct the host using the algorithm from PEP 333. host = self.META['SERVER_NAME'] server_port = self.get_port() if server_port != ('443' if self.is_secure() else '80'): host = '%s:%s' % (host, server_port) return host def get_host(self): """Return the HTTP host using the environment or request headers.""" host = self._get_raw_host() # Allow variants of localhost if ALLOWED_HOSTS is empty and DEBUG=True. allowed_hosts = settings.ALLOWED_HOSTS if settings.DEBUG and not allowed_hosts: allowed_hosts = ['.localhost', '127.0.0.1', '[::1]'] domain, port = split_domain_port(host) if domain and validate_host(domain, allowed_hosts): return host else: msg = "Invalid HTTP_HOST header: %r." % host if domain: msg += " You may need to add %r to ALLOWED_HOSTS." % domain else: msg += " The domain name provided is not valid according to RFC 1034/1035." raise DisallowedHost(msg) def get_port(self): """Return the port number for the request as a string.""" if settings.USE_X_FORWARDED_PORT and 'HTTP_X_FORWARDED_PORT' in self.META: port = self.META['HTTP_X_FORWARDED_PORT'] else: port = self.META['SERVER_PORT'] return str(port) def get_full_path(self, force_append_slash=False): return self._get_full_path(self.path, force_append_slash) def get_full_path_info(self, force_append_slash=False): return self._get_full_path(self.path_info, force_append_slash) def _get_full_path(self, path, force_append_slash): # RFC 3986 requires query string arguments to be in the ASCII range. # Rather than crash if this doesn't happen, we encode defensively. return '%s%s%s' % ( escape_uri_path(path), '/' if force_append_slash and not path.endswith('/') else '', ('?' + iri_to_uri(self.META.get('QUERY_STRING', ''))) if self.META.get('QUERY_STRING', '') else '' ) def get_signed_cookie(self, key, default=RAISE_ERROR, salt='', max_age=None): """ Attempt to return a signed cookie. If the signature fails or the cookie has expired, raise an exception, unless the `default` argument is provided, in which case return that value. """ try: cookie_value = self.COOKIES[key] except KeyError: if default is not RAISE_ERROR: return default else: raise try: value = signing.get_cookie_signer(salt=key + salt).unsign( cookie_value, max_age=max_age) except signing.BadSignature: if default is not RAISE_ERROR: return default else: raise return value def get_raw_uri(self): """ Return an absolute URI from variables available in this request. Skip allowed hosts protection, so may return insecure URI. """ return '{scheme}://{host}{path}'.format( scheme=self.scheme, host=self._get_raw_host(), path=self.get_full_path(), ) def build_absolute_uri(self, location=None): """ Build an absolute URI from the location and the variables available in this request. If no ``location`` is specified, build the absolute URI using request.get_full_path(). If the location is absolute, convert it to an RFC 3987 compliant URI and return it. If location is relative or is scheme-relative (i.e., ``//example.com/``), urljoin() it to a base URL constructed from the request variables. """ if location is None: # Make it an absolute url (but schemeless and domainless) for the # edge case that the path starts with '//'. location = '//%s' % self.get_full_path() else: # Coerce lazy locations. location = str(location) bits = urlsplit(location) if not (bits.scheme and bits.netloc): # Handle the simple, most common case. If the location is absolute # and a scheme or host (netloc) isn't provided, skip an expensive # urljoin() as long as no path segments are '.' or '..'. if (bits.path.startswith('/') and not bits.scheme and not bits.netloc and '/./' not in bits.path and '/../' not in bits.path): # If location starts with '//' but has no netloc, reuse the # schema and netloc from the current request. Strip the double # slashes and continue as if it wasn't specified. if location.startswith('//'): location = location[2:] location = self._current_scheme_host + location else: # Join the constructed URL with the provided location, which # allows the provided location to apply query strings to the # base path. location = urljoin(self._current_scheme_host + self.path, location) return iri_to_uri(location) @cached_property def _current_scheme_host(self): return '{}://{}'.format(self.scheme, self.get_host()) def _get_scheme(self): """ Hook for subclasses like WSGIRequest to implement. Return 'http' by default. """ return 'http' @property def scheme(self): if settings.SECURE_PROXY_SSL_HEADER: try: header, secure_value = settings.SECURE_PROXY_SSL_HEADER except ValueError: raise ImproperlyConfigured( 'The SECURE_PROXY_SSL_HEADER setting must be a tuple containing two values.' ) header_value = self.META.get(header) if header_value is not None: return 'https' if header_value == secure_value else 'http' return self._get_scheme() def is_secure(self): return self.scheme == 'https' def is_ajax(self): warnings.warn( 'request.is_ajax() is deprecated. See Django 3.1 release notes ' 'for more details about this deprecation.', RemovedInDjango40Warning, stacklevel=2, ) return self.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest' @property def encoding(self): return self._encoding @encoding.setter def encoding(self, val): """ Set the encoding used for GET/POST accesses. If the GET or POST dictionary has already been created, remove and recreate it on the next access (so that it is decoded correctly). """ self._encoding = val if hasattr(self, 'GET'): del self.GET if hasattr(self, '_post'): del self._post def _initialize_handlers(self): self._upload_handlers = [uploadhandler.load_handler(handler, self) for handler in settings.FILE_UPLOAD_HANDLERS] @property def upload_handlers(self): if not self._upload_handlers: # If there are no upload handlers defined, initialize them from settings. self._initialize_handlers() return self._upload_handlers @upload_handlers.setter def upload_handlers(self, upload_handlers): if hasattr(self, '_files'): raise AttributeError("You cannot set the upload handlers after the upload has been processed.") self._upload_handlers = upload_handlers def parse_file_upload(self, META, post_data): """Return a tuple of (POST QueryDict, FILES MultiValueDict).""" self.upload_handlers = ImmutableList( self.upload_handlers, warning="You cannot alter upload handlers after the upload has been processed." ) parser = MultiPartParser(META, post_data, self.upload_handlers, self.encoding) return parser.parse() @property def body(self): if not hasattr(self, '_body'): if self._read_started: raise RawPostDataException("You cannot access body after reading from request's data stream") # Limit the maximum request data size that will be handled in-memory. if (settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None and int(self.META.get('CONTENT_LENGTH') or 0) > settings.DATA_UPLOAD_MAX_MEMORY_SIZE): raise RequestDataTooBig('Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE.') try: self._body = self.read() except OSError as e: raise UnreadablePostError(*e.args) from e self._stream = BytesIO(self._body) return self._body def _mark_post_parse_error(self): self._post = QueryDict() self._files = MultiValueDict() def _load_post_and_files(self): """Populate self._post and self._files if the content-type is a form type""" if self.method != 'POST': self._post, self._files = QueryDict(encoding=self._encoding), MultiValueDict() return if self._read_started and not hasattr(self, '_body'): self._mark_post_parse_error() return if self.content_type == 'multipart/form-data': if hasattr(self, '_body'): # Use already read data data = BytesIO(self._body) else: data = self try: self._post, self._files = self.parse_file_upload(self.META, data) except MultiPartParserError: # An error occurred while parsing POST data. Since when # formatting the error the request handler might access # self.POST, set self._post and self._file to prevent # attempts to parse POST data again. self._mark_post_parse_error() raise elif self.content_type == 'application/x-www-form-urlencoded': self._post, self._files = QueryDict(self.body, encoding=self._encoding), MultiValueDict() else: self._post, self._files = QueryDict(encoding=self._encoding), MultiValueDict() def close(self): if hasattr(self, '_files'): for f in chain.from_iterable(list_[1] for list_ in self._files.lists()): f.close() # File-like and iterator interface. # # Expects self._stream to be set to an appropriate source of bytes by # a corresponding request subclass (e.g. WSGIRequest). # Also when request data has already been read by request.POST or # request.body, self._stream points to a BytesIO instance # containing that data. def read(self, *args, **kwargs): self._read_started = True try: return self._stream.read(*args, **kwargs) except OSError as e: raise UnreadablePostError(*e.args) from e def readline(self, *args, **kwargs): self._read_started = True try: return self._stream.readline(*args, **kwargs) except OSError as e: raise UnreadablePostError(*e.args) from e def __iter__(self): return iter(self.readline, b'') def readlines(self): return list(self) class HttpHeaders(CaseInsensitiveMapping): HTTP_PREFIX = 'HTTP_' # PEP 333 gives two headers which aren't prepended with HTTP_. UNPREFIXED_HEADERS = {'CONTENT_TYPE', 'CONTENT_LENGTH'} def __init__(self, environ): headers = {} for header, value in environ.items(): name = self.parse_header_name(header) if name: headers[name] = value super().__init__(headers) def __getitem__(self, key): """Allow header lookup using underscores in place of hyphens.""" return super().__getitem__(key.replace('_', '-')) @classmethod def parse_header_name(cls, header): if header.startswith(cls.HTTP_PREFIX): header = header[len(cls.HTTP_PREFIX):] elif header not in cls.UNPREFIXED_HEADERS: return None return header.replace('_', '-').title() class QueryDict(MultiValueDict): """ A specialized MultiValueDict which represents a query string. A QueryDict can be used to represent GET or POST data. It subclasses MultiValueDict since keys in such data can be repeated, for instance in the data from a form with a <select multiple> field. By default QueryDicts are immutable, though the copy() method will always return a mutable copy. Both keys and values set on this class are converted from the given encoding (DEFAULT_CHARSET by default) to str. """ # These are both reset in __init__, but is specified here at the class # level so that unpickling will have valid values _mutable = True _encoding = None def __init__(self, query_string=None, mutable=False, encoding=None): super().__init__() self.encoding = encoding or settings.DEFAULT_CHARSET query_string = query_string or '' parse_qsl_kwargs = { 'keep_blank_values': True, 'fields_limit': settings.DATA_UPLOAD_MAX_NUMBER_FIELDS, 'encoding': self.encoding, } if isinstance(query_string, bytes): # query_string normally contains URL-encoded data, a subset of ASCII. try: query_string = query_string.decode(self.encoding) except UnicodeDecodeError: # ... but some user agents are misbehaving :-( query_string = query_string.decode('iso-8859-1') for key, value in limited_parse_qsl(query_string, **parse_qsl_kwargs): self.appendlist(key, value) self._mutable = mutable @classmethod def fromkeys(cls, iterable, value='', mutable=False, encoding=None): """ Return a new QueryDict with keys (may be repeated) from an iterable and values from value. """ q = cls('', mutable=True, encoding=encoding) for key in iterable: q.appendlist(key, value) if not mutable: q._mutable = False return q @property def encoding(self): if self._encoding is None: self._encoding = settings.DEFAULT_CHARSET return self._encoding @encoding.setter def encoding(self, value): self._encoding = value def _assert_mutable(self): if not self._mutable: raise AttributeError("This QueryDict instance is immutable") def __setitem__(self, key, value): self._assert_mutable() key = bytes_to_text(key, self.encoding) value = bytes_to_text(value, self.encoding) super().__setitem__(key, value) def __delitem__(self, key): self._assert_mutable() super().__delitem__(key) def __copy__(self): result = self.__class__('', mutable=True, encoding=self.encoding) for key, value in self.lists(): result.setlist(key, value) return result def __deepcopy__(self, memo): result = self.__class__('', mutable=True, encoding=self.encoding) memo[id(self)] = result for key, value in self.lists(): result.setlist(copy.deepcopy(key, memo), copy.deepcopy(value, memo)) return result def setlist(self, key, list_): self._assert_mutable() key = bytes_to_text(key, self.encoding) list_ = [bytes_to_text(elt, self.encoding) for elt in list_] super().setlist(key, list_) def setlistdefault(self, key, default_list=None): self._assert_mutable() return super().setlistdefault(key, default_list) def appendlist(self, key, value): self._assert_mutable() key = bytes_to_text(key, self.encoding) value = bytes_to_text(value, self.encoding) super().appendlist(key, value) def pop(self, key, *args): self._assert_mutable() return super().pop(key, *args) def popitem(self): self._assert_mutable() return super().popitem() def clear(self): self._assert_mutable() super().clear() def setdefault(self, key, default=None): self._assert_mutable() key = bytes_to_text(key, self.encoding) default = bytes_to_text(default, self.encoding) return super().setdefault(key, default) def copy(self): """Return a mutable copy of this object.""" return self.__deepcopy__({}) def urlencode(self, safe=None): """ Return an encoded string of all query string arguments. `safe` specifies characters which don't require quoting, for example:: >>> q = QueryDict(mutable=True) >>> q['next'] = '/a&b/' >>> q.urlencode() 'next=%2Fa%26b%2F' >>> q.urlencode(safe='/') 'next=/a%26b/' """ output = [] if safe: safe = safe.encode(self.encoding) def encode(k, v): return '%s=%s' % ((quote(k, safe), quote(v, safe))) else: def encode(k, v): return urlencode({k: v}) for k, list_ in self.lists(): output.extend( encode(k.encode(self.encoding), str(v).encode(self.encoding)) for v in list_ ) return '&'.join(output) class MediaType: def __init__(self, media_type_raw_line): full_type, self.params = parse_header( media_type_raw_line.encode('ascii') if media_type_raw_line else b'' ) self.main_type, _, self.sub_type = full_type.partition('/') def __str__(self): params_str = ''.join( '; %s=%s' % (k, v.decode('ascii')) for k, v in self.params.items() ) return '%s%s%s' % ( self.main_type, ('/%s' % self.sub_type) if self.sub_type else '', params_str, ) def __repr__(self): return '<%s: %s>' % (self.__class__.__qualname__, self) @property def is_all_types(self): return self.main_type == '*' and self.sub_type == '*' def match(self, other): if self.is_all_types: return True other = MediaType(other) if self.main_type == other.main_type and self.sub_type in {'*', other.sub_type}: return True return False # It's neither necessary nor appropriate to use # django.utils.encoding.force_str() for parsing URLs and form inputs. Thus, # this slightly more restricted function, used by QueryDict. def bytes_to_text(s, encoding): """ Convert bytes objects to strings, using the given encoding. Illegally encoded input characters are replaced with Unicode "unknown" codepoint (\ufffd). Return any non-bytes objects without change. """ if isinstance(s, bytes): return str(s, encoding, 'replace') else: return s def split_domain_port(host): """ Return a (domain, port) tuple from a given host. Returned domain is lowercased. If the host is invalid, the domain will be empty. """ host = host.lower() if not host_validation_re.match(host): return '', '' if host[-1] == ']': # It's an IPv6 address without a port. return host, '' bits = host.rsplit(':', 1) domain, port = bits if len(bits) == 2 else (bits[0], '') # Remove a trailing dot (if present) from the domain. domain = domain[:-1] if domain.endswith('.') else domain return domain, port def validate_host(host, allowed_hosts): """ Validate the given host for this site. Check that the host looks valid and matches a host or host pattern in the given list of ``allowed_hosts``. Any pattern beginning with a period matches a domain and all its subdomains (e.g. ``.example.com`` matches ``example.com`` and any subdomain), ``*`` matches anything, and anything else must match exactly. Note: This function assumes that the given host is lowercased and has already had the port, if any, stripped off. Return ``True`` for a valid host, ``False`` otherwise. """ return any(pattern == '*' or is_same_domain(host, pattern) for pattern in allowed_hosts) def parse_accept_header(header): return [MediaType(token) for token in header.split(',') if token.strip()]
43f9c6f84a7ae172aebbfa62052ec1229a8261162fc5eda586afdcccd2f0af64
from django.db.backends.utils import names_digest, split_identifier from django.db.models.query_utils import Q from django.db.models.sql import Query __all__ = ['Index'] class Index: suffix = 'idx' # The max length of the name of the index (restricted to 30 for # cross-database compatibility with Oracle) max_name_length = 30 def __init__( self, *, fields=(), name=None, db_tablespace=None, opclasses=(), condition=None, include=None, ): if opclasses and not name: raise ValueError('An index must be named to use opclasses.') if not isinstance(condition, (type(None), Q)): raise ValueError('Index.condition must be a Q instance.') if condition and not name: raise ValueError('An index must be named to use condition.') if not isinstance(fields, (list, tuple)): raise ValueError('Index.fields must be a list or tuple.') if not isinstance(opclasses, (list, tuple)): raise ValueError('Index.opclasses must be a list or tuple.') if opclasses and len(fields) != len(opclasses): raise ValueError('Index.fields and Index.opclasses must have the same number of elements.') if not fields: raise ValueError('At least one field is required to define an index.') if include and not name: raise ValueError('A covering index must be named.') if not isinstance(include, (type(None), list, tuple)): raise ValueError('Index.include must be a list or tuple.') self.fields = list(fields) # A list of 2-tuple with the field name and ordering ('' or 'DESC'). self.fields_orders = [ (field_name[1:], 'DESC') if field_name.startswith('-') else (field_name, '') for field_name in self.fields ] self.name = name or '' self.db_tablespace = db_tablespace self.opclasses = opclasses self.condition = condition self.include = tuple(include) if include else () def _get_condition_sql(self, model, schema_editor): if self.condition is None: return None query = Query(model=model, alias_cols=False) where = query.build_where(self.condition) compiler = query.get_compiler(connection=schema_editor.connection) sql, params = where.as_sql(compiler, schema_editor.connection) return sql % tuple(schema_editor.quote_value(p) for p in params) def create_sql(self, model, schema_editor, using='', **kwargs): fields = [model._meta.get_field(field_name) for field_name, _ in self.fields_orders] include = [model._meta.get_field(field_name).column for field_name in self.include] col_suffixes = [order[1] for order in self.fields_orders] condition = self._get_condition_sql(model, schema_editor) return schema_editor._create_index_sql( model, fields, name=self.name, using=using, db_tablespace=self.db_tablespace, col_suffixes=col_suffixes, opclasses=self.opclasses, condition=condition, include=include, **kwargs, ) def remove_sql(self, model, schema_editor, **kwargs): return schema_editor._delete_index_sql(model, self.name, **kwargs) def deconstruct(self): path = '%s.%s' % (self.__class__.__module__, self.__class__.__name__) path = path.replace('django.db.models.indexes', 'django.db.models') kwargs = {'fields': self.fields, 'name': self.name} if self.db_tablespace is not None: kwargs['db_tablespace'] = self.db_tablespace if self.opclasses: kwargs['opclasses'] = self.opclasses if self.condition: kwargs['condition'] = self.condition if self.include: kwargs['include'] = self.include return (path, (), kwargs) def clone(self): """Create a copy of this Index.""" _, _, kwargs = self.deconstruct() return self.__class__(**kwargs) def set_name_with_model(self, model): """ Generate a unique name for the index. The name is divided into 3 parts - table name (12 chars), field name (8 chars) and unique hash + suffix (10 chars). Each part is made to fit its size by truncating the excess length. """ _, table_name = split_identifier(model._meta.db_table) column_names = [model._meta.get_field(field_name).column for field_name, order in self.fields_orders] column_names_with_order = [ (('-%s' if order else '%s') % column_name) for column_name, (field_name, order) in zip(column_names, self.fields_orders) ] # The length of the parts of the name is based on the default max # length of 30 characters. hash_data = [table_name] + column_names_with_order + [self.suffix] self.name = '%s_%s_%s' % ( table_name[:11], column_names[0][:7], '%s_%s' % (names_digest(*hash_data, length=6), self.suffix), ) assert len(self.name) <= self.max_name_length, ( 'Index too long for multiple database support. Is self.suffix ' 'longer than 3 characters?' ) if self.name[0] == '_' or self.name[0].isdigit(): self.name = 'D%s' % self.name[1:] def __repr__(self): return "<%s: fields='%s'%s%s>" % ( self.__class__.__name__, ', '.join(self.fields), '' if self.condition is None else ', condition=%s' % self.condition, '' if not self.include else ", include='%s'" % ', '.join(self.include), ) def __eq__(self, other): if self.__class__ == other.__class__: return self.deconstruct() == other.deconstruct() return NotImplemented
b6078e4e5417d8212d29b80fed014c0d4fed444030101e89742e6032430e8c0d
import copy import inspect import warnings from functools import partialmethod from itertools import chain import django from django.apps import apps from django.conf import settings from django.core import checks from django.core.exceptions import ( NON_FIELD_ERRORS, FieldDoesNotExist, FieldError, MultipleObjectsReturned, ObjectDoesNotExist, ValidationError, ) from django.db import ( DEFAULT_DB_ALIAS, DJANGO_VERSION_PICKLE_KEY, DatabaseError, connection, connections, router, transaction, ) from django.db.models import ( NOT_PROVIDED, ExpressionWrapper, IntegerField, Max, Value, ) from django.db.models.constants import LOOKUP_SEP from django.db.models.constraints import CheckConstraint, UniqueConstraint from django.db.models.deletion import CASCADE, Collector from django.db.models.fields.related import ( ForeignObjectRel, OneToOneField, lazy_related_operation, resolve_relation, ) from django.db.models.functions import Coalesce from django.db.models.manager import Manager from django.db.models.options import Options from django.db.models.query import Q from django.db.models.signals import ( class_prepared, post_init, post_save, pre_init, pre_save, ) from django.db.models.utils import make_model_tuple from django.utils.encoding import force_str from django.utils.hashable import make_hashable from django.utils.text import capfirst, get_text_list from django.utils.translation import gettext_lazy as _ class Deferred: def __repr__(self): return '<Deferred field>' def __str__(self): return '<Deferred field>' DEFERRED = Deferred() def subclass_exception(name, bases, module, attached_to): """ Create exception subclass. Used by ModelBase below. The exception is created in a way that allows it to be pickled, assuming that the returned exception class will be added as an attribute to the 'attached_to' class. """ return type(name, bases, { '__module__': module, '__qualname__': '%s.%s' % (attached_to.__qualname__, name), }) def _has_contribute_to_class(value): # Only call contribute_to_class() if it's bound. return not inspect.isclass(value) and hasattr(value, 'contribute_to_class') class ModelBase(type): """Metaclass for all models.""" def __new__(cls, name, bases, attrs, **kwargs): super_new = super().__new__ # Also ensure initialization is only performed for subclasses of Model # (excluding Model class itself). parents = [b for b in bases if isinstance(b, ModelBase)] if not parents: return super_new(cls, name, bases, attrs) # Create the class. module = attrs.pop('__module__') new_attrs = {'__module__': module} classcell = attrs.pop('__classcell__', None) if classcell is not None: new_attrs['__classcell__'] = classcell attr_meta = attrs.pop('Meta', None) # Pass all attrs without a (Django-specific) contribute_to_class() # method to type.__new__() so that they're properly initialized # (i.e. __set_name__()). contributable_attrs = {} for obj_name, obj in list(attrs.items()): if _has_contribute_to_class(obj): contributable_attrs[obj_name] = obj else: new_attrs[obj_name] = obj new_class = super_new(cls, name, bases, new_attrs, **kwargs) abstract = getattr(attr_meta, 'abstract', False) meta = attr_meta or getattr(new_class, 'Meta', None) base_meta = getattr(new_class, '_meta', None) app_label = None # Look for an application configuration to attach the model to. app_config = apps.get_containing_app_config(module) if getattr(meta, 'app_label', None) is None: if app_config is None: if not abstract: raise RuntimeError( "Model class %s.%s doesn't declare an explicit " "app_label and isn't in an application in " "INSTALLED_APPS." % (module, name) ) else: app_label = app_config.label new_class.add_to_class('_meta', Options(meta, app_label)) if not abstract: new_class.add_to_class( 'DoesNotExist', subclass_exception( 'DoesNotExist', tuple( x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract ) or (ObjectDoesNotExist,), module, attached_to=new_class)) new_class.add_to_class( 'MultipleObjectsReturned', subclass_exception( 'MultipleObjectsReturned', tuple( x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract ) or (MultipleObjectsReturned,), module, attached_to=new_class)) if base_meta and not base_meta.abstract: # Non-abstract child classes inherit some attributes from their # non-abstract parent (unless an ABC comes before it in the # method resolution order). if not hasattr(meta, 'ordering'): new_class._meta.ordering = base_meta.ordering if not hasattr(meta, 'get_latest_by'): new_class._meta.get_latest_by = base_meta.get_latest_by is_proxy = new_class._meta.proxy # If the model is a proxy, ensure that the base class # hasn't been swapped out. if is_proxy and base_meta and base_meta.swapped: raise TypeError("%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped)) # Add remaining attributes (those with a contribute_to_class() method) # to the class. for obj_name, obj in contributable_attrs.items(): new_class.add_to_class(obj_name, obj) # All the fields of any type declared on this model new_fields = chain( new_class._meta.local_fields, new_class._meta.local_many_to_many, new_class._meta.private_fields ) field_names = {f.name for f in new_fields} # Basic setup for proxy models. if is_proxy: base = None for parent in [kls for kls in parents if hasattr(kls, '_meta')]: if parent._meta.abstract: if parent._meta.fields: raise TypeError( "Abstract base class containing model fields not " "permitted for proxy model '%s'." % name ) else: continue if base is None: base = parent elif parent._meta.concrete_model is not base._meta.concrete_model: raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name) if base is None: raise TypeError("Proxy model '%s' has no non-abstract model base class." % name) new_class._meta.setup_proxy(base) new_class._meta.concrete_model = base._meta.concrete_model else: new_class._meta.concrete_model = new_class # Collect the parent links for multi-table inheritance. parent_links = {} for base in reversed([new_class] + parents): # Conceptually equivalent to `if base is Model`. if not hasattr(base, '_meta'): continue # Skip concrete parent classes. if base != new_class and not base._meta.abstract: continue # Locate OneToOneField instances. for field in base._meta.local_fields: if isinstance(field, OneToOneField) and field.remote_field.parent_link: related = resolve_relation(new_class, field.remote_field.model) parent_links[make_model_tuple(related)] = field # Track fields inherited from base models. inherited_attributes = set() # Do the appropriate setup for any model parents. for base in new_class.mro(): if base not in parents or not hasattr(base, '_meta'): # Things without _meta aren't functional models, so they're # uninteresting parents. inherited_attributes.update(base.__dict__) continue parent_fields = base._meta.local_fields + base._meta.local_many_to_many if not base._meta.abstract: # Check for clashes between locally declared fields and those # on the base classes. for field in parent_fields: if field.name in field_names: raise FieldError( 'Local field %r in class %r clashes with field of ' 'the same name from base class %r.' % ( field.name, name, base.__name__, ) ) else: inherited_attributes.add(field.name) # Concrete classes... base = base._meta.concrete_model base_key = make_model_tuple(base) if base_key in parent_links: field = parent_links[base_key] elif not is_proxy: attr_name = '%s_ptr' % base._meta.model_name field = OneToOneField( base, on_delete=CASCADE, name=attr_name, auto_created=True, parent_link=True, ) if attr_name in field_names: raise FieldError( "Auto-generated field '%s' in class %r for " "parent_link to base class %r clashes with " "declared field of the same name." % ( attr_name, name, base.__name__, ) ) # Only add the ptr field if it's not already present; # e.g. migrations will already have it specified if not hasattr(new_class, attr_name): new_class.add_to_class(attr_name, field) else: field = None new_class._meta.parents[base] = field else: base_parents = base._meta.parents.copy() # Add fields from abstract base class if it wasn't overridden. for field in parent_fields: if (field.name not in field_names and field.name not in new_class.__dict__ and field.name not in inherited_attributes): new_field = copy.deepcopy(field) new_class.add_to_class(field.name, new_field) # Replace parent links defined on this base by the new # field. It will be appropriately resolved if required. if field.one_to_one: for parent, parent_link in base_parents.items(): if field == parent_link: base_parents[parent] = new_field # Pass any non-abstract parent classes onto child. new_class._meta.parents.update(base_parents) # Inherit private fields (like GenericForeignKey) from the parent # class for field in base._meta.private_fields: if field.name in field_names: if not base._meta.abstract: raise FieldError( 'Local field %r in class %r clashes with field of ' 'the same name from base class %r.' % ( field.name, name, base.__name__, ) ) else: field = copy.deepcopy(field) if not base._meta.abstract: field.mti_inherited = True new_class.add_to_class(field.name, field) # Copy indexes so that index names are unique when models extend an # abstract model. new_class._meta.indexes = [copy.deepcopy(idx) for idx in new_class._meta.indexes] if abstract: # Abstract base models can't be instantiated and don't appear in # the list of models for an app. We do the final setup for them a # little differently from normal models. attr_meta.abstract = False new_class.Meta = attr_meta return new_class new_class._prepare() new_class._meta.apps.register_model(new_class._meta.app_label, new_class) return new_class def add_to_class(cls, name, value): if _has_contribute_to_class(value): value.contribute_to_class(cls, name) else: setattr(cls, name, value) def _prepare(cls): """Create some methods once self._meta has been populated.""" opts = cls._meta opts._prepare(cls) if opts.order_with_respect_to: cls.get_next_in_order = partialmethod(cls._get_next_or_previous_in_order, is_next=True) cls.get_previous_in_order = partialmethod(cls._get_next_or_previous_in_order, is_next=False) # Defer creating accessors on the foreign class until it has been # created and registered. If remote_field is None, we're ordering # with respect to a GenericForeignKey and don't know what the # foreign class is - we'll add those accessors later in # contribute_to_class(). if opts.order_with_respect_to.remote_field: wrt = opts.order_with_respect_to remote = wrt.remote_field.model lazy_related_operation(make_foreign_order_accessors, cls, remote) # Give the class a docstring -- its definition. if cls.__doc__ is None: cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join(f.name for f in opts.fields)) get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get(opts.label_lower) if get_absolute_url_override: setattr(cls, 'get_absolute_url', get_absolute_url_override) if not opts.managers: if any(f.name == 'objects' for f in opts.fields): raise ValueError( "Model %s must specify a custom Manager, because it has a " "field named 'objects'." % cls.__name__ ) manager = Manager() manager.auto_created = True cls.add_to_class('objects', manager) # Set the name of _meta.indexes. This can't be done in # Options.contribute_to_class() because fields haven't been added to # the model at that point. for index in cls._meta.indexes: if not index.name: index.set_name_with_model(cls) class_prepared.send(sender=cls) @property def _base_manager(cls): return cls._meta.base_manager @property def _default_manager(cls): return cls._meta.default_manager class ModelStateFieldsCacheDescriptor: def __get__(self, instance, cls=None): if instance is None: return self res = instance.fields_cache = {} return res class ModelState: """Store model instance state.""" db = None # If true, uniqueness validation checks will consider this a new, unsaved # object. Necessary for correct validation of new instances of objects with # explicit (non-auto) PKs. This impacts validation only; it has no effect # on the actual save. adding = True fields_cache = ModelStateFieldsCacheDescriptor() class Model(metaclass=ModelBase): def __init__(self, *args, **kwargs): # Alias some things as locals to avoid repeat global lookups cls = self.__class__ opts = self._meta _setattr = setattr _DEFERRED = DEFERRED pre_init.send(sender=cls, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. if len(args) > len(opts.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(opts.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): if val is _DEFERRED: continue _setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(opts.fields) for val, field in zip(args, fields_iter): if val is _DEFERRED: continue _setattr(self, field.attname, val) kwargs.pop(field.name, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # Virtual field if field.attname not in kwargs and field.column is None: continue if kwargs: if isinstance(field.remote_field, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. if rel_obj is not _DEFERRED: _setattr(self, field.name, rel_obj) else: if val is not _DEFERRED: _setattr(self, field.attname, val) if kwargs: property_names = opts._property_names for prop in tuple(kwargs): try: # Any remaining kwargs must correspond to properties or # virtual fields. if prop in property_names or opts.get_field(prop): if kwargs[prop] is not _DEFERRED: _setattr(self, prop, kwargs[prop]) del kwargs[prop] except (AttributeError, FieldDoesNotExist): pass for kwarg in kwargs: raise TypeError("%s() got an unexpected keyword argument '%s'" % (cls.__name__, kwarg)) super().__init__() post_init.send(sender=cls, instance=self) @classmethod def from_db(cls, db, field_names, values): if len(values) != len(cls._meta.concrete_fields): values_iter = iter(values) values = [ next(values_iter) if f.attname in field_names else DEFERRED for f in cls._meta.concrete_fields ] new = cls(*values) new._state.adding = False new._state.db = db return new def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def __str__(self): return '%s object (%s)' % (self.__class__.__name__, self.pk) def __eq__(self, other): if not isinstance(other, Model): return NotImplemented if self._meta.concrete_model != other._meta.concrete_model: return False my_pk = self.pk if my_pk is None: return self is other return my_pk == other.pk def __hash__(self): if self.pk is None: raise TypeError("Model instances without primary key value are unhashable") return hash(self.pk) def __reduce__(self): data = self.__getstate__() data[DJANGO_VERSION_PICKLE_KEY] = django.__version__ class_id = self._meta.app_label, self._meta.object_name return model_unpickle, (class_id,), data def __getstate__(self): """Hook to allow choosing the attributes to pickle.""" return self.__dict__ def __setstate__(self, state): pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY) if pickled_version: if pickled_version != django.__version__: warnings.warn( "Pickled model instance's Django version %s does not " "match the current version %s." % (pickled_version, django.__version__), RuntimeWarning, stacklevel=2, ) else: warnings.warn( "Pickled model instance's Django version is not specified.", RuntimeWarning, stacklevel=2, ) self.__dict__.update(state) def _get_pk_val(self, meta=None): meta = meta or self._meta return getattr(self, meta.pk.attname) def _set_pk_val(self, value): for parent_link in self._meta.parents.values(): if parent_link and parent_link != self._meta.pk: setattr(self, parent_link.target_field.attname, value) return setattr(self, self._meta.pk.attname, value) pk = property(_get_pk_val, _set_pk_val) def get_deferred_fields(self): """ Return a set containing names of deferred fields for this instance. """ return { f.attname for f in self._meta.concrete_fields if f.attname not in self.__dict__ } def refresh_from_db(self, using=None, fields=None): """ Reload field values from the database. By default, the reloading happens from the database this instance was loaded from, or by the read router if this instance wasn't loaded from any database. The using parameter will override the default. Fields can be used to specify which fields to reload. The fields should be an iterable of field attnames. If fields is None, then all non-deferred fields are reloaded. When accessing deferred fields of an instance, the deferred loading of the field will call this method. """ if fields is None: self._prefetched_objects_cache = {} else: prefetched_objects_cache = getattr(self, '_prefetched_objects_cache', ()) for field in fields: if field in prefetched_objects_cache: del prefetched_objects_cache[field] fields.remove(field) if not fields: return if any(LOOKUP_SEP in f for f in fields): raise ValueError( 'Found "%s" in fields argument. Relations and transforms ' 'are not allowed in fields.' % LOOKUP_SEP) hints = {'instance': self} db_instance_qs = self.__class__._base_manager.db_manager(using, hints=hints).filter(pk=self.pk) # Use provided fields, if not set then reload all non-deferred fields. deferred_fields = self.get_deferred_fields() if fields is not None: fields = list(fields) db_instance_qs = db_instance_qs.only(*fields) elif deferred_fields: fields = [f.attname for f in self._meta.concrete_fields if f.attname not in deferred_fields] db_instance_qs = db_instance_qs.only(*fields) db_instance = db_instance_qs.get() non_loaded_fields = db_instance.get_deferred_fields() for field in self._meta.concrete_fields: if field.attname in non_loaded_fields: # This field wasn't refreshed - skip ahead. continue setattr(self, field.attname, getattr(db_instance, field.attname)) # Clear cached foreign keys. if field.is_relation and field.is_cached(self): field.delete_cached_value(self) # Clear cached relations. for field in self._meta.related_objects: if field.is_cached(self): field.delete_cached_value(self) self._state.db = db_instance._state.db def serializable_value(self, field_name): """ Return the value of the field name for this instance. If the field is a foreign key, return the id value instead of the object. If there's no Field object with this name on the model, return the model attribute's value. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field(field_name) except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname) def save(self, force_insert=False, force_update=False, using=None, update_fields=None): """ Save the current instance. Override this in a subclass if you want to control the saving process. The 'force_insert' and 'force_update' parameters can be used to insist that the "save" must be an SQL insert or update (or equivalent for non-SQL backends), respectively. Normally, they should not be set. """ # Ensure that a model instance without a PK hasn't been assigned to # a ForeignKey or OneToOneField on this model. If the field is # nullable, allowing the save() would result in silent data loss. for field in self._meta.concrete_fields: # If the related field isn't cached, then an instance hasn't # been assigned and there's no need to worry about this check. if field.is_relation and field.is_cached(self): obj = getattr(self, field.name, None) if not obj: continue # A pk may have been assigned manually to a model instance not # saved to the database (or auto-generated in a case like # UUIDField), but we allow the save to proceed and rely on the # database to raise an IntegrityError if applicable. If # constraints aren't supported by the database, there's the # unavoidable risk of data corruption. if obj.pk is None: # Remove the object from a related instance cache. if not field.remote_field.multiple: field.remote_field.delete_cached_value(obj) raise ValueError( "save() prohibited to prevent data loss due to " "unsaved related object '%s'." % field.name ) elif getattr(self, field.attname) is None: # Use pk from related object if it has been saved after # an assignment. setattr(self, field.attname, obj.pk) # If the relationship's pk/to_field was changed, clear the # cached relationship. if getattr(obj, field.target_field.attname) != getattr(self, field.attname): field.delete_cached_value(self) using = using or router.db_for_write(self.__class__, instance=self) if force_insert and (force_update or update_fields): raise ValueError("Cannot force both insert and updating in model saving.") deferred_fields = self.get_deferred_fields() if update_fields is not None: # If update_fields is empty, skip the save. We do also check for # no-op saves later on for inheritance cases. This bailout is # still needed for skipping signal sending. if not update_fields: return update_fields = frozenset(update_fields) field_names = set() for field in self._meta.fields: if not field.primary_key: field_names.add(field.name) if field.name != field.attname: field_names.add(field.attname) non_model_fields = update_fields.difference(field_names) if non_model_fields: raise ValueError("The following fields do not exist in this " "model or are m2m fields: %s" % ', '.join(non_model_fields)) # If saving to the same database, and this model is deferred, then # automatically do an "update_fields" save on the loaded fields. elif not force_insert and deferred_fields and using == self._state.db: field_names = set() for field in self._meta.concrete_fields: if not field.primary_key and not hasattr(field, 'through'): field_names.add(field.attname) loaded_fields = field_names.difference(deferred_fields) if loaded_fields: update_fields = frozenset(loaded_fields) self.save_base(using=using, force_insert=force_insert, force_update=force_update, update_fields=update_fields) save.alters_data = True def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handle the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or update_fields cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: pre_save.send( sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields, ) # A transaction isn't needed if one query is issued. if meta.parents: context_manager = transaction.atomic(using=using, savepoint=False) else: context_manager = transaction.mark_for_rollback_on_error(using=using) with context_manager: parent_inserted = False if not raw: parent_inserted = self._save_parents(cls, using, update_fields) updated = self._save_table( raw, cls, force_insert or parent_inserted, force_update, using, update_fields, ) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: post_save.send( sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using, ) save_base.alters_data = True def _save_parents(self, cls, using, update_fields): """Save all the parents of cls using values from self.""" meta = cls._meta inserted = False for parent, field in meta.parents.items(): # Make sure the link fields are synced between parent and self. if (field and getattr(self, parent._meta.pk.attname) is None and getattr(self, field.attname) is not None): setattr(self, parent._meta.pk.attname, getattr(self, field.attname)) parent_inserted = self._save_parents(cls=parent, using=using, update_fields=update_fields) updated = self._save_table( cls=parent, using=using, update_fields=update_fields, force_insert=parent_inserted, ) if not updated: inserted = True # Set the parent's PK value to self. if field: setattr(self, field.attname, self._get_pk_val(parent._meta)) # Since we didn't have an instance of the parent handy set # attname directly, bypassing the descriptor. Invalidate # the related object cache, in case it's been accidentally # populated. A fresh instance will be re-built from the # database if necessary. if field.is_cached(self): field.delete_cached_value(self) return inserted def _save_table(self, raw=False, cls=None, force_insert=False, force_update=False, using=None, update_fields=None): """ Do the heavy-lifting involved in saving. Update or insert the data for a single table. """ meta = cls._meta non_pks = [f for f in meta.local_concrete_fields if not f.primary_key] if update_fields: non_pks = [f for f in non_pks if f.name in update_fields or f.attname in update_fields] pk_val = self._get_pk_val(meta) if pk_val is None: pk_val = meta.pk.get_pk_value_on_save(self) setattr(self, meta.pk.attname, pk_val) pk_set = pk_val is not None if not pk_set and (force_update or update_fields): raise ValueError("Cannot force an update in save() with no primary key.") updated = False # Skip an UPDATE when adding an instance and primary key has a default. if ( not raw and not force_insert and self._state.adding and meta.pk.default and meta.pk.default is not NOT_PROVIDED ): force_insert = True # If possible, try an UPDATE. If that doesn't update anything, do an INSERT. if pk_set and not force_insert: base_qs = cls._base_manager.using(using) values = [(f, None, (getattr(self, f.attname) if raw else f.pre_save(self, False))) for f in non_pks] forced_update = update_fields or force_update updated = self._do_update(base_qs, using, pk_val, values, update_fields, forced_update) if force_update and not updated: raise DatabaseError("Forced update did not affect any rows.") if update_fields and not updated: raise DatabaseError("Save with update_fields did not affect any rows.") if not updated: if meta.order_with_respect_to: # If this is a model with an order_with_respect_to # autopopulate the _order field field = meta.order_with_respect_to filter_args = field.get_filter_kwargs_for_object(self) self._order = cls._base_manager.using(using).filter(**filter_args).aggregate( _order__max=Coalesce( ExpressionWrapper(Max('_order') + Value(1), output_field=IntegerField()), Value(0), ), )['_order__max'] fields = meta.local_concrete_fields if not pk_set: fields = [f for f in fields if f is not meta.auto_field] returning_fields = meta.db_returning_fields results = self._do_insert(cls._base_manager, using, fields, returning_fields, raw) if results: for value, field in zip(results[0], returning_fields): setattr(self, field.attname, value) return updated def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update): """ Try to update the model. Return True if the model was updated (if an update query was done and a matching row was found in the DB). """ filtered = base_qs.filter(pk=pk_val) if not values: # We can end up here when saving a model in inheritance chain where # update_fields doesn't target any field in current model. In that # case we just say the update succeeded. Another case ending up here # is a model with just PK - in that case check that the PK still # exists. return update_fields is not None or filtered.exists() if self._meta.select_on_save and not forced_update: return ( filtered.exists() and # It may happen that the object is deleted from the DB right after # this check, causing the subsequent UPDATE to return zero matching # rows. The same result can occur in some rare cases when the # database returns zero despite the UPDATE being executed # successfully (a row is matched and updated). In order to # distinguish these two cases, the object's existence in the # database is again checked for if the UPDATE query returns 0. (filtered._update(values) > 0 or filtered.exists()) ) return filtered._update(values) > 0 def _do_insert(self, manager, using, fields, returning_fields, raw): """ Do an INSERT. If returning_fields is defined then this method should return the newly created data for the model. """ return manager._insert( [self], fields=fields, returning_fields=returning_fields, using=using, raw=raw, ) def delete(self, using=None, keep_parents=False): using = using or router.db_for_write(self.__class__, instance=self) assert self.pk is not None, ( "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname) ) collector = Collector(using=using) collector.collect([self], keep_parents=keep_parents) return collector.delete() delete.alters_data = True def _get_FIELD_display(self, field): value = getattr(self, field.attname) choices_dict = dict(make_hashable(field.flatchoices)) # force_str() to coerce lazy strings. return force_str(choices_dict.get(make_hashable(value), value), strings_only=True) def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs): if not self.pk: raise ValueError("get_next/get_previous cannot be used on unsaved objects.") op = 'gt' if is_next else 'lt' order = '' if is_next else '-' param = getattr(self, field.attname) q = Q(**{'%s__%s' % (field.name, op): param}) q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk}) qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by( '%s%s' % (order, field.name), '%spk' % order ) try: return qs[0] except IndexError: raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name) def _get_next_or_previous_in_order(self, is_next): cachename = "__%s_order_cache" % is_next if not hasattr(self, cachename): op = 'gt' if is_next else 'lt' order = '_order' if is_next else '-_order' order_field = self._meta.order_with_respect_to filter_args = order_field.get_filter_kwargs_for_object(self) obj = self.__class__._default_manager.filter(**filter_args).filter(**{ '_order__%s' % op: self.__class__._default_manager.values('_order').filter(**{ self._meta.pk.name: self.pk }) }).order_by(order)[:1].get() setattr(self, cachename, obj) return getattr(self, cachename) def prepare_database_save(self, field): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return getattr(self, field.remote_field.get_related_field().attname) def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass def validate_unique(self, exclude=None): """ Check unique constraints on the model and raise ValidationError if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors) def _get_unique_checks(self, exclude=None): """ Return a list of checks to perform. Since validate_unique() could be called from a ModelForm, some fields may have been excluded; we can't perform a unique check on a model that is missing fields involved in that check. Fields that did not validate should also be excluded, but they need to be passed in via the exclude argument. """ if exclude is None: exclude = [] unique_checks = [] unique_togethers = [(self.__class__, self._meta.unique_together)] constraints = [(self.__class__, self._meta.total_unique_constraints)] for parent_class in self._meta.get_parent_list(): if parent_class._meta.unique_together: unique_togethers.append((parent_class, parent_class._meta.unique_together)) if parent_class._meta.total_unique_constraints: constraints.append( (parent_class, parent_class._meta.total_unique_constraints) ) for model_class, unique_together in unique_togethers: for check in unique_together: if not any(name in exclude for name in check): # Add the check if the field isn't excluded. unique_checks.append((model_class, tuple(check))) for model_class, model_constraints in constraints: for constraint in model_constraints: if not any(name in exclude for name in constraint.fields): unique_checks.append((model_class, constraint.fields)) # These are checks for the unique_for_<date/year/month>. date_checks = [] # Gather a list of checks for fields declared as unique and add them to # the list of checks. fields_with_class = [(self.__class__, self._meta.local_fields)] for parent_class in self._meta.get_parent_list(): fields_with_class.append((parent_class, parent_class._meta.local_fields)) for model_class, fields in fields_with_class: for f in fields: name = f.name if name in exclude: continue if f.unique: unique_checks.append((model_class, (name,))) if f.unique_for_date and f.unique_for_date not in exclude: date_checks.append((model_class, 'date', name, f.unique_for_date)) if f.unique_for_year and f.unique_for_year not in exclude: date_checks.append((model_class, 'year', name, f.unique_for_year)) if f.unique_for_month and f.unique_for_month not in exclude: date_checks.append((model_class, 'month', name, f.unique_for_month)) return unique_checks, date_checks def _perform_unique_checks(self, unique_checks): errors = {} for model_class, unique_check in unique_checks: # Try to look up an existing object with the same values as this # object's values for all the unique field. lookup_kwargs = {} for field_name in unique_check: f = self._meta.get_field(field_name) lookup_value = getattr(self, f.attname) # TODO: Handle multiple backends with different feature flags. if (lookup_value is None or (lookup_value == '' and connection.features.interprets_empty_strings_as_nulls)): # no value, skip the lookup continue if f.primary_key and not self._state.adding: # no need to check for unique primary key when editing continue lookup_kwargs[str(field_name)] = lookup_value # some fields were skipped, no reason to do the check if len(unique_check) != len(lookup_kwargs): continue qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) # Note that we need to use the pk as defined by model_class, not # self.pk. These can be different fields because model inheritance # allows single model to have effectively multiple primary keys. # Refs #17615. model_class_pk = self._get_pk_val(model_class._meta) if not self._state.adding and model_class_pk is not None: qs = qs.exclude(pk=model_class_pk) if qs.exists(): if len(unique_check) == 1: key = unique_check[0] else: key = NON_FIELD_ERRORS errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check)) return errors def _perform_date_checks(self, date_checks): errors = {} for model_class, lookup_type, field, unique_for in date_checks: lookup_kwargs = {} # there's a ticket to add a date lookup, we can remove this special # case if that makes it's way in date = getattr(self, unique_for) if date is None: continue if lookup_type == 'date': lookup_kwargs['%s__day' % unique_for] = date.day lookup_kwargs['%s__month' % unique_for] = date.month lookup_kwargs['%s__year' % unique_for] = date.year else: lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type) lookup_kwargs[field] = getattr(self, field) qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) if not self._state.adding and self.pk is not None: qs = qs.exclude(pk=self.pk) if qs.exists(): errors.setdefault(field, []).append( self.date_error_message(lookup_type, field, unique_for) ) return errors def date_error_message(self, lookup_type, field_name, unique_for): opts = self._meta field = opts.get_field(field_name) return ValidationError( message=field.error_messages['unique_for_date'], code='unique_for_date', params={ 'model': self, 'model_name': capfirst(opts.verbose_name), 'lookup_type': lookup_type, 'field': field_name, 'field_label': capfirst(field.verbose_name), 'date_field': unique_for, 'date_field_label': capfirst(opts.get_field(unique_for).verbose_name), } ) def unique_error_message(self, model_class, unique_check): opts = model_class._meta params = { 'model': self, 'model_class': model_class, 'model_name': capfirst(opts.verbose_name), 'unique_check': unique_check, } # A unique field if len(unique_check) == 1: field = opts.get_field(unique_check[0]) params['field_label'] = capfirst(field.verbose_name) return ValidationError( message=field.error_messages['unique'], code='unique', params=params, ) # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] params['field_labels'] = get_text_list(field_labels, _('and')) return ValidationError( message=_("%(model_name)s with this %(field_labels)s already exists."), code='unique_together', params=params, ) def full_clean(self, exclude=None, validate_unique=True): """ Call clean_fields(), clean(), and validate_unique() on the model. Raise a ValidationError for any errors that occur. """ errors = {} if exclude is None: exclude = [] else: exclude = list(exclude) try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors: if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors) def clean_fields(self, exclude=None): """ Clean all fields and raise a ValidationError containing a dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors) @classmethod def check(cls, **kwargs): errors = [*cls._check_swappable(), *cls._check_model(), *cls._check_managers(**kwargs)] if not cls._meta.swapped: databases = kwargs.get('databases') or [] errors += [ *cls._check_fields(**kwargs), *cls._check_m2m_through_same_relationship(), *cls._check_long_column_names(databases), ] clash_errors = ( *cls._check_id_field(), *cls._check_field_name_clashes(), *cls._check_model_name_db_lookup_clashes(), *cls._check_property_name_related_field_accessor_clashes(), *cls._check_single_primary_key(), ) errors.extend(clash_errors) # If there are field name clashes, hide consequent column name # clashes. if not clash_errors: errors.extend(cls._check_column_name_clashes()) errors += [ *cls._check_index_together(), *cls._check_unique_together(), *cls._check_indexes(databases), *cls._check_ordering(), *cls._check_constraints(databases), ] return errors @classmethod def _check_swappable(cls): """Check if the swapped model exists.""" errors = [] if cls._meta.swapped: try: apps.get_model(cls._meta.swapped) except ValueError: errors.append( checks.Error( "'%s' is not of the form 'app_label.app_name'." % cls._meta.swappable, id='models.E001', ) ) except LookupError: app_label, model_name = cls._meta.swapped.split('.') errors.append( checks.Error( "'%s' references '%s.%s', which has not been " "installed, or is abstract." % ( cls._meta.swappable, app_label, model_name ), id='models.E002', ) ) return errors @classmethod def _check_model(cls): errors = [] if cls._meta.proxy: if cls._meta.local_fields or cls._meta.local_many_to_many: errors.append( checks.Error( "Proxy model '%s' contains model fields." % cls.__name__, id='models.E017', ) ) return errors @classmethod def _check_managers(cls, **kwargs): """Perform all manager checks.""" errors = [] for manager in cls._meta.managers: errors.extend(manager.check(**kwargs)) return errors @classmethod def _check_fields(cls, **kwargs): """Perform all field checks.""" errors = [] for field in cls._meta.local_fields: errors.extend(field.check(**kwargs)) for field in cls._meta.local_many_to_many: errors.extend(field.check(from_model=cls, **kwargs)) return errors @classmethod def _check_m2m_through_same_relationship(cls): """ Check if no relationship model is used by more than one m2m field. """ errors = [] seen_intermediary_signatures = [] fields = cls._meta.local_many_to_many # Skip when the target model wasn't found. fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase)) # Skip when the relationship model wasn't found. fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase)) for f in fields: signature = (f.remote_field.model, cls, f.remote_field.through, f.remote_field.through_fields) if signature in seen_intermediary_signatures: errors.append( checks.Error( "The model has two identical many-to-many relations " "through the intermediate model '%s'." % f.remote_field.through._meta.label, obj=cls, id='models.E003', ) ) else: seen_intermediary_signatures.append(signature) return errors @classmethod def _check_id_field(cls): """Check if `id` field is a primary key.""" fields = [f for f in cls._meta.local_fields if f.name == 'id' and f != cls._meta.pk] # fields is empty or consists of the invalid "id" field if fields and not fields[0].primary_key and cls._meta.pk.name == 'id': return [ checks.Error( "'id' can only be used as a field name if the field also " "sets 'primary_key=True'.", obj=cls, id='models.E004', ) ] else: return [] @classmethod def _check_field_name_clashes(cls): """Forbid field shadowing in multi-table inheritance.""" errors = [] used_fields = {} # name or attname -> field # Check that multi-inheritance doesn't cause field name shadowing. for parent in cls._meta.get_parent_list(): for f in parent._meta.local_fields: clash = used_fields.get(f.name) or used_fields.get(f.attname) or None if clash: errors.append( checks.Error( "The field '%s' from parent model " "'%s' clashes with the field '%s' " "from parent model '%s'." % ( clash.name, clash.model._meta, f.name, f.model._meta ), obj=cls, id='models.E005', ) ) used_fields[f.name] = f used_fields[f.attname] = f # Check that fields defined in the model don't clash with fields from # parents, including auto-generated fields like multi-table inheritance # child accessors. for parent in cls._meta.get_parent_list(): for f in parent._meta.get_fields(): if f not in used_fields: used_fields[f.name] = f for f in cls._meta.local_fields: clash = used_fields.get(f.name) or used_fields.get(f.attname) or None # Note that we may detect clash between user-defined non-unique # field "id" and automatically added unique field "id", both # defined at the same model. This special case is considered in # _check_id_field and here we ignore it. id_conflict = f.name == "id" and clash and clash.name == "id" and clash.model == cls if clash and not id_conflict: errors.append( checks.Error( "The field '%s' clashes with the field '%s' " "from model '%s'." % ( f.name, clash.name, clash.model._meta ), obj=f, id='models.E006', ) ) used_fields[f.name] = f used_fields[f.attname] = f return errors @classmethod def _check_column_name_clashes(cls): # Store a list of column names which have already been used by other fields. used_column_names = [] errors = [] for f in cls._meta.local_fields: _, column_name = f.get_attname_column() # Ensure the column name is not already in use. if column_name and column_name in used_column_names: errors.append( checks.Error( "Field '%s' has column name '%s' that is used by " "another field." % (f.name, column_name), hint="Specify a 'db_column' for the field.", obj=cls, id='models.E007' ) ) else: used_column_names.append(column_name) return errors @classmethod def _check_model_name_db_lookup_clashes(cls): errors = [] model_name = cls.__name__ if model_name.startswith('_') or model_name.endswith('_'): errors.append( checks.Error( "The model name '%s' cannot start or end with an underscore " "as it collides with the query lookup syntax." % model_name, obj=cls, id='models.E023' ) ) elif LOOKUP_SEP in model_name: errors.append( checks.Error( "The model name '%s' cannot contain double underscores as " "it collides with the query lookup syntax." % model_name, obj=cls, id='models.E024' ) ) return errors @classmethod def _check_property_name_related_field_accessor_clashes(cls): errors = [] property_names = cls._meta._property_names related_field_accessors = ( f.get_attname() for f in cls._meta._get_fields(reverse=False) if f.is_relation and f.related_model is not None ) for accessor in related_field_accessors: if accessor in property_names: errors.append( checks.Error( "The property '%s' clashes with a related field " "accessor." % accessor, obj=cls, id='models.E025', ) ) return errors @classmethod def _check_single_primary_key(cls): errors = [] if sum(1 for f in cls._meta.local_fields if f.primary_key) > 1: errors.append( checks.Error( "The model cannot have more than one field with " "'primary_key=True'.", obj=cls, id='models.E026', ) ) return errors @classmethod def _check_index_together(cls): """Check the value of "index_together" option.""" if not isinstance(cls._meta.index_together, (tuple, list)): return [ checks.Error( "'index_together' must be a list or tuple.", obj=cls, id='models.E008', ) ] elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.index_together): return [ checks.Error( "All 'index_together' elements must be lists or tuples.", obj=cls, id='models.E009', ) ] else: errors = [] for fields in cls._meta.index_together: errors.extend(cls._check_local_fields(fields, "index_together")) return errors @classmethod def _check_unique_together(cls): """Check the value of "unique_together" option.""" if not isinstance(cls._meta.unique_together, (tuple, list)): return [ checks.Error( "'unique_together' must be a list or tuple.", obj=cls, id='models.E010', ) ] elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.unique_together): return [ checks.Error( "All 'unique_together' elements must be lists or tuples.", obj=cls, id='models.E011', ) ] else: errors = [] for fields in cls._meta.unique_together: errors.extend(cls._check_local_fields(fields, "unique_together")) return errors @classmethod def _check_indexes(cls, databases): """Check fields, names, and conditions of indexes.""" errors = [] for index in cls._meta.indexes: # Index name can't start with an underscore or a number, restricted # for cross-database compatibility with Oracle. if index.name[0] == '_' or index.name[0].isdigit(): errors.append( checks.Error( "The index name '%s' cannot start with an underscore " "or a number." % index.name, obj=cls, id='models.E033', ), ) if len(index.name) > index.max_name_length: errors.append( checks.Error( "The index name '%s' cannot be longer than %d " "characters." % (index.name, index.max_name_length), obj=cls, id='models.E034', ), ) for db in databases: if not router.allow_migrate_model(db, cls): continue connection = connections[db] if not ( connection.features.supports_partial_indexes or 'supports_partial_indexes' in cls._meta.required_db_features ) and any(index.condition is not None for index in cls._meta.indexes): errors.append( checks.Warning( '%s does not support indexes with conditions.' % connection.display_name, hint=( "Conditions will be ignored. Silence this warning " "if you don't care about it." ), obj=cls, id='models.W037', ) ) if not ( connection.features.supports_covering_indexes or 'supports_covering_indexes' in cls._meta.required_db_features ) and any(index.include for index in cls._meta.indexes): errors.append( checks.Warning( '%s does not support indexes with non-key columns.' % connection.display_name, hint=( "Non-key columns will be ignored. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W040', ) ) fields = [field for index in cls._meta.indexes for field, _ in index.fields_orders] fields += [include for index in cls._meta.indexes for include in index.include] errors.extend(cls._check_local_fields(fields, 'indexes')) return errors @classmethod def _check_local_fields(cls, fields, option): from django.db import models # In order to avoid hitting the relation tree prematurely, we use our # own fields_map instead of using get_field() forward_fields_map = {} for field in cls._meta._get_fields(reverse=False): forward_fields_map[field.name] = field if hasattr(field, 'attname'): forward_fields_map[field.attname] = field errors = [] for field_name in fields: try: field = forward_fields_map[field_name] except KeyError: errors.append( checks.Error( "'%s' refers to the nonexistent field '%s'." % ( option, field_name, ), obj=cls, id='models.E012', ) ) else: if isinstance(field.remote_field, models.ManyToManyRel): errors.append( checks.Error( "'%s' refers to a ManyToManyField '%s', but " "ManyToManyFields are not permitted in '%s'." % ( option, field_name, option, ), obj=cls, id='models.E013', ) ) elif field not in cls._meta.local_fields: errors.append( checks.Error( "'%s' refers to field '%s' which is not local to model '%s'." % (option, field_name, cls._meta.object_name), hint="This issue may be caused by multi-table inheritance.", obj=cls, id='models.E016', ) ) return errors @classmethod def _check_ordering(cls): """ Check "ordering" option -- is it a list of strings and do all fields exist? """ if cls._meta._ordering_clash: return [ checks.Error( "'ordering' and 'order_with_respect_to' cannot be used together.", obj=cls, id='models.E021', ), ] if cls._meta.order_with_respect_to or not cls._meta.ordering: return [] if not isinstance(cls._meta.ordering, (list, tuple)): return [ checks.Error( "'ordering' must be a tuple or list (even if you want to order by only one field).", obj=cls, id='models.E014', ) ] errors = [] fields = cls._meta.ordering # Skip expressions and '?' fields. fields = (f for f in fields if isinstance(f, str) and f != '?') # Convert "-field" to "field". fields = ((f[1:] if f.startswith('-') else f) for f in fields) # Separate related fields and non-related fields. _fields = [] related_fields = [] for f in fields: if LOOKUP_SEP in f: related_fields.append(f) else: _fields.append(f) fields = _fields # Check related fields. for field in related_fields: _cls = cls fld = None for part in field.split(LOOKUP_SEP): try: # pk is an alias that won't be found by opts.get_field. if part == 'pk': fld = _cls._meta.pk else: fld = _cls._meta.get_field(part) if fld.is_relation: _cls = fld.get_path_info()[-1].to_opts.model else: _cls = None except (FieldDoesNotExist, AttributeError): if fld is None or ( fld.get_transform(part) is None and fld.get_lookup(part) is None ): errors.append( checks.Error( "'ordering' refers to the nonexistent field, " "related field, or lookup '%s'." % field, obj=cls, id='models.E015', ) ) # Skip ordering on pk. This is always a valid order_by field # but is an alias and therefore won't be found by opts.get_field. fields = {f for f in fields if f != 'pk'} # Check for invalid or nonexistent fields in ordering. invalid_fields = [] # Any field name that is not present in field_names does not exist. # Also, ordering by m2m fields is not allowed. opts = cls._meta valid_fields = set(chain.from_iterable( (f.name, f.attname) if not (f.auto_created and not f.concrete) else (f.field.related_query_name(),) for f in chain(opts.fields, opts.related_objects) )) invalid_fields.extend(fields - valid_fields) for invalid_field in invalid_fields: errors.append( checks.Error( "'ordering' refers to the nonexistent field, related " "field, or lookup '%s'." % invalid_field, obj=cls, id='models.E015', ) ) return errors @classmethod def _check_long_column_names(cls, databases): """ Check that any auto-generated column names are shorter than the limits for each database in which the model will be created. """ if not databases: return [] errors = [] allowed_len = None db_alias = None # Find the minimum max allowed length among all specified db_aliases. for db in databases: # skip databases where the model won't be created if not router.allow_migrate_model(db, cls): continue connection = connections[db] max_name_length = connection.ops.max_name_length() if max_name_length is None or connection.features.truncates_names: continue else: if allowed_len is None: allowed_len = max_name_length db_alias = db elif max_name_length < allowed_len: allowed_len = max_name_length db_alias = db if allowed_len is None: return errors for f in cls._meta.local_fields: _, column_name = f.get_attname_column() # Check if auto-generated name for the field is too long # for the database. if f.db_column is None and column_name is not None and len(column_name) > allowed_len: errors.append( checks.Error( 'Autogenerated column name too long for field "%s". ' 'Maximum length is "%s" for database "%s".' % (column_name, allowed_len, db_alias), hint="Set the column name manually using 'db_column'.", obj=cls, id='models.E018', ) ) for f in cls._meta.local_many_to_many: # Skip nonexistent models. if isinstance(f.remote_field.through, str): continue # Check if auto-generated name for the M2M field is too long # for the database. for m2m in f.remote_field.through._meta.local_fields: _, rel_name = m2m.get_attname_column() if m2m.db_column is None and rel_name is not None and len(rel_name) > allowed_len: errors.append( checks.Error( 'Autogenerated column name too long for M2M field ' '"%s". Maximum length is "%s" for database "%s".' % (rel_name, allowed_len, db_alias), hint=( "Use 'through' to create a separate model for " "M2M and then set column_name using 'db_column'." ), obj=cls, id='models.E019', ) ) return errors @classmethod def _check_constraints(cls, databases): errors = [] for db in databases: if not router.allow_migrate_model(db, cls): continue connection = connections[db] if not ( connection.features.supports_table_check_constraints or 'supports_table_check_constraints' in cls._meta.required_db_features ) and any( isinstance(constraint, CheckConstraint) for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support check constraints.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W027', ) ) if not ( connection.features.supports_partial_indexes or 'supports_partial_indexes' in cls._meta.required_db_features ) and any( isinstance(constraint, UniqueConstraint) and constraint.condition is not None for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support unique constraints with ' 'conditions.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W036', ) ) if not ( connection.features.supports_deferrable_unique_constraints or 'supports_deferrable_unique_constraints' in cls._meta.required_db_features ) and any( isinstance(constraint, UniqueConstraint) and constraint.deferrable is not None for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support deferrable unique constraints.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W038', ) ) if not ( connection.features.supports_covering_indexes or 'supports_covering_indexes' in cls._meta.required_db_features ) and any( isinstance(constraint, UniqueConstraint) and constraint.include for constraint in cls._meta.constraints ): errors.append( checks.Warning( '%s does not support unique constraints with non-key ' 'columns.' % connection.display_name, hint=( "A constraint won't be created. Silence this " "warning if you don't care about it." ), obj=cls, id='models.W039', ) ) fields = chain.from_iterable( (*constraint.fields, *constraint.include) for constraint in cls._meta.constraints if isinstance(constraint, UniqueConstraint) ) errors.extend(cls._check_local_fields(fields, 'constraints')) return errors ############################################ # HELPER FUNCTIONS (CURRIED MODEL METHODS) # ############################################ # ORDERING METHODS ######################### def method_set_order(self, ordered_obj, id_list, using=None): if using is None: using = DEFAULT_DB_ALIAS order_wrt = ordered_obj._meta.order_with_respect_to filter_args = order_wrt.get_forward_related_filter(self) ordered_obj.objects.db_manager(using).filter(**filter_args).bulk_update([ ordered_obj(pk=pk, _order=order) for order, pk in enumerate(id_list) ], ['_order']) def method_get_order(self, ordered_obj): order_wrt = ordered_obj._meta.order_with_respect_to filter_args = order_wrt.get_forward_related_filter(self) pk_name = ordered_obj._meta.pk.name return ordered_obj.objects.filter(**filter_args).values_list(pk_name, flat=True) def make_foreign_order_accessors(model, related_model): setattr( related_model, 'get_%s_order' % model.__name__.lower(), partialmethod(method_get_order, model) ) setattr( related_model, 'set_%s_order' % model.__name__.lower(), partialmethod(method_set_order, model) ) ######## # MISC # ######## def model_unpickle(model_id): """Used to unpickle Model subclasses with deferred fields.""" if isinstance(model_id, tuple): model = apps.get_model(*model_id) else: # Backwards compat - the model was cached directly in earlier versions. model = model_id return model.__new__(model) model_unpickle.__safe_for_unpickle__ = True
5318a56a920387cc7d2b67037067a7ea6bf642a2c7df148e488f30e26004d3a0
import copy import datetime import inspect from decimal import Decimal from django.core.exceptions import EmptyResultSet, FieldError from django.db import NotSupportedError, connection from django.db.models import fields from django.db.models.constants import LOOKUP_SEP from django.db.models.query_utils import Q from django.utils.deconstruct import deconstructible from django.utils.functional import cached_property from django.utils.hashable import make_hashable class SQLiteNumericMixin: """ Some expressions with output_field=DecimalField() must be cast to numeric to be properly filtered. """ def as_sqlite(self, compiler, connection, **extra_context): sql, params = self.as_sql(compiler, connection, **extra_context) try: if self.output_field.get_internal_type() == 'DecimalField': sql = 'CAST(%s AS NUMERIC)' % sql except FieldError: pass return sql, params class Combinable: """ Provide the ability to combine one or two objects with some connector. For example F('foo') + F('bar'). """ # Arithmetic connectors ADD = '+' SUB = '-' MUL = '*' DIV = '/' POW = '^' # The following is a quoted % operator - it is quoted because it can be # used in strings that also have parameter substitution. MOD = '%%' # Bitwise operators - note that these are generated by .bitand() # and .bitor(), the '&' and '|' are reserved for boolean operator # usage. BITAND = '&' BITOR = '|' BITLEFTSHIFT = '<<' BITRIGHTSHIFT = '>>' BITXOR = '#' def _combine(self, other, connector, reversed): if not hasattr(other, 'resolve_expression'): # everything must be resolvable to an expression if isinstance(other, datetime.timedelta): other = DurationValue(other, output_field=fields.DurationField()) else: other = Value(other) if reversed: return CombinedExpression(other, connector, self) return CombinedExpression(self, connector, other) ############# # OPERATORS # ############# def __neg__(self): return self._combine(-1, self.MUL, False) def __add__(self, other): return self._combine(other, self.ADD, False) def __sub__(self, other): return self._combine(other, self.SUB, False) def __mul__(self, other): return self._combine(other, self.MUL, False) def __truediv__(self, other): return self._combine(other, self.DIV, False) def __mod__(self, other): return self._combine(other, self.MOD, False) def __pow__(self, other): return self._combine(other, self.POW, False) def __and__(self, other): if getattr(self, 'conditional', False) and getattr(other, 'conditional', False): return Q(self) & Q(other) raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def bitand(self, other): return self._combine(other, self.BITAND, False) def bitleftshift(self, other): return self._combine(other, self.BITLEFTSHIFT, False) def bitrightshift(self, other): return self._combine(other, self.BITRIGHTSHIFT, False) def bitxor(self, other): return self._combine(other, self.BITXOR, False) def __or__(self, other): if getattr(self, 'conditional', False) and getattr(other, 'conditional', False): return Q(self) | Q(other) raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def bitor(self, other): return self._combine(other, self.BITOR, False) def __radd__(self, other): return self._combine(other, self.ADD, True) def __rsub__(self, other): return self._combine(other, self.SUB, True) def __rmul__(self, other): return self._combine(other, self.MUL, True) def __rtruediv__(self, other): return self._combine(other, self.DIV, True) def __rmod__(self, other): return self._combine(other, self.MOD, True) def __rpow__(self, other): return self._combine(other, self.POW, True) def __rand__(self, other): raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def __ror__(self, other): raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) @deconstructible class BaseExpression: """Base class for all query expressions.""" # aggregate specific fields is_summary = False _output_field_resolved_to_none = False # Can the expression be used in a WHERE clause? filterable = True # Can the expression can be used as a source expression in Window? window_compatible = False def __init__(self, output_field=None): if output_field is not None: self.output_field = output_field def __getstate__(self): state = self.__dict__.copy() state.pop('convert_value', None) return state def get_db_converters(self, connection): return ( [] if self.convert_value is self._convert_value_noop else [self.convert_value] ) + self.output_field.get_db_converters(connection) def get_source_expressions(self): return [] def set_source_expressions(self, exprs): assert not exprs def _parse_expressions(self, *expressions): return [ arg if hasattr(arg, 'resolve_expression') else ( F(arg) if isinstance(arg, str) else Value(arg) ) for arg in expressions ] def as_sql(self, compiler, connection): """ Responsible for returning a (sql, [params]) tuple to be included in the current query. Different backends can provide their own implementation, by providing an `as_{vendor}` method and patching the Expression: ``` def override_as_sql(self, compiler, connection): # custom logic return super().as_sql(compiler, connection) setattr(Expression, 'as_' + connection.vendor, override_as_sql) ``` Arguments: * compiler: the query compiler responsible for generating the query. Must have a compile method, returning a (sql, [params]) tuple. Calling compiler(value) will return a quoted `value`. * connection: the database connection used for the current query. Return: (sql, params) Where `sql` is a string containing ordered sql parameters to be replaced with the elements of the list `params`. """ raise NotImplementedError("Subclasses must implement as_sql()") @cached_property def contains_aggregate(self): return any(expr and expr.contains_aggregate for expr in self.get_source_expressions()) @cached_property def contains_over_clause(self): return any(expr and expr.contains_over_clause for expr in self.get_source_expressions()) @cached_property def contains_column_references(self): return any(expr and expr.contains_column_references for expr in self.get_source_expressions()) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): """ Provide the chance to do any preprocessing or validation before being added to the query. Arguments: * query: the backend query implementation * allow_joins: boolean allowing or denying use of joins in this query * reuse: a set of reusable joins for multijoins * summarize: a terminal aggregate clause * for_save: whether this expression about to be used in a save or update Return: an Expression to be added to the query. """ c = self.copy() c.is_summary = summarize c.set_source_expressions([ expr.resolve_expression(query, allow_joins, reuse, summarize) if expr else None for expr in c.get_source_expressions() ]) return c @property def conditional(self): return isinstance(self.output_field, fields.BooleanField) @property def field(self): return self.output_field @cached_property def output_field(self): """Return the output type of this expressions.""" output_field = self._resolve_output_field() if output_field is None: self._output_field_resolved_to_none = True raise FieldError('Cannot resolve expression type, unknown output_field') return output_field @cached_property def _output_field_or_none(self): """ Return the output field of this expression, or None if _resolve_output_field() didn't return an output type. """ try: return self.output_field except FieldError: if not self._output_field_resolved_to_none: raise def _resolve_output_field(self): """ Attempt to infer the output type of the expression. If the output fields of all source fields match then, simply infer the same type here. This isn't always correct, but it makes sense most of the time. Consider the difference between `2 + 2` and `2 / 3`. Inferring the type here is a convenience for the common case. The user should supply their own output_field with more complex computations. If a source's output field resolves to None, exclude it from this check. If all sources are None, then an error is raised higher up the stack in the output_field property. """ sources_iter = (source for source in self.get_source_fields() if source is not None) for output_field in sources_iter: for source in sources_iter: if not isinstance(output_field, source.__class__): raise FieldError( 'Expression contains mixed types: %s, %s. You must ' 'set output_field.' % ( output_field.__class__.__name__, source.__class__.__name__, ) ) return output_field @staticmethod def _convert_value_noop(value, expression, connection): return value @cached_property def convert_value(self): """ Expressions provide their own converters because users have the option of manually specifying the output_field which may be a different type from the one the database returns. """ field = self.output_field internal_type = field.get_internal_type() if internal_type == 'FloatField': return lambda value, expression, connection: None if value is None else float(value) elif internal_type.endswith('IntegerField'): return lambda value, expression, connection: None if value is None else int(value) elif internal_type == 'DecimalField': return lambda value, expression, connection: None if value is None else Decimal(value) return self._convert_value_noop def get_lookup(self, lookup): return self.output_field.get_lookup(lookup) def get_transform(self, name): return self.output_field.get_transform(name) def relabeled_clone(self, change_map): clone = self.copy() clone.set_source_expressions([ e.relabeled_clone(change_map) if e is not None else None for e in self.get_source_expressions() ]) return clone def copy(self): return copy.copy(self) def get_group_by_cols(self, alias=None): if not self.contains_aggregate: return [self] cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols def get_source_fields(self): """Return the underlying field types used by this aggregate.""" return [e._output_field_or_none for e in self.get_source_expressions()] def asc(self, **kwargs): return OrderBy(self, **kwargs) def desc(self, **kwargs): return OrderBy(self, descending=True, **kwargs) def reverse_ordering(self): return self def flatten(self): """ Recursively yield this expression and all subexpressions, in depth-first order. """ yield self for expr in self.get_source_expressions(): if expr: yield from expr.flatten() def select_format(self, compiler, sql, params): """ Custom format for select clauses. For example, EXISTS expressions need to be wrapped in CASE WHEN on Oracle. """ return self.output_field.select_format(compiler, sql, params) @cached_property def identity(self): constructor_signature = inspect.signature(self.__init__) args, kwargs = self._constructor_args signature = constructor_signature.bind_partial(*args, **kwargs) signature.apply_defaults() arguments = signature.arguments.items() identity = [self.__class__] for arg, value in arguments: if isinstance(value, fields.Field): if value.name and value.model: value = (value.model._meta.label, value.name) else: value = type(value) else: value = make_hashable(value) identity.append((arg, value)) return tuple(identity) def __eq__(self, other): if not isinstance(other, BaseExpression): return NotImplemented return other.identity == self.identity def __hash__(self): return hash(self.identity) class Expression(BaseExpression, Combinable): """An expression that can be combined with other expressions.""" pass class CombinedExpression(SQLiteNumericMixin, Expression): def __init__(self, lhs, connector, rhs, output_field=None): super().__init__(output_field=output_field) self.connector = connector self.lhs = lhs self.rhs = rhs def __repr__(self): return "<{}: {}>".format(self.__class__.__name__, self) def __str__(self): return "{} {} {}".format(self.lhs, self.connector, self.rhs) def get_source_expressions(self): return [self.lhs, self.rhs] def set_source_expressions(self, exprs): self.lhs, self.rhs = exprs def as_sql(self, compiler, connection): try: lhs_output = self.lhs.output_field except FieldError: lhs_output = None try: rhs_output = self.rhs.output_field except FieldError: rhs_output = None if (not connection.features.has_native_duration_field and ((lhs_output and lhs_output.get_internal_type() == 'DurationField') or (rhs_output and rhs_output.get_internal_type() == 'DurationField'))): return DurationExpression(self.lhs, self.connector, self.rhs).as_sql(compiler, connection) if (lhs_output and rhs_output and self.connector == self.SUB and lhs_output.get_internal_type() in {'DateField', 'DateTimeField', 'TimeField'} and lhs_output.get_internal_type() == rhs_output.get_internal_type()): return TemporalSubtraction(self.lhs, self.rhs).as_sql(compiler, connection) expressions = [] expression_params = [] sql, params = compiler.compile(self.lhs) expressions.append(sql) expression_params.extend(params) sql, params = compiler.compile(self.rhs) expressions.append(sql) expression_params.extend(params) # order of precedence expression_wrapper = '(%s)' sql = connection.ops.combine_expression(self.connector, expressions) return expression_wrapper % sql, expression_params def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize c.lhs = c.lhs.resolve_expression(query, allow_joins, reuse, summarize, for_save) c.rhs = c.rhs.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c class DurationExpression(CombinedExpression): def compile(self, side, compiler, connection): if not isinstance(side, DurationValue): try: output = side.output_field except FieldError: pass else: if output.get_internal_type() == 'DurationField': sql, params = compiler.compile(side) return connection.ops.format_for_duration_arithmetic(sql), params return compiler.compile(side) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) expressions = [] expression_params = [] sql, params = self.compile(self.lhs, compiler, connection) expressions.append(sql) expression_params.extend(params) sql, params = self.compile(self.rhs, compiler, connection) expressions.append(sql) expression_params.extend(params) # order of precedence expression_wrapper = '(%s)' sql = connection.ops.combine_duration_expression(self.connector, expressions) return expression_wrapper % sql, expression_params class TemporalSubtraction(CombinedExpression): output_field = fields.DurationField() def __init__(self, lhs, rhs): super().__init__(lhs, self.SUB, rhs) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) lhs = compiler.compile(self.lhs) rhs = compiler.compile(self.rhs) return connection.ops.subtract_temporals(self.lhs.output_field.get_internal_type(), lhs, rhs) @deconstructible class F(Combinable): """An object capable of resolving references to existing query objects.""" def __init__(self, name): """ Arguments: * name: the name of the field this expression references """ self.name = name def __repr__(self): return "{}({})".format(self.__class__.__name__, self.name) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): return query.resolve_ref(self.name, allow_joins, reuse, summarize) def asc(self, **kwargs): return OrderBy(self, **kwargs) def desc(self, **kwargs): return OrderBy(self, descending=True, **kwargs) def __eq__(self, other): return self.__class__ == other.__class__ and self.name == other.name def __hash__(self): return hash(self.name) class ResolvedOuterRef(F): """ An object that contains a reference to an outer query. In this case, the reference to the outer query has been resolved because the inner query has been used as a subquery. """ contains_aggregate = False def as_sql(self, *args, **kwargs): raise ValueError( 'This queryset contains a reference to an outer query and may ' 'only be used in a subquery.' ) def resolve_expression(self, *args, **kwargs): col = super().resolve_expression(*args, **kwargs) # FIXME: Rename possibly_multivalued to multivalued and fix detection # for non-multivalued JOINs (e.g. foreign key fields). This should take # into account only many-to-many and one-to-many relationships. col.possibly_multivalued = LOOKUP_SEP in self.name return col def relabeled_clone(self, relabels): return self def get_group_by_cols(self, alias=None): return [] class OuterRef(F): def resolve_expression(self, *args, **kwargs): if isinstance(self.name, self.__class__): return self.name return ResolvedOuterRef(self.name) def relabeled_clone(self, relabels): return self class Func(SQLiteNumericMixin, Expression): """An SQL function call.""" function = None template = '%(function)s(%(expressions)s)' arg_joiner = ', ' arity = None # The number of arguments the function accepts. def __init__(self, *expressions, output_field=None, **extra): if self.arity is not None and len(expressions) != self.arity: raise TypeError( "'%s' takes exactly %s %s (%s given)" % ( self.__class__.__name__, self.arity, "argument" if self.arity == 1 else "arguments", len(expressions), ) ) super().__init__(output_field=output_field) self.source_expressions = self._parse_expressions(*expressions) self.extra = extra def __repr__(self): args = self.arg_joiner.join(str(arg) for arg in self.source_expressions) extra = {**self.extra, **self._get_repr_options()} if extra: extra = ', '.join(str(key) + '=' + str(val) for key, val in sorted(extra.items())) return "{}({}, {})".format(self.__class__.__name__, args, extra) return "{}({})".format(self.__class__.__name__, args) def _get_repr_options(self): """Return a dict of extra __init__() options to include in the repr.""" return {} def get_source_expressions(self): return self.source_expressions def set_source_expressions(self, exprs): self.source_expressions = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize for pos, arg in enumerate(c.source_expressions): c.source_expressions[pos] = arg.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def as_sql(self, compiler, connection, function=None, template=None, arg_joiner=None, **extra_context): connection.ops.check_expression_support(self) sql_parts = [] params = [] for arg in self.source_expressions: arg_sql, arg_params = compiler.compile(arg) sql_parts.append(arg_sql) params.extend(arg_params) data = {**self.extra, **extra_context} # Use the first supplied value in this order: the parameter to this # method, a value supplied in __init__()'s **extra (the value in # `data`), or the value defined on the class. if function is not None: data['function'] = function else: data.setdefault('function', self.function) template = template or data.get('template', self.template) arg_joiner = arg_joiner or data.get('arg_joiner', self.arg_joiner) data['expressions'] = data['field'] = arg_joiner.join(sql_parts) return template % data, params def copy(self): copy = super().copy() copy.source_expressions = self.source_expressions[:] copy.extra = self.extra.copy() return copy class Value(Expression): """Represent a wrapped value as a node within an expression.""" def __init__(self, value, output_field=None): """ Arguments: * value: the value this expression represents. The value will be added into the sql parameter list and properly quoted. * output_field: an instance of the model field type that this expression will return, such as IntegerField() or CharField(). """ super().__init__(output_field=output_field) self.value = value def __repr__(self): return "{}({})".format(self.__class__.__name__, self.value) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) val = self.value output_field = self._output_field_or_none if output_field is not None: if self.for_save: val = output_field.get_db_prep_save(val, connection=connection) else: val = output_field.get_db_prep_value(val, connection=connection) if hasattr(output_field, 'get_placeholder'): return output_field.get_placeholder(val, compiler, connection), [val] if val is None: # cx_Oracle does not always convert None to the appropriate # NULL type (like in case expressions using numbers), so we # use a literal SQL NULL return 'NULL', [] return '%s', [val] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = super().resolve_expression(query, allow_joins, reuse, summarize, for_save) c.for_save = for_save return c def get_group_by_cols(self, alias=None): return [] class DurationValue(Value): def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) if connection.features.has_native_duration_field: return super().as_sql(compiler, connection) return connection.ops.date_interval_sql(self.value), [] class RawSQL(Expression): def __init__(self, sql, params, output_field=None): if output_field is None: output_field = fields.Field() self.sql, self.params = sql, params super().__init__(output_field=output_field) def __repr__(self): return "{}({}, {})".format(self.__class__.__name__, self.sql, self.params) def as_sql(self, compiler, connection): return '(%s)' % self.sql, self.params def get_group_by_cols(self, alias=None): return [self] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): # Resolve parents fields used in raw SQL. for parent in query.model._meta.get_parent_list(): for parent_field in parent._meta.local_fields: _, column_name = parent_field.get_attname_column() if column_name.lower() in self.sql.lower(): query.resolve_ref(parent_field.name, allow_joins, reuse, summarize) break return super().resolve_expression(query, allow_joins, reuse, summarize, for_save) class Star(Expression): def __repr__(self): return "'*'" def as_sql(self, compiler, connection): return '*', [] class Random(Expression): output_field = fields.FloatField() def __repr__(self): return "Random()" def as_sql(self, compiler, connection): return connection.ops.random_function_sql(), [] class Col(Expression): contains_column_references = True possibly_multivalued = False def __init__(self, alias, target, output_field=None): if output_field is None: output_field = target super().__init__(output_field=output_field) self.alias, self.target = alias, target def __repr__(self): alias, target = self.alias, self.target identifiers = (alias, str(target)) if alias else (str(target),) return '{}({})'.format(self.__class__.__name__, ', '.join(identifiers)) def as_sql(self, compiler, connection): alias, column = self.alias, self.target.column identifiers = (alias, column) if alias else (column,) sql = '.'.join(map(compiler.quote_name_unless_alias, identifiers)) return sql, [] def relabeled_clone(self, relabels): if self.alias is None: return self return self.__class__(relabels.get(self.alias, self.alias), self.target, self.output_field) def get_group_by_cols(self, alias=None): return [self] def get_db_converters(self, connection): if self.target == self.output_field: return self.output_field.get_db_converters(connection) return (self.output_field.get_db_converters(connection) + self.target.get_db_converters(connection)) class Ref(Expression): """ Reference to column alias of the query. For example, Ref('sum_cost') in qs.annotate(sum_cost=Sum('cost')) query. """ def __init__(self, refs, source): super().__init__() self.refs, self.source = refs, source def __repr__(self): return "{}({}, {})".format(self.__class__.__name__, self.refs, self.source) def get_source_expressions(self): return [self.source] def set_source_expressions(self, exprs): self.source, = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): # The sub-expression `source` has already been resolved, as this is # just a reference to the name of `source`. return self def relabeled_clone(self, relabels): return self def as_sql(self, compiler, connection): return connection.ops.quote_name(self.refs), [] def get_group_by_cols(self, alias=None): return [self] class ExpressionList(Func): """ An expression containing multiple expressions. Can be used to provide a list of expressions as an argument to another expression, like an ordering clause. """ template = '%(expressions)s' def __init__(self, *expressions, **extra): if not expressions: raise ValueError('%s requires at least one expression.' % self.__class__.__name__) super().__init__(*expressions, **extra) def __str__(self): return self.arg_joiner.join(str(arg) for arg in self.source_expressions) class ExpressionWrapper(Expression): """ An expression that can wrap another expression so that it can provide extra context to the inner expression, such as the output_field. """ def __init__(self, expression, output_field): super().__init__(output_field=output_field) self.expression = expression def set_source_expressions(self, exprs): self.expression = exprs[0] def get_source_expressions(self): return [self.expression] def get_group_by_cols(self, alias=None): return self.expression.get_group_by_cols(alias=alias) def as_sql(self, compiler, connection): return self.expression.as_sql(compiler, connection) def __repr__(self): return "{}({})".format(self.__class__.__name__, self.expression) class When(Expression): template = 'WHEN %(condition)s THEN %(result)s' # This isn't a complete conditional expression, must be used in Case(). conditional = False def __init__(self, condition=None, then=None, **lookups): if lookups: if condition is None: condition, lookups = Q(**lookups), None elif getattr(condition, 'conditional', False): condition, lookups = Q(condition, **lookups), None if condition is None or not getattr(condition, 'conditional', False) or lookups: raise TypeError( 'When() supports a Q object, a boolean expression, or lookups ' 'as a condition.' ) if isinstance(condition, Q) and not condition: raise ValueError("An empty Q() can't be used as a When() condition.") super().__init__(output_field=None) self.condition = condition self.result = self._parse_expressions(then)[0] def __str__(self): return "WHEN %r THEN %r" % (self.condition, self.result) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) def get_source_expressions(self): return [self.condition, self.result] def set_source_expressions(self, exprs): self.condition, self.result = exprs def get_source_fields(self): # We're only interested in the fields of the result expressions. return [self.result._output_field_or_none] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize if hasattr(c.condition, 'resolve_expression'): c.condition = c.condition.resolve_expression(query, allow_joins, reuse, summarize, False) c.result = c.result.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def as_sql(self, compiler, connection, template=None, **extra_context): connection.ops.check_expression_support(self) template_params = extra_context sql_params = [] condition_sql, condition_params = compiler.compile(self.condition) template_params['condition'] = condition_sql sql_params.extend(condition_params) result_sql, result_params = compiler.compile(self.result) template_params['result'] = result_sql sql_params.extend(result_params) template = template or self.template return template % template_params, sql_params def get_group_by_cols(self, alias=None): # This is not a complete expression and cannot be used in GROUP BY. cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols class Case(Expression): """ An SQL searched CASE expression: CASE WHEN n > 0 THEN 'positive' WHEN n < 0 THEN 'negative' ELSE 'zero' END """ template = 'CASE %(cases)s ELSE %(default)s END' case_joiner = ' ' def __init__(self, *cases, default=None, output_field=None, **extra): if not all(isinstance(case, When) for case in cases): raise TypeError("Positional arguments must all be When objects.") super().__init__(output_field) self.cases = list(cases) self.default = self._parse_expressions(default)[0] self.extra = extra def __str__(self): return "CASE %s, ELSE %r" % (', '.join(str(c) for c in self.cases), self.default) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) def get_source_expressions(self): return self.cases + [self.default] def set_source_expressions(self, exprs): *self.cases, self.default = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize for pos, case in enumerate(c.cases): c.cases[pos] = case.resolve_expression(query, allow_joins, reuse, summarize, for_save) c.default = c.default.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def copy(self): c = super().copy() c.cases = c.cases[:] return c def as_sql(self, compiler, connection, template=None, case_joiner=None, **extra_context): connection.ops.check_expression_support(self) if not self.cases: return compiler.compile(self.default) template_params = {**self.extra, **extra_context} case_parts = [] sql_params = [] for case in self.cases: try: case_sql, case_params = compiler.compile(case) except EmptyResultSet: continue case_parts.append(case_sql) sql_params.extend(case_params) default_sql, default_params = compiler.compile(self.default) if not case_parts: return default_sql, default_params case_joiner = case_joiner or self.case_joiner template_params['cases'] = case_joiner.join(case_parts) template_params['default'] = default_sql sql_params.extend(default_params) template = template or template_params.get('template', self.template) sql = template % template_params if self._output_field_or_none is not None: sql = connection.ops.unification_cast_sql(self.output_field) % sql return sql, sql_params class Subquery(Expression): """ An explicit subquery. It may contain OuterRef() references to the outer query which will be resolved when it is applied to that query. """ template = '(%(subquery)s)' contains_aggregate = False def __init__(self, queryset, output_field=None, **extra): self.query = queryset.query self.extra = extra # Prevent the QuerySet from being evaluated. self.queryset = queryset._chain(_result_cache=[], prefetch_done=True) super().__init__(output_field) def __getstate__(self): state = super().__getstate__() args, kwargs = state['_constructor_args'] if args: args = (self.queryset, *args[1:]) else: kwargs['queryset'] = self.queryset state['_constructor_args'] = args, kwargs return state def get_source_expressions(self): return [self.query] def set_source_expressions(self, exprs): self.query = exprs[0] def _resolve_output_field(self): return self.query.output_field def copy(self): clone = super().copy() clone.query = clone.query.clone() return clone @property def external_aliases(self): return self.query.external_aliases def as_sql(self, compiler, connection, template=None, **extra_context): connection.ops.check_expression_support(self) template_params = {**self.extra, **extra_context} subquery_sql, sql_params = self.query.as_sql(compiler, connection) template_params['subquery'] = subquery_sql[1:-1] template = template or template_params.get('template', self.template) sql = template % template_params return sql, sql_params def get_group_by_cols(self, alias=None): if alias: return [Ref(alias, self)] external_cols = self.query.get_external_cols() if any(col.possibly_multivalued for col in external_cols): return [self] return external_cols class Exists(Subquery): template = 'EXISTS(%(subquery)s)' output_field = fields.BooleanField() def __init__(self, queryset, negated=False, **kwargs): # As a performance optimization, remove ordering since EXISTS doesn't # care about it, just whether or not a row matches. queryset = queryset.order_by() self.negated = negated super().__init__(queryset, **kwargs) def __invert__(self): clone = self.copy() clone.negated = not self.negated return clone def as_sql(self, compiler, connection, template=None, **extra_context): sql, params = super().as_sql(compiler, connection, template, **extra_context) if self.negated: sql = 'NOT {}'.format(sql) return sql, params def select_format(self, compiler, sql, params): # Wrap EXISTS() with a CASE WHEN expression if a database backend # (e.g. Oracle) doesn't support boolean expression in SELECT or GROUP # BY list. if not compiler.connection.features.supports_boolean_expr_in_select_clause: sql = 'CASE WHEN {} THEN 1 ELSE 0 END'.format(sql) return sql, params class OrderBy(BaseExpression): template = '%(expression)s %(ordering)s' conditional = False def __init__(self, expression, descending=False, nulls_first=False, nulls_last=False): if nulls_first and nulls_last: raise ValueError('nulls_first and nulls_last are mutually exclusive') self.nulls_first = nulls_first self.nulls_last = nulls_last self.descending = descending if not hasattr(expression, 'resolve_expression'): raise ValueError('expression must be an expression type') self.expression = expression def __repr__(self): return "{}({}, descending={})".format( self.__class__.__name__, self.expression, self.descending) def set_source_expressions(self, exprs): self.expression = exprs[0] def get_source_expressions(self): return [self.expression] def as_sql(self, compiler, connection, template=None, **extra_context): template = template or self.template if connection.features.supports_order_by_nulls_modifier: if self.nulls_last: template = '%s NULLS LAST' % template elif self.nulls_first: template = '%s NULLS FIRST' % template else: if self.nulls_last and not ( self.descending and connection.features.order_by_nulls_first ): template = '%%(expression)s IS NULL, %s' % template elif self.nulls_first and not ( not self.descending and connection.features.order_by_nulls_first ): template = '%%(expression)s IS NOT NULL, %s' % template connection.ops.check_expression_support(self) expression_sql, params = compiler.compile(self.expression) placeholders = { 'expression': expression_sql, 'ordering': 'DESC' if self.descending else 'ASC', **extra_context, } template = template or self.template params *= template.count('%(expression)s') return (template % placeholders).rstrip(), params def as_oracle(self, compiler, connection): # Oracle doesn't allow ORDER BY EXISTS() unless it's wrapped in # a CASE WHEN. if isinstance(self.expression, Exists): copy = self.copy() copy.expression = Case( When(self.expression, then=True), default=False, output_field=fields.BooleanField(), ) return copy.as_sql(compiler, connection) return self.as_sql(compiler, connection) def get_group_by_cols(self, alias=None): cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols def reverse_ordering(self): self.descending = not self.descending if self.nulls_first or self.nulls_last: self.nulls_first = not self.nulls_first self.nulls_last = not self.nulls_last return self def asc(self): self.descending = False def desc(self): self.descending = True class Window(Expression): template = '%(expression)s OVER (%(window)s)' # Although the main expression may either be an aggregate or an # expression with an aggregate function, the GROUP BY that will # be introduced in the query as a result is not desired. contains_aggregate = False contains_over_clause = True filterable = False def __init__(self, expression, partition_by=None, order_by=None, frame=None, output_field=None): self.partition_by = partition_by self.order_by = order_by self.frame = frame if not getattr(expression, 'window_compatible', False): raise ValueError( "Expression '%s' isn't compatible with OVER clauses." % expression.__class__.__name__ ) if self.partition_by is not None: if not isinstance(self.partition_by, (tuple, list)): self.partition_by = (self.partition_by,) self.partition_by = ExpressionList(*self.partition_by) if self.order_by is not None: if isinstance(self.order_by, (list, tuple)): self.order_by = ExpressionList(*self.order_by) elif not isinstance(self.order_by, BaseExpression): raise ValueError( 'order_by must be either an Expression or a sequence of ' 'expressions.' ) super().__init__(output_field=output_field) self.source_expression = self._parse_expressions(expression)[0] def _resolve_output_field(self): return self.source_expression.output_field def get_source_expressions(self): return [self.source_expression, self.partition_by, self.order_by, self.frame] def set_source_expressions(self, exprs): self.source_expression, self.partition_by, self.order_by, self.frame = exprs def as_sql(self, compiler, connection, template=None): connection.ops.check_expression_support(self) if not connection.features.supports_over_clause: raise NotSupportedError('This backend does not support window expressions.') expr_sql, params = compiler.compile(self.source_expression) window_sql, window_params = [], [] if self.partition_by is not None: sql_expr, sql_params = self.partition_by.as_sql( compiler=compiler, connection=connection, template='PARTITION BY %(expressions)s', ) window_sql.extend(sql_expr) window_params.extend(sql_params) if self.order_by is not None: window_sql.append(' ORDER BY ') order_sql, order_params = compiler.compile(self.order_by) window_sql.extend(order_sql) window_params.extend(order_params) if self.frame: frame_sql, frame_params = compiler.compile(self.frame) window_sql.append(' ' + frame_sql) window_params.extend(frame_params) params.extend(window_params) template = template or self.template return template % { 'expression': expr_sql, 'window': ''.join(window_sql).strip() }, params def __str__(self): return '{} OVER ({}{}{})'.format( str(self.source_expression), 'PARTITION BY ' + str(self.partition_by) if self.partition_by else '', 'ORDER BY ' + str(self.order_by) if self.order_by else '', str(self.frame or ''), ) def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def get_group_by_cols(self, alias=None): return [] class WindowFrame(Expression): """ Model the frame clause in window expressions. There are two types of frame clauses which are subclasses, however, all processing and validation (by no means intended to be complete) is done here. Thus, providing an end for a frame is optional (the default is UNBOUNDED FOLLOWING, which is the last row in the frame). """ template = '%(frame_type)s BETWEEN %(start)s AND %(end)s' def __init__(self, start=None, end=None): self.start = Value(start) self.end = Value(end) def set_source_expressions(self, exprs): self.start, self.end = exprs def get_source_expressions(self): return [self.start, self.end] def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) start, end = self.window_frame_start_end(connection, self.start.value, self.end.value) return self.template % { 'frame_type': self.frame_type, 'start': start, 'end': end, }, [] def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def get_group_by_cols(self, alias=None): return [] def __str__(self): if self.start.value is not None and self.start.value < 0: start = '%d %s' % (abs(self.start.value), connection.ops.PRECEDING) elif self.start.value is not None and self.start.value == 0: start = connection.ops.CURRENT_ROW else: start = connection.ops.UNBOUNDED_PRECEDING if self.end.value is not None and self.end.value > 0: end = '%d %s' % (self.end.value, connection.ops.FOLLOWING) elif self.end.value is not None and self.end.value == 0: end = connection.ops.CURRENT_ROW else: end = connection.ops.UNBOUNDED_FOLLOWING return self.template % { 'frame_type': self.frame_type, 'start': start, 'end': end, } def window_frame_start_end(self, connection, start, end): raise NotImplementedError('Subclasses must implement window_frame_start_end().') class RowRange(WindowFrame): frame_type = 'ROWS' def window_frame_start_end(self, connection, start, end): return connection.ops.window_frame_rows_start_end(start, end) class ValueRange(WindowFrame): frame_type = 'RANGE' def window_frame_start_end(self, connection, start, end): return connection.ops.window_frame_range_start_end(start, end)
7002f9caf357dacb8e5645cb4596e4851daba35368dcb0c0bce1e800968efc05
from enum import Enum from django.db.models.query_utils import Q from django.db.models.sql.query import Query __all__ = ['CheckConstraint', 'Deferrable', 'UniqueConstraint'] class BaseConstraint: def __init__(self, name): self.name = name def constraint_sql(self, model, schema_editor): raise NotImplementedError('This method must be implemented by a subclass.') def create_sql(self, model, schema_editor): raise NotImplementedError('This method must be implemented by a subclass.') def remove_sql(self, model, schema_editor): raise NotImplementedError('This method must be implemented by a subclass.') def deconstruct(self): path = '%s.%s' % (self.__class__.__module__, self.__class__.__name__) path = path.replace('django.db.models.constraints', 'django.db.models') return (path, (), {'name': self.name}) def clone(self): _, args, kwargs = self.deconstruct() return self.__class__(*args, **kwargs) class CheckConstraint(BaseConstraint): def __init__(self, *, check, name): self.check = check if not getattr(check, 'conditional', False): raise TypeError( 'CheckConstraint.check must be a Q instance or boolean ' 'expression.' ) super().__init__(name) def _get_check_sql(self, model, schema_editor): query = Query(model=model, alias_cols=False) where = query.build_where(self.check) compiler = query.get_compiler(connection=schema_editor.connection) sql, params = where.as_sql(compiler, schema_editor.connection) return sql % tuple(schema_editor.quote_value(p) for p in params) def constraint_sql(self, model, schema_editor): check = self._get_check_sql(model, schema_editor) return schema_editor._check_sql(self.name, check) def create_sql(self, model, schema_editor): check = self._get_check_sql(model, schema_editor) return schema_editor._create_check_sql(model, self.name, check) def remove_sql(self, model, schema_editor): return schema_editor._delete_check_sql(model, self.name) def __repr__(self): return "<%s: check='%s' name=%r>" % (self.__class__.__name__, self.check, self.name) def __eq__(self, other): if isinstance(other, CheckConstraint): return self.name == other.name and self.check == other.check return super().__eq__(other) def deconstruct(self): path, args, kwargs = super().deconstruct() kwargs['check'] = self.check return path, args, kwargs class Deferrable(Enum): DEFERRED = 'deferred' IMMEDIATE = 'immediate' class UniqueConstraint(BaseConstraint): def __init__(self, *, fields, name, condition=None, deferrable=None, include=None): if not fields: raise ValueError('At least one field is required to define a unique constraint.') if not isinstance(condition, (type(None), Q)): raise ValueError('UniqueConstraint.condition must be a Q instance.') if condition and deferrable: raise ValueError( 'UniqueConstraint with conditions cannot be deferred.' ) if not isinstance(deferrable, (type(None), Deferrable)): raise ValueError( 'UniqueConstraint.deferrable must be a Deferrable instance.' ) if not isinstance(include, (type(None), list, tuple)): raise ValueError('UniqueConstraint.include must be a list or tuple.') self.fields = tuple(fields) self.condition = condition self.deferrable = deferrable self.include = tuple(include) if include else () super().__init__(name) def _get_condition_sql(self, model, schema_editor): if self.condition is None: return None query = Query(model=model, alias_cols=False) where = query.build_where(self.condition) compiler = query.get_compiler(connection=schema_editor.connection) sql, params = where.as_sql(compiler, schema_editor.connection) return sql % tuple(schema_editor.quote_value(p) for p in params) def constraint_sql(self, model, schema_editor): fields = [model._meta.get_field(field_name).column for field_name in self.fields] include = [model._meta.get_field(field_name).column for field_name in self.include] condition = self._get_condition_sql(model, schema_editor) return schema_editor._unique_sql( model, fields, self.name, condition=condition, deferrable=self.deferrable, include=include, ) def create_sql(self, model, schema_editor): fields = [model._meta.get_field(field_name).column for field_name in self.fields] include = [model._meta.get_field(field_name).column for field_name in self.include] condition = self._get_condition_sql(model, schema_editor) return schema_editor._create_unique_sql( model, fields, self.name, condition=condition, deferrable=self.deferrable, include=include, ) def remove_sql(self, model, schema_editor): condition = self._get_condition_sql(model, schema_editor) include = [model._meta.get_field(field_name).column for field_name in self.include] return schema_editor._delete_unique_sql( model, self.name, condition=condition, deferrable=self.deferrable, include=include, ) def __repr__(self): return '<%s: fields=%r name=%r%s%s%s>' % ( self.__class__.__name__, self.fields, self.name, '' if self.condition is None else ' condition=%s' % self.condition, '' if self.deferrable is None else ' deferrable=%s' % self.deferrable, '' if not self.include else ' include=%s' % repr(self.include), ) def __eq__(self, other): if isinstance(other, UniqueConstraint): return ( self.name == other.name and self.fields == other.fields and self.condition == other.condition and self.deferrable == other.deferrable and self.include == other.include ) return super().__eq__(other) def deconstruct(self): path, args, kwargs = super().deconstruct() kwargs['fields'] = self.fields if self.condition: kwargs['condition'] = self.condition if self.deferrable: kwargs['deferrable'] = self.deferrable if self.include: kwargs['include'] = self.include return path, args, kwargs
b881e152b7d54c3d86b1effce6bfda37d16e4862151018941d7d8c049c06a19d
from django.db import InterfaceError from django.db.backends.base.features import BaseDatabaseFeatures from django.utils.functional import cached_property class DatabaseFeatures(BaseDatabaseFeatures): interprets_empty_strings_as_nulls = True has_select_for_update = True has_select_for_update_nowait = True has_select_for_update_skip_locked = True has_select_for_update_of = True select_for_update_of_column = True can_return_columns_from_insert = True supports_subqueries_in_group_by = False supports_transactions = True supports_timezones = False has_native_duration_field = True can_defer_constraint_checks = True supports_partially_nullable_unique_constraints = False supports_deferrable_unique_constraints = True truncates_names = True supports_tablespaces = True supports_sequence_reset = False can_introspect_materialized_views = True atomic_transactions = False supports_combined_alters = False nulls_order_largest = True requires_literal_defaults = True closed_cursor_error_class = InterfaceError bare_select_suffix = " FROM DUAL" # select for update with limit can be achieved on Oracle, but not with the current backend. supports_select_for_update_with_limit = False supports_temporal_subtraction = True # Oracle doesn't ignore quoted identifiers case but the current backend # does by uppercasing all identifiers. ignores_table_name_case = True supports_index_on_text_field = False has_case_insensitive_like = False create_test_procedure_without_params_sql = """ CREATE PROCEDURE "TEST_PROCEDURE" AS V_I INTEGER; BEGIN V_I := 1; END; """ create_test_procedure_with_int_param_sql = """ CREATE PROCEDURE "TEST_PROCEDURE" (P_I INTEGER) AS V_I INTEGER; BEGIN V_I := P_I; END; """ supports_callproc_kwargs = True supports_over_clause = True supports_frame_range_fixed_distance = True supports_ignore_conflicts = False max_query_params = 2**16 - 1 supports_partial_indexes = False supports_slicing_ordering_in_compound = True allows_multiple_constraints_on_same_fields = False supports_boolean_expr_in_select_clause = False supports_primitives_in_json_field = False @cached_property def introspected_field_types(self): return { **super().introspected_field_types, 'GenericIPAddressField': 'CharField', 'PositiveBigIntegerField': 'BigIntegerField', 'PositiveIntegerField': 'IntegerField', 'PositiveSmallIntegerField': 'IntegerField', 'SmallIntegerField': 'IntegerField', 'TimeField': 'DateTimeField', }
6a6ecc7d18c110f9b91e108d3bd49cbdd11fc9f6d2701a690e330c5ecd4522b6
from django.db import ProgrammingError from django.utils.functional import cached_property class BaseDatabaseFeatures: gis_enabled = False allows_group_by_pk = False allows_group_by_selected_pks = False empty_fetchmany_value = [] update_can_self_select = True # Does the backend distinguish between '' and None? interprets_empty_strings_as_nulls = False # Does the backend allow inserting duplicate NULL rows in a nullable # unique field? All core backends implement this correctly, but other # databases such as SQL Server do not. supports_nullable_unique_constraints = True # Does the backend allow inserting duplicate rows when a unique_together # constraint exists and some fields are nullable but not all of them? supports_partially_nullable_unique_constraints = True # Does the backend support initially deferrable unique constraints? supports_deferrable_unique_constraints = False can_use_chunked_reads = True can_return_columns_from_insert = False can_return_rows_from_bulk_insert = False has_bulk_insert = True uses_savepoints = True can_release_savepoints = False # If True, don't use integer foreign keys referring to, e.g., positive # integer primary keys. related_fields_match_type = False allow_sliced_subqueries_with_in = True has_select_for_update = False has_select_for_update_nowait = False has_select_for_update_skip_locked = False has_select_for_update_of = False has_select_for_no_key_update = False # Does the database's SELECT FOR UPDATE OF syntax require a column rather # than a table? select_for_update_of_column = False # Does the default test database allow multiple connections? # Usually an indication that the test database is in-memory test_db_allows_multiple_connections = True # Can an object be saved without an explicit primary key? supports_unspecified_pk = False # Can a fixture contain forward references? i.e., are # FK constraints checked at the end of transaction, or # at the end of each save operation? supports_forward_references = True # Does the backend truncate names properly when they are too long? truncates_names = False # Is there a REAL datatype in addition to floats/doubles? has_real_datatype = False supports_subqueries_in_group_by = True # Is there a true datatype for uuid? has_native_uuid_field = False # Is there a true datatype for timedeltas? has_native_duration_field = False # Does the database driver supports same type temporal data subtraction # by returning the type used to store duration field? supports_temporal_subtraction = False # Does the __regex lookup support backreferencing and grouping? supports_regex_backreferencing = True # Can date/datetime lookups be performed using a string? supports_date_lookup_using_string = True # Can datetimes with timezones be used? supports_timezones = True # Does the database have a copy of the zoneinfo database? has_zoneinfo_database = True # When performing a GROUP BY, is an ORDER BY NULL required # to remove any ordering? requires_explicit_null_ordering_when_grouping = False # Does the backend order NULL values as largest or smallest? nulls_order_largest = False # Does the backend support NULLS FIRST and NULLS LAST in ORDER BY? supports_order_by_nulls_modifier = True # Does the backend orders NULLS FIRST by default? order_by_nulls_first = False # The database's limit on the number of query parameters. max_query_params = None # Can an object have an autoincrement primary key of 0? MySQL says No. allows_auto_pk_0 = True # Do we need to NULL a ForeignKey out, or can the constraint check be # deferred can_defer_constraint_checks = False # date_interval_sql can properly handle mixed Date/DateTime fields and timedeltas supports_mixed_date_datetime_comparisons = True # Does the backend support tablespaces? Default to False because it isn't # in the SQL standard. supports_tablespaces = False # Does the backend reset sequences between tests? supports_sequence_reset = True # Can the backend introspect the default value of a column? can_introspect_default = True # Confirm support for introspected foreign keys # Every database can do this reliably, except MySQL, # which can't do it for MyISAM tables can_introspect_foreign_keys = True # Map fields which some backends may not be able to differentiate to the # field it's introspected as. introspected_field_types = { 'AutoField': 'AutoField', 'BigAutoField': 'BigAutoField', 'BigIntegerField': 'BigIntegerField', 'BinaryField': 'BinaryField', 'BooleanField': 'BooleanField', 'CharField': 'CharField', 'DurationField': 'DurationField', 'GenericIPAddressField': 'GenericIPAddressField', 'IntegerField': 'IntegerField', 'PositiveBigIntegerField': 'PositiveBigIntegerField', 'PositiveIntegerField': 'PositiveIntegerField', 'PositiveSmallIntegerField': 'PositiveSmallIntegerField', 'SmallAutoField': 'SmallAutoField', 'SmallIntegerField': 'SmallIntegerField', 'TimeField': 'TimeField', } # Can the backend introspect the column order (ASC/DESC) for indexes? supports_index_column_ordering = True # Does the backend support introspection of materialized views? can_introspect_materialized_views = False # Support for the DISTINCT ON clause can_distinct_on_fields = False # Does the backend prevent running SQL queries in broken transactions? atomic_transactions = True # Can we roll back DDL in a transaction? can_rollback_ddl = False # Does it support operations requiring references rename in a transaction? supports_atomic_references_rename = True # Can we issue more than one ALTER COLUMN clause in an ALTER TABLE? supports_combined_alters = False # Does it support foreign keys? supports_foreign_keys = True # Can it create foreign key constraints inline when adding columns? can_create_inline_fk = True # Does it support CHECK constraints? supports_column_check_constraints = True supports_table_check_constraints = True # Does the backend support introspection of CHECK constraints? can_introspect_check_constraints = True # Does the backend support 'pyformat' style ("... %(name)s ...", {'name': value}) # parameter passing? Note this can be provided by the backend even if not # supported by the Python driver supports_paramstyle_pyformat = True # Does the backend require literal defaults, rather than parameterized ones? requires_literal_defaults = False # Does the backend require a connection reset after each material schema change? connection_persists_old_columns = False # What kind of error does the backend throw when accessing closed cursor? closed_cursor_error_class = ProgrammingError # Does 'a' LIKE 'A' match? has_case_insensitive_like = True # Suffix for backends that don't support "SELECT xxx;" queries. bare_select_suffix = '' # If NULL is implied on columns without needing to be explicitly specified implied_column_null = False # Does the backend support "select for update" queries with limit (and offset)? supports_select_for_update_with_limit = True # Does the backend ignore null expressions in GREATEST and LEAST queries unless # every expression is null? greatest_least_ignores_nulls = False # Can the backend clone databases for parallel test execution? # Defaults to False to allow third-party backends to opt-in. can_clone_databases = False # Does the backend consider table names with different casing to # be equal? ignores_table_name_case = False # Place FOR UPDATE right after FROM clause. Used on MSSQL. for_update_after_from = False # Combinatorial flags supports_select_union = True supports_select_intersection = True supports_select_difference = True supports_slicing_ordering_in_compound = False supports_parentheses_in_compound = True # Does the database support SQL 2003 FILTER (WHERE ...) in aggregate # expressions? supports_aggregate_filter_clause = False # Does the backend support indexing a TextField? supports_index_on_text_field = True # Does the backend support window expressions (expression OVER (...))? supports_over_clause = False supports_frame_range_fixed_distance = False only_supports_unbounded_with_preceding_and_following = False # Does the backend support CAST with precision? supports_cast_with_precision = True # How many second decimals does the database return when casting a value to # a type with time? time_cast_precision = 6 # SQL to create a procedure for use by the Django test suite. The # functionality of the procedure isn't important. create_test_procedure_without_params_sql = None create_test_procedure_with_int_param_sql = None # Does the backend support keyword parameters for cursor.callproc()? supports_callproc_kwargs = False # Convert CharField results from bytes to str in database functions. db_functions_convert_bytes_to_str = False # What formats does the backend EXPLAIN syntax support? supported_explain_formats = set() # Does DatabaseOperations.explain_query_prefix() raise ValueError if # unknown kwargs are passed to QuerySet.explain()? validates_explain_options = True # Does the backend support the default parameter in lead() and lag()? supports_default_in_lead_lag = True # Does the backend support ignoring constraint or uniqueness errors during # INSERT? supports_ignore_conflicts = True # Does this backend require casting the results of CASE expressions used # in UPDATE statements to ensure the expression has the correct type? requires_casted_case_in_updates = False # Does the backend support partial indexes (CREATE INDEX ... WHERE ...)? supports_partial_indexes = True supports_functions_in_partial_indexes = True # Does the backend support covering indexes (CREATE INDEX ... INCLUDE ...)? supports_covering_indexes = False # Does the database allow more than one constraint or index on the same # field(s)? allows_multiple_constraints_on_same_fields = True # Does the backend support boolean expressions in SELECT and GROUP BY # clauses? supports_boolean_expr_in_select_clause = True # Does the backend support JSONField? supports_json_field = True # Can the backend introspect a JSONField? can_introspect_json_field = True # Does the backend support primitives in JSONField? supports_primitives_in_json_field = True # Is there a true datatype for JSON? has_native_json_field = False # Does the backend use PostgreSQL-style JSON operators like '->'? has_json_operators = False def __init__(self, connection): self.connection = connection @cached_property def supports_explaining_query_execution(self): """Does this backend support explaining query execution?""" return self.connection.ops.explain_prefix is not None @cached_property def supports_transactions(self): """Confirm support for transactions.""" with self.connection.cursor() as cursor: cursor.execute('CREATE TABLE ROLLBACK_TEST (X INT)') self.connection.set_autocommit(False) cursor.execute('INSERT INTO ROLLBACK_TEST (X) VALUES (8)') self.connection.rollback() self.connection.set_autocommit(True) cursor.execute('SELECT COUNT(X) FROM ROLLBACK_TEST') count, = cursor.fetchone() cursor.execute('DROP TABLE ROLLBACK_TEST') return count == 0 def allows_group_by_selected_pks_on_model(self, model): if not self.allows_group_by_selected_pks: return False return model._meta.managed
eef907958a225315b9bebe98e0601ef860aa98ab5918d151f1ef8cf37541c664
import logging from datetime import datetime from django.db.backends.ddl_references import ( Columns, ForeignKeyName, IndexName, Statement, Table, ) from django.db.backends.utils import names_digest, split_identifier from django.db.models import Deferrable, Index from django.db.transaction import TransactionManagementError, atomic from django.utils import timezone logger = logging.getLogger('django.db.backends.schema') def _is_relevant_relation(relation, altered_field): """ When altering the given field, must constraints on its model from the given relation be temporarily dropped? """ field = relation.field if field.many_to_many: # M2M reverse field return False if altered_field.primary_key and field.to_fields == [None]: # Foreign key constraint on the primary key, which is being altered. return True # Is the constraint targeting the field being altered? return altered_field.name in field.to_fields def _all_related_fields(model): return model._meta._get_fields(forward=False, reverse=True, include_hidden=True) def _related_non_m2m_objects(old_field, new_field): # Filter out m2m objects from reverse relations. # Return (old_relation, new_relation) tuples. return zip( (obj for obj in _all_related_fields(old_field.model) if _is_relevant_relation(obj, old_field)), (obj for obj in _all_related_fields(new_field.model) if _is_relevant_relation(obj, new_field)), ) class BaseDatabaseSchemaEditor: """ This class and its subclasses are responsible for emitting schema-changing statements to the databases - model creation/removal/alteration, field renaming, index fiddling, and so on. """ # Overrideable SQL templates sql_create_table = "CREATE TABLE %(table)s (%(definition)s)" sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s" sql_retablespace_table = "ALTER TABLE %(table)s SET TABLESPACE %(new_tablespace)s" sql_delete_table = "DROP TABLE %(table)s CASCADE" sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s" sql_alter_column = "ALTER TABLE %(table)s %(changes)s" sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s" sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL" sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL" sql_alter_column_default = "ALTER COLUMN %(column)s SET DEFAULT %(default)s" sql_alter_column_no_default = "ALTER COLUMN %(column)s DROP DEFAULT" sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE" sql_rename_column = "ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s" sql_update_with_default = "UPDATE %(table)s SET %(column)s = %(default)s WHERE %(column)s IS NULL" sql_unique_constraint = "UNIQUE (%(columns)s)%(deferrable)s" sql_check_constraint = "CHECK (%(check)s)" sql_delete_constraint = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" sql_constraint = "CONSTRAINT %(name)s %(constraint)s" sql_create_check = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(check)s)" sql_delete_check = sql_delete_constraint sql_create_unique = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s UNIQUE (%(columns)s)%(deferrable)s" sql_delete_unique = sql_delete_constraint sql_create_fk = ( "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) " "REFERENCES %(to_table)s (%(to_column)s)%(deferrable)s" ) sql_create_inline_fk = None sql_create_column_inline_fk = None sql_delete_fk = sql_delete_constraint sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(include)s%(extra)s%(condition)s" sql_create_unique_index = "CREATE UNIQUE INDEX %(name)s ON %(table)s (%(columns)s)%(include)s%(condition)s" sql_delete_index = "DROP INDEX %(name)s" sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)" sql_delete_pk = sql_delete_constraint sql_delete_procedure = 'DROP PROCEDURE %(procedure)s' def __init__(self, connection, collect_sql=False, atomic=True): self.connection = connection self.collect_sql = collect_sql if self.collect_sql: self.collected_sql = [] self.atomic_migration = self.connection.features.can_rollback_ddl and atomic # State-managing methods def __enter__(self): self.deferred_sql = [] if self.atomic_migration: self.atomic = atomic(self.connection.alias) self.atomic.__enter__() return self def __exit__(self, exc_type, exc_value, traceback): if exc_type is None: for sql in self.deferred_sql: self.execute(sql) if self.atomic_migration: self.atomic.__exit__(exc_type, exc_value, traceback) # Core utility functions def execute(self, sql, params=()): """Execute the given SQL statement, with optional parameters.""" # Don't perform the transactional DDL check if SQL is being collected # as it's not going to be executed anyway. if not self.collect_sql and self.connection.in_atomic_block and not self.connection.features.can_rollback_ddl: raise TransactionManagementError( "Executing DDL statements while in a transaction on databases " "that can't perform a rollback is prohibited." ) # Account for non-string statement objects. sql = str(sql) # Log the command we're running, then run it logger.debug("%s; (params %r)", sql, params, extra={'params': params, 'sql': sql}) if self.collect_sql: ending = "" if sql.endswith(";") else ";" if params is not None: self.collected_sql.append((sql % tuple(map(self.quote_value, params))) + ending) else: self.collected_sql.append(sql + ending) else: with self.connection.cursor() as cursor: cursor.execute(sql, params) def quote_name(self, name): return self.connection.ops.quote_name(name) def table_sql(self, model): """Take a model and return its table definition.""" # Add any unique_togethers (always deferred, as some fields might be # created afterwards, like geometry fields with some backends). for fields in model._meta.unique_together: columns = [model._meta.get_field(field).column for field in fields] self.deferred_sql.append(self._create_unique_sql(model, columns)) # Create column SQL, add FK deferreds if needed. column_sqls = [] params = [] for field in model._meta.local_fields: # SQL. definition, extra_params = self.column_sql(model, field) if definition is None: continue # Check constraints can go on the column SQL here. db_params = field.db_parameters(connection=self.connection) if db_params['check']: definition += ' ' + self.sql_check_constraint % db_params # Autoincrement SQL (for backends with inline variant). col_type_suffix = field.db_type_suffix(connection=self.connection) if col_type_suffix: definition += ' %s' % col_type_suffix params.extend(extra_params) # FK. if field.remote_field and field.db_constraint: to_table = field.remote_field.model._meta.db_table to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column if self.sql_create_inline_fk: definition += ' ' + self.sql_create_inline_fk % { 'to_table': self.quote_name(to_table), 'to_column': self.quote_name(to_column), } elif self.connection.features.supports_foreign_keys: self.deferred_sql.append(self._create_fk_sql(model, field, '_fk_%(to_table)s_%(to_column)s')) # Add the SQL to our big list. column_sqls.append('%s %s' % ( self.quote_name(field.column), definition, )) # Autoincrement SQL (for backends with post table definition # variant). if field.get_internal_type() in ('AutoField', 'BigAutoField', 'SmallAutoField'): autoinc_sql = self.connection.ops.autoinc_sql(model._meta.db_table, field.column) if autoinc_sql: self.deferred_sql.extend(autoinc_sql) constraints = [constraint.constraint_sql(model, self) for constraint in model._meta.constraints] sql = self.sql_create_table % { 'table': self.quote_name(model._meta.db_table), 'definition': ', '.join(constraint for constraint in (*column_sqls, *constraints) if constraint), } if model._meta.db_tablespace: tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace) if tablespace_sql: sql += ' ' + tablespace_sql return sql, params # Field <-> database mapping functions def column_sql(self, model, field, include_default=False): """ Take a field and return its column definition. The field must already have had set_attributes_from_name() called. """ # Get the column's type and use that as the basis of the SQL db_params = field.db_parameters(connection=self.connection) sql = db_params['type'] params = [] # Check for fields that aren't actually columns (e.g. M2M) if sql is None: return None, None # Work out nullability null = field.null # If we were told to include a default value, do so include_default = include_default and not self.skip_default(field) if include_default: default_value = self.effective_default(field) column_default = ' DEFAULT ' + self._column_default_sql(field) if default_value is not None: if self.connection.features.requires_literal_defaults: # Some databases can't take defaults as a parameter (oracle) # If this is the case, the individual schema backend should # implement prepare_default sql += column_default % self.prepare_default(default_value) else: sql += column_default params += [default_value] # Oracle treats the empty string ('') as null, so coerce the null # option whenever '' is a possible value. if (field.empty_strings_allowed and not field.primary_key and self.connection.features.interprets_empty_strings_as_nulls): null = True if null and not self.connection.features.implied_column_null: sql += " NULL" elif not null: sql += " NOT NULL" # Primary key/unique outputs if field.primary_key: sql += " PRIMARY KEY" elif field.unique: sql += " UNIQUE" # Optionally add the tablespace if it's an implicitly indexed column tablespace = field.db_tablespace or model._meta.db_tablespace if tablespace and self.connection.features.supports_tablespaces and field.unique: sql += " %s" % self.connection.ops.tablespace_sql(tablespace, inline=True) # Return the sql return sql, params def skip_default(self, field): """ Some backends don't accept default values for certain columns types (i.e. MySQL longtext and longblob). """ return False def prepare_default(self, value): """ Only used for backends which have requires_literal_defaults feature """ raise NotImplementedError( 'subclasses of BaseDatabaseSchemaEditor for backends which have ' 'requires_literal_defaults must provide a prepare_default() method' ) def _column_default_sql(self, field): """ Return the SQL to use in a DEFAULT clause. The resulting string should contain a '%s' placeholder for a default value. """ return '%s' @staticmethod def _effective_default(field): # This method allows testing its logic without a connection. if field.has_default(): default = field.get_default() elif not field.null and field.blank and field.empty_strings_allowed: if field.get_internal_type() == "BinaryField": default = b'' else: default = '' elif getattr(field, 'auto_now', False) or getattr(field, 'auto_now_add', False): default = datetime.now() internal_type = field.get_internal_type() if internal_type == 'DateField': default = default.date() elif internal_type == 'TimeField': default = default.time() elif internal_type == 'DateTimeField': default = timezone.now() else: default = None return default def effective_default(self, field): """Return a field's effective database default value.""" return field.get_db_prep_save(self._effective_default(field), self.connection) def quote_value(self, value): """ Return a quoted version of the value so it's safe to use in an SQL string. This is not safe against injection from user code; it is intended only for use in making SQL scripts or preparing default values for particularly tricky backends (defaults are not user-defined, though, so this is safe). """ raise NotImplementedError() # Actions def create_model(self, model): """ Create a table and any accompanying indexes or unique constraints for the given `model`. """ sql, params = self.table_sql(model) # Prevent using [] as params, in the case a literal '%' is used in the definition self.execute(sql, params or None) # Add any field index and index_together's (deferred as SQLite _remake_table needs it) self.deferred_sql.extend(self._model_indexes_sql(model)) # Make M2M tables for field in model._meta.local_many_to_many: if field.remote_field.through._meta.auto_created: self.create_model(field.remote_field.through) def delete_model(self, model): """Delete a model from the database.""" # Handle auto-created intermediary models for field in model._meta.local_many_to_many: if field.remote_field.through._meta.auto_created: self.delete_model(field.remote_field.through) # Delete the table self.execute(self.sql_delete_table % { "table": self.quote_name(model._meta.db_table), }) # Remove all deferred statements referencing the deleted table. for sql in list(self.deferred_sql): if isinstance(sql, Statement) and sql.references_table(model._meta.db_table): self.deferred_sql.remove(sql) def add_index(self, model, index): """Add an index on a model.""" self.execute(index.create_sql(model, self), params=None) def remove_index(self, model, index): """Remove an index from a model.""" self.execute(index.remove_sql(model, self)) def add_constraint(self, model, constraint): """Add a constraint to a model.""" sql = constraint.create_sql(model, self) if sql: self.execute(sql) def remove_constraint(self, model, constraint): """Remove a constraint from a model.""" sql = constraint.remove_sql(model, self) if sql: self.execute(sql) def alter_unique_together(self, model, old_unique_together, new_unique_together): """ Deal with a model changing its unique_together. The input unique_togethers must be doubly-nested, not the single-nested ["foo", "bar"] format. """ olds = {tuple(fields) for fields in old_unique_together} news = {tuple(fields) for fields in new_unique_together} # Deleted uniques for fields in olds.difference(news): self._delete_composed_index(model, fields, {'unique': True}, self.sql_delete_unique) # Created uniques for fields in news.difference(olds): columns = [model._meta.get_field(field).column for field in fields] self.execute(self._create_unique_sql(model, columns)) def alter_index_together(self, model, old_index_together, new_index_together): """ Deal with a model changing its index_together. The input index_togethers must be doubly-nested, not the single-nested ["foo", "bar"] format. """ olds = {tuple(fields) for fields in old_index_together} news = {tuple(fields) for fields in new_index_together} # Deleted indexes for fields in olds.difference(news): self._delete_composed_index( model, fields, {'index': True, 'unique': False}, self.sql_delete_index, ) # Created indexes for field_names in news.difference(olds): fields = [model._meta.get_field(field) for field in field_names] self.execute(self._create_index_sql(model, fields, suffix="_idx")) def _delete_composed_index(self, model, fields, constraint_kwargs, sql): meta_constraint_names = {constraint.name for constraint in model._meta.constraints} meta_index_names = {constraint.name for constraint in model._meta.indexes} columns = [model._meta.get_field(field).column for field in fields] constraint_names = self._constraint_names( model, columns, exclude=meta_constraint_names | meta_index_names, **constraint_kwargs ) if len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of constraints for %s(%s)" % ( len(constraint_names), model._meta.db_table, ", ".join(columns), )) self.execute(self._delete_constraint_sql(sql, model, constraint_names[0])) def alter_db_table(self, model, old_db_table, new_db_table): """Rename the table a model points to.""" if (old_db_table == new_db_table or (self.connection.features.ignores_table_name_case and old_db_table.lower() == new_db_table.lower())): return self.execute(self.sql_rename_table % { "old_table": self.quote_name(old_db_table), "new_table": self.quote_name(new_db_table), }) # Rename all references to the old table name. for sql in self.deferred_sql: if isinstance(sql, Statement): sql.rename_table_references(old_db_table, new_db_table) def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace): """Move a model's table between tablespaces.""" self.execute(self.sql_retablespace_table % { "table": self.quote_name(model._meta.db_table), "old_tablespace": self.quote_name(old_db_tablespace), "new_tablespace": self.quote_name(new_db_tablespace), }) def add_field(self, model, field): """ Create a field on a model. Usually involves adding a column, but may involve adding a table instead (for M2M fields). """ # Special-case implicit M2M tables if field.many_to_many and field.remote_field.through._meta.auto_created: return self.create_model(field.remote_field.through) # Get the column's definition definition, params = self.column_sql(model, field, include_default=True) # It might not actually have a column behind it if definition is None: return # Check constraints can go on the column SQL here db_params = field.db_parameters(connection=self.connection) if db_params['check']: definition += " " + self.sql_check_constraint % db_params if field.remote_field and self.connection.features.supports_foreign_keys and field.db_constraint: constraint_suffix = '_fk_%(to_table)s_%(to_column)s' # Add FK constraint inline, if supported. if self.sql_create_column_inline_fk: to_table = field.remote_field.model._meta.db_table to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column definition += " " + self.sql_create_column_inline_fk % { 'name': self._fk_constraint_name(model, field, constraint_suffix), 'column': self.quote_name(field.column), 'to_table': self.quote_name(to_table), 'to_column': self.quote_name(to_column), 'deferrable': self.connection.ops.deferrable_sql() } # Otherwise, add FK constraints later. else: self.deferred_sql.append(self._create_fk_sql(model, field, constraint_suffix)) # Build the SQL and run it sql = self.sql_create_column % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(field.column), "definition": definition, } self.execute(sql, params) # Drop the default if we need to # (Django usually does not use in-database defaults) if not self.skip_default(field) and self.effective_default(field) is not None: changes_sql, params = self._alter_column_default_sql(model, None, field, drop=True) sql = self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": changes_sql, } self.execute(sql, params) # Add an index, if required self.deferred_sql.extend(self._field_indexes_sql(model, field)) # Reset connection if required if self.connection.features.connection_persists_old_columns: self.connection.close() def remove_field(self, model, field): """ Remove a field from a model. Usually involves deleting a column, but for M2Ms may involve deleting a table. """ # Special-case implicit M2M tables if field.many_to_many and field.remote_field.through._meta.auto_created: return self.delete_model(field.remote_field.through) # It might not actually have a column behind it if field.db_parameters(connection=self.connection)['type'] is None: return # Drop any FK constraints, MySQL requires explicit deletion if field.remote_field: fk_names = self._constraint_names(model, [field.column], foreign_key=True) for fk_name in fk_names: self.execute(self._delete_fk_sql(model, fk_name)) # Delete the column sql = self.sql_delete_column % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(field.column), } self.execute(sql) # Reset connection if required if self.connection.features.connection_persists_old_columns: self.connection.close() # Remove all deferred statements referencing the deleted column. for sql in list(self.deferred_sql): if isinstance(sql, Statement) and sql.references_column(model._meta.db_table, field.column): self.deferred_sql.remove(sql) def alter_field(self, model, old_field, new_field, strict=False): """ Allow a field's type, uniqueness, nullability, default, column, constraints, etc. to be modified. `old_field` is required to compute the necessary changes. If `strict` is True, raise errors if the old column does not match `old_field` precisely. """ # Ensure this field is even column-based old_db_params = old_field.db_parameters(connection=self.connection) old_type = old_db_params['type'] new_db_params = new_field.db_parameters(connection=self.connection) new_type = new_db_params['type'] if ((old_type is None and old_field.remote_field is None) or (new_type is None and new_field.remote_field is None)): raise ValueError( "Cannot alter field %s into %s - they do not properly define " "db_type (are you using a badly-written custom field?)" % (old_field, new_field), ) elif old_type is None and new_type is None and ( old_field.remote_field.through and new_field.remote_field.through and old_field.remote_field.through._meta.auto_created and new_field.remote_field.through._meta.auto_created): return self._alter_many_to_many(model, old_field, new_field, strict) elif old_type is None and new_type is None and ( old_field.remote_field.through and new_field.remote_field.through and not old_field.remote_field.through._meta.auto_created and not new_field.remote_field.through._meta.auto_created): # Both sides have through models; this is a no-op. return elif old_type is None or new_type is None: raise ValueError( "Cannot alter field %s into %s - they are not compatible types " "(you cannot alter to or from M2M fields, or add or remove " "through= on M2M fields)" % (old_field, new_field) ) self._alter_field(model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict) def _alter_field(self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False): """Perform a "physical" (non-ManyToMany) field update.""" # Drop any FK constraints, we'll remake them later fks_dropped = set() if ( self.connection.features.supports_foreign_keys and old_field.remote_field and old_field.db_constraint ): fk_names = self._constraint_names(model, [old_field.column], foreign_key=True) if strict and len(fk_names) != 1: raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % ( len(fk_names), model._meta.db_table, old_field.column, )) for fk_name in fk_names: fks_dropped.add((old_field.column,)) self.execute(self._delete_fk_sql(model, fk_name)) # Has unique been removed? if old_field.unique and (not new_field.unique or self._field_became_primary_key(old_field, new_field)): # Find the unique constraint for this field meta_constraint_names = {constraint.name for constraint in model._meta.constraints} constraint_names = self._constraint_names( model, [old_field.column], unique=True, primary_key=False, exclude=meta_constraint_names, ) if strict and len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % ( len(constraint_names), model._meta.db_table, old_field.column, )) for constraint_name in constraint_names: self.execute(self._delete_unique_sql(model, constraint_name)) # Drop incoming FK constraints if the field is a primary key or unique, # which might be a to_field target, and things are going to change. drop_foreign_keys = ( self.connection.features.supports_foreign_keys and ( (old_field.primary_key and new_field.primary_key) or (old_field.unique and new_field.unique) ) and old_type != new_type ) if drop_foreign_keys: # '_meta.related_field' also contains M2M reverse fields, these # will be filtered out for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field): rel_fk_names = self._constraint_names( new_rel.related_model, [new_rel.field.column], foreign_key=True ) for fk_name in rel_fk_names: self.execute(self._delete_fk_sql(new_rel.related_model, fk_name)) # Removed an index? (no strict check, as multiple indexes are possible) # Remove indexes if db_index switched to False or a unique constraint # will now be used in lieu of an index. The following lines from the # truth table show all True cases; the rest are False: # # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique # ------------------------------------------------------------------------------ # True | False | False | False # True | False | False | True # True | False | True | True if old_field.db_index and not old_field.unique and (not new_field.db_index or new_field.unique): # Find the index for this field meta_index_names = {index.name for index in model._meta.indexes} # Retrieve only BTREE indexes since this is what's created with # db_index=True. index_names = self._constraint_names( model, [old_field.column], index=True, type_=Index.suffix, exclude=meta_index_names, ) for index_name in index_names: # The only way to check if an index was created with # db_index=True or with Index(['field'], name='foo') # is to look at its name (refs #28053). self.execute(self._delete_index_sql(model, index_name)) # Change check constraints? if old_db_params['check'] != new_db_params['check'] and old_db_params['check']: meta_constraint_names = {constraint.name for constraint in model._meta.constraints} constraint_names = self._constraint_names( model, [old_field.column], check=True, exclude=meta_constraint_names, ) if strict and len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % ( len(constraint_names), model._meta.db_table, old_field.column, )) for constraint_name in constraint_names: self.execute(self._delete_check_sql(model, constraint_name)) # Have they renamed the column? if old_field.column != new_field.column: self.execute(self._rename_field_sql(model._meta.db_table, old_field, new_field, new_type)) # Rename all references to the renamed column. for sql in self.deferred_sql: if isinstance(sql, Statement): sql.rename_column_references(model._meta.db_table, old_field.column, new_field.column) # Next, start accumulating actions to do actions = [] null_actions = [] post_actions = [] # Type change? if old_type != new_type: fragment, other_actions = self._alter_column_type_sql(model, old_field, new_field, new_type) actions.append(fragment) post_actions.extend(other_actions) # When changing a column NULL constraint to NOT NULL with a given # default value, we need to perform 4 steps: # 1. Add a default for new incoming writes # 2. Update existing NULL rows with new default # 3. Replace NULL constraint with NOT NULL # 4. Drop the default again. # Default change? needs_database_default = False if old_field.null and not new_field.null: old_default = self.effective_default(old_field) new_default = self.effective_default(new_field) if ( not self.skip_default(new_field) and old_default != new_default and new_default is not None ): needs_database_default = True actions.append(self._alter_column_default_sql(model, old_field, new_field)) # Nullability change? if old_field.null != new_field.null: fragment = self._alter_column_null_sql(model, old_field, new_field) if fragment: null_actions.append(fragment) # Only if we have a default and there is a change from NULL to NOT NULL four_way_default_alteration = ( new_field.has_default() and (old_field.null and not new_field.null) ) if actions or null_actions: if not four_way_default_alteration: # If we don't have to do a 4-way default alteration we can # directly run a (NOT) NULL alteration actions = actions + null_actions # Combine actions together if we can (e.g. postgres) if self.connection.features.supports_combined_alters and actions: sql, params = tuple(zip(*actions)) actions = [(", ".join(sql), sum(params, []))] # Apply those actions for sql, params in actions: self.execute( self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": sql, }, params, ) if four_way_default_alteration: # Update existing rows with default value self.execute( self.sql_update_with_default % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(new_field.column), "default": "%s", }, [new_default], ) # Since we didn't run a NOT NULL change before we need to do it # now for sql, params in null_actions: self.execute( self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": sql, }, params, ) if post_actions: for sql, params in post_actions: self.execute(sql, params) # If primary_key changed to False, delete the primary key constraint. if old_field.primary_key and not new_field.primary_key: self._delete_primary_key(model, strict) # Added a unique? if self._unique_should_be_added(old_field, new_field): self.execute(self._create_unique_sql(model, [new_field.column])) # Added an index? Add an index if db_index switched to True or a unique # constraint will no longer be used in lieu of an index. The following # lines from the truth table show all True cases; the rest are False: # # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique # ------------------------------------------------------------------------------ # False | False | True | False # False | True | True | False # True | True | True | False if (not old_field.db_index or old_field.unique) and new_field.db_index and not new_field.unique: self.execute(self._create_index_sql(model, [new_field])) # Type alteration on primary key? Then we need to alter the column # referring to us. rels_to_update = [] if drop_foreign_keys: rels_to_update.extend(_related_non_m2m_objects(old_field, new_field)) # Changed to become primary key? if self._field_became_primary_key(old_field, new_field): # Make the new one self.execute(self._create_primary_key_sql(model, new_field)) # Update all referencing columns rels_to_update.extend(_related_non_m2m_objects(old_field, new_field)) # Handle our type alters on the other end of rels from the PK stuff above for old_rel, new_rel in rels_to_update: rel_db_params = new_rel.field.db_parameters(connection=self.connection) rel_type = rel_db_params['type'] fragment, other_actions = self._alter_column_type_sql( new_rel.related_model, old_rel.field, new_rel.field, rel_type ) self.execute( self.sql_alter_column % { "table": self.quote_name(new_rel.related_model._meta.db_table), "changes": fragment[0], }, fragment[1], ) for sql, params in other_actions: self.execute(sql, params) # Does it have a foreign key? if (self.connection.features.supports_foreign_keys and new_field.remote_field and (fks_dropped or not old_field.remote_field or not old_field.db_constraint) and new_field.db_constraint): self.execute(self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s")) # Rebuild FKs that pointed to us if we previously had to drop them if drop_foreign_keys: for rel in new_field.model._meta.related_objects: if _is_relevant_relation(rel, new_field) and rel.field.db_constraint: self.execute(self._create_fk_sql(rel.related_model, rel.field, "_fk")) # Does it have check constraints we need to add? if old_db_params['check'] != new_db_params['check'] and new_db_params['check']: constraint_name = self._create_index_name(model._meta.db_table, [new_field.column], suffix='_check') self.execute(self._create_check_sql(model, constraint_name, new_db_params['check'])) # Drop the default if we need to # (Django usually does not use in-database defaults) if needs_database_default: changes_sql, params = self._alter_column_default_sql(model, old_field, new_field, drop=True) sql = self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": changes_sql, } self.execute(sql, params) # Reset connection if required if self.connection.features.connection_persists_old_columns: self.connection.close() def _alter_column_null_sql(self, model, old_field, new_field): """ Hook to specialize column null alteration. Return a (sql, params) fragment to set a column to null or non-null as required by new_field, or None if no changes are required. """ if (self.connection.features.interprets_empty_strings_as_nulls and new_field.get_internal_type() in ("CharField", "TextField")): # The field is nullable in the database anyway, leave it alone. return else: new_db_params = new_field.db_parameters(connection=self.connection) sql = self.sql_alter_column_null if new_field.null else self.sql_alter_column_not_null return ( sql % { 'column': self.quote_name(new_field.column), 'type': new_db_params['type'], }, [], ) def _alter_column_default_sql(self, model, old_field, new_field, drop=False): """ Hook to specialize column default alteration. Return a (sql, params) fragment to add or drop (depending on the drop argument) a default to new_field's column. """ new_default = self.effective_default(new_field) default = self._column_default_sql(new_field) params = [new_default] if drop: params = [] elif self.connection.features.requires_literal_defaults: # Some databases (Oracle) can't take defaults as a parameter # If this is the case, the SchemaEditor for that database should # implement prepare_default(). default = self.prepare_default(new_default) params = [] new_db_params = new_field.db_parameters(connection=self.connection) sql = self.sql_alter_column_no_default if drop else self.sql_alter_column_default return ( sql % { 'column': self.quote_name(new_field.column), 'type': new_db_params['type'], 'default': default, }, params, ) def _alter_column_type_sql(self, model, old_field, new_field, new_type): """ Hook to specialize column type alteration for different backends, for cases when a creation type is different to an alteration type (e.g. SERIAL in PostgreSQL, PostGIS fields). Return a two-tuple of: an SQL fragment of (sql, params) to insert into an ALTER TABLE statement and a list of extra (sql, params) tuples to run once the field is altered. """ return ( ( self.sql_alter_column_type % { "column": self.quote_name(new_field.column), "type": new_type, }, [], ), [], ) def _alter_many_to_many(self, model, old_field, new_field, strict): """Alter M2Ms to repoint their to= endpoints.""" # Rename the through table if old_field.remote_field.through._meta.db_table != new_field.remote_field.through._meta.db_table: self.alter_db_table(old_field.remote_field.through, old_field.remote_field.through._meta.db_table, new_field.remote_field.through._meta.db_table) # Repoint the FK to the other side self.alter_field( new_field.remote_field.through, # We need the field that points to the target model, so we can tell alter_field to change it - # this is m2m_reverse_field_name() (as opposed to m2m_field_name, which points to our model) old_field.remote_field.through._meta.get_field(old_field.m2m_reverse_field_name()), new_field.remote_field.through._meta.get_field(new_field.m2m_reverse_field_name()), ) self.alter_field( new_field.remote_field.through, # for self-referential models we need to alter field from the other end too old_field.remote_field.through._meta.get_field(old_field.m2m_field_name()), new_field.remote_field.through._meta.get_field(new_field.m2m_field_name()), ) def _create_index_name(self, table_name, column_names, suffix=""): """ Generate a unique name for an index/unique constraint. The name is divided into 3 parts: the table name, the column names, and a unique digest and suffix. """ _, table_name = split_identifier(table_name) hash_suffix_part = '%s%s' % (names_digest(table_name, *column_names, length=8), suffix) max_length = self.connection.ops.max_name_length() or 200 # If everything fits into max_length, use that name. index_name = '%s_%s_%s' % (table_name, '_'.join(column_names), hash_suffix_part) if len(index_name) <= max_length: return index_name # Shorten a long suffix. if len(hash_suffix_part) > max_length / 3: hash_suffix_part = hash_suffix_part[:max_length // 3] other_length = (max_length - len(hash_suffix_part)) // 2 - 1 index_name = '%s_%s_%s' % ( table_name[:other_length], '_'.join(column_names)[:other_length], hash_suffix_part, ) # Prepend D if needed to prevent the name from starting with an # underscore or a number (not permitted on Oracle). if index_name[0] == "_" or index_name[0].isdigit(): index_name = "D%s" % index_name[:-1] return index_name def _get_index_tablespace_sql(self, model, fields, db_tablespace=None): if db_tablespace is None: if len(fields) == 1 and fields[0].db_tablespace: db_tablespace = fields[0].db_tablespace elif model._meta.db_tablespace: db_tablespace = model._meta.db_tablespace if db_tablespace is not None: return ' ' + self.connection.ops.tablespace_sql(db_tablespace) return '' def _index_condition_sql(self, condition): if condition: return ' WHERE ' + condition return '' def _index_include_sql(self, model, columns): if not columns or not self.connection.features.supports_covering_indexes: return '' return Statement( ' INCLUDE (%(columns)s)', columns=Columns(model._meta.db_table, columns, self.quote_name), ) def _create_index_sql(self, model, fields, *, name=None, suffix='', using='', db_tablespace=None, col_suffixes=(), sql=None, opclasses=(), condition=None, include=None): """ Return the SQL statement to create the index for one or several fields. `sql` can be specified if the syntax differs from the standard (GIS indexes, ...). """ tablespace_sql = self._get_index_tablespace_sql(model, fields, db_tablespace=db_tablespace) columns = [field.column for field in fields] sql_create_index = sql or self.sql_create_index table = model._meta.db_table def create_index_name(*args, **kwargs): nonlocal name if name is None: name = self._create_index_name(*args, **kwargs) return self.quote_name(name) return Statement( sql_create_index, table=Table(table, self.quote_name), name=IndexName(table, columns, suffix, create_index_name), using=using, columns=self._index_columns(table, columns, col_suffixes, opclasses), extra=tablespace_sql, condition=self._index_condition_sql(condition), include=self._index_include_sql(model, include), ) def _delete_index_sql(self, model, name, sql=None): return Statement( sql or self.sql_delete_index, table=Table(model._meta.db_table, self.quote_name), name=self.quote_name(name), ) def _index_columns(self, table, columns, col_suffixes, opclasses): return Columns(table, columns, self.quote_name, col_suffixes=col_suffixes) def _model_indexes_sql(self, model): """ Return a list of all index SQL statements (field indexes, index_together, Meta.indexes) for the specified model. """ if not model._meta.managed or model._meta.proxy or model._meta.swapped: return [] output = [] for field in model._meta.local_fields: output.extend(self._field_indexes_sql(model, field)) for field_names in model._meta.index_together: fields = [model._meta.get_field(field) for field in field_names] output.append(self._create_index_sql(model, fields, suffix="_idx")) for index in model._meta.indexes: output.append(index.create_sql(model, self)) return output def _field_indexes_sql(self, model, field): """ Return a list of all index SQL statements for the specified field. """ output = [] if self._field_should_be_indexed(model, field): output.append(self._create_index_sql(model, [field])) return output def _field_should_be_indexed(self, model, field): return field.db_index and not field.unique def _field_became_primary_key(self, old_field, new_field): return not old_field.primary_key and new_field.primary_key def _unique_should_be_added(self, old_field, new_field): return (not old_field.unique and new_field.unique) or ( old_field.primary_key and not new_field.primary_key and new_field.unique ) def _rename_field_sql(self, table, old_field, new_field, new_type): return self.sql_rename_column % { "table": self.quote_name(table), "old_column": self.quote_name(old_field.column), "new_column": self.quote_name(new_field.column), "type": new_type, } def _create_fk_sql(self, model, field, suffix): table = Table(model._meta.db_table, self.quote_name) name = self._fk_constraint_name(model, field, suffix) column = Columns(model._meta.db_table, [field.column], self.quote_name) to_table = Table(field.target_field.model._meta.db_table, self.quote_name) to_column = Columns(field.target_field.model._meta.db_table, [field.target_field.column], self.quote_name) deferrable = self.connection.ops.deferrable_sql() return Statement( self.sql_create_fk, table=table, name=name, column=column, to_table=to_table, to_column=to_column, deferrable=deferrable, ) def _fk_constraint_name(self, model, field, suffix): def create_fk_name(*args, **kwargs): return self.quote_name(self._create_index_name(*args, **kwargs)) return ForeignKeyName( model._meta.db_table, [field.column], split_identifier(field.target_field.model._meta.db_table)[1], [field.target_field.column], suffix, create_fk_name, ) def _delete_fk_sql(self, model, name): return self._delete_constraint_sql(self.sql_delete_fk, model, name) def _deferrable_constraint_sql(self, deferrable): if deferrable is None: return '' if deferrable == Deferrable.DEFERRED: return ' DEFERRABLE INITIALLY DEFERRED' if deferrable == Deferrable.IMMEDIATE: return ' DEFERRABLE INITIALLY IMMEDIATE' def _unique_sql(self, model, fields, name, condition=None, deferrable=None, include=None): if ( deferrable and not self.connection.features.supports_deferrable_unique_constraints ): return None if condition or include: # Databases support conditional and covering unique constraints via # a unique index. sql = self._create_unique_sql( model, fields, name=name, condition=condition, include=include, ) if sql: self.deferred_sql.append(sql) return None constraint = self.sql_unique_constraint % { 'columns': ', '.join(map(self.quote_name, fields)), 'deferrable': self._deferrable_constraint_sql(deferrable), } return self.sql_constraint % { 'name': self.quote_name(name), 'constraint': constraint, } def _create_unique_sql(self, model, columns, name=None, condition=None, deferrable=None, include=None): if ( ( deferrable and not self.connection.features.supports_deferrable_unique_constraints ) or (condition and not self.connection.features.supports_partial_indexes) or (include and not self.connection.features.supports_covering_indexes) ): return None def create_unique_name(*args, **kwargs): return self.quote_name(self._create_index_name(*args, **kwargs)) table = Table(model._meta.db_table, self.quote_name) if name is None: name = IndexName(model._meta.db_table, columns, '_uniq', create_unique_name) else: name = self.quote_name(name) columns = Columns(table, columns, self.quote_name) if condition or include: sql = self.sql_create_unique_index else: sql = self.sql_create_unique return Statement( sql, table=table, name=name, columns=columns, condition=self._index_condition_sql(condition), deferrable=self._deferrable_constraint_sql(deferrable), include=self._index_include_sql(model, include), ) def _delete_unique_sql(self, model, name, condition=None, deferrable=None, include=None): if ( ( deferrable and not self.connection.features.supports_deferrable_unique_constraints ) or (condition and not self.connection.features.supports_partial_indexes) or (include and not self.connection.features.supports_covering_indexes) ): return None if condition or include: sql = self.sql_delete_index else: sql = self.sql_delete_unique return self._delete_constraint_sql(sql, model, name) def _check_sql(self, name, check): return self.sql_constraint % { 'name': self.quote_name(name), 'constraint': self.sql_check_constraint % {'check': check}, } def _create_check_sql(self, model, name, check): return Statement( self.sql_create_check, table=Table(model._meta.db_table, self.quote_name), name=self.quote_name(name), check=check, ) def _delete_check_sql(self, model, name): return self._delete_constraint_sql(self.sql_delete_check, model, name) def _delete_constraint_sql(self, template, model, name): return Statement( template, table=Table(model._meta.db_table, self.quote_name), name=self.quote_name(name), ) def _constraint_names(self, model, column_names=None, unique=None, primary_key=None, index=None, foreign_key=None, check=None, type_=None, exclude=None): """Return all constraint names matching the columns and conditions.""" if column_names is not None: column_names = [ self.connection.introspection.identifier_converter(name) for name in column_names ] with self.connection.cursor() as cursor: constraints = self.connection.introspection.get_constraints(cursor, model._meta.db_table) result = [] for name, infodict in constraints.items(): if column_names is None or column_names == infodict['columns']: if unique is not None and infodict['unique'] != unique: continue if primary_key is not None and infodict['primary_key'] != primary_key: continue if index is not None and infodict['index'] != index: continue if check is not None and infodict['check'] != check: continue if foreign_key is not None and not infodict['foreign_key']: continue if type_ is not None and infodict['type'] != type_: continue if not exclude or name not in exclude: result.append(name) return result def _delete_primary_key(self, model, strict=False): constraint_names = self._constraint_names(model, primary_key=True) if strict and len(constraint_names) != 1: raise ValueError('Found wrong number (%s) of PK constraints for %s' % ( len(constraint_names), model._meta.db_table, )) for constraint_name in constraint_names: self.execute(self._delete_primary_key_sql(model, constraint_name)) def _create_primary_key_sql(self, model, field): return Statement( self.sql_create_pk, table=Table(model._meta.db_table, self.quote_name), name=self.quote_name( self._create_index_name(model._meta.db_table, [field.column], suffix="_pk") ), columns=Columns(model._meta.db_table, [field.column], self.quote_name), ) def _delete_primary_key_sql(self, model, name): return self._delete_constraint_sql(self.sql_delete_pk, model, name) def remove_procedure(self, procedure_name, param_types=()): sql = self.sql_delete_procedure % { 'procedure': self.quote_name(procedure_name), 'param_types': ','.join(param_types), } self.execute(sql)
901e76b44fc6563993e91d15bb058e734a81ec2cf9b779d47b8a9745f1bc02d1
import operator from django.db.backends.base.features import BaseDatabaseFeatures from django.utils.functional import cached_property class DatabaseFeatures(BaseDatabaseFeatures): empty_fetchmany_value = () allows_group_by_pk = True related_fields_match_type = True # MySQL doesn't support sliced subqueries with IN/ALL/ANY/SOME. allow_sliced_subqueries_with_in = False has_select_for_update = True supports_forward_references = False supports_regex_backreferencing = False supports_date_lookup_using_string = False supports_index_column_ordering = False supports_timezones = False requires_explicit_null_ordering_when_grouping = True allows_auto_pk_0 = False can_release_savepoints = True atomic_transactions = False can_clone_databases = True supports_temporal_subtraction = True supports_select_intersection = False supports_select_difference = False supports_slicing_ordering_in_compound = True supports_index_on_text_field = False has_case_insensitive_like = False create_test_procedure_without_params_sql = """ CREATE PROCEDURE test_procedure () BEGIN DECLARE V_I INTEGER; SET V_I = 1; END; """ create_test_procedure_with_int_param_sql = """ CREATE PROCEDURE test_procedure (P_I INTEGER) BEGIN DECLARE V_I INTEGER; SET V_I = P_I; END; """ db_functions_convert_bytes_to_str = True # Neither MySQL nor MariaDB support partial indexes. supports_partial_indexes = False supports_order_by_nulls_modifier = False order_by_nulls_first = True @cached_property def _mysql_storage_engine(self): "Internal method used in Django tests. Don't rely on this from your code" with self.connection.cursor() as cursor: cursor.execute("SELECT ENGINE FROM INFORMATION_SCHEMA.ENGINES WHERE SUPPORT = 'DEFAULT'") result = cursor.fetchone() return result[0] @cached_property def update_can_self_select(self): return self.connection.mysql_is_mariadb and self.connection.mysql_version >= (10, 3, 2) @cached_property def can_introspect_foreign_keys(self): "Confirm support for introspected foreign keys" return self._mysql_storage_engine != 'MyISAM' @cached_property def introspected_field_types(self): return { **super().introspected_field_types, 'BinaryField': 'TextField', 'BooleanField': 'IntegerField', 'DurationField': 'BigIntegerField', 'GenericIPAddressField': 'CharField', } @cached_property def can_return_columns_from_insert(self): return self.connection.mysql_is_mariadb and self.connection.mysql_version >= (10, 5, 0) can_return_rows_from_bulk_insert = property(operator.attrgetter('can_return_columns_from_insert')) @cached_property def has_zoneinfo_database(self): # Test if the time zone definitions are installed. CONVERT_TZ returns # NULL if 'UTC' timezone isn't loaded into the mysql.time_zone. with self.connection.cursor() as cursor: cursor.execute("SELECT CONVERT_TZ('2001-01-01 01:00:00', 'UTC', 'UTC')") return cursor.fetchone()[0] is not None @cached_property def is_sql_auto_is_null_enabled(self): with self.connection.cursor() as cursor: cursor.execute('SELECT @@SQL_AUTO_IS_NULL') result = cursor.fetchone() return result and result[0] == 1 @cached_property def supports_over_clause(self): if self.connection.mysql_is_mariadb: return True return self.connection.mysql_version >= (8, 0, 2) supports_frame_range_fixed_distance = property(operator.attrgetter('supports_over_clause')) @cached_property def supports_column_check_constraints(self): if self.connection.mysql_is_mariadb: return self.connection.mysql_version >= (10, 2, 1) return self.connection.mysql_version >= (8, 0, 16) supports_table_check_constraints = property(operator.attrgetter('supports_column_check_constraints')) @cached_property def can_introspect_check_constraints(self): if self.connection.mysql_is_mariadb: version = self.connection.mysql_version return (version >= (10, 2, 22) and version < (10, 3)) or version >= (10, 3, 10) return self.connection.mysql_version >= (8, 0, 16) @cached_property def has_select_for_update_skip_locked(self): return not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (8, 0, 1) @cached_property def has_select_for_update_nowait(self): if self.connection.mysql_is_mariadb: return self.connection.mysql_version >= (10, 3, 0) return self.connection.mysql_version >= (8, 0, 1) @cached_property def supports_explain_analyze(self): return self.connection.mysql_is_mariadb or self.connection.mysql_version >= (8, 0, 18) @cached_property def supported_explain_formats(self): # Alias MySQL's TRADITIONAL to TEXT for consistency with other # backends. formats = {'JSON', 'TEXT', 'TRADITIONAL'} if not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (8, 0, 16): formats.add('TREE') return formats @cached_property def supports_transactions(self): """ All storage engines except MyISAM support transactions. """ return self._mysql_storage_engine != 'MyISAM' @cached_property def ignores_table_name_case(self): with self.connection.cursor() as cursor: cursor.execute('SELECT @@LOWER_CASE_TABLE_NAMES') result = cursor.fetchone() return result and result[0] != 0 @cached_property def supports_default_in_lead_lag(self): # To be added in https://jira.mariadb.org/browse/MDEV-12981. return not self.connection.mysql_is_mariadb @cached_property def supports_json_field(self): if self.connection.mysql_is_mariadb: return self.connection.mysql_version >= (10, 2, 7) return self.connection.mysql_version >= (5, 7, 8) @cached_property def can_introspect_json_field(self): if self.connection.mysql_is_mariadb: return self.supports_json_field and self.can_introspect_check_constraints return self.supports_json_field
73594e763190c673ec940fb454d14a92230529350e0b3f90fbf1cb5d508e64bd
import operator from django.db import InterfaceError from django.db.backends.base.features import BaseDatabaseFeatures from django.utils.functional import cached_property class DatabaseFeatures(BaseDatabaseFeatures): allows_group_by_selected_pks = True can_return_columns_from_insert = True can_return_rows_from_bulk_insert = True has_real_datatype = True has_native_uuid_field = True has_native_duration_field = True has_native_json_field = True can_defer_constraint_checks = True has_select_for_update = True has_select_for_update_nowait = True has_select_for_update_of = True has_select_for_update_skip_locked = True has_select_for_no_key_update = True can_release_savepoints = True supports_tablespaces = True supports_transactions = True can_introspect_materialized_views = True can_distinct_on_fields = True can_rollback_ddl = True supports_combined_alters = True nulls_order_largest = True closed_cursor_error_class = InterfaceError has_case_insensitive_like = False greatest_least_ignores_nulls = True can_clone_databases = True supports_temporal_subtraction = True supports_slicing_ordering_in_compound = True create_test_procedure_without_params_sql = """ CREATE FUNCTION test_procedure () RETURNS void AS $$ DECLARE V_I INTEGER; BEGIN V_I := 1; END; $$ LANGUAGE plpgsql;""" create_test_procedure_with_int_param_sql = """ CREATE FUNCTION test_procedure (P_I INTEGER) RETURNS void AS $$ DECLARE V_I INTEGER; BEGIN V_I := P_I; END; $$ LANGUAGE plpgsql;""" requires_casted_case_in_updates = True supports_over_clause = True only_supports_unbounded_with_preceding_and_following = True supports_aggregate_filter_clause = True supported_explain_formats = {'JSON', 'TEXT', 'XML', 'YAML'} validates_explain_options = False # A query will error on invalid options. supports_deferrable_unique_constraints = True has_json_operators = True @cached_property def introspected_field_types(self): return { **super().introspected_field_types, 'PositiveBigIntegerField': 'BigIntegerField', 'PositiveIntegerField': 'IntegerField', 'PositiveSmallIntegerField': 'SmallIntegerField', } @cached_property def is_postgresql_10(self): return self.connection.pg_version >= 100000 @cached_property def is_postgresql_11(self): return self.connection.pg_version >= 110000 @cached_property def is_postgresql_12(self): return self.connection.pg_version >= 120000 has_brin_autosummarize = property(operator.attrgetter('is_postgresql_10')) has_websearch_to_tsquery = property(operator.attrgetter('is_postgresql_11')) supports_table_partitions = property(operator.attrgetter('is_postgresql_10')) supports_covering_indexes = property(operator.attrgetter('is_postgresql_11')) supports_covering_gist_indexes = property(operator.attrgetter('is_postgresql_12'))
4f85c0a17aa238c9677cac5032316ed8cf99bc67dc2fdd92e5e2330a3704813b
import psycopg2 from django.db.backends.base.schema import BaseDatabaseSchemaEditor from django.db.backends.ddl_references import IndexColumns from django.db.backends.utils import strip_quotes class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): sql_create_sequence = "CREATE SEQUENCE %(sequence)s" sql_delete_sequence = "DROP SEQUENCE IF EXISTS %(sequence)s CASCADE" sql_set_sequence_max = "SELECT setval('%(sequence)s', MAX(%(column)s)) FROM %(table)s" sql_set_sequence_owner = 'ALTER SEQUENCE %(sequence)s OWNED BY %(table)s.%(column)s' sql_create_index = ( 'CREATE INDEX %(name)s ON %(table)s%(using)s ' '(%(columns)s)%(include)s%(extra)s%(condition)s' ) sql_create_index_concurrently = ( 'CREATE INDEX CONCURRENTLY %(name)s ON %(table)s%(using)s ' '(%(columns)s)%(include)s%(extra)s%(condition)s' ) sql_delete_index = "DROP INDEX IF EXISTS %(name)s" sql_delete_index_concurrently = "DROP INDEX CONCURRENTLY IF EXISTS %(name)s" # Setting the constraint to IMMEDIATE to allow changing data in the same # transaction. sql_create_column_inline_fk = ( 'CONSTRAINT %(name)s REFERENCES %(to_table)s(%(to_column)s)%(deferrable)s' '; SET CONSTRAINTS %(name)s IMMEDIATE' ) # Setting the constraint to IMMEDIATE runs any deferred checks to allow # dropping it in the same transaction. sql_delete_fk = "SET CONSTRAINTS %(name)s IMMEDIATE; ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" sql_delete_procedure = 'DROP FUNCTION %(procedure)s(%(param_types)s)' def quote_value(self, value): if isinstance(value, str): value = value.replace('%', '%%') # getquoted() returns a quoted bytestring of the adapted value. return psycopg2.extensions.adapt(value).getquoted().decode() def _field_indexes_sql(self, model, field): output = super()._field_indexes_sql(model, field) like_index_statement = self._create_like_index_sql(model, field) if like_index_statement is not None: output.append(like_index_statement) return output def _field_data_type(self, field): if field.is_relation: return field.rel_db_type(self.connection) return self.connection.data_types.get( field.get_internal_type(), field.db_type(self.connection), ) def _field_base_data_types(self, field): # Yield base data types for array fields. if field.base_field.get_internal_type() == 'ArrayField': yield from self._field_base_data_types(field.base_field) else: yield self._field_data_type(field.base_field) def _create_like_index_sql(self, model, field): """ Return the statement to create an index with varchar operator pattern when the column type is 'varchar' or 'text', otherwise return None. """ db_type = field.db_type(connection=self.connection) if db_type is not None and (field.db_index or field.unique): # Fields with database column types of `varchar` and `text` need # a second index that specifies their operator class, which is # needed when performing correct LIKE queries outside the # C locale. See #12234. # # The same doesn't apply to array fields such as varchar[size] # and text[size], so skip them. if '[' in db_type: return None if db_type.startswith('varchar'): return self._create_index_sql(model, [field], suffix='_like', opclasses=['varchar_pattern_ops']) elif db_type.startswith('text'): return self._create_index_sql(model, [field], suffix='_like', opclasses=['text_pattern_ops']) return None def _alter_column_type_sql(self, model, old_field, new_field, new_type): self.sql_alter_column_type = 'ALTER COLUMN %(column)s TYPE %(type)s' # Cast when data type changed. using_sql = ' USING %(column)s::%(type)s' new_internal_type = new_field.get_internal_type() old_internal_type = old_field.get_internal_type() if new_internal_type == 'ArrayField' and new_internal_type == old_internal_type: # Compare base data types for array fields. if list(self._field_base_data_types(old_field)) != list(self._field_base_data_types(new_field)): self.sql_alter_column_type += using_sql elif self._field_data_type(old_field) != self._field_data_type(new_field): self.sql_alter_column_type += using_sql # Make ALTER TYPE with SERIAL make sense. table = strip_quotes(model._meta.db_table) serial_fields_map = {'bigserial': 'bigint', 'serial': 'integer', 'smallserial': 'smallint'} if new_type.lower() in serial_fields_map: column = strip_quotes(new_field.column) sequence_name = "%s_%s_seq" % (table, column) return ( ( self.sql_alter_column_type % { "column": self.quote_name(column), "type": serial_fields_map[new_type.lower()], }, [], ), [ ( self.sql_delete_sequence % { "sequence": self.quote_name(sequence_name), }, [], ), ( self.sql_create_sequence % { "sequence": self.quote_name(sequence_name), }, [], ), ( self.sql_alter_column % { "table": self.quote_name(table), "changes": self.sql_alter_column_default % { "column": self.quote_name(column), "default": "nextval('%s')" % self.quote_name(sequence_name), } }, [], ), ( self.sql_set_sequence_max % { "table": self.quote_name(table), "column": self.quote_name(column), "sequence": self.quote_name(sequence_name), }, [], ), ( self.sql_set_sequence_owner % { 'table': self.quote_name(table), 'column': self.quote_name(column), 'sequence': self.quote_name(sequence_name), }, [], ), ], ) else: return super()._alter_column_type_sql(model, old_field, new_field, new_type) def _alter_field(self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False): # Drop indexes on varchar/text/citext columns that are changing to a # different type. if (old_field.db_index or old_field.unique) and ( (old_type.startswith('varchar') and not new_type.startswith('varchar')) or (old_type.startswith('text') and not new_type.startswith('text')) or (old_type.startswith('citext') and not new_type.startswith('citext')) ): index_name = self._create_index_name(model._meta.db_table, [old_field.column], suffix='_like') self.execute(self._delete_index_sql(model, index_name)) super()._alter_field( model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict, ) # Added an index? Create any PostgreSQL-specific indexes. if ((not (old_field.db_index or old_field.unique) and new_field.db_index) or (not old_field.unique and new_field.unique)): like_index_statement = self._create_like_index_sql(model, new_field) if like_index_statement is not None: self.execute(like_index_statement) # Removed an index? Drop any PostgreSQL-specific indexes. if old_field.unique and not (new_field.db_index or new_field.unique): index_to_remove = self._create_index_name(model._meta.db_table, [old_field.column], suffix='_like') self.execute(self._delete_index_sql(model, index_to_remove)) def _index_columns(self, table, columns, col_suffixes, opclasses): if opclasses: return IndexColumns(table, columns, self.quote_name, col_suffixes=col_suffixes, opclasses=opclasses) return super()._index_columns(table, columns, col_suffixes, opclasses) def add_index(self, model, index, concurrently=False): self.execute(index.create_sql(model, self, concurrently=concurrently), params=None) def remove_index(self, model, index, concurrently=False): self.execute(index.remove_sql(model, self, concurrently=concurrently)) def _delete_index_sql(self, model, name, sql=None, concurrently=False): sql = self.sql_delete_index_concurrently if concurrently else self.sql_delete_index return super()._delete_index_sql(model, name, sql) def _create_index_sql( self, model, fields, *, name=None, suffix='', using='', db_tablespace=None, col_suffixes=(), sql=None, opclasses=(), condition=None, concurrently=False, include=None, ): sql = self.sql_create_index if not concurrently else self.sql_create_index_concurrently return super()._create_index_sql( model, fields, name=name, suffix=suffix, using=using, db_tablespace=db_tablespace, col_suffixes=col_suffixes, sql=sql, opclasses=opclasses, condition=condition, include=include, )
fe9ff0ec11e72b27584c9f5d02b706fb8fc95582ec3db6f25d954b8d86678f73
import operator from django.db import transaction from django.db.backends.base.features import BaseDatabaseFeatures from django.db.utils import OperationalError from django.utils.functional import cached_property from .base import Database class DatabaseFeatures(BaseDatabaseFeatures): # SQLite can read from a cursor since SQLite 3.6.5, subject to the caveat # that statements within a connection aren't isolated from each other. See # https://sqlite.org/isolation.html. can_use_chunked_reads = True test_db_allows_multiple_connections = False supports_unspecified_pk = True supports_timezones = False max_query_params = 999 supports_mixed_date_datetime_comparisons = False supports_transactions = True atomic_transactions = False can_rollback_ddl = True supports_atomic_references_rename = Database.sqlite_version_info >= (3, 26, 0) can_create_inline_fk = False supports_paramstyle_pyformat = False can_clone_databases = True supports_temporal_subtraction = True ignores_table_name_case = True supports_cast_with_precision = False time_cast_precision = 3 can_release_savepoints = True # Is "ALTER TABLE ... RENAME COLUMN" supported? can_alter_table_rename_column = Database.sqlite_version_info >= (3, 25, 0) supports_parentheses_in_compound = False # Deferred constraint checks can be emulated on SQLite < 3.20 but not in a # reasonably performant way. supports_pragma_foreign_key_check = Database.sqlite_version_info >= (3, 20, 0) can_defer_constraint_checks = supports_pragma_foreign_key_check supports_functions_in_partial_indexes = Database.sqlite_version_info >= (3, 15, 0) supports_over_clause = Database.sqlite_version_info >= (3, 25, 0) supports_frame_range_fixed_distance = Database.sqlite_version_info >= (3, 28, 0) supports_aggregate_filter_clause = Database.sqlite_version_info >= (3, 30, 1) supports_order_by_nulls_modifier = Database.sqlite_version_info >= (3, 30, 0) order_by_nulls_first = True @cached_property def introspected_field_types(self): return{ **super().introspected_field_types, 'BigAutoField': 'AutoField', 'DurationField': 'BigIntegerField', 'GenericIPAddressField': 'CharField', 'SmallAutoField': 'AutoField', } @cached_property def supports_json_field(self): try: with self.connection.cursor() as cursor, transaction.atomic(): cursor.execute('SELECT JSON(\'{"a": "b"}\')') except OperationalError: return False return True can_introspect_json_field = property(operator.attrgetter('supports_json_field'))
f23f7b1574369934cd84e41c16196fc459283178809f87b3aaa378b78e18dc6e
import inspect import types from collections import defaultdict from itertools import chain from django.apps import apps from django.conf import settings from django.core.checks import Error, Tags, Warning, register @register(Tags.models) def check_all_models(app_configs=None, **kwargs): db_table_models = defaultdict(list) indexes = defaultdict(list) constraints = defaultdict(list) errors = [] if app_configs is None: models = apps.get_models() else: models = chain.from_iterable(app_config.get_models() for app_config in app_configs) for model in models: if model._meta.managed and not model._meta.proxy: db_table_models[model._meta.db_table].append(model._meta.label) if not inspect.ismethod(model.check): errors.append( Error( "The '%s.check()' class method is currently overridden by %r." % (model.__name__, model.check), obj=model, id='models.E020' ) ) else: errors.extend(model.check(**kwargs)) for model_index in model._meta.indexes: indexes[model_index.name].append(model._meta.label) for model_constraint in model._meta.constraints: constraints[model_constraint.name].append(model._meta.label) if settings.DATABASE_ROUTERS: error_class, error_id = Warning, 'models.W035' error_hint = ( 'You have configured settings.DATABASE_ROUTERS. Verify that %s ' 'are correctly routed to separate databases.' ) else: error_class, error_id = Error, 'models.E028' error_hint = None for db_table, model_labels in db_table_models.items(): if len(model_labels) != 1: model_labels_str = ', '.join(model_labels) errors.append( error_class( "db_table '%s' is used by multiple models: %s." % (db_table, model_labels_str), obj=db_table, hint=(error_hint % model_labels_str) if error_hint else None, id=error_id, ) ) for index_name, model_labels in indexes.items(): if len(model_labels) > 1: model_labels = set(model_labels) errors.append( Error( "index name '%s' is not unique %s %s." % ( index_name, 'for model' if len(model_labels) == 1 else 'among models:', ', '.join(sorted(model_labels)), ), id='models.E029' if len(model_labels) == 1 else 'models.E030', ), ) for constraint_name, model_labels in constraints.items(): if len(model_labels) > 1: model_labels = set(model_labels) errors.append( Error( "constraint name '%s' is not unique %s %s." % ( constraint_name, 'for model' if len(model_labels) == 1 else 'among models:', ', '.join(sorted(model_labels)), ), id='models.E031' if len(model_labels) == 1 else 'models.E032', ), ) return errors def _check_lazy_references(apps, ignore=None): """ Ensure all lazy (i.e. string) model references have been resolved. Lazy references are used in various places throughout Django, primarily in related fields and model signals. Identify those common cases and provide more helpful error messages for them. The ignore parameter is used by StateApps to exclude swappable models from this check. """ pending_models = set(apps._pending_operations) - (ignore or set()) # Short circuit if there aren't any errors. if not pending_models: return [] from django.db.models import signals model_signals = { signal: name for name, signal in vars(signals).items() if isinstance(signal, signals.ModelSignal) } def extract_operation(obj): """ Take a callable found in Apps._pending_operations and identify the original callable passed to Apps.lazy_model_operation(). If that callable was a partial, return the inner, non-partial function and any arguments and keyword arguments that were supplied with it. obj is a callback defined locally in Apps.lazy_model_operation() and annotated there with a `func` attribute so as to imitate a partial. """ operation, args, keywords = obj, [], {} while hasattr(operation, 'func'): args.extend(getattr(operation, 'args', [])) keywords.update(getattr(operation, 'keywords', {})) operation = operation.func return operation, args, keywords def app_model_error(model_key): try: apps.get_app_config(model_key[0]) model_error = "app '%s' doesn't provide model '%s'" % model_key except LookupError: model_error = "app '%s' isn't installed" % model_key[0] return model_error # Here are several functions which return CheckMessage instances for the # most common usages of lazy operations throughout Django. These functions # take the model that was being waited on as an (app_label, modelname) # pair, the original lazy function, and its positional and keyword args as # determined by extract_operation(). def field_error(model_key, func, args, keywords): error_msg = ( "The field %(field)s was declared with a lazy reference " "to '%(model)s', but %(model_error)s." ) params = { 'model': '.'.join(model_key), 'field': keywords['field'], 'model_error': app_model_error(model_key), } return Error(error_msg % params, obj=keywords['field'], id='fields.E307') def signal_connect_error(model_key, func, args, keywords): error_msg = ( "%(receiver)s was connected to the '%(signal)s' signal with a " "lazy reference to the sender '%(model)s', but %(model_error)s." ) receiver = args[0] # The receiver is either a function or an instance of class # defining a `__call__` method. if isinstance(receiver, types.FunctionType): description = "The function '%s'" % receiver.__name__ elif isinstance(receiver, types.MethodType): description = "Bound method '%s.%s'" % (receiver.__self__.__class__.__name__, receiver.__name__) else: description = "An instance of class '%s'" % receiver.__class__.__name__ signal_name = model_signals.get(func.__self__, 'unknown') params = { 'model': '.'.join(model_key), 'receiver': description, 'signal': signal_name, 'model_error': app_model_error(model_key), } return Error(error_msg % params, obj=receiver.__module__, id='signals.E001') def default_error(model_key, func, args, keywords): error_msg = "%(op)s contains a lazy reference to %(model)s, but %(model_error)s." params = { 'op': func, 'model': '.'.join(model_key), 'model_error': app_model_error(model_key), } return Error(error_msg % params, obj=func, id='models.E022') # Maps common uses of lazy operations to corresponding error functions # defined above. If a key maps to None, no error will be produced. # default_error() will be used for usages that don't appear in this dict. known_lazy = { ('django.db.models.fields.related', 'resolve_related_class'): field_error, ('django.db.models.fields.related', 'set_managed'): None, ('django.dispatch.dispatcher', 'connect'): signal_connect_error, } def build_error(model_key, func, args, keywords): key = (func.__module__, func.__name__) error_fn = known_lazy.get(key, default_error) return error_fn(model_key, func, args, keywords) if error_fn else None return sorted(filter(None, ( build_error(model_key, *extract_operation(func)) for model_key in pending_models for func in apps._pending_operations[model_key] )), key=lambda error: error.msg) @register(Tags.models) def check_lazy_references(app_configs=None, **kwargs): return _check_lazy_references(apps)
bb0bdd7f460348689300928b3a60fd6ca5c7dc3503fecf797e6cbd05cde7a2a6
from django.apps import apps from django.db import models def sql_flush(style, connection, reset_sequences=True, allow_cascade=False): """ Return a list of the SQL statements used to flush the database. """ tables = connection.introspection.django_table_names(only_existing=True, include_views=False) return connection.ops.sql_flush( style, tables, reset_sequences=reset_sequences, allow_cascade=allow_cascade, ) def emit_pre_migrate_signal(verbosity, interactive, db, **kwargs): # Emit the pre_migrate signal for every application. for app_config in apps.get_app_configs(): if app_config.models_module is None: continue if verbosity >= 2: print("Running pre-migrate handlers for application %s" % app_config.label) models.signals.pre_migrate.send( sender=app_config, app_config=app_config, verbosity=verbosity, interactive=interactive, using=db, **kwargs ) def emit_post_migrate_signal(verbosity, interactive, db, **kwargs): # Emit the post_migrate signal for every application. for app_config in apps.get_app_configs(): if app_config.models_module is None: continue if verbosity >= 2: print("Running post-migrate handlers for application %s" % app_config.label) models.signals.post_migrate.send( sender=app_config, app_config=app_config, verbosity=verbosity, interactive=interactive, using=db, **kwargs )
7636c4ef5f8fdcf7b95b40b5ab8c0170a78a893c195ef3b40410005590d7b179
""" Caching framework. This package defines set of cache backends that all conform to a simple API. In a nutshell, a cache is a set of values -- which can be any object that may be pickled -- identified by string keys. For the complete API, see the abstract BaseCache class in django.core.cache.backends.base. Client code should use the `cache` variable defined here to access the default cache backend and look up non-default cache backends in the `caches` dict-like object. See docs/topics/cache.txt for information on the public API. """ from asgiref.local import Local from django.conf import settings from django.core import signals from django.core.cache.backends.base import ( BaseCache, CacheKeyWarning, InvalidCacheBackendError, InvalidCacheKey, ) from django.utils.module_loading import import_string __all__ = [ 'cache', 'caches', 'DEFAULT_CACHE_ALIAS', 'InvalidCacheBackendError', 'CacheKeyWarning', 'BaseCache', 'InvalidCacheKey', ] DEFAULT_CACHE_ALIAS = 'default' def _create_cache(backend, **kwargs): try: # Try to get the CACHES entry for the given backend name first try: conf = settings.CACHES[backend] except KeyError: try: # Trying to import the given backend, in case it's a dotted path import_string(backend) except ImportError as e: raise InvalidCacheBackendError("Could not find backend '%s': %s" % ( backend, e)) location = kwargs.pop('LOCATION', '') params = kwargs else: params = {**conf, **kwargs} backend = params.pop('BACKEND') location = params.pop('LOCATION', '') backend_cls = import_string(backend) except ImportError as e: raise InvalidCacheBackendError( "Could not find backend '%s': %s" % (backend, e)) return backend_cls(location, params) class CacheHandler: """ A Cache Handler to manage access to Cache instances. Ensure only one instance of each alias exists per thread. """ def __init__(self): self._caches = Local() def __getitem__(self, alias): try: return self._caches.caches[alias] except AttributeError: self._caches.caches = {} except KeyError: pass if alias not in settings.CACHES: raise InvalidCacheBackendError( "Could not find config for '%s' in settings.CACHES" % alias ) cache = _create_cache(alias) self._caches.caches[alias] = cache return cache def all(self): return getattr(self._caches, 'caches', {}).values() caches = CacheHandler() class DefaultCacheProxy: """ Proxy access to the default Cache object's attributes. This allows the legacy `cache` object to be thread-safe using the new ``caches`` API. """ def __getattr__(self, name): return getattr(caches[DEFAULT_CACHE_ALIAS], name) def __setattr__(self, name, value): return setattr(caches[DEFAULT_CACHE_ALIAS], name, value) def __delattr__(self, name): return delattr(caches[DEFAULT_CACHE_ALIAS], name) def __contains__(self, key): return key in caches[DEFAULT_CACHE_ALIAS] def __eq__(self, other): return caches[DEFAULT_CACHE_ALIAS] == other cache = DefaultCacheProxy() def close_caches(**kwargs): # Some caches -- python-memcached in particular -- need to do a cleanup at the # end of a request cycle. If not implemented in a particular backend # cache.close is a no-op for cache in caches.all(): cache.close() signals.request_finished.connect(close_caches)
00bb9b643cf33a5c517f3b3106dfb76032e40c11181e57dfec304b884a4dd06a
from importlib import import_module from django.apps import apps from django.core.management.base import BaseCommand, CommandError from django.core.management.color import no_style from django.core.management.sql import emit_post_migrate_signal, sql_flush from django.db import DEFAULT_DB_ALIAS, connections class Command(BaseCommand): help = ( 'Removes ALL DATA from the database, including data added during ' 'migrations. Does not achieve a "fresh install" state.' ) stealth_options = ('reset_sequences', 'allow_cascade', 'inhibit_post_migrate') def add_arguments(self, parser): parser.add_argument( '--noinput', '--no-input', action='store_false', dest='interactive', help='Tells Django to NOT prompt the user for input of any kind.', ) parser.add_argument( '--database', default=DEFAULT_DB_ALIAS, help='Nominates a database to flush. Defaults to the "default" database.', ) def handle(self, **options): database = options['database'] connection = connections[database] verbosity = options['verbosity'] interactive = options['interactive'] # The following are stealth options used by Django's internals. reset_sequences = options.get('reset_sequences', True) allow_cascade = options.get('allow_cascade', False) inhibit_post_migrate = options.get('inhibit_post_migrate', False) self.style = no_style() # Import the 'management' module within each installed app, to register # dispatcher events. for app_config in apps.get_app_configs(): try: import_module('.management', app_config.name) except ImportError: pass sql_list = sql_flush(self.style, connection, reset_sequences=reset_sequences, allow_cascade=allow_cascade) if interactive: confirm = input("""You have requested a flush of the database. This will IRREVERSIBLY DESTROY all data currently in the "%s" database, and return each table to an empty state. Are you sure you want to do this? Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME']) else: confirm = 'yes' if confirm == 'yes': try: connection.ops.execute_sql_flush(sql_list) except Exception as exc: raise CommandError( "Database %s couldn't be flushed. Possible reasons:\n" " * The database isn't running or isn't configured correctly.\n" " * At least one of the expected database tables doesn't exist.\n" " * The SQL was invalid.\n" "Hint: Look at the output of 'django-admin sqlflush'. " "That's the SQL this command wasn't able to run." % ( connection.settings_dict['NAME'], ) ) from exc # Empty sql_list may signify an empty database and post_migrate would then crash if sql_list and not inhibit_post_migrate: # Emit the post migrate signal. This allows individual applications to # respond as if the database had been migrated from scratch. emit_post_migrate_signal(verbosity, interactive, database) else: self.stdout.write('Flush cancelled.')
48a464937317ba8ef373ca285a1c4164b74b252843f95cda4c9886067e4053a4
from django.core.management.base import BaseCommand from django.core.management.sql import sql_flush from django.db import DEFAULT_DB_ALIAS, connections class Command(BaseCommand): help = ( "Returns a list of the SQL statements required to return all tables in " "the database to the state they were in just after they were installed." ) output_transaction = True def add_arguments(self, parser): super().add_arguments(parser) parser.add_argument( '--database', default=DEFAULT_DB_ALIAS, help='Nominates a database to print the SQL for. Defaults to the "default" database.', ) def handle(self, **options): sql_statements = sql_flush(self.style, connections[options['database']]) if not sql_statements and options['verbosity'] >= 1: self.stderr.write('No tables found.') return '\n'.join(sql_statements)
7e45da34a5e1fc0d2a32a59f8b82b5515eb048d688a7c149716b374771602fbe
import keyword import re from django.core.management.base import BaseCommand, CommandError from django.db import DEFAULT_DB_ALIAS, connections from django.db.models.constants import LOOKUP_SEP class Command(BaseCommand): help = "Introspects the database tables in the given database and outputs a Django model module." requires_system_checks = [] stealth_options = ('table_name_filter',) db_module = 'django.db' def add_arguments(self, parser): parser.add_argument( 'table', nargs='*', type=str, help='Selects what tables or views should be introspected.', ) parser.add_argument( '--database', default=DEFAULT_DB_ALIAS, help='Nominates a database to introspect. Defaults to using the "default" database.', ) parser.add_argument( '--include-partitions', action='store_true', help='Also output models for partition tables.', ) parser.add_argument( '--include-views', action='store_true', help='Also output models for database views.', ) def handle(self, **options): try: for line in self.handle_inspection(options): self.stdout.write(line) except NotImplementedError: raise CommandError("Database inspection isn't supported for the currently selected database backend.") def handle_inspection(self, options): connection = connections[options['database']] # 'table_name_filter' is a stealth option table_name_filter = options.get('table_name_filter') def table2model(table_name): return re.sub(r'[^a-zA-Z0-9]', '', table_name.title()) with connection.cursor() as cursor: yield "# This is an auto-generated Django model module." yield "# You'll have to do the following manually to clean this up:" yield "# * Rearrange models' order" yield "# * Make sure each model has one field with primary_key=True" yield "# * Make sure each ForeignKey and OneToOneField has `on_delete` set to the desired behavior" yield ( "# * Remove `managed = False` lines if you wish to allow " "Django to create, modify, and delete the table" ) yield "# Feel free to rename the models, but don't rename db_table values or field names." yield 'from %s import models' % self.db_module known_models = [] table_info = connection.introspection.get_table_list(cursor) # Determine types of tables and/or views to be introspected. types = {'t'} if options['include_partitions']: types.add('p') if options['include_views']: types.add('v') for table_name in (options['table'] or sorted(info.name for info in table_info if info.type in types)): if table_name_filter is not None and callable(table_name_filter): if not table_name_filter(table_name): continue try: try: relations = connection.introspection.get_relations(cursor, table_name) except NotImplementedError: relations = {} try: constraints = connection.introspection.get_constraints(cursor, table_name) except NotImplementedError: constraints = {} primary_key_column = connection.introspection.get_primary_key_column(cursor, table_name) unique_columns = [ c['columns'][0] for c in constraints.values() if c['unique'] and len(c['columns']) == 1 ] table_description = connection.introspection.get_table_description(cursor, table_name) except Exception as e: yield "# Unable to inspect table '%s'" % table_name yield "# The error was: %s" % e continue yield '' yield '' yield 'class %s(models.Model):' % table2model(table_name) known_models.append(table2model(table_name)) used_column_names = [] # Holds column names used in the table so far column_to_field_name = {} # Maps column names to names of model fields for row in table_description: comment_notes = [] # Holds Field notes, to be displayed in a Python comment. extra_params = {} # Holds Field parameters such as 'db_column'. column_name = row.name is_relation = column_name in relations att_name, params, notes = self.normalize_col_name( column_name, used_column_names, is_relation) extra_params.update(params) comment_notes.extend(notes) used_column_names.append(att_name) column_to_field_name[column_name] = att_name # Add primary_key and unique, if necessary. if column_name == primary_key_column: extra_params['primary_key'] = True elif column_name in unique_columns: extra_params['unique'] = True if is_relation: if extra_params.pop('unique', False) or extra_params.get('primary_key'): rel_type = 'OneToOneField' else: rel_type = 'ForeignKey' rel_to = ( "self" if relations[column_name][1] == table_name else table2model(relations[column_name][1]) ) if rel_to in known_models: field_type = '%s(%s' % (rel_type, rel_to) else: field_type = "%s('%s'" % (rel_type, rel_to) else: # Calling `get_field_type` to get the field type string and any # additional parameters and notes. field_type, field_params, field_notes = self.get_field_type(connection, table_name, row) extra_params.update(field_params) comment_notes.extend(field_notes) field_type += '(' # Don't output 'id = meta.AutoField(primary_key=True)', because # that's assumed if it doesn't exist. if att_name == 'id' and extra_params == {'primary_key': True}: if field_type == 'AutoField(': continue elif field_type == connection.features.introspected_field_types['AutoField'] + '(': comment_notes.append('AutoField?') # Add 'null' and 'blank', if the 'null_ok' flag was present in the # table description. if row.null_ok: # If it's NULL... extra_params['blank'] = True extra_params['null'] = True field_desc = '%s = %s%s' % ( att_name, # Custom fields will have a dotted path '' if '.' in field_type else 'models.', field_type, ) if field_type.startswith(('ForeignKey(', 'OneToOneField(')): field_desc += ', models.DO_NOTHING' if extra_params: if not field_desc.endswith('('): field_desc += ', ' field_desc += ', '.join('%s=%r' % (k, v) for k, v in extra_params.items()) field_desc += ')' if comment_notes: field_desc += ' # ' + ' '.join(comment_notes) yield ' %s' % field_desc is_view = any(info.name == table_name and info.type == 'v' for info in table_info) is_partition = any(info.name == table_name and info.type == 'p' for info in table_info) yield from self.get_meta(table_name, constraints, column_to_field_name, is_view, is_partition) def normalize_col_name(self, col_name, used_column_names, is_relation): """ Modify the column name to make it Python-compatible as a field name """ field_params = {} field_notes = [] new_name = col_name.lower() if new_name != col_name: field_notes.append('Field name made lowercase.') if is_relation: if new_name.endswith('_id'): new_name = new_name[:-3] else: field_params['db_column'] = col_name new_name, num_repl = re.subn(r'\W', '_', new_name) if num_repl > 0: field_notes.append('Field renamed to remove unsuitable characters.') if new_name.find(LOOKUP_SEP) >= 0: while new_name.find(LOOKUP_SEP) >= 0: new_name = new_name.replace(LOOKUP_SEP, '_') if col_name.lower().find(LOOKUP_SEP) >= 0: # Only add the comment if the double underscore was in the original name field_notes.append("Field renamed because it contained more than one '_' in a row.") if new_name.startswith('_'): new_name = 'field%s' % new_name field_notes.append("Field renamed because it started with '_'.") if new_name.endswith('_'): new_name = '%sfield' % new_name field_notes.append("Field renamed because it ended with '_'.") if keyword.iskeyword(new_name): new_name += '_field' field_notes.append('Field renamed because it was a Python reserved word.') if new_name[0].isdigit(): new_name = 'number_%s' % new_name field_notes.append("Field renamed because it wasn't a valid Python identifier.") if new_name in used_column_names: num = 0 while '%s_%d' % (new_name, num) in used_column_names: num += 1 new_name = '%s_%d' % (new_name, num) field_notes.append('Field renamed because of name conflict.') if col_name != new_name and field_notes: field_params['db_column'] = col_name return new_name, field_params, field_notes def get_field_type(self, connection, table_name, row): """ Given the database connection, the table name, and the cursor row description, this routine will return the given field type name, as well as any additional keyword parameters and notes for the field. """ field_params = {} field_notes = [] try: field_type = connection.introspection.get_field_type(row.type_code, row) except KeyError: field_type = 'TextField' field_notes.append('This field type is a guess.') # Add max_length for all CharFields. if field_type == 'CharField' and row.internal_size: field_params['max_length'] = int(row.internal_size) if field_type == 'DecimalField': if row.precision is None or row.scale is None: field_notes.append( 'max_digits and decimal_places have been guessed, as this ' 'database handles decimal fields as float') field_params['max_digits'] = row.precision if row.precision is not None else 10 field_params['decimal_places'] = row.scale if row.scale is not None else 5 else: field_params['max_digits'] = row.precision field_params['decimal_places'] = row.scale return field_type, field_params, field_notes def get_meta(self, table_name, constraints, column_to_field_name, is_view, is_partition): """ Return a sequence comprising the lines of code necessary to construct the inner Meta class for the model corresponding to the given database table name. """ unique_together = [] has_unsupported_constraint = False for params in constraints.values(): if params['unique']: columns = params['columns'] if None in columns: has_unsupported_constraint = True columns = [x for x in columns if x is not None] if len(columns) > 1: unique_together.append(str(tuple(column_to_field_name[c] for c in columns))) if is_view: managed_comment = " # Created from a view. Don't remove." elif is_partition: managed_comment = " # Created from a partition. Don't remove." else: managed_comment = '' meta = [''] if has_unsupported_constraint: meta.append(' # A unique constraint could not be introspected.') meta += [ ' class Meta:', ' managed = False%s' % managed_comment, ' db_table = %r' % table_name ] if unique_together: tup = '(' + ', '.join(unique_together) + ',)' meta += [" unique_together = %s" % tup] return meta
0168547a42e5932f86030e210b9cc604a93dce85dc91fb8bcdbf85bc505aa922
"Memcached cache backend" import pickle import re import time from django.core.cache.backends.base import ( DEFAULT_TIMEOUT, BaseCache, InvalidCacheKey, memcache_key_warnings, ) from django.utils.functional import cached_property class BaseMemcachedCache(BaseCache): def __init__(self, server, params, library, value_not_found_exception): super().__init__(params) if isinstance(server, str): self._servers = re.split('[;,]', server) else: self._servers = server # The exception type to catch from the underlying library for a key # that was not found. This is a ValueError for python-memcache, # pylibmc.NotFound for pylibmc, and cmemcache will return None without # raising an exception. self.LibraryValueNotFoundException = value_not_found_exception self._lib = library self._options = params.get('OPTIONS') or {} @property def _cache(self): """ Implement transparent thread-safe access to a memcached client. """ if getattr(self, '_client', None) is None: self._client = self._lib.Client(self._servers, **self._options) return self._client def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT): """ Memcached deals with long (> 30 days) timeouts in a special way. Call this function to obtain a safe value for your timeout. """ if timeout == DEFAULT_TIMEOUT: timeout = self.default_timeout if timeout is None: # Using 0 in memcache sets a non-expiring timeout. return 0 elif int(timeout) == 0: # Other cache backends treat 0 as set-and-expire. To achieve this # in memcache backends, a negative timeout must be passed. timeout = -1 if timeout > 2592000: # 60*60*24*30, 30 days # See https://github.com/memcached/memcached/wiki/Programming#expiration # "Expiration times can be set from 0, meaning "never expire", to # 30 days. Any time higher than 30 days is interpreted as a Unix # timestamp date. If you want to expire an object on January 1st of # next year, this is how you do that." # # This means that we have to switch to absolute timestamps. timeout += int(time.time()) return int(timeout) def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) self.validate_key(key) return self._cache.add(key, value, self.get_backend_timeout(timeout)) def get(self, key, default=None, version=None): key = self.make_key(key, version=version) self.validate_key(key) return self._cache.get(key, default) def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) self.validate_key(key) if not self._cache.set(key, value, self.get_backend_timeout(timeout)): # make sure the key doesn't keep its old value in case of failure to set (memcached's 1MB limit) self._cache.delete(key) def delete(self, key, version=None): key = self.make_key(key, version=version) self.validate_key(key) return bool(self._cache.delete(key)) def get_many(self, keys, version=None): key_map = {self.make_key(key, version=version): key for key in keys} for key in key_map: self.validate_key(key) ret = self._cache.get_multi(key_map.keys()) return {key_map[k]: v for k, v in ret.items()} def close(self, **kwargs): # Many clients don't clean up connections properly. self._cache.disconnect_all() def incr(self, key, delta=1, version=None): key = self.make_key(key, version=version) self.validate_key(key) # memcached doesn't support a negative delta if delta < 0: return self._cache.decr(key, -delta) try: val = self._cache.incr(key, delta) # python-memcache responds to incr on nonexistent keys by # raising a ValueError, pylibmc by raising a pylibmc.NotFound # and Cmemcache returns None. In all cases, # we should raise a ValueError though. except self.LibraryValueNotFoundException: val = None if val is None: raise ValueError("Key '%s' not found" % key) return val def decr(self, key, delta=1, version=None): key = self.make_key(key, version=version) self.validate_key(key) # memcached doesn't support a negative delta if delta < 0: return self._cache.incr(key, -delta) try: val = self._cache.decr(key, delta) # python-memcache responds to incr on nonexistent keys by # raising a ValueError, pylibmc by raising a pylibmc.NotFound # and Cmemcache returns None. In all cases, # we should raise a ValueError though. except self.LibraryValueNotFoundException: val = None if val is None: raise ValueError("Key '%s' not found" % key) return val def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None): safe_data = {} original_keys = {} for key, value in data.items(): safe_key = self.make_key(key, version=version) self.validate_key(safe_key) safe_data[safe_key] = value original_keys[safe_key] = key failed_keys = self._cache.set_multi(safe_data, self.get_backend_timeout(timeout)) return [original_keys[k] for k in failed_keys] def delete_many(self, keys, version=None): self._cache.delete_multi(self.make_key(key, version=version) for key in keys) def clear(self): self._cache.flush_all() def validate_key(self, key): for warning in memcache_key_warnings(key): raise InvalidCacheKey(warning) class MemcachedCache(BaseMemcachedCache): "An implementation of a cache binding using python-memcached" def __init__(self, server, params): import memcache super().__init__(server, params, library=memcache, value_not_found_exception=ValueError) @property def _cache(self): if getattr(self, '_client', None) is None: client_kwargs = {'pickleProtocol': pickle.HIGHEST_PROTOCOL} client_kwargs.update(self._options) self._client = self._lib.Client(self._servers, **client_kwargs) return self._client def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) return self._cache.touch(key, self.get_backend_timeout(timeout)) != 0 def get(self, key, default=None, version=None): key = self.make_key(key, version=version) val = self._cache.get(key) # python-memcached doesn't support default values in get(). # https://github.com/linsomniac/python-memcached/issues/159 # Remove this method if that issue is fixed. if val is None: return default return val def delete(self, key, version=None): # python-memcached's delete() returns True when key doesn't exist. # https://github.com/linsomniac/python-memcached/issues/170 # Call _deletetouch() without the NOT_FOUND in expected results. key = self.make_key(key, version=version) return bool(self._cache._deletetouch([b'DELETED'], 'delete', key)) class PyLibMCCache(BaseMemcachedCache): "An implementation of a cache binding using pylibmc" def __init__(self, server, params): import pylibmc super().__init__(server, params, library=pylibmc, value_not_found_exception=pylibmc.NotFound) @cached_property def _cache(self): return self._lib.Client(self._servers, **self._options) def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) if timeout == 0: return self._cache.delete(key) return self._cache.touch(key, self.get_backend_timeout(timeout)) def close(self, **kwargs): # libmemcached manages its own connections. Don't call disconnect_all() # as it resets the failover state and creates unnecessary reconnects. pass
9c22c151885f3018d062e553d907d4db5b581117be2f405f1d25009d703a93c5
"Base Cache class." import time import warnings from django.core.exceptions import ImproperlyConfigured from django.utils.module_loading import import_string class InvalidCacheBackendError(ImproperlyConfigured): pass class CacheKeyWarning(RuntimeWarning): pass class InvalidCacheKey(ValueError): pass # Stub class to ensure not passing in a `timeout` argument results in # the default timeout DEFAULT_TIMEOUT = object() # Memcached does not accept keys longer than this. MEMCACHE_MAX_KEY_LENGTH = 250 def default_key_func(key, key_prefix, version): """ Default function to generate keys. Construct the key used by all other methods. By default, prepend the `key_prefix'. KEY_FUNCTION can be used to specify an alternate function with custom key making behavior. """ return '%s:%s:%s' % (key_prefix, version, key) def get_key_func(key_func): """ Function to decide which key function to use. Default to ``default_key_func``. """ if key_func is not None: if callable(key_func): return key_func else: return import_string(key_func) return default_key_func class BaseCache: def __init__(self, params): timeout = params.get('timeout', params.get('TIMEOUT', 300)) if timeout is not None: try: timeout = int(timeout) except (ValueError, TypeError): timeout = 300 self.default_timeout = timeout options = params.get('OPTIONS', {}) max_entries = params.get('max_entries', options.get('MAX_ENTRIES', 300)) try: self._max_entries = int(max_entries) except (ValueError, TypeError): self._max_entries = 300 cull_frequency = params.get('cull_frequency', options.get('CULL_FREQUENCY', 3)) try: self._cull_frequency = int(cull_frequency) except (ValueError, TypeError): self._cull_frequency = 3 self.key_prefix = params.get('KEY_PREFIX', '') self.version = params.get('VERSION', 1) self.key_func = get_key_func(params.get('KEY_FUNCTION')) def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT): """ Return the timeout value usable by this backend based upon the provided timeout. """ if timeout == DEFAULT_TIMEOUT: timeout = self.default_timeout elif timeout == 0: # ticket 21147 - avoid time.time() related precision issues timeout = -1 return None if timeout is None else time.time() + timeout def make_key(self, key, version=None): """ Construct the key used by all other methods. By default, use the key_func to generate a key (which, by default, prepends the `key_prefix' and 'version'). A different key function can be provided at the time of cache construction; alternatively, you can subclass the cache backend to provide custom key making behavior. """ if version is None: version = self.version return self.key_func(key, self.key_prefix, version) def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): """ Set a value in the cache if the key does not already exist. If timeout is given, use that timeout for the key; otherwise use the default cache timeout. Return True if the value was stored, False otherwise. """ raise NotImplementedError('subclasses of BaseCache must provide an add() method') def get(self, key, default=None, version=None): """ Fetch a given key from the cache. If the key does not exist, return default, which itself defaults to None. """ raise NotImplementedError('subclasses of BaseCache must provide a get() method') def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): """ Set a value in the cache. If timeout is given, use that timeout for the key; otherwise use the default cache timeout. """ raise NotImplementedError('subclasses of BaseCache must provide a set() method') def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None): """ Update the key's expiry time using timeout. Return True if successful or False if the key does not exist. """ raise NotImplementedError('subclasses of BaseCache must provide a touch() method') def delete(self, key, version=None): """ Delete a key from the cache and return whether it succeeded, failing silently. """ raise NotImplementedError('subclasses of BaseCache must provide a delete() method') def get_many(self, keys, version=None): """ Fetch a bunch of keys from the cache. For certain backends (memcached, pgsql) this can be *much* faster when fetching multiple values. Return a dict mapping each key in keys to its value. If the given key is missing, it will be missing from the response dict. """ d = {} for k in keys: val = self.get(k, version=version) if val is not None: d[k] = val return d def get_or_set(self, key, default, timeout=DEFAULT_TIMEOUT, version=None): """ Fetch a given key from the cache. If the key does not exist, add the key and set it to the default value. The default value can also be any callable. If timeout is given, use that timeout for the key; otherwise use the default cache timeout. Return the value of the key stored or retrieved. """ val = self.get(key, version=version) if val is None: if callable(default): default = default() if default is not None: self.add(key, default, timeout=timeout, version=version) # Fetch the value again to avoid a race condition if another # caller added a value between the first get() and the add() # above. return self.get(key, default, version=version) return val def has_key(self, key, version=None): """ Return True if the key is in the cache and has not expired. """ return self.get(key, version=version) is not None def incr(self, key, delta=1, version=None): """ Add delta to value in the cache. If the key does not exist, raise a ValueError exception. """ value = self.get(key, version=version) if value is None: raise ValueError("Key '%s' not found" % key) new_value = value + delta self.set(key, new_value, version=version) return new_value def decr(self, key, delta=1, version=None): """ Subtract delta from value in the cache. If the key does not exist, raise a ValueError exception. """ return self.incr(key, -delta, version=version) def __contains__(self, key): """ Return True if the key is in the cache and has not expired. """ # This is a separate method, rather than just a copy of has_key(), # so that it always has the same functionality as has_key(), even # if a subclass overrides it. return self.has_key(key) def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None): """ Set a bunch of values in the cache at once from a dict of key/value pairs. For certain backends (memcached), this is much more efficient than calling set() multiple times. If timeout is given, use that timeout for the key; otherwise use the default cache timeout. On backends that support it, return a list of keys that failed insertion, or an empty list if all keys were inserted successfully. """ for key, value in data.items(): self.set(key, value, timeout=timeout, version=version) return [] def delete_many(self, keys, version=None): """ Delete a bunch of values in the cache at once. For certain backends (memcached), this is much more efficient than calling delete() multiple times. """ for key in keys: self.delete(key, version=version) def clear(self): """Remove *all* values from the cache at once.""" raise NotImplementedError('subclasses of BaseCache must provide a clear() method') def validate_key(self, key): """ Warn about keys that would not be portable to the memcached backend. This encourages (but does not force) writing backend-portable cache code. """ for warning in memcache_key_warnings(key): warnings.warn(warning, CacheKeyWarning) def incr_version(self, key, delta=1, version=None): """ Add delta to the cache version for the supplied key. Return the new version. """ if version is None: version = self.version value = self.get(key, version=version) if value is None: raise ValueError("Key '%s' not found" % key) self.set(key, value, version=version + delta) self.delete(key, version=version) return version + delta def decr_version(self, key, delta=1, version=None): """ Subtract delta from the cache version for the supplied key. Return the new version. """ return self.incr_version(key, -delta, version) def close(self, **kwargs): """Close the cache connection""" pass def memcache_key_warnings(key): if len(key) > MEMCACHE_MAX_KEY_LENGTH: yield ( 'Cache key will cause errors if used with memcached: %r ' '(longer than %s)' % (key, MEMCACHE_MAX_KEY_LENGTH) ) for char in key: if ord(char) < 33 or ord(char) == 127: yield ( 'Cache key contains characters that will cause errors if ' 'used with memcached: %r' % key, CacheKeyWarning ) break
dc7d770813f5a88431ad477ec841dad6cd4fa8198bce3f36b6580aad2cbf0d3a
from django.db import NotSupportedError from django.db.models import Index from django.utils.functional import cached_property __all__ = [ 'BloomIndex', 'BrinIndex', 'BTreeIndex', 'GinIndex', 'GistIndex', 'HashIndex', 'SpGistIndex', ] class PostgresIndex(Index): @cached_property def max_name_length(self): # Allow an index name longer than 30 characters when the suffix is # longer than the usual 3 character limit. The 30 character limit for # cross-database compatibility isn't applicable to PostgreSQL-specific # indexes. return Index.max_name_length - len(Index.suffix) + len(self.suffix) def create_sql(self, model, schema_editor, using='', **kwargs): self.check_supported(schema_editor) statement = super().create_sql(model, schema_editor, using=' USING %s' % self.suffix, **kwargs) with_params = self.get_with_params() if with_params: statement.parts['extra'] = 'WITH (%s) %s' % ( ', '.join(with_params), statement.parts['extra'], ) return statement def check_supported(self, schema_editor): pass def get_with_params(self): return [] class BloomIndex(PostgresIndex): suffix = 'bloom' def __init__(self, *, length=None, columns=(), **kwargs): super().__init__(**kwargs) if len(self.fields) > 32: raise ValueError('Bloom indexes support a maximum of 32 fields.') if not isinstance(columns, (list, tuple)): raise ValueError('BloomIndex.columns must be a list or tuple.') if len(columns) > len(self.fields): raise ValueError( 'BloomIndex.columns cannot have more values than fields.' ) if not all(0 < col <= 4095 for col in columns): raise ValueError( 'BloomIndex.columns must contain integers from 1 to 4095.', ) if length is not None and not 0 < length <= 4096: raise ValueError( 'BloomIndex.length must be None or an integer from 1 to 4096.', ) self.length = length self.columns = columns def deconstruct(self): path, args, kwargs = super().deconstruct() if self.length is not None: kwargs['length'] = self.length if self.columns: kwargs['columns'] = self.columns return path, args, kwargs def get_with_params(self): with_params = [] if self.length is not None: with_params.append('length = %d' % self.length) if self.columns: with_params.extend( 'col%d = %d' % (i, v) for i, v in enumerate(self.columns, start=1) ) return with_params class BrinIndex(PostgresIndex): suffix = 'brin' def __init__(self, *, autosummarize=None, pages_per_range=None, **kwargs): if pages_per_range is not None and pages_per_range <= 0: raise ValueError('pages_per_range must be None or a positive integer') self.autosummarize = autosummarize self.pages_per_range = pages_per_range super().__init__(**kwargs) def deconstruct(self): path, args, kwargs = super().deconstruct() if self.autosummarize is not None: kwargs['autosummarize'] = self.autosummarize if self.pages_per_range is not None: kwargs['pages_per_range'] = self.pages_per_range return path, args, kwargs def check_supported(self, schema_editor): if self.autosummarize and not schema_editor.connection.features.has_brin_autosummarize: raise NotSupportedError('BRIN option autosummarize requires PostgreSQL 10+.') def get_with_params(self): with_params = [] if self.autosummarize is not None: with_params.append('autosummarize = %s' % ('on' if self.autosummarize else 'off')) if self.pages_per_range is not None: with_params.append('pages_per_range = %d' % self.pages_per_range) return with_params class BTreeIndex(PostgresIndex): suffix = 'btree' def __init__(self, *, fillfactor=None, **kwargs): self.fillfactor = fillfactor super().__init__(**kwargs) def deconstruct(self): path, args, kwargs = super().deconstruct() if self.fillfactor is not None: kwargs['fillfactor'] = self.fillfactor return path, args, kwargs def get_with_params(self): with_params = [] if self.fillfactor is not None: with_params.append('fillfactor = %d' % self.fillfactor) return with_params class GinIndex(PostgresIndex): suffix = 'gin' def __init__(self, *, fastupdate=None, gin_pending_list_limit=None, **kwargs): self.fastupdate = fastupdate self.gin_pending_list_limit = gin_pending_list_limit super().__init__(**kwargs) def deconstruct(self): path, args, kwargs = super().deconstruct() if self.fastupdate is not None: kwargs['fastupdate'] = self.fastupdate if self.gin_pending_list_limit is not None: kwargs['gin_pending_list_limit'] = self.gin_pending_list_limit return path, args, kwargs def get_with_params(self): with_params = [] if self.gin_pending_list_limit is not None: with_params.append('gin_pending_list_limit = %d' % self.gin_pending_list_limit) if self.fastupdate is not None: with_params.append('fastupdate = %s' % ('on' if self.fastupdate else 'off')) return with_params class GistIndex(PostgresIndex): suffix = 'gist' def __init__(self, *, buffering=None, fillfactor=None, **kwargs): self.buffering = buffering self.fillfactor = fillfactor super().__init__(**kwargs) def deconstruct(self): path, args, kwargs = super().deconstruct() if self.buffering is not None: kwargs['buffering'] = self.buffering if self.fillfactor is not None: kwargs['fillfactor'] = self.fillfactor return path, args, kwargs def get_with_params(self): with_params = [] if self.buffering is not None: with_params.append('buffering = %s' % ('on' if self.buffering else 'off')) if self.fillfactor is not None: with_params.append('fillfactor = %d' % self.fillfactor) return with_params def check_supported(self, schema_editor): if self.include and not schema_editor.connection.features.supports_covering_gist_indexes: raise NotSupportedError('Covering GiST indexes requires PostgreSQL 12+.') class HashIndex(PostgresIndex): suffix = 'hash' def __init__(self, *, fillfactor=None, **kwargs): self.fillfactor = fillfactor super().__init__(**kwargs) def deconstruct(self): path, args, kwargs = super().deconstruct() if self.fillfactor is not None: kwargs['fillfactor'] = self.fillfactor return path, args, kwargs def get_with_params(self): with_params = [] if self.fillfactor is not None: with_params.append('fillfactor = %d' % self.fillfactor) return with_params class SpGistIndex(PostgresIndex): suffix = 'spgist' def __init__(self, *, fillfactor=None, **kwargs): self.fillfactor = fillfactor super().__init__(**kwargs) def deconstruct(self): path, args, kwargs = super().deconstruct() if self.fillfactor is not None: kwargs['fillfactor'] = self.fillfactor return path, args, kwargs def get_with_params(self): with_params = [] if self.fillfactor is not None: with_params.append('fillfactor = %d' % self.fillfactor) return with_params
fe90cd2f334791a117629869cbea7cc63cc30c76f4694e405c158141da8f01d1
""" Form Widget classes specific to the Django admin site. """ import copy import json from django import forms from django.conf import settings from django.core.exceptions import ValidationError from django.core.validators import URLValidator from django.db.models import CASCADE from django.urls import reverse from django.urls.exceptions import NoReverseMatch from django.utils.html import smart_urlquote from django.utils.http import urlencode from django.utils.text import Truncator from django.utils.translation import get_language, gettext as _ class FilteredSelectMultiple(forms.SelectMultiple): """ A SelectMultiple with a JavaScript filter interface. Note that the resulting JavaScript assumes that the jsi18n catalog has been loaded in the page """ class Media: js = [ 'admin/js/core.js', 'admin/js/SelectBox.js', 'admin/js/SelectFilter2.js', ] def __init__(self, verbose_name, is_stacked, attrs=None, choices=()): self.verbose_name = verbose_name self.is_stacked = is_stacked super().__init__(attrs, choices) def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) context['widget']['attrs']['class'] = 'selectfilter' if self.is_stacked: context['widget']['attrs']['class'] += 'stacked' context['widget']['attrs']['data-field-name'] = self.verbose_name context['widget']['attrs']['data-is-stacked'] = int(self.is_stacked) return context class AdminDateWidget(forms.DateInput): class Media: js = [ 'admin/js/calendar.js', 'admin/js/admin/DateTimeShortcuts.js', ] def __init__(self, attrs=None, format=None): attrs = {'class': 'vDateField', 'size': '10', **(attrs or {})} super().__init__(attrs=attrs, format=format) class AdminTimeWidget(forms.TimeInput): class Media: js = [ 'admin/js/calendar.js', 'admin/js/admin/DateTimeShortcuts.js', ] def __init__(self, attrs=None, format=None): attrs = {'class': 'vTimeField', 'size': '8', **(attrs or {})} super().__init__(attrs=attrs, format=format) class AdminSplitDateTime(forms.SplitDateTimeWidget): """ A SplitDateTime Widget that has some admin-specific styling. """ template_name = 'admin/widgets/split_datetime.html' def __init__(self, attrs=None): widgets = [AdminDateWidget, AdminTimeWidget] # Note that we're calling MultiWidget, not SplitDateTimeWidget, because # we want to define widgets. forms.MultiWidget.__init__(self, widgets, attrs) def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) context['date_label'] = _('Date:') context['time_label'] = _('Time:') return context class AdminRadioSelect(forms.RadioSelect): template_name = 'admin/widgets/radio.html' class AdminFileWidget(forms.ClearableFileInput): template_name = 'admin/widgets/clearable_file_input.html' def url_params_from_lookup_dict(lookups): """ Convert the type of lookups specified in a ForeignKey limit_choices_to attribute to a dictionary of query parameters """ params = {} if lookups and hasattr(lookups, 'items'): for k, v in lookups.items(): if callable(v): v = v() if isinstance(v, (tuple, list)): v = ','.join(str(x) for x in v) elif isinstance(v, bool): v = ('0', '1')[v] else: v = str(v) params[k] = v return params class ForeignKeyRawIdWidget(forms.TextInput): """ A Widget for displaying ForeignKeys in the "raw_id" interface rather than in a <select> box. """ template_name = 'admin/widgets/foreign_key_raw_id.html' def __init__(self, rel, admin_site, attrs=None, using=None): self.rel = rel self.admin_site = admin_site self.db = using super().__init__(attrs) def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) rel_to = self.rel.model if rel_to in self.admin_site._registry: # The related object is registered with the same AdminSite related_url = reverse( 'admin:%s_%s_changelist' % ( rel_to._meta.app_label, rel_to._meta.model_name, ), current_app=self.admin_site.name, ) params = self.url_parameters() if params: related_url += '?' + urlencode(params) context['related_url'] = related_url context['link_title'] = _('Lookup') # The JavaScript code looks for this class. context['widget']['attrs'].setdefault('class', 'vForeignKeyRawIdAdminField') else: context['related_url'] = None if context['widget']['value']: context['link_label'], context['link_url'] = self.label_and_url_for_value(value) else: context['link_label'] = None return context def base_url_parameters(self): limit_choices_to = self.rel.limit_choices_to if callable(limit_choices_to): limit_choices_to = limit_choices_to() return url_params_from_lookup_dict(limit_choices_to) def url_parameters(self): from django.contrib.admin.views.main import TO_FIELD_VAR params = self.base_url_parameters() params.update({TO_FIELD_VAR: self.rel.get_related_field().name}) return params def label_and_url_for_value(self, value): key = self.rel.get_related_field().name try: obj = self.rel.model._default_manager.using(self.db).get(**{key: value}) except (ValueError, self.rel.model.DoesNotExist, ValidationError): return '', '' try: url = reverse( '%s:%s_%s_change' % ( self.admin_site.name, obj._meta.app_label, obj._meta.object_name.lower(), ), args=(obj.pk,) ) except NoReverseMatch: url = '' # Admin not registered for target model. return Truncator(obj).words(14), url class ManyToManyRawIdWidget(ForeignKeyRawIdWidget): """ A Widget for displaying ManyToMany ids in the "raw_id" interface rather than in a <select multiple> box. """ template_name = 'admin/widgets/many_to_many_raw_id.html' def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) if self.rel.model in self.admin_site._registry: # The related object is registered with the same AdminSite context['widget']['attrs']['class'] = 'vManyToManyRawIdAdminField' return context def url_parameters(self): return self.base_url_parameters() def label_and_url_for_value(self, value): return '', '' def value_from_datadict(self, data, files, name): value = data.get(name) if value: return value.split(',') def format_value(self, value): return ','.join(str(v) for v in value) if value else '' class RelatedFieldWidgetWrapper(forms.Widget): """ This class is a wrapper to a given widget to add the add icon for the admin interface. """ template_name = 'admin/widgets/related_widget_wrapper.html' def __init__(self, widget, rel, admin_site, can_add_related=None, can_change_related=False, can_delete_related=False, can_view_related=False): self.needs_multipart_form = widget.needs_multipart_form self.attrs = widget.attrs self.choices = widget.choices self.widget = widget self.rel = rel # Backwards compatible check for whether a user can add related # objects. if can_add_related is None: can_add_related = rel.model in admin_site._registry self.can_add_related = can_add_related # XXX: The UX does not support multiple selected values. multiple = getattr(widget, 'allow_multiple_selected', False) self.can_change_related = not multiple and can_change_related # XXX: The deletion UX can be confusing when dealing with cascading deletion. cascade = getattr(rel, 'on_delete', None) is CASCADE self.can_delete_related = not multiple and not cascade and can_delete_related self.can_view_related = not multiple and can_view_related # so we can check if the related object is registered with this AdminSite self.admin_site = admin_site def __deepcopy__(self, memo): obj = copy.copy(self) obj.widget = copy.deepcopy(self.widget, memo) obj.attrs = self.widget.attrs memo[id(self)] = obj return obj @property def is_hidden(self): return self.widget.is_hidden @property def media(self): return self.widget.media def get_related_url(self, info, action, *args): return reverse("admin:%s_%s_%s" % (info + (action,)), current_app=self.admin_site.name, args=args) def get_context(self, name, value, attrs): from django.contrib.admin.views.main import IS_POPUP_VAR, TO_FIELD_VAR rel_opts = self.rel.model._meta info = (rel_opts.app_label, rel_opts.model_name) self.widget.choices = self.choices url_params = '&'.join("%s=%s" % param for param in [ (TO_FIELD_VAR, self.rel.get_related_field().name), (IS_POPUP_VAR, 1), ]) context = { 'rendered_widget': self.widget.render(name, value, attrs), 'is_hidden': self.is_hidden, 'name': name, 'url_params': url_params, 'model': rel_opts.verbose_name, 'can_add_related': self.can_add_related, 'can_change_related': self.can_change_related, 'can_delete_related': self.can_delete_related, 'can_view_related': self.can_view_related, } if self.can_add_related: context['add_related_url'] = self.get_related_url(info, 'add') if self.can_delete_related: context['delete_related_template_url'] = self.get_related_url(info, 'delete', '__fk__') if self.can_view_related or self.can_change_related: context['change_related_template_url'] = self.get_related_url(info, 'change', '__fk__') return context def value_from_datadict(self, data, files, name): return self.widget.value_from_datadict(data, files, name) def value_omitted_from_data(self, data, files, name): return self.widget.value_omitted_from_data(data, files, name) def id_for_label(self, id_): return self.widget.id_for_label(id_) class AdminTextareaWidget(forms.Textarea): def __init__(self, attrs=None): super().__init__(attrs={'class': 'vLargeTextField', **(attrs or {})}) class AdminTextInputWidget(forms.TextInput): def __init__(self, attrs=None): super().__init__(attrs={'class': 'vTextField', **(attrs or {})}) class AdminEmailInputWidget(forms.EmailInput): def __init__(self, attrs=None): super().__init__(attrs={'class': 'vTextField', **(attrs or {})}) class AdminURLFieldWidget(forms.URLInput): template_name = 'admin/widgets/url.html' def __init__(self, attrs=None, validator_class=URLValidator): super().__init__(attrs={'class': 'vURLField', **(attrs or {})}) self.validator = validator_class() def get_context(self, name, value, attrs): try: self.validator(value if value else '') url_valid = True except ValidationError: url_valid = False context = super().get_context(name, value, attrs) context['current_label'] = _('Currently:') context['change_label'] = _('Change:') context['widget']['href'] = smart_urlquote(context['widget']['value']) if value else '' context['url_valid'] = url_valid return context class AdminIntegerFieldWidget(forms.NumberInput): class_name = 'vIntegerField' def __init__(self, attrs=None): super().__init__(attrs={'class': self.class_name, **(attrs or {})}) class AdminBigIntegerFieldWidget(AdminIntegerFieldWidget): class_name = 'vBigIntegerField' class AdminUUIDInputWidget(forms.TextInput): def __init__(self, attrs=None): super().__init__(attrs={'class': 'vUUIDField', **(attrs or {})}) # Mapping of lowercase language codes [returned by Django's get_language()] to # language codes supported by select2. # See django/contrib/admin/static/admin/js/vendor/select2/i18n/* SELECT2_TRANSLATIONS = {x.lower(): x for x in [ 'ar', 'az', 'bg', 'ca', 'cs', 'da', 'de', 'el', 'en', 'es', 'et', 'eu', 'fa', 'fi', 'fr', 'gl', 'he', 'hi', 'hr', 'hu', 'id', 'is', 'it', 'ja', 'km', 'ko', 'lt', 'lv', 'mk', 'ms', 'nb', 'nl', 'pl', 'pt-BR', 'pt', 'ro', 'ru', 'sk', 'sr-Cyrl', 'sr', 'sv', 'th', 'tr', 'uk', 'vi', ]} SELECT2_TRANSLATIONS.update({'zh-hans': 'zh-CN', 'zh-hant': 'zh-TW'}) class AutocompleteMixin: """ Select widget mixin that loads options from AutocompleteJsonView via AJAX. Renders the necessary data attributes for select2 and adds the static form media. """ url_name = '%s:%s_%s_autocomplete' def __init__(self, rel, admin_site, attrs=None, choices=(), using=None): self.rel = rel self.admin_site = admin_site self.db = using self.choices = choices self.attrs = {} if attrs is None else attrs.copy() def get_url(self): model = self.rel.model return reverse(self.url_name % (self.admin_site.name, model._meta.app_label, model._meta.model_name)) def build_attrs(self, base_attrs, extra_attrs=None): """ Set select2's AJAX attributes. Attributes can be set using the html5 data attribute. Nested attributes require a double dash as per https://select2.org/configuration/data-attributes#nested-subkey-options """ attrs = super().build_attrs(base_attrs, extra_attrs=extra_attrs) attrs.setdefault('class', '') attrs.update({ 'data-ajax--cache': 'true', 'data-ajax--delay': 250, 'data-ajax--type': 'GET', 'data-ajax--url': self.get_url(), 'data-theme': 'admin-autocomplete', 'data-allow-clear': json.dumps(not self.is_required), 'data-placeholder': '', # Allows clearing of the input. 'class': attrs['class'] + (' ' if attrs['class'] else '') + 'admin-autocomplete', }) return attrs def optgroups(self, name, value, attr=None): """Return selected options based on the ModelChoiceIterator.""" default = (None, [], 0) groups = [default] has_selected = False selected_choices = { str(v) for v in value if str(v) not in self.choices.field.empty_values } if not self.is_required and not self.allow_multiple_selected: default[1].append(self.create_option(name, '', '', False, 0)) choices = ( (obj.pk, self.choices.field.label_from_instance(obj)) for obj in self.choices.queryset.using(self.db).filter(pk__in=selected_choices) ) for option_value, option_label in choices: selected = ( str(option_value) in value and (has_selected is False or self.allow_multiple_selected) ) has_selected |= selected index = len(default[1]) subgroup = default[1] subgroup.append(self.create_option(name, option_value, option_label, selected_choices, index)) return groups @property def media(self): extra = '' if settings.DEBUG else '.min' i18n_name = SELECT2_TRANSLATIONS.get(get_language()) i18n_file = ('admin/js/vendor/select2/i18n/%s.js' % i18n_name,) if i18n_name else () return forms.Media( js=( 'admin/js/vendor/jquery/jquery%s.js' % extra, 'admin/js/vendor/select2/select2.full%s.js' % extra, ) + i18n_file + ( 'admin/js/jquery.init.js', 'admin/js/autocomplete.js', ), css={ 'screen': ( 'admin/css/vendor/select2/select2%s.css' % extra, 'admin/css/autocomplete.css', ), }, ) class AutocompleteSelect(AutocompleteMixin, forms.Select): pass class AutocompleteSelectMultiple(AutocompleteMixin, forms.SelectMultiple): pass
f420644d080f25acff64c46c1c36db4b37bafc625873ebaba79e8aa8764dfd0b
from django.db.backends.ddl_references import Statement from django.db.backends.postgresql.schema import DatabaseSchemaEditor class PostGISSchemaEditor(DatabaseSchemaEditor): geom_index_type = 'GIST' geom_index_ops_nd = 'GIST_GEOMETRY_OPS_ND' rast_index_wrapper = 'ST_ConvexHull(%s)' sql_alter_column_to_3d = "ALTER COLUMN %(column)s TYPE %(type)s USING ST_Force3D(%(column)s)::%(type)s" sql_alter_column_to_2d = "ALTER COLUMN %(column)s TYPE %(type)s USING ST_Force2D(%(column)s)::%(type)s" def geo_quote_name(self, name): return self.connection.ops.geo_quote_name(name) def _field_should_be_indexed(self, model, field): if getattr(field, 'spatial_index', False): return True return super()._field_should_be_indexed(model, field) def _create_index_sql(self, model, fields, **kwargs): if len(fields) != 1 or not hasattr(fields[0], 'geodetic'): return super()._create_index_sql(model, fields, **kwargs) field = fields[0] field_column = self.quote_name(field.column) if field.geom_type == 'RASTER': # For raster fields, wrap index creation SQL statement with ST_ConvexHull. # Indexes on raster columns are based on the convex hull of the raster. field_column = self.rast_index_wrapper % field_column elif field.dim > 2 and not field.geography: # Use "nd" ops which are fast on multidimensional cases field_column = "%s %s" % (field_column, self.geom_index_ops_nd) if kwargs.get('name') is None: index_name = '%s_%s_id' % (model._meta.db_table, field.column) else: index_name = kwargs['name'] return Statement( self.sql_create_index, name=self.quote_name(index_name), table=self.quote_name(model._meta.db_table), using=' USING %s' % self.geom_index_type, columns=field_column, extra='', condition='', include='', ) def _alter_column_type_sql(self, table, old_field, new_field, new_type): """ Special case when dimension changed. """ if not hasattr(old_field, 'dim') or not hasattr(new_field, 'dim'): return super()._alter_column_type_sql(table, old_field, new_field, new_type) if old_field.dim == 2 and new_field.dim == 3: sql_alter = self.sql_alter_column_to_3d elif old_field.dim == 3 and new_field.dim == 2: sql_alter = self.sql_alter_column_to_2d else: sql_alter = self.sql_alter_column_type return ( ( sql_alter % { "column": self.quote_name(new_field.column), "type": new_type, }, [], ), [], )
ee0904c3431e5e461fae8d63aeb72250255472098d67d2d9d0b0f9eb40a16239
from django.core.exceptions import FieldDoesNotExist from django.db import ( IntegrityError, connection, migrations, models, transaction, ) from django.db.migrations.migration import Migration from django.db.migrations.operations.fields import FieldOperation from django.db.migrations.state import ModelState, ProjectState from django.db.transaction import atomic from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature from .models import FoodManager, FoodQuerySet, UnicodeModel from .test_base import OperationTestBase class Mixin: pass class OperationTests(OperationTestBase): """ Tests running the operations and making sure they do what they say they do. Each test looks at their state changing, and then their database operation - both forwards and backwards. """ def test_create_model(self): """ Tests the CreateModel operation. Most other tests use this operation as part of setup, so check failures here first. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) self.assertEqual(operation.describe(), "Create model Pony") self.assertEqual(operation.migration_name_fragment, 'pony') # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["fields", "name"]) # And default manager not in set operation = migrations.CreateModel("Foo", fields=[], managers=[("objects", models.Manager())]) definition = operation.deconstruct() self.assertNotIn('managers', definition[2]) def test_create_model_with_duplicate_field_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value pink in CreateModel fields argument.'): migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.TextField()), ("pink", models.IntegerField(default=1)), ], ) def test_create_model_with_duplicate_base(self): message = 'Found duplicate value test_crmo.pony in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.Pony",), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.pony",), ) message = 'Found duplicate value migrations.unicodemodel in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, UnicodeModel,), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.unicodemodel',), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.UnicodeModel',), ) message = "Found duplicate value <class 'django.db.models.base.Model'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(models.Model, models.Model,), ) message = "Found duplicate value <class 'migrations.test_operations.Mixin'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(Mixin, Mixin,), ) def test_create_model_with_duplicate_manager_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value objects in CreateModel managers argument.'): migrations.CreateModel( "Pony", fields=[], managers=[ ("objects", models.Manager()), ("objects", models.Manager()), ], ) def test_create_model_with_unique_after(self): """ Tests the CreateModel operation directly followed by an AlterUniqueTogether (bug #22844 - sqlite remake issues) """ operation1 = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) operation2 = migrations.CreateModel( "Rider", [ ("id", models.AutoField(primary_key=True)), ("number", models.IntegerField(default=1)), ("pony", models.ForeignKey("test_crmoua.Pony", models.CASCADE)), ], ) operation3 = migrations.AlterUniqueTogether( "Rider", [ ("number", "pony"), ], ) # Test the database alteration project_state = ProjectState() self.assertTableNotExists("test_crmoua_pony") self.assertTableNotExists("test_crmoua_rider") with connection.schema_editor() as editor: new_state = project_state.clone() operation1.state_forwards("test_crmoua", new_state) operation1.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation2.state_forwards("test_crmoua", new_state) operation2.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation3.state_forwards("test_crmoua", new_state) operation3.database_forwards("test_crmoua", editor, project_state, new_state) self.assertTableExists("test_crmoua_pony") self.assertTableExists("test_crmoua_rider") def test_create_model_m2m(self): """ Test the creation of a model with a ManyToMany field and the auto-created "through" model. """ project_state = self.set_up_test_model("test_crmomm") operation = migrations.CreateModel( "Stable", [ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("Pony", related_name="stables")) ] ) # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_crmomm", new_state) # Test the database alteration self.assertTableNotExists("test_crmomm_stable_ponies") with connection.schema_editor() as editor: operation.database_forwards("test_crmomm", editor, project_state, new_state) self.assertTableExists("test_crmomm_stable") self.assertTableExists("test_crmomm_stable_ponies") self.assertColumnNotExists("test_crmomm_stable", "ponies") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_crmomm", "Pony") Stable = new_state.apps.get_model("test_crmomm", "Stable") stable = Stable.objects.create() p1 = Pony.objects.create(pink=False, weight=4.55) p2 = Pony.objects.create(pink=True, weight=5.43) stable.ponies.add(p1, p2) self.assertEqual(stable.ponies.count(), 2) stable.ponies.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmomm", editor, new_state, project_state) self.assertTableNotExists("test_crmomm_stable") self.assertTableNotExists("test_crmomm_stable_ponies") def test_create_model_inheritance(self): """ Tests the CreateModel operation on a multi-table inheritance setup. """ project_state = self.set_up_test_model("test_crmoih") # Test the state alteration operation = migrations.CreateModel( "ShetlandPony", [ ('pony_ptr', models.OneToOneField( 'test_crmoih.Pony', models.CASCADE, auto_created=True, primary_key=True, to_field='id', serialize=False, )), ("cuteness", models.IntegerField(default=1)), ], ) new_state = project_state.clone() operation.state_forwards("test_crmoih", new_state) self.assertIn(("test_crmoih", "shetlandpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crmoih_shetlandpony") with connection.schema_editor() as editor: operation.database_forwards("test_crmoih", editor, project_state, new_state) self.assertTableExists("test_crmoih_shetlandpony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmoih", editor, new_state, project_state) self.assertTableNotExists("test_crmoih_shetlandpony") def test_create_proxy_model(self): """ CreateModel ignores proxy models. """ project_state = self.set_up_test_model("test_crprmo") # Test the state alteration operation = migrations.CreateModel( "ProxyPony", [], options={"proxy": True}, bases=("test_crprmo.Pony",), ) self.assertEqual(operation.describe(), "Create proxy model ProxyPony") new_state = project_state.clone() operation.state_forwards("test_crprmo", new_state) self.assertIn(("test_crprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crprmo", editor, project_state, new_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crprmo", editor, new_state, project_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["bases", "fields", "name", "options"]) def test_create_unmanaged_model(self): """ CreateModel ignores unmanaged models. """ project_state = self.set_up_test_model("test_crummo") # Test the state alteration operation = migrations.CreateModel( "UnmanagedPony", [], options={"proxy": True}, bases=("test_crummo.Pony",), ) self.assertEqual(operation.describe(), "Create proxy model UnmanagedPony") new_state = project_state.clone() operation.state_forwards("test_crummo", new_state) self.assertIn(("test_crummo", "unmanagedpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crummo", editor, project_state, new_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crummo", editor, new_state, project_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") @skipUnlessDBFeature('supports_table_check_constraints') def test_create_model_with_constraint(self): where = models.Q(pink__gt=2) check_constraint = models.CheckConstraint(check=where, name='test_constraint_pony_pink_gt_2') operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=3)), ], options={'constraints': [check_constraint]}, ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) # Test database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") with connection.cursor() as cursor: with self.assertRaises(IntegrityError): cursor.execute("INSERT INTO test_crmo_pony (id, pink) VALUES (1, 1)") # Test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(definition[2]['options']['constraints'], [check_constraint]) def test_create_model_with_partial_unique_constraint(self): partial_unique_constraint = models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ('weight', models.FloatField()), ], options={'constraints': [partial_unique_constraint]}, ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) # Test database alteration self.assertTableNotExists('test_crmo_pony') with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') # Test constraint works Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test reversal with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual(definition[2]['options']['constraints'], [partial_unique_constraint]) def test_create_model_with_deferred_unique_constraint(self): deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferrable_pink_constraint', deferrable=models.Deferrable.DEFERRED, ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ], options={'constraints': [deferred_unique_constraint]}, ) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) self.assertTableNotExists('test_crmo_pony') # Create table. with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1) obj.pink = 3 obj.save() else: Pony.objects.create(pink=1) # Reversal. with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual( definition[2]['options']['constraints'], [deferred_unique_constraint], ) @skipUnlessDBFeature('supports_covering_indexes') def test_create_model_with_covering_unique_constraint(self): covering_unique_constraint = models.UniqueConstraint( fields=['pink'], include=['weight'], name='test_constraint_pony_pink_covering_weight', ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ('weight', models.FloatField()), ], options={'constraints': [covering_unique_constraint]}, ) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) self.assertTableNotExists('test_crmo_pony') # Create table. with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1, weight=4.0) with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=7.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual( definition[2]['options']['constraints'], [covering_unique_constraint], ) def test_create_model_managers(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_cmoma") # Test the state alteration operation = migrations.CreateModel( "Food", fields=[ ("id", models.AutoField(primary_key=True)), ], managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Create model Food") new_state = project_state.clone() operation.state_forwards("test_cmoma", new_state) self.assertIn(("test_cmoma", "food"), new_state.models) managers = new_state.models["test_cmoma", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) def test_delete_model(self): """ Tests the DeleteModel operation. """ project_state = self.set_up_test_model("test_dlmo") # Test the state alteration operation = migrations.DeleteModel("Pony") self.assertEqual(operation.describe(), "Delete model Pony") self.assertEqual(operation.migration_name_fragment, 'delete_pony') new_state = project_state.clone() operation.state_forwards("test_dlmo", new_state) self.assertNotIn(("test_dlmo", "pony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dlmo", editor, project_state, new_state) self.assertTableNotExists("test_dlmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlmo", editor, new_state, project_state) self.assertTableExists("test_dlmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "DeleteModel") self.assertEqual(definition[1], []) self.assertEqual(list(definition[2]), ["name"]) def test_delete_proxy_model(self): """ Tests the DeleteModel operation ignores proxy models. """ project_state = self.set_up_test_model("test_dlprmo", proxy_model=True) # Test the state alteration operation = migrations.DeleteModel("ProxyPony") new_state = project_state.clone() operation.state_forwards("test_dlprmo", new_state) self.assertIn(("test_dlprmo", "proxypony"), project_state.models) self.assertNotIn(("test_dlprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") with connection.schema_editor() as editor: operation.database_forwards("test_dlprmo", editor, project_state, new_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlprmo", editor, new_state, project_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") def test_delete_mti_model(self): project_state = self.set_up_test_model('test_dlmtimo', mti_model=True) # Test the state alteration operation = migrations.DeleteModel('ShetlandPony') new_state = project_state.clone() operation.state_forwards('test_dlmtimo', new_state) self.assertIn(('test_dlmtimo', 'shetlandpony'), project_state.models) self.assertNotIn(('test_dlmtimo', 'shetlandpony'), new_state.models) # Test the database alteration self.assertTableExists('test_dlmtimo_pony') self.assertTableExists('test_dlmtimo_shetlandpony') self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id') with connection.schema_editor() as editor: operation.database_forwards('test_dlmtimo', editor, project_state, new_state) self.assertTableExists('test_dlmtimo_pony') self.assertTableNotExists('test_dlmtimo_shetlandpony') # And test reversal with connection.schema_editor() as editor: operation.database_backwards('test_dlmtimo', editor, new_state, project_state) self.assertTableExists('test_dlmtimo_pony') self.assertTableExists('test_dlmtimo_shetlandpony') self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id') def test_rename_model(self): """ Tests the RenameModel operation. """ project_state = self.set_up_test_model("test_rnmo", related_model=True) # Test the state alteration operation = migrations.RenameModel("Pony", "Horse") self.assertEqual(operation.describe(), "Rename model Pony to Horse") self.assertEqual(operation.migration_name_fragment, 'rename_pony_horse') # Test initial state and database self.assertIn(("test_rnmo", "pony"), project_state.models) self.assertNotIn(("test_rnmo", "horse"), project_state.models) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate forwards new_state = project_state.clone() atomic_rename = connection.features.supports_atomic_references_rename new_state = self.apply_operations("test_rnmo", new_state, [operation], atomic=atomic_rename) # Test new state and database self.assertNotIn(("test_rnmo", "pony"), new_state.models) self.assertIn(("test_rnmo", "horse"), new_state.models) # RenameModel also repoints all incoming FKs and M2Ms self.assertEqual( new_state.models['test_rnmo', 'rider'].fields['pony'].remote_field.model, 'test_rnmo.Horse', ) self.assertTableNotExists("test_rnmo_pony") self.assertTableExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate backwards original_state = self.unapply_operations("test_rnmo", project_state, [operation], atomic=atomic_rename) # Test original state and database self.assertIn(("test_rnmo", "pony"), original_state.models) self.assertNotIn(("test_rnmo", "horse"), original_state.models) self.assertEqual( original_state.models['test_rnmo', 'rider'].fields['pony'].remote_field.model, 'Pony', ) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameModel") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'old_name': "Pony", 'new_name': "Horse"}) def test_rename_model_state_forwards(self): """ RenameModel operations shouldn't trigger the caching of rendered apps on state without prior apps. """ state = ProjectState() state.add_model(ModelState('migrations', 'Foo', [])) operation = migrations.RenameModel('Foo', 'Bar') operation.state_forwards('migrations', state) self.assertNotIn('apps', state.__dict__) self.assertNotIn(('migrations', 'foo'), state.models) self.assertIn(('migrations', 'bar'), state.models) # Now with apps cached. apps = state.apps operation = migrations.RenameModel('Bar', 'Foo') operation.state_forwards('migrations', state) self.assertIs(state.apps, apps) self.assertNotIn(('migrations', 'bar'), state.models) self.assertIn(('migrations', 'foo'), state.models) def test_rename_model_with_self_referential_fk(self): """ Tests the RenameModel operation on model with self referential FK. """ project_state = self.set_up_test_model("test_rmwsrf", related_model=True) # Test the state alteration operation = migrations.RenameModel("Rider", "HorseRider") self.assertEqual(operation.describe(), "Rename model Rider to HorseRider") new_state = project_state.clone() operation.state_forwards("test_rmwsrf", new_state) self.assertNotIn(("test_rmwsrf", "rider"), new_state.models) self.assertIn(("test_rmwsrf", "horserider"), new_state.models) # Remember, RenameModel also repoints all incoming FKs and M2Ms self.assertEqual( 'self', new_state.models["test_rmwsrf", "horserider"].fields['friend'].remote_field.model ) HorseRider = new_state.apps.get_model('test_rmwsrf', 'horserider') self.assertIs(HorseRider._meta.get_field('horserider').remote_field.model, HorseRider) # Test the database alteration self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_forwards("test_rmwsrf", editor, project_state, new_state) self.assertTableNotExists("test_rmwsrf_rider") self.assertTableExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_horserider", "id")) # And test reversal with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_backwards("test_rmwsrf", editor, new_state, project_state) self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) def test_rename_model_with_superclass_fk(self): """ Tests the RenameModel operation on a model which has a superclass that has a foreign key. """ project_state = self.set_up_test_model("test_rmwsc", related_model=True, mti_model=True) # Test the state alteration operation = migrations.RenameModel("ShetlandPony", "LittleHorse") self.assertEqual(operation.describe(), "Rename model ShetlandPony to LittleHorse") new_state = project_state.clone() operation.state_forwards("test_rmwsc", new_state) self.assertNotIn(("test_rmwsc", "shetlandpony"), new_state.models) self.assertIn(("test_rmwsc", "littlehorse"), new_state.models) # RenameModel shouldn't repoint the superclass's relations, only local ones self.assertEqual( project_state.models['test_rmwsc', 'rider'].fields['pony'].remote_field.model, new_state.models['test_rmwsc', 'rider'].fields['pony'].remote_field.model, ) # Before running the migration we have a table for Shetland Pony, not Little Horse self.assertTableExists("test_rmwsc_shetlandpony") self.assertTableNotExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # and the foreign key on rider points to pony, not shetland pony self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_shetlandpony", "id")) with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: operation.database_forwards("test_rmwsc", editor, project_state, new_state) # Now we have a little horse table, not shetland pony self.assertTableNotExists("test_rmwsc_shetlandpony") self.assertTableExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # but the Foreign keys still point at pony, not little horse self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_littlehorse", "id")) def test_rename_model_with_self_referential_m2m(self): app_label = "test_rename_model_with_self_referential_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("ReflexivePony", fields=[ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("self")), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("ReflexivePony", "ReflexivePony2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "ReflexivePony2") pony = Pony.objects.create() pony.ponies.add(pony) def test_rename_model_with_m2m(self): app_label = "test_rename_model_with_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Pony", "Pony2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "Pony2") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_m2m_target_model(self): app_label = "test_rename_m2m_target_model" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Rider", "Rider2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider2") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_m2m_through_model(self): app_label = "test_rename_through" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("PonyRider", fields=[ ("id", models.AutoField(primary_key=True)), ("rider", models.ForeignKey("test_rename_through.Rider", models.CASCADE)), ("pony", models.ForeignKey("test_rename_through.Pony", models.CASCADE)), ]), migrations.AddField( "Pony", "riders", models.ManyToManyField("test_rename_through.Rider", through="test_rename_through.PonyRider"), ), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider") pony = Pony.objects.create() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("PonyRider", "PonyRider2"), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider2") pony = Pony.objects.first() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) self.assertEqual(Pony.objects.count(), 1) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(PonyRider.objects.count(), 2) self.assertEqual(pony.riders.count(), 2) def test_rename_m2m_model_after_rename_field(self): """RenameModel renames a many-to-many column after a RenameField.""" app_label = 'test_rename_multiple' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=20)), ]), migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('test_rename_multiple.Pony', models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('riders', models.ManyToManyField('Rider')), ]), migrations.RenameField(model_name='pony', old_name='name', new_name='fancy_name'), migrations.RenameModel(old_name='Rider', new_name='Jockey'), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, 'Pony') Jockey = project_state.apps.get_model(app_label, 'Jockey') PonyRider = project_state.apps.get_model(app_label, 'PonyRider') # No "no such column" error means the column was renamed correctly. pony = Pony.objects.create(fancy_name='a good name') jockey = Jockey.objects.create(pony=pony) ponyrider = PonyRider.objects.create() ponyrider.riders.add(jockey) def test_add_field(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) self.assertEqual(operation.describe(), "Add field height to Pony") self.assertEqual(operation.migration_name_fragment, 'pony_height') project_state, new_state = self.make_test_state("test_adfl", operation) self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4) field = new_state.models['test_adfl', 'pony'].fields['height'] self.assertEqual(field.default, 5) # Test the database alteration self.assertColumnNotExists("test_adfl_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adfl", editor, project_state, new_state) self.assertColumnExists("test_adfl_pony", "height") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfl", editor, new_state, project_state) self.assertColumnNotExists("test_adfl_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_add_charfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adchfl") Pony = project_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adchfl", project_state, [ migrations.AddField( "Pony", "text", models.CharField(max_length=10, default="some text"), ), migrations.AddField( "Pony", "empty", models.CharField(max_length=10, default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.CharField(max_length=10, default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.CharField(max_length=10, default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_textfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adtxtfl") Pony = project_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adtxtfl", project_state, [ migrations.AddField( "Pony", "text", models.TextField(default="some text"), ), migrations.AddField( "Pony", "empty", models.TextField(default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.TextField(default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.TextField(default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_binaryfield(self): """ Tests the AddField operation on TextField/BinaryField. """ project_state = self.set_up_test_model("test_adbinfl") Pony = project_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adbinfl", project_state, [ migrations.AddField( "Pony", "blob", models.BinaryField(default=b"some text"), ), migrations.AddField( "Pony", "empty", models.BinaryField(default=b""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.BinaryField(default=b"42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.BinaryField(default=b'"\'"'), ), ]) Pony = new_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.get(pk=pony.pk) # SQLite returns buffer/memoryview, cast to bytes for checking. self.assertEqual(bytes(pony.blob), b"some text") self.assertEqual(bytes(pony.empty), b"") self.assertEqual(bytes(pony.digits), b"42") self.assertEqual(bytes(pony.quotes), b'"\'"') def test_column_name_quoting(self): """ Column names that are SQL keywords shouldn't cause problems when used in migrations (#22168). """ project_state = self.set_up_test_model("test_regr22168") operation = migrations.AddField( "Pony", "order", models.IntegerField(default=0), ) new_state = project_state.clone() operation.state_forwards("test_regr22168", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_regr22168", editor, project_state, new_state) self.assertColumnExists("test_regr22168_pony", "order") def test_add_field_preserve_default(self): """ Tests the AddField operation's state alteration when preserve_default = False. """ project_state = self.set_up_test_model("test_adflpd") # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=4), preserve_default=False, ) new_state = project_state.clone() operation.state_forwards("test_adflpd", new_state) self.assertEqual(len(new_state.models["test_adflpd", "pony"].fields), 4) field = new_state.models['test_adflpd', 'pony'].fields['height'] self.assertEqual(field.default, models.NOT_PROVIDED) # Test the database alteration project_state.apps.get_model("test_adflpd", "pony").objects.create( weight=4, ) self.assertColumnNotExists("test_adflpd_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adflpd", editor, project_state, new_state) self.assertColumnExists("test_adflpd_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name", "preserve_default"]) def test_add_field_m2m(self): """ Tests the AddField operation with a ManyToManyField. """ project_state = self.set_up_test_model("test_adflmm", second_model=True) # Test the state alteration operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) new_state = project_state.clone() operation.state_forwards("test_adflmm", new_state) self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4) # Test the database alteration self.assertTableNotExists("test_adflmm_pony_stables") with connection.schema_editor() as editor: operation.database_forwards("test_adflmm", editor, project_state, new_state) self.assertTableExists("test_adflmm_pony_stables") self.assertColumnNotExists("test_adflmm_pony", "stables") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_adflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.stables.create() self.assertEqual(p.stables.count(), 1) p.stables.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adflmm", editor, new_state, project_state) self.assertTableNotExists("test_adflmm_pony_stables") def test_alter_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertFalse(Pony._meta.get_field('stables').blank) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField( "Pony", "stables", models.ManyToManyField(to="Stable", related_name="ponies", blank=True) ) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertTrue(Pony._meta.get_field('stables').blank) def test_repoint_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True, third_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "places", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField("Pony", "places", models.ManyToManyField(to="Van", related_name="ponies")) ]) # Ensure the new field actually works Pony = project_state.apps.get_model("test_alflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.places.create() self.assertEqual(p.places.count(), 1) p.places.all().delete() def test_remove_field_m2m(self): project_state = self.set_up_test_model("test_rmflmm", second_model=True) project_state = self.apply_operations("test_rmflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) self.assertTableExists("test_rmflmm_pony_stables") with_field_state = project_state.clone() operations = [migrations.RemoveField("Pony", "stables")] project_state = self.apply_operations("test_rmflmm", project_state, operations=operations) self.assertTableNotExists("test_rmflmm_pony_stables") # And test reversal self.unapply_operations("test_rmflmm", with_field_state, operations=operations) self.assertTableExists("test_rmflmm_pony_stables") def test_remove_field_m2m_with_through(self): project_state = self.set_up_test_model("test_rmflmmwt", second_model=True) self.assertTableNotExists("test_rmflmmwt_ponystables") project_state = self.apply_operations("test_rmflmmwt", project_state, operations=[ migrations.CreateModel("PonyStables", fields=[ ("pony", models.ForeignKey('test_rmflmmwt.Pony', models.CASCADE)), ("stable", models.ForeignKey('test_rmflmmwt.Stable', models.CASCADE)), ]), migrations.AddField( "Pony", "stables", models.ManyToManyField("Stable", related_name="ponies", through='test_rmflmmwt.PonyStables') ) ]) self.assertTableExists("test_rmflmmwt_ponystables") operations = [migrations.RemoveField("Pony", "stables"), migrations.DeleteModel("PonyStables")] self.apply_operations("test_rmflmmwt", project_state, operations=operations) def test_remove_field(self): """ Tests the RemoveField operation. """ project_state = self.set_up_test_model("test_rmfl") # Test the state alteration operation = migrations.RemoveField("Pony", "pink") self.assertEqual(operation.describe(), "Remove field pink from Pony") self.assertEqual(operation.migration_name_fragment, 'remove_pony_pink') new_state = project_state.clone() operation.state_forwards("test_rmfl", new_state) self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2) # Test the database alteration self.assertColumnExists("test_rmfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_rmfl", editor, project_state, new_state) self.assertColumnNotExists("test_rmfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmfl", editor, new_state, project_state) self.assertColumnExists("test_rmfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': 'pink'}) def test_remove_fk(self): """ Tests the RemoveField operation on a foreign key. """ project_state = self.set_up_test_model("test_rfk", related_model=True) self.assertColumnExists("test_rfk_rider", "pony_id") operation = migrations.RemoveField("Rider", "pony") new_state = project_state.clone() operation.state_forwards("test_rfk", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_rfk", editor, project_state, new_state) self.assertColumnNotExists("test_rfk_rider", "pony_id") with connection.schema_editor() as editor: operation.database_backwards("test_rfk", editor, new_state, project_state) self.assertColumnExists("test_rfk_rider", "pony_id") def test_alter_model_table(self): """ Tests the AlterModelTable operation. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony_2") self.assertEqual(operation.describe(), "Rename table for Pony to test_almota_pony_2") self.assertEqual(operation.migration_name_fragment, 'alter_pony_table') new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony_2") # Test the database alteration self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableNotExists("test_almota_pony") self.assertTableExists("test_almota_pony_2") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelTable") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'table': "test_almota_pony_2"}) def test_alter_model_table_none(self): """ Tests the AlterModelTable operation if the table name is set to None. """ operation = migrations.AlterModelTable("Pony", None) self.assertEqual(operation.describe(), "Rename table for Pony to (default)") def test_alter_model_table_noop(self): """ Tests the AlterModelTable operation if the table name is not changed. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony") new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony") # Test the database alteration self.assertTableExists("test_almota_pony") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableExists("test_almota_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") def test_alter_model_table_m2m(self): """ AlterModelTable should rename auto-generated M2M tables. """ app_label = "test_talflmltlm2m" pony_db_table = 'pony_foo' project_state = self.set_up_test_model(app_label, second_model=True, db_table=pony_db_table) # Add the M2M field first_state = project_state.clone() operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable")) operation.state_forwards(app_label, first_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, first_state) original_m2m_table = "%s_%s" % (pony_db_table, "stables") new_m2m_table = "%s_%s" % (app_label, "pony_stables") self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) # Rename the Pony db_table which should also rename the m2m table. second_state = first_state.clone() operation = migrations.AlterModelTable(name='pony', table=None) operation.state_forwards(app_label, second_state) atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_forwards(app_label, editor, first_state, second_state) self.assertTableExists(new_m2m_table) self.assertTableNotExists(original_m2m_table) # And test reversal with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_backwards(app_label, editor, second_state, first_state) self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) def test_alter_field(self): """ Tests the AlterField operation. """ project_state = self.set_up_test_model("test_alfl") # Test the state alteration operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) self.assertEqual(operation.describe(), "Alter field pink on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_pink') new_state = project_state.clone() operation.state_forwards("test_alfl", new_state) self.assertIs(project_state.models['test_alfl', 'pony'].fields['pink'].null, False) self.assertIs(new_state.models['test_alfl', 'pony'].fields['pink'].null, True) # Test the database alteration self.assertColumnNotNull("test_alfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alfl", editor, project_state, new_state) self.assertColumnNull("test_alfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alfl", editor, new_state, project_state) self.assertColumnNotNull("test_alfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_alter_field_pk(self): """ Tests the AlterField operation on primary keys (for things like PostgreSQL's SERIAL weirdness) """ project_state = self.set_up_test_model("test_alflpk") # Test the state alteration operation = migrations.AlterField("Pony", "id", models.IntegerField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpk", new_state) self.assertIsInstance( project_state.models['test_alflpk', 'pony'].fields['id'], models.AutoField, ) self.assertIsInstance( new_state.models['test_alflpk', 'pony'].fields['id'], models.IntegerField, ) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpk", editor, project_state, new_state) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpk", editor, new_state, project_state) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_pk_fk(self): """ Tests the AlterField operation on primary keys changes any FKs pointing to it. """ project_state = self.set_up_test_model("test_alflpkfk", related_model=True) project_state = self.apply_operations('test_alflpkfk', project_state, [ migrations.CreateModel('Stable', fields=[ ('ponies', models.ManyToManyField('Pony')), ]), migrations.AddField( 'Pony', 'stables', models.ManyToManyField('Stable'), ), ]) # Test the state alteration operation = migrations.AlterField("Pony", "id", models.FloatField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpkfk", new_state) self.assertIsInstance( project_state.models['test_alflpkfk', 'pony'].fields['id'], models.AutoField, ) self.assertIsInstance( new_state.models['test_alflpkfk', 'pony'].fields['id'], models.FloatField, ) def assertIdTypeEqualsFkType(): with connection.cursor() as cursor: id_type, id_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_pony") if c.name == "id" ][0] fk_type, fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_rider") if c.name == "pony_id" ][0] m2m_fk_type, m2m_fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description( cursor, 'test_alflpkfk_pony_stables', ) if c.name == 'pony_id' ][0] remote_m2m_fk_type, remote_m2m_fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description( cursor, 'test_alflpkfk_stable_ponies', ) if c.name == 'pony_id' ][0] self.assertEqual(id_type, fk_type) self.assertEqual(id_type, m2m_fk_type) self.assertEqual(id_type, remote_m2m_fk_type) self.assertEqual(id_null, fk_null) self.assertEqual(id_null, m2m_fk_null) self.assertEqual(id_null, remote_m2m_fk_null) assertIdTypeEqualsFkType() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpkfk", editor, project_state, new_state) assertIdTypeEqualsFkType() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpkfk", editor, new_state, project_state) assertIdTypeEqualsFkType() @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_reloads_state_on_fk_with_to_field_target_type_change(self): app_label = 'test_alflrsfkwtflttc' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('code', models.IntegerField(unique=True)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='code')), ]), ]) operation = migrations.AlterField( 'Rider', 'code', models.CharField(max_length=100, unique=True), ) self.apply_operations(app_label, project_state, operations=[operation]) id_type, id_null = [ (c.type_code, c.null_ok) for c in self.get_table_description('%s_rider' % app_label) if c.name == 'code' ][0] fk_type, fk_null = [ (c.type_code, c.null_ok) for c in self.get_table_description('%s_pony' % app_label) if c.name == 'rider_id' ][0] self.assertEqual(id_type, fk_type) self.assertEqual(id_null, fk_null) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_reloads_state_on_fk_with_to_field_related_name_target_type_change(self): app_label = 'test_alflrsfkwtflrnttc' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('code', models.PositiveIntegerField(unique=True)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('rider', models.ForeignKey( '%s.Rider' % app_label, models.CASCADE, to_field='code', related_name='+', )), ]), ]) operation = migrations.AlterField( 'Rider', 'code', models.CharField(max_length=100, unique=True), ) self.apply_operations(app_label, project_state, operations=[operation]) def test_alter_field_reloads_state_on_fk_target_changes(self): """ If AlterField doesn't reload state appropriately, the second AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_alter_field_reloads_state_on_fk_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.AlterField('Rider', 'id', models.CharField(primary_key=True, max_length=99)), migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)), ]) def test_alter_field_reloads_state_on_fk_with_to_field_target_changes(self): """ If AlterField doesn't reload state appropriately, the second AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_alter_field_reloads_state_on_fk_with_to_field_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('slug', models.CharField(unique=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='slug')), ('slug', models.CharField(unique=True, max_length=100)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE, to_field='slug')), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.AlterField('Rider', 'slug', models.CharField(unique=True, max_length=99)), migrations.AlterField('Pony', 'slug', models.CharField(unique=True, max_length=99)), ]) def test_rename_field_reloads_state_on_fk_target_changes(self): """ If RenameField doesn't reload state appropriately, the AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_rename_field_reloads_state_on_fk_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameField('Rider', 'id', 'id2'), migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)), ], atomic=connection.features.supports_atomic_references_rename) def test_rename_field(self): """ Tests the RenameField operation. """ project_state = self.set_up_test_model("test_rnfl", unique_together=True, index_together=True) # Test the state alteration operation = migrations.RenameField("Pony", "pink", "blue") self.assertEqual(operation.describe(), "Rename field pink on Pony to blue") self.assertEqual(operation.migration_name_fragment, 'rename_pink_pony_blue') new_state = project_state.clone() operation.state_forwards("test_rnfl", new_state) self.assertIn("blue", new_state.models["test_rnfl", "pony"].fields) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].fields) # Make sure the unique_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) # Make sure the index_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['index_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['index_together'][0]) # Test the database alteration self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") with connection.schema_editor() as editor: operation.database_forwards("test_rnfl", editor, project_state, new_state) self.assertColumnExists("test_rnfl_pony", "blue") self.assertColumnNotExists("test_rnfl_pony", "pink") # Ensure the unique constraint has been ported over with connection.cursor() as cursor: cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_rnfl_pony") # Ensure the index constraint has been ported over self.assertIndexExists("test_rnfl_pony", ["weight", "blue"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rnfl", editor, new_state, project_state) self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") # Ensure the index constraint has been reset self.assertIndexExists("test_rnfl_pony", ["weight", "pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'old_name': "pink", 'new_name': "blue"}) def test_rename_missing_field(self): state = ProjectState() state.add_model(ModelState('app', 'model', [])) with self.assertRaisesMessage(FieldDoesNotExist, "app.model has no field named 'field'"): migrations.RenameField('model', 'field', 'new_field').state_forwards('app', state) def test_rename_referenced_field_state_forward(self): state = ProjectState() state.add_model(ModelState('app', 'Model', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(unique=True)), ])) state.add_model(ModelState('app', 'OtherModel', [ ('id', models.AutoField(primary_key=True)), ('fk', models.ForeignKey('Model', models.CASCADE, to_field='field')), ('fo', models.ForeignObject('Model', models.CASCADE, from_fields=('fk',), to_fields=('field',))), ])) operation = migrations.RenameField('Model', 'field', 'renamed') new_state = state.clone() operation.state_forwards('app', new_state) self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].remote_field.field_name, 'renamed') self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].from_fields, ['self']) self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].to_fields, ('renamed',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].from_fields, ('fk',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].to_fields, ('renamed',)) operation = migrations.RenameField('OtherModel', 'fk', 'renamed_fk') new_state = state.clone() operation.state_forwards('app', new_state) self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].remote_field.field_name, 'renamed') self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].from_fields, ('self',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].to_fields, ('renamed',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].from_fields, ('renamed_fk',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].to_fields, ('renamed',)) def test_alter_unique_together(self): """ Tests the AlterUniqueTogether operation. """ project_state = self.set_up_test_model("test_alunto") # Test the state alteration operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter unique_together for Pony (1 constraint(s))") self.assertEqual( operation.migration_name_fragment, 'alter_pony_unique_together', ) new_state = project_state.clone() operation.state_forwards("test_alunto", new_state) self.assertEqual(len(project_state.models["test_alunto", "pony"].options.get("unique_together", set())), 0) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # Make sure we can insert duplicate rows with connection.cursor() as cursor: cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alunto", editor, project_state, new_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alunto", editor, new_state, project_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test flat unique_together operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight")) operation.state_forwards("test_alunto", new_state) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterUniqueTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'unique_together': {("pink", "weight")}}) def test_alter_unique_together_remove(self): operation = migrations.AlterUniqueTogether("Pony", None) self.assertEqual(operation.describe(), "Alter unique_together for Pony (0 constraint(s))") def test_add_index(self): """ Test the AddIndex operation. """ project_state = self.set_up_test_model("test_adin") msg = ( "Indexes passed to AddIndex operations require a name argument. " "<Index: fields='pink'> doesn't have one." ) with self.assertRaisesMessage(ValueError, msg): migrations.AddIndex("Pony", models.Index(fields=["pink"])) index = models.Index(fields=["pink"], name="test_adin_pony_pink_idx") operation = migrations.AddIndex("Pony", index) self.assertEqual(operation.describe(), "Create index test_adin_pony_pink_idx on field(s) pink of model Pony") self.assertEqual( operation.migration_name_fragment, 'pony_test_adin_pony_pink_idx', ) new_state = project_state.clone() operation.state_forwards("test_adin", new_state) # Test the database alteration self.assertEqual(len(new_state.models["test_adin", "pony"].options['indexes']), 1) self.assertIndexNotExists("test_adin_pony", ["pink"]) with connection.schema_editor() as editor: operation.database_forwards("test_adin", editor, project_state, new_state) self.assertIndexExists("test_adin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adin", editor, new_state, project_state) self.assertIndexNotExists("test_adin_pony", ["pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'index': index}) def test_remove_index(self): """ Test the RemoveIndex operation. """ project_state = self.set_up_test_model("test_rmin", multicol_index=True) self.assertTableExists("test_rmin_pony") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) operation = migrations.RemoveIndex("Pony", "pony_test_idx") self.assertEqual(operation.describe(), "Remove index pony_test_idx from Pony") self.assertEqual( operation.migration_name_fragment, 'remove_pony_pony_test_idx', ) new_state = project_state.clone() operation.state_forwards("test_rmin", new_state) # Test the state alteration self.assertEqual(len(new_state.models["test_rmin", "pony"].options['indexes']), 0) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_rmin", editor, project_state, new_state) self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmin", editor, new_state, project_state) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': "pony_test_idx"}) # Also test a field dropped with index - sqlite remake issue operations = [ migrations.RemoveIndex("Pony", "pony_test_idx"), migrations.RemoveField("Pony", "pink"), ] self.assertColumnExists("test_rmin_pony", "pink") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test database alteration new_state = project_state.clone() self.apply_operations('test_rmin', new_state, operations=operations) self.assertColumnNotExists("test_rmin_pony", "pink") self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal self.unapply_operations("test_rmin", project_state, operations=operations) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) def test_add_index_state_forwards(self): project_state = self.set_up_test_model('test_adinsf') index = models.Index(fields=['pink'], name='test_adinsf_pony_pink_idx') old_model = project_state.apps.get_model('test_adinsf', 'Pony') new_state = project_state.clone() operation = migrations.AddIndex('Pony', index) operation.state_forwards('test_adinsf', new_state) new_model = new_state.apps.get_model('test_adinsf', 'Pony') self.assertIsNot(old_model, new_model) def test_remove_index_state_forwards(self): project_state = self.set_up_test_model('test_rminsf') index = models.Index(fields=['pink'], name='test_rminsf_pony_pink_idx') migrations.AddIndex('Pony', index).state_forwards('test_rminsf', project_state) old_model = project_state.apps.get_model('test_rminsf', 'Pony') new_state = project_state.clone() operation = migrations.RemoveIndex('Pony', 'test_rminsf_pony_pink_idx') operation.state_forwards('test_rminsf', new_state) new_model = new_state.apps.get_model('test_rminsf', 'Pony') self.assertIsNot(old_model, new_model) def test_alter_field_with_index(self): """ Test AlterField operation with an index to ensure indexes created via Meta.indexes don't get dropped with sqlite3 remake. """ project_state = self.set_up_test_model("test_alflin", index=True) operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) new_state = project_state.clone() operation.state_forwards("test_alflin", new_state) # Test the database alteration self.assertColumnNotNull("test_alflin_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alflin", editor, project_state, new_state) # Index hasn't been dropped self.assertIndexExists("test_alflin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflin", editor, new_state, project_state) # Ensure the index is still there self.assertIndexExists("test_alflin_pony", ["pink"]) def test_alter_index_together(self): """ Tests the AlterIndexTogether operation. """ project_state = self.set_up_test_model("test_alinto") # Test the state alteration operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter index_together for Pony (1 constraint(s))") self.assertEqual( operation.migration_name_fragment, 'alter_pony_index_together', ) new_state = project_state.clone() operation.state_forwards("test_alinto", new_state) self.assertEqual(len(project_state.models["test_alinto", "pony"].options.get("index_together", set())), 0) self.assertEqual(len(new_state.models["test_alinto", "pony"].options.get("index_together", set())), 1) # Make sure there's no matching index self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alinto", editor, project_state, new_state) self.assertIndexExists("test_alinto_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alinto", editor, new_state, project_state) self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterIndexTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'index_together': {("pink", "weight")}}) def test_alter_index_together_remove(self): operation = migrations.AlterIndexTogether("Pony", None) self.assertEqual(operation.describe(), "Alter index_together for Pony (0 constraint(s))") @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_alter_index_together_remove_with_unique_together(self): app_label = 'test_alintoremove_wunto' table_name = '%s_pony' % app_label project_state = self.set_up_test_model(app_label, unique_together=True) self.assertUniqueConstraintExists(table_name, ['pink', 'weight']) # Add index together. new_state = project_state.clone() operation = migrations.AlterIndexTogether('Pony', [('pink', 'weight')]) operation.state_forwards(app_label, new_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexExists(table_name, ['pink', 'weight']) # Remove index together. project_state = new_state new_state = project_state.clone() operation = migrations.AlterIndexTogether('Pony', set()) operation.state_forwards(app_label, new_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNotExists(table_name, ['pink', 'weight']) self.assertUniqueConstraintExists(table_name, ['pink', 'weight']) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint(self): project_state = self.set_up_test_model("test_addconstraint") gt_check = models.Q(pink__gt=2) gt_constraint = models.CheckConstraint(check=gt_check, name="test_add_constraint_pony_pink_gt_2") gt_operation = migrations.AddConstraint("Pony", gt_constraint) self.assertEqual( gt_operation.describe(), "Create constraint test_add_constraint_pony_pink_gt_2 on model Pony" ) self.assertEqual( gt_operation.migration_name_fragment, 'pony_test_add_constraint_pony_pink_gt_2', ) # Test the state alteration new_state = project_state.clone() gt_operation.state_forwards("test_addconstraint", new_state) self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 1) Pony = new_state.apps.get_model("test_addconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 1) # Test the database alteration with connection.schema_editor() as editor: gt_operation.database_forwards("test_addconstraint", editor, project_state, new_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=1.0) # Add another one. lt_check = models.Q(pink__lt=100) lt_constraint = models.CheckConstraint(check=lt_check, name="test_add_constraint_pony_pink_lt_100") lt_operation = migrations.AddConstraint("Pony", lt_constraint) lt_operation.state_forwards("test_addconstraint", new_state) self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 2) Pony = new_state.apps.get_model("test_addconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 2) with connection.schema_editor() as editor: lt_operation.database_forwards("test_addconstraint", editor, project_state, new_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=100, weight=1.0) # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards("test_addconstraint", editor, new_state, project_state) Pony.objects.create(pink=1, weight=1.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], "AddConstraint") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'constraint': gt_constraint}) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint_percent_escaping(self): app_label = 'add_constraint_string_quoting' operations = [ migrations.CreateModel( 'Author', fields=[ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=100)), ('rebate', models.CharField(max_length=100)), ], ), ] from_state = self.apply_operations(app_label, ProjectState(), operations) # "%" generated in startswith lookup should be escaped in a way that is # considered a leading wildcard. check = models.Q(name__startswith='Albert') constraint = models.CheckConstraint(check=check, name='name_constraint') operation = migrations.AddConstraint('Author', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Artur') # Literal "%" should be escaped in a way that is not a considered a # wildcard. check = models.Q(rebate__endswith='%') constraint = models.CheckConstraint(check=check, name='rebate_constraint') operation = migrations.AddConstraint('Author', constraint) from_state = to_state to_state = from_state.clone() operation.state_forwards(app_label, to_state) Author = to_state.apps.get_model(app_label, 'Author') with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Albert', rebate='10$') author = Author.objects.create(name='Albert', rebate='10%') self.assertEqual(Author.objects.get(), author) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_or_constraint(self): app_label = 'test_addorconstraint' constraint_name = 'add_constraint_or' from_state = self.set_up_test_model(app_label) check = models.Q(pink__gt=2, weight__gt=2) | models.Q(weight__lt=0) constraint = models.CheckConstraint(check=check, name=constraint_name) operation = migrations.AddConstraint('Pony', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Pony = to_state.apps.get_model(app_label, 'Pony') with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=2, weight=3.0) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=3, weight=1.0) Pony.objects.bulk_create([ Pony(pink=3, weight=-1.0), Pony(pink=1, weight=-1.0), Pony(pink=3, weight=3.0), ]) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint_combinable(self): app_label = 'test_addconstraint_combinable' operations = [ migrations.CreateModel( 'Book', fields=[ ('id', models.AutoField(primary_key=True)), ('read', models.PositiveIntegerField()), ('unread', models.PositiveIntegerField()), ], ), ] from_state = self.apply_operations(app_label, ProjectState(), operations) constraint = models.CheckConstraint( check=models.Q(read=(100 - models.F('unread'))), name='test_addconstraint_combinable_sum_100', ) operation = migrations.AddConstraint('Book', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Book = to_state.apps.get_model(app_label, 'Book') with self.assertRaises(IntegrityError), transaction.atomic(): Book.objects.create(read=70, unread=10) Book.objects.create(read=70, unread=30) @skipUnlessDBFeature('supports_table_check_constraints') def test_remove_constraint(self): project_state = self.set_up_test_model("test_removeconstraint", constraints=[ models.CheckConstraint(check=models.Q(pink__gt=2), name="test_remove_constraint_pony_pink_gt_2"), models.CheckConstraint(check=models.Q(pink__lt=100), name="test_remove_constraint_pony_pink_lt_100"), ]) gt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_gt_2") self.assertEqual( gt_operation.describe(), "Remove constraint test_remove_constraint_pony_pink_gt_2 from model Pony" ) self.assertEqual( gt_operation.migration_name_fragment, 'remove_pony_test_remove_constraint_pony_pink_gt_2', ) # Test state alteration new_state = project_state.clone() gt_operation.state_forwards("test_removeconstraint", new_state) self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 1) Pony = new_state.apps.get_model("test_removeconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 1) # Test database alteration with connection.schema_editor() as editor: gt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state) Pony.objects.create(pink=1, weight=1.0).delete() with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=100, weight=1.0) # Remove the other one. lt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_lt_100") lt_operation.state_forwards("test_removeconstraint", new_state) self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 0) Pony = new_state.apps.get_model("test_removeconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor: lt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state) Pony.objects.create(pink=100, weight=1.0).delete() # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards("test_removeconstraint", editor, new_state, project_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=1.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], "RemoveConstraint") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': "test_remove_constraint_pony_pink_gt_2"}) def test_add_partial_unique_constraint(self): project_state = self.set_up_test_model('test_addpartialuniqueconstraint') partial_unique_constraint = models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ) operation = migrations.AddConstraint('Pony', partial_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint test_constraint_pony_pink_for_weight_gt_5_uniq ' 'on model Pony' ) # Test the state alteration new_state = project_state.clone() operation.state_forwards('test_addpartialuniqueconstraint', new_state) self.assertEqual(len(new_state.models['test_addpartialuniqueconstraint', 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model('test_addpartialuniqueconstraint', 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards('test_addpartialuniqueconstraint', editor, project_state, new_state) # Test constraint works Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test reversal with connection.schema_editor() as editor: operation.database_backwards('test_addpartialuniqueconstraint', editor, new_state, project_state) # Test constraint doesn't work Pony.objects.create(pink=1, weight=7.0) # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'constraint': partial_unique_constraint}) def test_remove_partial_unique_constraint(self): project_state = self.set_up_test_model('test_removepartialuniqueconstraint', constraints=[ models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ), ]) gt_operation = migrations.RemoveConstraint('Pony', 'test_constraint_pony_pink_for_weight_gt_5_uniq') self.assertEqual( gt_operation.describe(), 'Remove constraint test_constraint_pony_pink_for_weight_gt_5_uniq from model Pony' ) # Test state alteration new_state = project_state.clone() gt_operation.state_forwards('test_removepartialuniqueconstraint', new_state) self.assertEqual(len(new_state.models['test_removepartialuniqueconstraint', 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model('test_removepartialuniqueconstraint', 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) # Test database alteration with connection.schema_editor() as editor: gt_operation.database_forwards('test_removepartialuniqueconstraint', editor, project_state, new_state) # Test constraint doesn't work Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) Pony.objects.create(pink=1, weight=7.0).delete() # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards('test_removepartialuniqueconstraint', editor, new_state, project_state) # Test constraint works if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'test_constraint_pony_pink_for_weight_gt_5_uniq', }) def test_add_deferred_unique_constraint(self): app_label = 'test_adddeferred_uc' project_state = self.set_up_test_model(app_label) deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferred_pink_constraint_add', deferrable=models.Deferrable.DEFERRED, ) operation = migrations.AddConstraint('Pony', deferred_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint deferred_pink_constraint_add on model Pony', ) # Add constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) Pony.objects.create(pink=1, weight=4.0) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 3 obj.save() else: Pony.objects.create(pink=1, weight=4.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual( definition[2], {'model_name': 'Pony', 'constraint': deferred_unique_constraint}, ) def test_remove_deferred_unique_constraint(self): app_label = 'test_removedeferred_uc' deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferred_pink_constraint_rm', deferrable=models.Deferrable.DEFERRED, ) project_state = self.set_up_test_model(app_label, constraints=[deferred_unique_constraint]) operation = migrations.RemoveConstraint('Pony', deferred_unique_constraint.name) self.assertEqual( operation.describe(), 'Remove constraint deferred_pink_constraint_rm from model Pony', ) # Remove constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0).delete() # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 3 obj.save() else: Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'deferred_pink_constraint_rm', }) def test_add_covering_unique_constraint(self): app_label = 'test_addcovering_uc' project_state = self.set_up_test_model(app_label) covering_unique_constraint = models.UniqueConstraint( fields=['pink'], name='covering_pink_constraint_add', include=['weight'], ) operation = migrations.AddConstraint('Pony', covering_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint covering_pink_constraint_add on model Pony', ) # Add constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) Pony.objects.create(pink=1, weight=4.0) if connection.features.supports_covering_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=4.0) else: Pony.objects.create(pink=1, weight=4.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual( definition[2], {'model_name': 'Pony', 'constraint': covering_unique_constraint}, ) def test_remove_covering_unique_constraint(self): app_label = 'test_removecovering_uc' covering_unique_constraint = models.UniqueConstraint( fields=['pink'], name='covering_pink_constraint_rm', include=['weight'], ) project_state = self.set_up_test_model(app_label, constraints=[covering_unique_constraint]) operation = migrations.RemoveConstraint('Pony', covering_unique_constraint.name) self.assertEqual( operation.describe(), 'Remove constraint covering_pink_constraint_rm from model Pony', ) # Remove constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0).delete() # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_covering_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=4.0) else: Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'covering_pink_constraint_rm', }) def test_alter_model_options(self): """ Tests the AlterModelOptions operation. """ project_state = self.set_up_test_model("test_almoop") # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {"permissions": [("can_groom", "Can groom")]}) self.assertEqual(operation.describe(), "Change Meta options on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_options') new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(new_state.models["test_almoop", "pony"].options["permissions"][0][0], "can_groom") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {"permissions": [("can_groom", "Can groom")]}}) def test_alter_model_options_emptying(self): """ The AlterModelOptions operation removes keys from the dict (#23121) """ project_state = self.set_up_test_model("test_almoop", options=True) # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {}) self.assertEqual(operation.describe(), "Change Meta options on Pony") new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {}}) def test_alter_order_with_respect_to(self): """ Tests the AlterOrderWithRespectTo operation. """ project_state = self.set_up_test_model("test_alorwrtto", related_model=True) # Test the state alteration operation = migrations.AlterOrderWithRespectTo("Rider", "pony") self.assertEqual(operation.describe(), "Set order_with_respect_to on Rider to pony") self.assertEqual( operation.migration_name_fragment, 'alter_rider_order_with_respect_to', ) new_state = project_state.clone() operation.state_forwards("test_alorwrtto", new_state) self.assertIsNone( project_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None) ) self.assertEqual( new_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None), "pony" ) # Make sure there's no matching index self.assertColumnNotExists("test_alorwrtto_rider", "_order") # Create some rows before alteration rendered_state = project_state.apps pony = rendered_state.get_model("test_alorwrtto", "Pony").objects.create(weight=50) rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=1) rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=2) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alorwrtto", editor, project_state, new_state) self.assertColumnExists("test_alorwrtto_rider", "_order") # Check for correct value in rows updated_riders = new_state.apps.get_model("test_alorwrtto", "Rider").objects.all() self.assertEqual(updated_riders[0]._order, 0) self.assertEqual(updated_riders[1]._order, 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alorwrtto", editor, new_state, project_state) self.assertColumnNotExists("test_alorwrtto_rider", "_order") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterOrderWithRespectTo") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Rider", 'order_with_respect_to': "pony"}) def test_alter_model_managers(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_almoma") # Test the state alteration operation = migrations.AlterModelManagers( "Pony", managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Change managers on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_managers') managers = project_state.models["test_almoma", "pony"].managers self.assertEqual(managers, []) new_state = project_state.clone() operation.state_forwards("test_almoma", new_state) self.assertIn(("test_almoma", "pony"), new_state.models) managers = new_state.models["test_almoma", "pony"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) rendered_state = new_state.apps model = rendered_state.get_model('test_almoma', 'pony') self.assertIsInstance(model.food_qs, models.Manager) self.assertIsInstance(model.food_mgr, FoodManager) self.assertIsInstance(model.food_mgr_kwargs, FoodManager) def test_alter_model_managers_emptying(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_almomae", manager_model=True) # Test the state alteration operation = migrations.AlterModelManagers("Food", managers=[]) self.assertEqual(operation.describe(), "Change managers on Food") self.assertIn(("test_almomae", "food"), project_state.models) managers = project_state.models["test_almomae", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) new_state = project_state.clone() operation.state_forwards("test_almomae", new_state) managers = new_state.models["test_almomae", "food"].managers self.assertEqual(managers, []) def test_alter_fk(self): """ Creating and then altering an FK works correctly and deals with the pending SQL (#23091) """ project_state = self.set_up_test_model("test_alfk") # Test adding and then altering the FK in one go create_operation = migrations.CreateModel( name="Rider", fields=[ ("id", models.AutoField(primary_key=True)), ("pony", models.ForeignKey("Pony", models.CASCADE)), ], ) create_state = project_state.clone() create_operation.state_forwards("test_alfk", create_state) alter_operation = migrations.AlterField( model_name='Rider', name='pony', field=models.ForeignKey("Pony", models.CASCADE, editable=False), ) alter_state = create_state.clone() alter_operation.state_forwards("test_alfk", alter_state) with connection.schema_editor() as editor: create_operation.database_forwards("test_alfk", editor, project_state, create_state) alter_operation.database_forwards("test_alfk", editor, create_state, alter_state) def test_alter_fk_non_fk(self): """ Altering an FK to a non-FK works (#23244) """ # Test the state alteration operation = migrations.AlterField( model_name="Rider", name="pony", field=models.FloatField(), ) project_state, new_state = self.make_test_state("test_afknfk", operation, related_model=True) # Test the database alteration self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") with connection.schema_editor() as editor: operation.database_forwards("test_afknfk", editor, project_state, new_state) self.assertColumnExists("test_afknfk_rider", "pony") self.assertColumnNotExists("test_afknfk_rider", "pony_id") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_afknfk", editor, new_state, project_state) self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") def test_run_sql(self): """ Tests the RunSQL operation. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( # Use a multi-line string with a comment to test splitting on SQLite and MySQL respectively "CREATE TABLE i_love_ponies (id int, special_thing varchar(15));\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'i love ponies'); -- this is magic!\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (2, 'i love django');\n" "UPDATE i_love_ponies SET special_thing = 'Ponies' WHERE special_thing LIKE '%%ponies';" "UPDATE i_love_ponies SET special_thing = 'Django' WHERE special_thing LIKE '%django';", # Run delete queries to test for parameter substitution failure # reported in #23426 "DELETE FROM i_love_ponies WHERE special_thing LIKE '%Django%';" "DELETE FROM i_love_ponies WHERE special_thing LIKE '%%Ponies%%';" "DROP TABLE i_love_ponies", state_operations=[migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])], ) self.assertEqual(operation.describe(), "Raw SQL operation") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_runsql", new_state) self.assertEqual(len(new_state.models["test_runsql", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test SQL collection with connection.schema_editor(collect_sql=True) as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%ponies';", "\n".join(editor.collected_sql)) operation.database_backwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%Ponies%%';", "\n".join(editor.collected_sql)) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 2) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Django'") self.assertEqual(cursor.fetchall()[0][0], 1) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Ponies'") self.assertEqual(cursor.fetchall()[0][0], 1) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunSQL") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["reverse_sql", "sql", "state_operations"]) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunSQL('SELECT 1 FROM void;', elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_sql_params(self): """ #23426 - RunSQL should accept parameters. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( ["CREATE TABLE i_love_ponies (id int, special_thing varchar(15));"], ["DROP TABLE i_love_ponies"], ) param_operation = migrations.RunSQL( # forwards ( "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'Django');", ["INSERT INTO i_love_ponies (id, special_thing) VALUES (2, %s);", ['Ponies']], ("INSERT INTO i_love_ponies (id, special_thing) VALUES (%s, %s);", (3, 'Python',)), ), # backwards [ "DELETE FROM i_love_ponies WHERE special_thing = 'Django';", ["DELETE FROM i_love_ponies WHERE special_thing = 'Ponies';", None], ("DELETE FROM i_love_ponies WHERE id = %s OR special_thing = %s;", [3, 'Python']), ] ) # Make sure there's no table self.assertTableNotExists("i_love_ponies") new_state = project_state.clone() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) # Test parameter passing with connection.schema_editor() as editor: param_operation.database_forwards("test_runsql", editor, project_state, new_state) # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 3) with connection.schema_editor() as editor: param_operation.database_backwards("test_runsql", editor, new_state, project_state) with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") def test_run_sql_params_invalid(self): """ #23426 - RunSQL should fail when a list of statements with an incorrect number of tuples is given. """ project_state = self.set_up_test_model("test_runsql") new_state = project_state.clone() operation = migrations.RunSQL( # forwards [ ["INSERT INTO foo (bar) VALUES ('buz');"] ], # backwards ( ("DELETE FROM foo WHERE bar = 'buz';", 'invalid', 'parameter count'), ), ) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 1"): operation.database_forwards("test_runsql", editor, project_state, new_state) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 3"): operation.database_backwards("test_runsql", editor, new_state, project_state) def test_run_sql_noop(self): """ #24098 - Tests no-op RunSQL operations. """ operation = migrations.RunSQL(migrations.RunSQL.noop, migrations.RunSQL.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, None, None) operation.database_backwards("test_runsql", editor, None, None) def test_run_python(self): """ Tests the RunPython operation """ project_state = self.set_up_test_model("test_runpython", mti_model=True) # Create the operation def inner_method(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.create(pink=1, weight=3.55) Pony.objects.create(weight=5) def inner_method_reverse(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.filter(pink=1, weight=3.55).delete() Pony.objects.filter(weight=5).delete() operation = migrations.RunPython(inner_method, reverse_code=inner_method_reverse) self.assertEqual(operation.describe(), "Raw Python operation") # Test the state alteration does nothing new_state = project_state.clone() operation.state_forwards("test_runpython", new_state) self.assertEqual(new_state, project_state) # Test the database alteration self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) # Now test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) # Now test we can't use a string with self.assertRaisesMessage(ValueError, 'RunPython must be supplied with a callable'): migrations.RunPython("print 'ahahaha'") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code", "reverse_code"]) # Also test reversal fails, with an operation identical to above but without reverse_code set no_reverse_operation = migrations.RunPython(inner_method) self.assertFalse(no_reverse_operation.reversible) with connection.schema_editor() as editor: no_reverse_operation.database_forwards("test_runpython", editor, project_state, new_state) with self.assertRaises(NotImplementedError): no_reverse_operation.database_backwards("test_runpython", editor, new_state, project_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) def create_ponies(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") pony1 = Pony.objects.create(pink=1, weight=3.55) self.assertIsNot(pony1.pk, None) pony2 = Pony.objects.create(weight=5) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_ponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 4) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code"]) def create_shetlandponies(models, schema_editor): ShetlandPony = models.get_model("test_runpython", "ShetlandPony") pony1 = ShetlandPony.objects.create(weight=4.0) self.assertIsNot(pony1.pk, None) pony2 = ShetlandPony.objects.create(weight=5.0) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_shetlandponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 6) self.assertEqual(project_state.apps.get_model("test_runpython", "ShetlandPony").objects.count(), 2) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunPython(inner_method, elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_python_atomic(self): """ Tests the RunPython operation correctly handles the "atomic" keyword """ project_state = self.set_up_test_model("test_runpythonatomic", mti_model=True) def inner_method(models, schema_editor): Pony = models.get_model("test_runpythonatomic", "Pony") Pony.objects.create(pink=1, weight=3.55) raise ValueError("Adrian hates ponies.") # Verify atomicity when applying. atomic_migration = Migration("test", "test_runpythonatomic") atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method)] non_atomic_migration = Migration("test", "test_runpythonatomic") non_atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method, atomic=False)] # If we're a fully-transactional database, both versions should rollback if connection.features.can_rollback_ddl: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) # Otherwise, the non-atomic operation should leave a row there else: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1) # Reset object count to zero and verify atomicity when unapplying. project_state.apps.get_model("test_runpythonatomic", "Pony").objects.all().delete() # On a fully-transactional database, both versions rollback. if connection.features.can_rollback_ddl: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) # Otherwise, the non-atomic operation leaves a row there. else: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1) # Verify deconstruction. definition = non_atomic_migration.operations[0].deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["atomic", "code", "reverse_code"]) def test_run_python_related_assignment(self): """ #24282 - Model changes to a FK reverse side update the model on the FK side as well. """ def inner_method(models, schema_editor): Author = models.get_model("test_authors", "Author") Book = models.get_model("test_books", "Book") author = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author) create_author = migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey("test_authors.Author", models.CASCADE)) ], options={}, ) add_hometown = migrations.AddField( "Author", "hometown", models.CharField(max_length=100), ) create_old_man = migrations.RunPython(inner_method, inner_method) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_authors", new_state) create_author.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_books", new_state) create_book.database_forwards("test_books", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: add_hometown.state_forwards("test_authors", new_state) add_hometown.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_old_man.state_forwards("test_books", new_state) create_old_man.database_forwards("test_books", editor, project_state, new_state) def test_model_with_bigautofield(self): """ A model with BigAutoField can be created. """ def create_data(models, schema_editor): Author = models.get_model("test_author", "Author") Book = models.get_model("test_book", "Book") author1 = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author1) Book.objects.create(id=2 ** 33, title="A farewell to arms", author=author1) author2 = Author.objects.create(id=2 ** 33, name="Remarque") Book.objects.create(title="All quiet on the western front", author=author2) Book.objects.create(title="Arc de Triomphe", author=author2) create_author = migrations.CreateModel( "Author", [ ("id", models.BigAutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.BigAutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey(to="test_author.Author", on_delete=models.CASCADE)) ], options={}, ) fill_data = migrations.RunPython(create_data) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_author", new_state) create_author.database_forwards("test_author", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_book", new_state) create_book.database_forwards("test_book", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_data.state_forwards("fill_data", new_state) fill_data.database_forwards("fill_data", editor, project_state, new_state) def _test_autofield_foreignfield_growth(self, source_field, target_field, target_value): """ A field may be migrated in the following ways: - AutoField to BigAutoField - SmallAutoField to AutoField - SmallAutoField to BigAutoField """ def create_initial_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog = Blog.objects.create(name="web development done right") Article.objects.create(name="Frameworks", blog=blog) Article.objects.create(name="Programming Languages", blog=blog) def create_big_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog2 = Blog.objects.create(name="Frameworks", id=target_value) Article.objects.create(name="Django", blog=blog2) Article.objects.create(id=target_value, name="Django2", blog=blog2) create_blog = migrations.CreateModel( "Blog", [ ("id", source_field(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_article = migrations.CreateModel( "Article", [ ("id", source_field(primary_key=True)), ("blog", models.ForeignKey(to="test_blog.Blog", on_delete=models.CASCADE)), ("name", models.CharField(max_length=100)), ("data", models.TextField(default="")), ], options={}, ) fill_initial_data = migrations.RunPython(create_initial_data, create_initial_data) fill_big_data = migrations.RunPython(create_big_data, create_big_data) grow_article_id = migrations.AlterField('Article', 'id', target_field(primary_key=True)) grow_blog_id = migrations.AlterField('Blog', 'id', target_field(primary_key=True)) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_blog.state_forwards("test_blog", new_state) create_blog.database_forwards("test_blog", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_article.state_forwards("test_article", new_state) create_article.database_forwards("test_article", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_initial_data.state_forwards("fill_initial_data", new_state) fill_initial_data.database_forwards("fill_initial_data", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_article_id.state_forwards("test_article", new_state) grow_article_id.database_forwards("test_article", editor, project_state, new_state) state = new_state.clone() article = state.apps.get_model("test_article.Article") self.assertIsInstance(article._meta.pk, target_field) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_blog_id.state_forwards("test_blog", new_state) grow_blog_id.database_forwards("test_blog", editor, project_state, new_state) state = new_state.clone() blog = state.apps.get_model("test_blog.Blog") self.assertIsInstance(blog._meta.pk, target_field) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_big_data.state_forwards("fill_big_data", new_state) fill_big_data.database_forwards("fill_big_data", editor, project_state, new_state) def test_autofield__bigautofield_foreignfield_growth(self): """A field may be migrated from AutoField to BigAutoField.""" self._test_autofield_foreignfield_growth( models.AutoField, models.BigAutoField, 2 ** 33, ) def test_smallfield_autofield_foreignfield_growth(self): """A field may be migrated from SmallAutoField to AutoField.""" self._test_autofield_foreignfield_growth( models.SmallAutoField, models.AutoField, 2 ** 22, ) def test_smallfield_bigautofield_foreignfield_growth(self): """A field may be migrated from SmallAutoField to BigAutoField.""" self._test_autofield_foreignfield_growth( models.SmallAutoField, models.BigAutoField, 2 ** 33, ) def test_run_python_noop(self): """ #24098 - Tests no-op RunPython operations. """ project_state = ProjectState() new_state = project_state.clone() operation = migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) operation.database_backwards("test_runpython", editor, new_state, project_state) def test_separate_database_and_state(self): """ Tests the SeparateDatabaseAndState operation. """ project_state = self.set_up_test_model("test_separatedatabaseandstate") # Create the operation database_operation = migrations.RunSQL( "CREATE TABLE i_love_ponies (id int, special_thing int);", "DROP TABLE i_love_ponies;" ) state_operation = migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))]) operation = migrations.SeparateDatabaseAndState( state_operations=[state_operation], database_operations=[database_operation] ) self.assertEqual(operation.describe(), "Custom state/database change combination") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_separatedatabaseandstate", new_state) self.assertEqual(len(new_state.models["test_separatedatabaseandstate", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_separatedatabaseandstate", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_separatedatabaseandstate", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "SeparateDatabaseAndState") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["database_operations", "state_operations"]) def test_separate_database_and_state2(self): """ A complex SeparateDatabaseAndState operation: Multiple operations both for state and database. Verify the state dependencies within each list and that state ops don't affect the database. """ app_label = "test_separatedatabaseandstate2" project_state = self.set_up_test_model(app_label) # Create the operation database_operations = [ migrations.CreateModel( "ILovePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveponies"}, ), migrations.CreateModel( "ILoveMorePonies", # We use IntegerField and not AutoField because # the model is going to be deleted immediately # and with an AutoField this fails on Oracle [("id", models.IntegerField(primary_key=True))], options={"db_table": "ilovemoreponies"}, ), migrations.DeleteModel("ILoveMorePonies"), migrations.CreateModel( "ILoveEvenMorePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveevenmoreponies"}, ), ] state_operations = [ migrations.CreateModel( "SomethingElse", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingelse"}, ), migrations.DeleteModel("SomethingElse"), migrations.CreateModel( "SomethingCompletelyDifferent", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingcompletelydifferent"}, ), ] operation = migrations.SeparateDatabaseAndState( state_operations=state_operations, database_operations=database_operations, ) # Test the state alteration new_state = project_state.clone() operation.state_forwards(app_label, new_state) def assertModelsAndTables(after_db): # Tables and models exist, or don't, as they should: self.assertNotIn((app_label, "somethingelse"), new_state.models) self.assertEqual(len(new_state.models[app_label, "somethingcompletelydifferent"].fields), 1) self.assertNotIn((app_label, "iloveponiesonies"), new_state.models) self.assertNotIn((app_label, "ilovemoreponies"), new_state.models) self.assertNotIn((app_label, "iloveevenmoreponies"), new_state.models) self.assertTableNotExists("somethingelse") self.assertTableNotExists("somethingcompletelydifferent") self.assertTableNotExists("ilovemoreponies") if after_db: self.assertTableExists("iloveponies") self.assertTableExists("iloveevenmoreponies") else: self.assertTableNotExists("iloveponies") self.assertTableNotExists("iloveevenmoreponies") assertModelsAndTables(after_db=False) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) assertModelsAndTables(after_db=True) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) assertModelsAndTables(after_db=False) class SwappableOperationTests(OperationTestBase): """ Key operations ignore swappable models (we don't want to replicate all of them here, as the functionality is in a common base class anyway) """ available_apps = ['migrations'] @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_create_ignore_swapped(self): """ The CreateTable operation ignores swapped models. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], options={ "swappable": "TEST_SWAP_MODEL", }, ) # Test the state alteration (it should still be there!) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crigsw", new_state) self.assertEqual(new_state.models["test_crigsw", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crigsw", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crigsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crigsw", editor, project_state, new_state) self.assertTableNotExists("test_crigsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crigsw", editor, new_state, project_state) self.assertTableNotExists("test_crigsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_delete_ignore_swapped(self): """ Tests the DeleteModel operation ignores swapped models. """ operation = migrations.DeleteModel("Pony") project_state, new_state = self.make_test_state("test_dligsw", operation) # Test the database alteration self.assertTableNotExists("test_dligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dligsw", editor, project_state, new_state) self.assertTableNotExists("test_dligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dligsw", editor, new_state, project_state) self.assertTableNotExists("test_dligsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_add_field_ignore_swapped(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) project_state, new_state = self.make_test_state("test_adfligsw", operation) # Test the database alteration self.assertTableNotExists("test_adfligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_adfligsw", editor, project_state, new_state) self.assertTableNotExists("test_adfligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfligsw", editor, new_state, project_state) self.assertTableNotExists("test_adfligsw_pony") @override_settings(TEST_SWAP_MODEL='migrations.SomeFakeModel') def test_indexes_ignore_swapped(self): """ Add/RemoveIndex operations ignore swapped models. """ operation = migrations.AddIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state('test_adinigsw', operation) with connection.schema_editor() as editor: # No database queries should be run for swapped models operation.database_forwards('test_adinigsw', editor, project_state, new_state) operation.database_backwards('test_adinigsw', editor, new_state, project_state) operation = migrations.RemoveIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state("test_rminigsw", operation) with connection.schema_editor() as editor: operation.database_forwards('test_rminigsw', editor, project_state, new_state) operation.database_backwards('test_rminigsw', editor, new_state, project_state) class TestCreateModel(SimpleTestCase): def test_references_model_mixin(self): migrations.CreateModel( 'name', fields=[], bases=(Mixin, models.Model), ).references_model('other_model', 'migrations') class FieldOperationTests(SimpleTestCase): def test_references_model(self): operation = FieldOperation('MoDel', 'field', models.ForeignKey('Other', models.CASCADE)) # Model name match. self.assertIs(operation.references_model('mOdEl', 'migrations'), True) # Referenced field. self.assertIs(operation.references_model('oTher', 'migrations'), True) # Doesn't reference. self.assertIs(operation.references_model('Whatever', 'migrations'), False) def test_references_field_by_name(self): operation = FieldOperation('MoDel', 'field', models.BooleanField(default=False)) self.assertIs(operation.references_field('model', 'field', 'migrations'), True) def test_references_field_by_remote_field_model(self): operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE)) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_references_field_by_from_fields(self): operation = FieldOperation( 'Model', 'field', models.fields.related.ForeignObject('Other', models.CASCADE, ['from'], ['to']) ) self.assertIs(operation.references_field('Model', 'from', 'migrations'), True) self.assertIs(operation.references_field('Model', 'to', 'migrations'), False) self.assertIs(operation.references_field('Other', 'from', 'migrations'), False) self.assertIs(operation.references_field('Model', 'to', 'migrations'), False) def test_references_field_by_to_fields(self): operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE, to_field='field')) self.assertIs(operation.references_field('Other', 'field', 'migrations'), True) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), False) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_references_field_by_through(self): operation = FieldOperation('Model', 'field', models.ManyToManyField('Other', through='Through')) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Through', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_reference_field_by_through_fields(self): operation = FieldOperation( 'Model', 'field', models.ManyToManyField('Other', through='Through', through_fields=('first', 'second')) ) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Through', 'whatever', 'migrations'), False) self.assertIs(operation.references_field('Through', 'first', 'migrations'), True) self.assertIs(operation.references_field('Through', 'second', 'migrations'), True)
00acb90c7b077ad09069ade6eff4f61443cef26ed701d385bef317e5dc3a4b58
from unittest import mock from django.contrib.postgres.indexes import ( BloomIndex, BrinIndex, BTreeIndex, GinIndex, GistIndex, HashIndex, SpGistIndex, ) from django.db import NotSupportedError, connection from django.db.models import CharField, Q from django.db.models.functions import Length from django.test import skipUnlessDBFeature from django.test.utils import register_lookup from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase from .models import CharFieldModel, IntegerArrayModel, Scene class IndexTestMixin: def test_name_auto_generation(self): index = self.index_class(fields=['field']) index.set_name_with_model(CharFieldModel) self.assertRegex(index.name, r'postgres_te_field_[0-9a-f]{6}_%s' % self.index_class.suffix) def test_deconstruction_no_customization(self): index = self.index_class(fields=['title'], name='test_title_%s' % self.index_class.suffix) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.%s' % self.index_class.__name__) self.assertEqual(args, ()) self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_%s' % self.index_class.suffix}) class BloomIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase): index_class = BloomIndex def test_suffix(self): self.assertEqual(BloomIndex.suffix, 'bloom') def test_deconstruction(self): index = BloomIndex(fields=['title'], name='test_bloom', length=80, columns=[4]) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.BloomIndex') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'fields': ['title'], 'name': 'test_bloom', 'length': 80, 'columns': [4], }) def test_invalid_fields(self): msg = 'Bloom indexes support a maximum of 32 fields.' with self.assertRaisesMessage(ValueError, msg): BloomIndex(fields=['title'] * 33, name='test_bloom') def test_invalid_columns(self): msg = 'BloomIndex.columns must be a list or tuple.' with self.assertRaisesMessage(ValueError, msg): BloomIndex(fields=['title'], name='test_bloom', columns='x') msg = 'BloomIndex.columns cannot have more values than fields.' with self.assertRaisesMessage(ValueError, msg): BloomIndex(fields=['title'], name='test_bloom', columns=[4, 3]) def test_invalid_columns_value(self): msg = 'BloomIndex.columns must contain integers from 1 to 4095.' for length in (0, 4096): with self.subTest(length), self.assertRaisesMessage(ValueError, msg): BloomIndex(fields=['title'], name='test_bloom', columns=[length]) def test_invalid_length(self): msg = 'BloomIndex.length must be None or an integer from 1 to 4096.' for length in (0, 4097): with self.subTest(length), self.assertRaisesMessage(ValueError, msg): BloomIndex(fields=['title'], name='test_bloom', length=length) class BrinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase): index_class = BrinIndex def test_suffix(self): self.assertEqual(BrinIndex.suffix, 'brin') def test_deconstruction(self): index = BrinIndex(fields=['title'], name='test_title_brin', autosummarize=True, pages_per_range=16) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.BrinIndex') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'fields': ['title'], 'name': 'test_title_brin', 'autosummarize': True, 'pages_per_range': 16, }) def test_invalid_pages_per_range(self): with self.assertRaisesMessage(ValueError, 'pages_per_range must be None or a positive integer'): BrinIndex(fields=['title'], name='test_title_brin', pages_per_range=0) class BTreeIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase): index_class = BTreeIndex def test_suffix(self): self.assertEqual(BTreeIndex.suffix, 'btree') def test_deconstruction(self): index = BTreeIndex(fields=['title'], name='test_title_btree', fillfactor=80) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.BTreeIndex') self.assertEqual(args, ()) self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_btree', 'fillfactor': 80}) class GinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase): index_class = GinIndex def test_suffix(self): self.assertEqual(GinIndex.suffix, 'gin') def test_deconstruction(self): index = GinIndex( fields=['title'], name='test_title_gin', fastupdate=True, gin_pending_list_limit=128, ) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.GinIndex') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'fields': ['title'], 'name': 'test_title_gin', 'fastupdate': True, 'gin_pending_list_limit': 128, }) class GistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase): index_class = GistIndex def test_suffix(self): self.assertEqual(GistIndex.suffix, 'gist') def test_deconstruction(self): index = GistIndex(fields=['title'], name='test_title_gist', buffering=False, fillfactor=80) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.GistIndex') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'fields': ['title'], 'name': 'test_title_gist', 'buffering': False, 'fillfactor': 80, }) class HashIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase): index_class = HashIndex def test_suffix(self): self.assertEqual(HashIndex.suffix, 'hash') def test_deconstruction(self): index = HashIndex(fields=['title'], name='test_title_hash', fillfactor=80) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.HashIndex') self.assertEqual(args, ()) self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_hash', 'fillfactor': 80}) class SpGistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase): index_class = SpGistIndex def test_suffix(self): self.assertEqual(SpGistIndex.suffix, 'spgist') def test_deconstruction(self): index = SpGistIndex(fields=['title'], name='test_title_spgist', fillfactor=80) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.SpGistIndex') self.assertEqual(args, ()) self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_spgist', 'fillfactor': 80}) class SchemaTests(PostgreSQLTestCase): def get_constraints(self, table): """ Get the indexes on the table using a new cursor. """ with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) def test_gin_index(self): # Ensure the table is there and doesn't have an index. self.assertNotIn('field', self.get_constraints(IntegerArrayModel._meta.db_table)) # Add the index index_name = 'integer_array_model_field_gin' index = GinIndex(fields=['field'], name=index_name) with connection.schema_editor() as editor: editor.add_index(IntegerArrayModel, index) constraints = self.get_constraints(IntegerArrayModel._meta.db_table) # Check gin index was added self.assertEqual(constraints[index_name]['type'], GinIndex.suffix) # Drop the index with connection.schema_editor() as editor: editor.remove_index(IntegerArrayModel, index) self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table)) def test_gin_fastupdate(self): index_name = 'integer_array_gin_fastupdate' index = GinIndex(fields=['field'], name=index_name, fastupdate=False) with connection.schema_editor() as editor: editor.add_index(IntegerArrayModel, index) constraints = self.get_constraints(IntegerArrayModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], 'gin') self.assertEqual(constraints[index_name]['options'], ['fastupdate=off']) with connection.schema_editor() as editor: editor.remove_index(IntegerArrayModel, index) self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table)) def test_partial_gin_index(self): with register_lookup(CharField, Length): index_name = 'char_field_gin_partial_idx' index = GinIndex(fields=['field'], name=index_name, condition=Q(field__length=40)) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], 'gin') with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_partial_gin_index_with_tablespace(self): with register_lookup(CharField, Length): index_name = 'char_field_gin_partial_idx' index = GinIndex( fields=['field'], name=index_name, condition=Q(field__length=40), db_tablespace='pg_default', ) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) self.assertIn('TABLESPACE "pg_default" ', str(index.create_sql(CharFieldModel, editor))) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], 'gin') with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_gin_parameters(self): index_name = 'integer_array_gin_params' index = GinIndex(fields=['field'], name=index_name, fastupdate=True, gin_pending_list_limit=64) with connection.schema_editor() as editor: editor.add_index(IntegerArrayModel, index) constraints = self.get_constraints(IntegerArrayModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], 'gin') self.assertEqual(constraints[index_name]['options'], ['gin_pending_list_limit=64', 'fastupdate=on']) with connection.schema_editor() as editor: editor.remove_index(IntegerArrayModel, index) self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table)) def test_bloom_index(self): index_name = 'char_field_model_field_bloom' index = BloomIndex(fields=['field'], name=index_name) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], BloomIndex.suffix) with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_bloom_parameters(self): index_name = 'char_field_model_field_bloom_params' index = BloomIndex(fields=['field'], name=index_name, length=512, columns=[3]) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], BloomIndex.suffix) self.assertEqual(constraints[index_name]['options'], ['length=512', 'col1=3']) with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_brin_index(self): index_name = 'char_field_model_field_brin' index = BrinIndex(fields=['field'], name=index_name, pages_per_range=4) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], BrinIndex.suffix) self.assertEqual(constraints[index_name]['options'], ['pages_per_range=4']) with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) @skipUnlessDBFeature('has_brin_autosummarize') def test_brin_parameters(self): index_name = 'char_field_brin_params' index = BrinIndex(fields=['field'], name=index_name, autosummarize=True) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], BrinIndex.suffix) self.assertEqual(constraints[index_name]['options'], ['autosummarize=on']) with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_brin_autosummarize_not_supported(self): index_name = 'brin_options_exception' index = BrinIndex(fields=['field'], name=index_name, autosummarize=True) with self.assertRaisesMessage(NotSupportedError, 'BRIN option autosummarize requires PostgreSQL 10+.'): with mock.patch('django.db.backends.postgresql.features.DatabaseFeatures.has_brin_autosummarize', False): with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_btree_index(self): # Ensure the table is there and doesn't have an index. self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table)) # Add the index. index_name = 'char_field_model_field_btree' index = BTreeIndex(fields=['field'], name=index_name) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) # The index was added. self.assertEqual(constraints[index_name]['type'], BTreeIndex.suffix) # Drop the index. with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_btree_parameters(self): index_name = 'integer_array_btree_fillfactor' index = BTreeIndex(fields=['field'], name=index_name, fillfactor=80) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], BTreeIndex.suffix) self.assertEqual(constraints[index_name]['options'], ['fillfactor=80']) with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_gist_index(self): # Ensure the table is there and doesn't have an index. self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table)) # Add the index. index_name = 'char_field_model_field_gist' index = GistIndex(fields=['field'], name=index_name) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) # The index was added. self.assertEqual(constraints[index_name]['type'], GistIndex.suffix) # Drop the index. with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_gist_parameters(self): index_name = 'integer_array_gist_buffering' index = GistIndex(fields=['field'], name=index_name, buffering=True, fillfactor=80) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], GistIndex.suffix) self.assertEqual(constraints[index_name]['options'], ['buffering=on', 'fillfactor=80']) with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) @skipUnlessDBFeature('supports_covering_gist_indexes') def test_gist_include(self): index_name = 'scene_gist_include_setting' index = GistIndex(name=index_name, fields=['scene'], include=['setting']) with connection.schema_editor() as editor: editor.add_index(Scene, index) constraints = self.get_constraints(Scene._meta.db_table) self.assertIn(index_name, constraints) self.assertEqual(constraints[index_name]['type'], GistIndex.suffix) self.assertEqual(constraints[index_name]['columns'], ['scene', 'setting']) with connection.schema_editor() as editor: editor.remove_index(Scene, index) self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table)) def test_gist_include_not_supported(self): index_name = 'gist_include_exception' index = GistIndex(fields=['scene'], name=index_name, include=['setting']) msg = 'Covering GiST indexes requires PostgreSQL 12+.' with self.assertRaisesMessage(NotSupportedError, msg): with mock.patch( 'django.db.backends.postgresql.features.DatabaseFeatures.supports_covering_gist_indexes', False, ): with connection.schema_editor() as editor: editor.add_index(Scene, index) self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table)) def test_hash_index(self): # Ensure the table is there and doesn't have an index. self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table)) # Add the index. index_name = 'char_field_model_field_hash' index = HashIndex(fields=['field'], name=index_name) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) # The index was added. self.assertEqual(constraints[index_name]['type'], HashIndex.suffix) # Drop the index. with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_hash_parameters(self): index_name = 'integer_array_hash_fillfactor' index = HashIndex(fields=['field'], name=index_name, fillfactor=80) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], HashIndex.suffix) self.assertEqual(constraints[index_name]['options'], ['fillfactor=80']) with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_spgist_index(self): # Ensure the table is there and doesn't have an index. self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table)) # Add the index. index_name = 'char_field_model_field_spgist' index = SpGistIndex(fields=['field'], name=index_name) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) # The index was added. self.assertEqual(constraints[index_name]['type'], SpGistIndex.suffix) # Drop the index. with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_spgist_parameters(self): index_name = 'integer_array_spgist_fillfactor' index = SpGistIndex(fields=['field'], name=index_name, fillfactor=80) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], SpGistIndex.suffix) self.assertEqual(constraints[index_name]['options'], ['fillfactor=80']) with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
41da4f5d9c50eef7363d589ec4d3be921ff8ca819e7d02e4b892981b7a7062cb
import re from io import StringIO from unittest import mock, skipUnless from django.core.management import call_command from django.db import connection from django.db.backends.base.introspection import TableInfo from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature from .models import PeopleMoreData def inspectdb_tables_only(table_name): """ Limit introspection to tables created for models of this app. Some databases such as Oracle are extremely slow at introspection. """ return table_name.startswith('inspectdb_') def special_table_only(table_name): return table_name.startswith('inspectdb_special') class InspectDBTestCase(TestCase): unique_re = re.compile(r'.*unique_together = \((.+),\).*') def test_stealth_table_name_filter_option(self): out = StringIO() call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out) error_message = "inspectdb has examined a table that should have been filtered out." # contrib.contenttypes is one of the apps always installed when running # the Django test suite, check that one of its tables hasn't been # inspected self.assertNotIn("class DjangoContentType(models.Model):", out.getvalue(), msg=error_message) def test_table_option(self): """ inspectdb can inspect a subset of tables by passing the table names as arguments. """ out = StringIO() call_command('inspectdb', 'inspectdb_people', stdout=out) output = out.getvalue() self.assertIn('class InspectdbPeople(models.Model):', output) self.assertNotIn("InspectdbPeopledata", output) def make_field_type_asserter(self): """Call inspectdb and return a function to validate a field type in its output""" out = StringIO() call_command('inspectdb', 'inspectdb_columntypes', stdout=out) output = out.getvalue() def assertFieldType(name, definition): out_def = re.search(r'^\s*%s = (models.*)$' % name, output, re.MULTILINE)[1] self.assertEqual(definition, out_def) return assertFieldType def test_field_types(self): """Test introspection of various Django field types""" assertFieldType = self.make_field_type_asserter() introspected_field_types = connection.features.introspected_field_types char_field_type = introspected_field_types['CharField'] # Inspecting Oracle DB doesn't produce correct results (#19884): # - it reports fields as blank=True when they aren't. if not connection.features.interprets_empty_strings_as_nulls and char_field_type == 'CharField': assertFieldType('char_field', "models.CharField(max_length=10)") assertFieldType('null_char_field', "models.CharField(max_length=10, blank=True, null=True)") assertFieldType('email_field', "models.CharField(max_length=254)") assertFieldType('file_field', "models.CharField(max_length=100)") assertFieldType('file_path_field', "models.CharField(max_length=100)") assertFieldType('slug_field', "models.CharField(max_length=50)") assertFieldType('text_field', "models.TextField()") assertFieldType('url_field', "models.CharField(max_length=200)") if char_field_type == 'TextField': assertFieldType('char_field', 'models.TextField()') assertFieldType('null_char_field', 'models.TextField(blank=True, null=True)') assertFieldType('email_field', 'models.TextField()') assertFieldType('file_field', 'models.TextField()') assertFieldType('file_path_field', 'models.TextField()') assertFieldType('slug_field', 'models.TextField()') assertFieldType('text_field', 'models.TextField()') assertFieldType('url_field', 'models.TextField()') assertFieldType('date_field', "models.DateField()") assertFieldType('date_time_field', "models.DateTimeField()") if introspected_field_types['GenericIPAddressField'] == 'GenericIPAddressField': assertFieldType('gen_ip_address_field', "models.GenericIPAddressField()") elif not connection.features.interprets_empty_strings_as_nulls: assertFieldType('gen_ip_address_field', "models.CharField(max_length=39)") assertFieldType('time_field', 'models.%s()' % introspected_field_types['TimeField']) if connection.features.has_native_uuid_field: assertFieldType('uuid_field', "models.UUIDField()") elif not connection.features.interprets_empty_strings_as_nulls: assertFieldType('uuid_field', "models.CharField(max_length=32)") @skipUnlessDBFeature('can_introspect_json_field', 'supports_json_field') def test_json_field(self): out = StringIO() call_command('inspectdb', 'inspectdb_jsonfieldcolumntype', stdout=out) output = out.getvalue() if not connection.features.interprets_empty_strings_as_nulls: self.assertIn('json_field = models.JSONField()', output) self.assertIn('null_json_field = models.JSONField(blank=True, null=True)', output) def test_number_field_types(self): """Test introspection of various Django field types""" assertFieldType = self.make_field_type_asserter() introspected_field_types = connection.features.introspected_field_types auto_field_type = connection.features.introspected_field_types['AutoField'] if auto_field_type != 'AutoField': assertFieldType('id', "models.%s(primary_key=True) # AutoField?" % auto_field_type) assertFieldType('big_int_field', 'models.%s()' % introspected_field_types['BigIntegerField']) bool_field_type = introspected_field_types['BooleanField'] assertFieldType('bool_field', "models.{}()".format(bool_field_type)) assertFieldType('null_bool_field', 'models.{}(blank=True, null=True)'.format(bool_field_type)) if connection.vendor != 'sqlite': assertFieldType('decimal_field', "models.DecimalField(max_digits=6, decimal_places=1)") else: # Guessed arguments on SQLite, see #5014 assertFieldType('decimal_field', "models.DecimalField(max_digits=10, decimal_places=5) " "# max_digits and decimal_places have been guessed, " "as this database handles decimal fields as float") assertFieldType('float_field', "models.FloatField()") assertFieldType('int_field', 'models.%s()' % introspected_field_types['IntegerField']) assertFieldType('pos_int_field', 'models.%s()' % introspected_field_types['PositiveIntegerField']) assertFieldType('pos_big_int_field', 'models.%s()' % introspected_field_types['PositiveBigIntegerField']) assertFieldType('pos_small_int_field', 'models.%s()' % introspected_field_types['PositiveSmallIntegerField']) assertFieldType('small_int_field', 'models.%s()' % introspected_field_types['SmallIntegerField']) @skipUnlessDBFeature('can_introspect_foreign_keys') def test_attribute_name_not_python_keyword(self): out = StringIO() call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out) output = out.getvalue() error_message = "inspectdb generated an attribute name which is a Python keyword" # Recursive foreign keys should be set to 'self' self.assertIn("parent = models.ForeignKey('self', models.DO_NOTHING)", output) self.assertNotIn( "from = models.ForeignKey(InspectdbPeople, models.DO_NOTHING)", output, msg=error_message, ) # As InspectdbPeople model is defined after InspectdbMessage, it should be quoted self.assertIn( "from_field = models.ForeignKey('InspectdbPeople', models.DO_NOTHING, db_column='from_id')", output, ) self.assertIn( 'people_pk = models.OneToOneField(InspectdbPeople, models.DO_NOTHING, primary_key=True)', output, ) self.assertIn( 'people_unique = models.OneToOneField(InspectdbPeople, models.DO_NOTHING)', output, ) def test_digits_column_name_introspection(self): """Introspection of column names consist/start with digits (#16536/#17676)""" char_field_type = connection.features.introspected_field_types['CharField'] out = StringIO() call_command('inspectdb', 'inspectdb_digitsincolumnname', stdout=out) output = out.getvalue() error_message = "inspectdb generated a model field name which is a number" self.assertNotIn(' 123 = models.%s' % char_field_type, output, msg=error_message) self.assertIn('number_123 = models.%s' % char_field_type, output) error_message = "inspectdb generated a model field name which starts with a digit" self.assertNotIn(' 4extra = models.%s' % char_field_type, output, msg=error_message) self.assertIn('number_4extra = models.%s' % char_field_type, output) self.assertNotIn(' 45extra = models.%s' % char_field_type, output, msg=error_message) self.assertIn('number_45extra = models.%s' % char_field_type, output) def test_special_column_name_introspection(self): """ Introspection of column names containing special characters, unsuitable for Python identifiers """ out = StringIO() call_command('inspectdb', table_name_filter=special_table_only, stdout=out) output = out.getvalue() base_name = connection.introspection.identifier_converter('Field') integer_field_type = connection.features.introspected_field_types['IntegerField'] self.assertIn("field = models.%s()" % integer_field_type, output) self.assertIn("field_field = models.%s(db_column='%s_')" % (integer_field_type, base_name), output) self.assertIn("field_field_0 = models.%s(db_column='%s__')" % (integer_field_type, base_name), output) self.assertIn("field_field_1 = models.%s(db_column='__field')" % integer_field_type, output) self.assertIn("prc_x = models.{}(db_column='prc(%) x')".format(integer_field_type), output) self.assertIn("tamaño = models.%s()" % integer_field_type, output) def test_table_name_introspection(self): """ Introspection of table names containing special characters, unsuitable for Python identifiers """ out = StringIO() call_command('inspectdb', table_name_filter=special_table_only, stdout=out) output = out.getvalue() self.assertIn("class InspectdbSpecialTableName(models.Model):", output) def test_managed_models(self): """By default the command generates models with `Meta.managed = False` (#14305)""" out = StringIO() call_command('inspectdb', 'inspectdb_columntypes', stdout=out) output = out.getvalue() self.longMessage = False self.assertIn(" managed = False", output, msg='inspectdb should generate unmanaged models.') def test_unique_together_meta(self): out = StringIO() call_command('inspectdb', 'inspectdb_uniquetogether', stdout=out) output = out.getvalue() self.assertIn(" unique_together = (('", output) unique_together_match = self.unique_re.findall(output) # There should be one unique_together tuple. self.assertEqual(len(unique_together_match), 1) fields = unique_together_match[0] # Fields with db_column = field name. self.assertIn("('field1', 'field2')", fields) # Fields from columns whose names are Python keywords. self.assertIn("('field1', 'field2')", fields) # Fields whose names normalize to the same Python field name and hence # are given an integer suffix. self.assertIn("('non_unique_column', 'non_unique_column_0')", fields) @skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific SQL') def test_unsupported_unique_together(self): """Unsupported index types (COALESCE here) are skipped.""" with connection.cursor() as c: c.execute( 'CREATE UNIQUE INDEX Findex ON %s ' '(id, people_unique_id, COALESCE(message_id, -1))' % PeopleMoreData._meta.db_table ) try: out = StringIO() call_command( 'inspectdb', table_name_filter=lambda tn: tn.startswith(PeopleMoreData._meta.db_table), stdout=out, ) output = out.getvalue() self.assertIn('# A unique constraint could not be introspected.', output) self.assertEqual(self.unique_re.findall(output), ["('id', 'people_unique')"]) finally: with connection.cursor() as c: c.execute('DROP INDEX Findex') @skipUnless(connection.vendor == 'sqlite', "Only patched sqlite's DatabaseIntrospection.data_types_reverse for this test") def test_custom_fields(self): """ Introspection of columns with a custom field (#21090) """ out = StringIO() orig_data_types_reverse = connection.introspection.data_types_reverse try: connection.introspection.data_types_reverse = { 'text': 'myfields.TextField', 'bigint': 'BigIntegerField', } call_command('inspectdb', 'inspectdb_columntypes', stdout=out) output = out.getvalue() self.assertIn("text_field = myfields.TextField()", output) self.assertIn("big_int_field = models.BigIntegerField()", output) finally: connection.introspection.data_types_reverse = orig_data_types_reverse def test_introspection_errors(self): """ Introspection errors should not crash the command, and the error should be visible in the output. """ out = StringIO() with mock.patch('django.db.connection.introspection.get_table_list', return_value=[TableInfo(name='nonexistent', type='t')]): call_command('inspectdb', stdout=out) output = out.getvalue() self.assertIn("# Unable to inspect table 'nonexistent'", output) # The error message depends on the backend self.assertIn("# The error was:", output) class InspectDBTransactionalTests(TransactionTestCase): available_apps = ['inspectdb'] def test_include_views(self): """inspectdb --include-views creates models for database views.""" with connection.cursor() as cursor: cursor.execute( 'CREATE VIEW inspectdb_people_view AS ' 'SELECT id, name FROM inspectdb_people' ) out = StringIO() view_model = 'class InspectdbPeopleView(models.Model):' view_managed = 'managed = False # Created from a view.' try: call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out) no_views_output = out.getvalue() self.assertNotIn(view_model, no_views_output) self.assertNotIn(view_managed, no_views_output) call_command('inspectdb', table_name_filter=inspectdb_tables_only, include_views=True, stdout=out) with_views_output = out.getvalue() self.assertIn(view_model, with_views_output) self.assertIn(view_managed, with_views_output) finally: with connection.cursor() as cursor: cursor.execute('DROP VIEW inspectdb_people_view') @skipUnlessDBFeature('can_introspect_materialized_views') def test_include_materialized_views(self): """inspectdb --include-views creates models for materialized views.""" with connection.cursor() as cursor: cursor.execute( 'CREATE MATERIALIZED VIEW inspectdb_people_materialized AS ' 'SELECT id, name FROM inspectdb_people' ) out = StringIO() view_model = 'class InspectdbPeopleMaterialized(models.Model):' view_managed = 'managed = False # Created from a view.' try: call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out) no_views_output = out.getvalue() self.assertNotIn(view_model, no_views_output) self.assertNotIn(view_managed, no_views_output) call_command('inspectdb', table_name_filter=inspectdb_tables_only, include_views=True, stdout=out) with_views_output = out.getvalue() self.assertIn(view_model, with_views_output) self.assertIn(view_managed, with_views_output) finally: with connection.cursor() as cursor: cursor.execute('DROP MATERIALIZED VIEW inspectdb_people_materialized') @skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific SQL') @skipUnlessDBFeature('supports_table_partitions') def test_include_partitions(self): """inspectdb --include-partitions creates models for partitions.""" with connection.cursor() as cursor: cursor.execute('''\ CREATE TABLE inspectdb_partition_parent (name text not null) PARTITION BY LIST (left(upper(name), 1)) ''') cursor.execute('''\ CREATE TABLE inspectdb_partition_child PARTITION OF inspectdb_partition_parent FOR VALUES IN ('A', 'B', 'C') ''') out = StringIO() partition_model_parent = 'class InspectdbPartitionParent(models.Model):' partition_model_child = 'class InspectdbPartitionChild(models.Model):' partition_managed = 'managed = False # Created from a partition.' try: call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out) no_partitions_output = out.getvalue() self.assertIn(partition_model_parent, no_partitions_output) self.assertNotIn(partition_model_child, no_partitions_output) self.assertNotIn(partition_managed, no_partitions_output) call_command('inspectdb', table_name_filter=inspectdb_tables_only, include_partitions=True, stdout=out) with_partitions_output = out.getvalue() self.assertIn(partition_model_parent, with_partitions_output) self.assertIn(partition_model_child, with_partitions_output) self.assertIn(partition_managed, with_partitions_output) finally: with connection.cursor() as cursor: cursor.execute('DROP TABLE IF EXISTS inspectdb_partition_child') cursor.execute('DROP TABLE IF EXISTS inspectdb_partition_parent') @skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific SQL') def test_foreign_data_wrapper(self): with connection.cursor() as cursor: cursor.execute('CREATE EXTENSION IF NOT EXISTS file_fdw') cursor.execute('CREATE SERVER inspectdb_server FOREIGN DATA WRAPPER file_fdw') cursor.execute('''\ CREATE FOREIGN TABLE inspectdb_iris_foreign_table ( petal_length real, petal_width real, sepal_length real, sepal_width real ) SERVER inspectdb_server OPTIONS ( filename '/dev/null' ) ''') out = StringIO() foreign_table_model = 'class InspectdbIrisForeignTable(models.Model):' foreign_table_managed = 'managed = False' try: call_command('inspectdb', stdout=out) output = out.getvalue() self.assertIn(foreign_table_model, output) self.assertIn(foreign_table_managed, output) finally: with connection.cursor() as cursor: cursor.execute('DROP FOREIGN TABLE IF EXISTS inspectdb_iris_foreign_table') cursor.execute('DROP SERVER IF EXISTS inspectdb_server') cursor.execute('DROP EXTENSION IF EXISTS file_fdw')
e83ba97e184f4bd4c884510b626f2bdbb12ad46ef23e970da3716f4b23988570
# Unit tests for cache framework # Uses whatever cache backend is set in the test settings file. import copy import io import os import pickle import re import shutil import tempfile import threading import time import unittest from pathlib import Path from unittest import mock from django.conf import settings from django.core import management, signals from django.core.cache import ( DEFAULT_CACHE_ALIAS, CacheKeyWarning, InvalidCacheKey, cache, caches, ) from django.core.cache.backends.base import InvalidCacheBackendError from django.core.cache.utils import make_template_fragment_key from django.db import close_old_connections, connection, connections from django.http import ( HttpRequest, HttpResponse, HttpResponseNotModified, StreamingHttpResponse, ) from django.middleware.cache import ( CacheMiddleware, FetchFromCacheMiddleware, UpdateCacheMiddleware, ) from django.middleware.csrf import CsrfViewMiddleware from django.template import engines from django.template.context_processors import csrf from django.template.response import TemplateResponse from django.test import ( RequestFactory, SimpleTestCase, TestCase, TransactionTestCase, override_settings, ) from django.test.signals import setting_changed from django.utils import timezone, translation from django.utils.cache import ( get_cache_key, learn_cache_key, patch_cache_control, patch_vary_headers, ) from django.views.decorators.cache import cache_control, cache_page from .models import Poll, expensive_calculation # functions/classes for complex data type tests def f(): return 42 class C: def m(n): return 24 class Unpicklable: def __getstate__(self): raise pickle.PickleError() def empty_response(request): return HttpResponse() KEY_ERRORS_WITH_MEMCACHED_MSG = ( 'Cache key contains characters that will cause errors if used with ' 'memcached: %r' ) @override_settings(CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } }) class DummyCacheTests(SimpleTestCase): # The Dummy cache backend doesn't really behave like a test backend, # so it has its own test case. def test_simple(self): "Dummy cache backend ignores cache set calls" cache.set("key", "value") self.assertIsNone(cache.get("key")) def test_add(self): "Add doesn't do anything in dummy cache backend" self.assertIs(cache.add("addkey1", "value"), True) self.assertIs(cache.add("addkey1", "newvalue"), True) self.assertIsNone(cache.get("addkey1")) def test_non_existent(self): "Nonexistent keys aren't found in the dummy cache backend" self.assertIsNone(cache.get("does_not_exist")) self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") def test_get_many(self): "get_many returns nothing for the dummy cache backend" cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'}) self.assertEqual(cache.get_many(['a', 'c', 'd']), {}) self.assertEqual(cache.get_many(['a', 'b', 'e']), {}) def test_get_many_invalid_key(self): msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces' with self.assertWarnsMessage(CacheKeyWarning, msg): cache.get_many(['key with spaces']) def test_delete(self): "Cache deletion is transparently ignored on the dummy cache backend" cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertIsNone(cache.get("key1")) self.assertIs(cache.delete("key1"), False) self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_has_key(self): "The has_key method doesn't ever return True for the dummy cache backend" cache.set("hello1", "goodbye1") self.assertIs(cache.has_key("hello1"), False) self.assertIs(cache.has_key("goodbye1"), False) def test_in(self): "The in operator doesn't ever return True for the dummy cache backend" cache.set("hello2", "goodbye2") self.assertNotIn("hello2", cache) self.assertNotIn("goodbye2", cache) def test_incr(self): "Dummy cache values can't be incremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.incr('answer') with self.assertRaises(ValueError): cache.incr('does_not_exist') def test_decr(self): "Dummy cache values can't be decremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.decr('answer') with self.assertRaises(ValueError): cache.decr('does_not_exist') def test_touch(self): """Dummy cache can't do touch().""" self.assertIs(cache.touch('whatever'), False) def test_data_types(self): "All data types are ignored equally by the dummy cache" stuff = { 'string': 'this is a string', 'int': 42, 'list': [1, 2, 3, 4], 'tuple': (1, 2, 3, 4), 'dict': {'A': 1, 'B': 2}, 'function': f, 'class': C, } cache.set("stuff", stuff) self.assertIsNone(cache.get("stuff")) def test_expiration(self): "Expiration has no effect on the dummy cache" cache.set('expire1', 'very quickly', 1) cache.set('expire2', 'very quickly', 1) cache.set('expire3', 'very quickly', 1) time.sleep(2) self.assertIsNone(cache.get("expire1")) self.assertIs(cache.add("expire2", "newvalue"), True) self.assertIsNone(cache.get("expire2")) self.assertIs(cache.has_key("expire3"), False) def test_unicode(self): "Unicode values are ignored by the dummy cache" stuff = { 'ascii': 'ascii_value', 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', 'ascii2': {'x': 1} } for (key, value) in stuff.items(): with self.subTest(key=key): cache.set(key, value) self.assertIsNone(cache.get(key)) def test_set_many(self): "set_many does nothing for the dummy cache backend" self.assertEqual(cache.set_many({'a': 1, 'b': 2}), []) self.assertEqual(cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1'), []) def test_set_many_invalid_key(self): msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces' with self.assertWarnsMessage(CacheKeyWarning, msg): cache.set_many({'key with spaces': 'foo'}) def test_delete_many(self): "delete_many does nothing for the dummy cache backend" cache.delete_many(['a', 'b']) def test_delete_many_invalid_key(self): msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces' with self.assertWarnsMessage(CacheKeyWarning, msg): cache.delete_many({'key with spaces': 'foo'}) def test_clear(self): "clear does nothing for the dummy cache backend" cache.clear() def test_incr_version(self): "Dummy cache versions can't be incremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.incr_version('answer') with self.assertRaises(ValueError): cache.incr_version('does_not_exist') def test_decr_version(self): "Dummy cache versions can't be decremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.decr_version('answer') with self.assertRaises(ValueError): cache.decr_version('does_not_exist') def test_get_or_set(self): self.assertEqual(cache.get_or_set('mykey', 'default'), 'default') self.assertIsNone(cache.get_or_set('mykey', None)) def test_get_or_set_callable(self): def my_callable(): return 'default' self.assertEqual(cache.get_or_set('mykey', my_callable), 'default') self.assertEqual(cache.get_or_set('mykey', my_callable()), 'default') def custom_key_func(key, key_prefix, version): "A customized cache key function" return 'CUSTOM-' + '-'.join([key_prefix, str(version), key]) _caches_setting_base = { 'default': {}, 'prefix': {'KEY_PREFIX': 'cacheprefix{}'.format(os.getpid())}, 'v2': {'VERSION': 2}, 'custom_key': {'KEY_FUNCTION': custom_key_func}, 'custom_key2': {'KEY_FUNCTION': 'cache.tests.custom_key_func'}, 'cull': {'OPTIONS': {'MAX_ENTRIES': 30}}, 'zero_cull': {'OPTIONS': {'CULL_FREQUENCY': 0, 'MAX_ENTRIES': 30}}, } def caches_setting_for_tests(base=None, exclude=None, **params): # `base` is used to pull in the memcached config from the original settings, # `exclude` is a set of cache names denoting which `_caches_setting_base` keys # should be omitted. # `params` are test specific overrides and `_caches_settings_base` is the # base config for the tests. # This results in the following search order: # params -> _caches_setting_base -> base base = base or {} exclude = exclude or set() setting = {k: base.copy() for k in _caches_setting_base if k not in exclude} for key, cache_params in setting.items(): cache_params.update(_caches_setting_base[key]) cache_params.update(params) return setting class BaseCacheTests: # A common set of tests to apply to all cache backends factory = RequestFactory() def tearDown(self): cache.clear() def test_simple(self): # Simple cache set/get works cache.set("key", "value") self.assertEqual(cache.get("key"), "value") def test_default_used_when_none_is_set(self): """If None is cached, get() returns it instead of the default.""" cache.set('key_default_none', None) self.assertIsNone(cache.get('key_default_none', default='default')) def test_add(self): # A key can be added to a cache self.assertIs(cache.add("addkey1", "value"), True) self.assertIs(cache.add("addkey1", "newvalue"), False) self.assertEqual(cache.get("addkey1"), "value") def test_prefix(self): # Test for same cache key conflicts between shared backend cache.set('somekey', 'value') # should not be set in the prefixed cache self.assertIs(caches['prefix'].has_key('somekey'), False) caches['prefix'].set('somekey', 'value2') self.assertEqual(cache.get('somekey'), 'value') self.assertEqual(caches['prefix'].get('somekey'), 'value2') def test_non_existent(self): """Nonexistent cache keys return as None/default.""" self.assertIsNone(cache.get("does_not_exist")) self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") def test_get_many(self): # Multiple cache keys can be returned using get_many cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'}) self.assertEqual(cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'}) self.assertEqual(cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'}) self.assertEqual(cache.get_many(iter(['a', 'b', 'e'])), {'a': 'a', 'b': 'b'}) def test_delete(self): # Cache keys can be deleted cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertEqual(cache.get("key1"), "spam") self.assertIs(cache.delete("key1"), True) self.assertIsNone(cache.get("key1")) self.assertEqual(cache.get("key2"), "eggs") def test_delete_nonexistent(self): self.assertIs(cache.delete('nonexistent_key'), False) def test_has_key(self): # The cache can be inspected for cache keys cache.set("hello1", "goodbye1") self.assertIs(cache.has_key("hello1"), True) self.assertIs(cache.has_key("goodbye1"), False) cache.set("no_expiry", "here", None) self.assertIs(cache.has_key("no_expiry"), True) def test_in(self): # The in operator can be used to inspect cache contents cache.set("hello2", "goodbye2") self.assertIn("hello2", cache) self.assertNotIn("goodbye2", cache) def test_incr(self): # Cache values can be incremented cache.set('answer', 41) self.assertEqual(cache.incr('answer'), 42) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.incr('answer', 10), 52) self.assertEqual(cache.get('answer'), 52) self.assertEqual(cache.incr('answer', -10), 42) with self.assertRaises(ValueError): cache.incr('does_not_exist') def test_decr(self): # Cache values can be decremented cache.set('answer', 43) self.assertEqual(cache.decr('answer'), 42) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.decr('answer', 10), 32) self.assertEqual(cache.get('answer'), 32) self.assertEqual(cache.decr('answer', -10), 42) with self.assertRaises(ValueError): cache.decr('does_not_exist') def test_close(self): self.assertTrue(hasattr(cache, 'close')) cache.close() def test_data_types(self): # Many different data types can be cached stuff = { 'string': 'this is a string', 'int': 42, 'list': [1, 2, 3, 4], 'tuple': (1, 2, 3, 4), 'dict': {'A': 1, 'B': 2}, 'function': f, 'class': C, } cache.set("stuff", stuff) self.assertEqual(cache.get("stuff"), stuff) def test_cache_read_for_model_instance(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() my_poll = Poll.objects.create(question="Well?") self.assertEqual(Poll.objects.count(), 1) pub_date = my_poll.pub_date cache.set('question', my_poll) cached_poll = cache.get('question') self.assertEqual(cached_poll.pub_date, pub_date) # We only want the default expensive calculation run once self.assertEqual(expensive_calculation.num_runs, 1) def test_cache_write_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache write expensive_calculation.num_runs = 0 Poll.objects.all().delete() Poll.objects.create(question="What?") self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) self.assertEqual(expensive_calculation.num_runs, 1) cache.set('deferred_queryset', defer_qs) # cache set should not re-evaluate default functions self.assertEqual(expensive_calculation.num_runs, 1) def test_cache_read_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() Poll.objects.create(question="What?") self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) cache.set('deferred_queryset', defer_qs) self.assertEqual(expensive_calculation.num_runs, 1) runs_before_cache_read = expensive_calculation.num_runs cache.get('deferred_queryset') # We only want the default expensive calculation run on creation and set self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read) def test_expiration(self): # Cache values can be set to expire cache.set('expire1', 'very quickly', 1) cache.set('expire2', 'very quickly', 1) cache.set('expire3', 'very quickly', 1) time.sleep(2) self.assertIsNone(cache.get("expire1")) self.assertIs(cache.add("expire2", "newvalue"), True) self.assertEqual(cache.get("expire2"), "newvalue") self.assertIs(cache.has_key("expire3"), False) def test_touch(self): # cache.touch() updates the timeout. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1', timeout=4), True) time.sleep(2) self.assertIs(cache.has_key('expire1'), True) time.sleep(3) self.assertIs(cache.has_key('expire1'), False) # cache.touch() works without the timeout argument. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1'), True) time.sleep(2) self.assertIs(cache.has_key('expire1'), True) self.assertIs(cache.touch('nonexistent'), False) def test_unicode(self): # Unicode values can be cached stuff = { 'ascii': 'ascii_value', 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', 'ascii2': {'x': 1} } # Test `set` for (key, value) in stuff.items(): with self.subTest(key=key): cache.set(key, value) self.assertEqual(cache.get(key), value) # Test `add` for (key, value) in stuff.items(): with self.subTest(key=key): self.assertIs(cache.delete(key), True) self.assertIs(cache.add(key, value), True) self.assertEqual(cache.get(key), value) # Test `set_many` for (key, value) in stuff.items(): self.assertIs(cache.delete(key), True) cache.set_many(stuff) for (key, value) in stuff.items(): with self.subTest(key=key): self.assertEqual(cache.get(key), value) def test_binary_string(self): # Binary strings should be cacheable from zlib import compress, decompress value = 'value_to_be_compressed' compressed_value = compress(value.encode()) # Test set cache.set('binary1', compressed_value) compressed_result = cache.get('binary1') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test add self.assertIs(cache.add('binary1-add', compressed_value), True) compressed_result = cache.get('binary1-add') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test set_many cache.set_many({'binary1-set_many': compressed_value}) compressed_result = cache.get('binary1-set_many') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) def test_set_many(self): # Multiple keys can be set using set_many cache.set_many({"key1": "spam", "key2": "eggs"}) self.assertEqual(cache.get("key1"), "spam") self.assertEqual(cache.get("key2"), "eggs") def test_set_many_returns_empty_list_on_success(self): """set_many() returns an empty list when all keys are inserted.""" failing_keys = cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertEqual(failing_keys, []) def test_set_many_expiration(self): # set_many takes a second ``timeout`` parameter cache.set_many({"key1": "spam", "key2": "eggs"}, 1) time.sleep(2) self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_delete_many(self): # Multiple keys can be deleted using delete_many cache.set_many({'key1': 'spam', 'key2': 'eggs', 'key3': 'ham'}) cache.delete_many(["key1", "key2"]) self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) self.assertEqual(cache.get("key3"), "ham") def test_clear(self): # The cache can be emptied using clear cache.set_many({'key1': 'spam', 'key2': 'eggs'}) cache.clear() self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_long_timeout(self): """ Follow memcached's convention where a timeout greater than 30 days is treated as an absolute expiration timestamp instead of a relative offset (#12399). """ cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second self.assertEqual(cache.get('key1'), 'eggs') self.assertIs(cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1), True) self.assertEqual(cache.get('key2'), 'ham') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1) self.assertEqual(cache.get('key3'), 'sausage') self.assertEqual(cache.get('key4'), 'lobster bisque') def test_forever_timeout(self): """ Passing in None into timeout results in a value that is cached forever """ cache.set('key1', 'eggs', None) self.assertEqual(cache.get('key1'), 'eggs') self.assertIs(cache.add('key2', 'ham', None), True) self.assertEqual(cache.get('key2'), 'ham') self.assertIs(cache.add('key1', 'new eggs', None), False) self.assertEqual(cache.get('key1'), 'eggs') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None) self.assertEqual(cache.get('key3'), 'sausage') self.assertEqual(cache.get('key4'), 'lobster bisque') cache.set('key5', 'belgian fries', timeout=1) self.assertIs(cache.touch('key5', timeout=None), True) time.sleep(2) self.assertEqual(cache.get('key5'), 'belgian fries') def test_zero_timeout(self): """ Passing in zero into timeout results in a value that is not cached """ cache.set('key1', 'eggs', 0) self.assertIsNone(cache.get('key1')) self.assertIs(cache.add('key2', 'ham', 0), True) self.assertIsNone(cache.get('key2')) cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0) self.assertIsNone(cache.get('key3')) self.assertIsNone(cache.get('key4')) cache.set('key5', 'belgian fries', timeout=5) self.assertIs(cache.touch('key5', timeout=0), True) self.assertIsNone(cache.get('key5')) def test_float_timeout(self): # Make sure a timeout given as a float doesn't crash anything. cache.set("key1", "spam", 100.2) self.assertEqual(cache.get("key1"), "spam") def _perform_cull_test(self, cull_cache_name, initial_count, final_count): try: cull_cache = caches[cull_cache_name] except InvalidCacheBackendError: self.skipTest("Culling isn't implemented.") # Create initial cache key entries. This will overflow the cache, # causing a cull. for i in range(1, initial_count): cull_cache.set('cull%d' % i, 'value', 1000) count = 0 # Count how many keys are left in the cache. for i in range(1, initial_count): if cull_cache.has_key('cull%d' % i): count += 1 self.assertEqual(count, final_count) def test_cull(self): self._perform_cull_test('cull', 50, 29) def test_zero_cull(self): self._perform_cull_test('zero_cull', 50, 19) def _perform_invalid_key_test(self, key, expected_warning): """ All the builtin backends should warn (except memcached that should error) on keys that would be refused by memcached. This encourages portable caching code without making it too difficult to use production backends with more liberal key rules. Refs #6447. """ # mimic custom ``make_key`` method being defined since the default will # never show the below warnings def func(key, *args): return key old_func = cache.key_func cache.key_func = func try: with self.assertWarnsMessage(CacheKeyWarning, expected_warning): cache.set(key, 'value') finally: cache.key_func = old_func def test_invalid_key_characters(self): # memcached doesn't allow whitespace or control characters in keys. key = 'key with spaces and 清' self._perform_invalid_key_test(key, KEY_ERRORS_WITH_MEMCACHED_MSG % key) def test_invalid_key_length(self): # memcached limits key length to 250. key = ('a' * 250) + '清' expected_warning = ( 'Cache key will cause errors if used with memcached: ' '%r (longer than %s)' % (key, 250) ) self._perform_invalid_key_test(key, expected_warning) def test_cache_versioning_get_set(self): # set, using default version = 1 cache.set('answer1', 42) self.assertEqual(cache.get('answer1'), 42) self.assertEqual(cache.get('answer1', version=1), 42) self.assertIsNone(cache.get('answer1', version=2)) self.assertIsNone(caches['v2'].get('answer1')) self.assertEqual(caches['v2'].get('answer1', version=1), 42) self.assertIsNone(caches['v2'].get('answer1', version=2)) # set, default version = 1, but manually override version = 2 cache.set('answer2', 42, version=2) self.assertIsNone(cache.get('answer2')) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) # v2 set, using default version = 2 caches['v2'].set('answer3', 42) self.assertIsNone(cache.get('answer3')) self.assertIsNone(cache.get('answer3', version=1)) self.assertEqual(cache.get('answer3', version=2), 42) self.assertEqual(caches['v2'].get('answer3'), 42) self.assertIsNone(caches['v2'].get('answer3', version=1)) self.assertEqual(caches['v2'].get('answer3', version=2), 42) # v2 set, default version = 2, but manually override version = 1 caches['v2'].set('answer4', 42, version=1) self.assertEqual(cache.get('answer4'), 42) self.assertEqual(cache.get('answer4', version=1), 42) self.assertIsNone(cache.get('answer4', version=2)) self.assertIsNone(caches['v2'].get('answer4')) self.assertEqual(caches['v2'].get('answer4', version=1), 42) self.assertIsNone(caches['v2'].get('answer4', version=2)) def test_cache_versioning_add(self): # add, default version = 1, but manually override version = 2 self.assertIs(cache.add('answer1', 42, version=2), True) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) self.assertIs(cache.add('answer1', 37, version=2), False) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) self.assertIs(cache.add('answer1', 37, version=1), True) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) # v2 add, using default version = 2 self.assertIs(caches['v2'].add('answer2', 42), True) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) self.assertIs(caches['v2'].add('answer2', 37), False) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) self.assertIs(caches['v2'].add('answer2', 37, version=1), True) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) # v2 add, default version = 2, but manually override version = 1 self.assertIs(caches['v2'].add('answer3', 42, version=1), True) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) self.assertIs(caches['v2'].add('answer3', 37, version=1), False) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) self.assertIs(caches['v2'].add('answer3', 37), True) self.assertEqual(cache.get('answer3', version=1), 42) self.assertEqual(cache.get('answer3', version=2), 37) def test_cache_versioning_has_key(self): cache.set('answer1', 42) # has_key self.assertIs(cache.has_key('answer1'), True) self.assertIs(cache.has_key('answer1', version=1), True) self.assertIs(cache.has_key('answer1', version=2), False) self.assertIs(caches['v2'].has_key('answer1'), False) self.assertIs(caches['v2'].has_key('answer1', version=1), True) self.assertIs(caches['v2'].has_key('answer1', version=2), False) def test_cache_versioning_delete(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) self.assertIs(cache.delete('answer1'), True) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) self.assertIs(cache.delete('answer2', version=2), True) self.assertEqual(cache.get('answer2', version=1), 37) self.assertIsNone(cache.get('answer2', version=2)) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) self.assertIs(caches['v2'].delete('answer3'), True) self.assertEqual(cache.get('answer3', version=1), 37) self.assertIsNone(cache.get('answer3', version=2)) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) self.assertIs(caches['v2'].delete('answer4', version=1), True) self.assertIsNone(cache.get('answer4', version=1)) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_incr_decr(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) self.assertEqual(cache.incr('answer1'), 38) self.assertEqual(cache.get('answer1', version=1), 38) self.assertEqual(cache.get('answer1', version=2), 42) self.assertEqual(cache.decr('answer1'), 37) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) self.assertEqual(cache.incr('answer2', version=2), 43) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 43) self.assertEqual(cache.decr('answer2', version=2), 42) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) self.assertEqual(caches['v2'].incr('answer3'), 43) self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 43) self.assertEqual(caches['v2'].decr('answer3'), 42) self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 42) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) self.assertEqual(caches['v2'].incr('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=2), 42) self.assertEqual(caches['v2'].decr('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_get_set_many(self): # set, using default version = 1 cache.set_many({'ford1': 37, 'arthur1': 42}) self.assertEqual(cache.get_many(['ford1', 'arthur1']), {'ford1': 37, 'arthur1': 42}) self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=2), {}) self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1']), {}) self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=2), {}) # set, default version = 1, but manually override version = 2 cache.set_many({'ford2': 37, 'arthur2': 42}, version=2) self.assertEqual(cache.get_many(['ford2', 'arthur2']), {}) self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=1), {}) self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2']), {'ford2': 37, 'arthur2': 42}) self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=1), {}) self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) # v2 set, using default version = 2 caches['v2'].set_many({'ford3': 37, 'arthur3': 42}) self.assertEqual(cache.get_many(['ford3', 'arthur3']), {}) self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=1), {}) self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3']), {'ford3': 37, 'arthur3': 42}) self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=1), {}) self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) # v2 set, default version = 2, but manually override version = 1 caches['v2'].set_many({'ford4': 37, 'arthur4': 42}, version=1) self.assertEqual(cache.get_many(['ford4', 'arthur4']), {'ford4': 37, 'arthur4': 42}) self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=2), {}) self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4']), {}) self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=2), {}) def test_incr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertEqual(cache.get('answer', version=2), 42) self.assertIsNone(cache.get('answer', version=3)) self.assertEqual(cache.incr_version('answer', version=2), 3) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertIsNone(cache.get('answer', version=2)) self.assertEqual(cache.get('answer', version=3), 42) caches['v2'].set('answer2', 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) self.assertIsNone(caches['v2'].get('answer2', version=3)) self.assertEqual(caches['v2'].incr_version('answer2'), 3) self.assertIsNone(caches['v2'].get('answer2')) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertIsNone(caches['v2'].get('answer2', version=2)) self.assertEqual(caches['v2'].get('answer2', version=3), 42) with self.assertRaises(ValueError): cache.incr_version('does_not_exist') def test_decr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertEqual(cache.get('answer', version=2), 42) self.assertEqual(cache.decr_version('answer', version=2), 1) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.get('answer', version=1), 42) self.assertIsNone(cache.get('answer', version=2)) caches['v2'].set('answer2', 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) self.assertEqual(caches['v2'].decr_version('answer2'), 1) self.assertIsNone(caches['v2'].get('answer2')) self.assertEqual(caches['v2'].get('answer2', version=1), 42) self.assertIsNone(caches['v2'].get('answer2', version=2)) with self.assertRaises(ValueError): cache.decr_version('does_not_exist', version=2) def test_custom_key_func(self): # Two caches with different key functions aren't visible to each other cache.set('answer1', 42) self.assertEqual(cache.get('answer1'), 42) self.assertIsNone(caches['custom_key'].get('answer1')) self.assertIsNone(caches['custom_key2'].get('answer1')) caches['custom_key'].set('answer2', 42) self.assertIsNone(cache.get('answer2')) self.assertEqual(caches['custom_key'].get('answer2'), 42) self.assertEqual(caches['custom_key2'].get('answer2'), 42) def test_cache_write_unpicklable_object(self): fetch_middleware = FetchFromCacheMiddleware(empty_response) fetch_middleware.cache = cache request = self.factory.get('/cache/test') request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNone(get_cache_data) content = 'Testing cookie serialization.' def get_response(req): response = HttpResponse(content) response.set_cookie('foo', 'bar') return response update_middleware = UpdateCacheMiddleware(get_response) update_middleware.cache = cache response = update_middleware(request) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) self.assertEqual(get_cache_data.cookies, response.cookies) UpdateCacheMiddleware(lambda req: get_cache_data)(request) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) self.assertEqual(get_cache_data.cookies, response.cookies) def test_add_fail_on_pickleerror(self): # Shouldn't fail silently if trying to cache an unpicklable type. with self.assertRaises(pickle.PickleError): cache.add('unpicklable', Unpicklable()) def test_set_fail_on_pickleerror(self): with self.assertRaises(pickle.PickleError): cache.set('unpicklable', Unpicklable()) def test_get_or_set(self): self.assertIsNone(cache.get('projector')) self.assertEqual(cache.get_or_set('projector', 42), 42) self.assertEqual(cache.get('projector'), 42) self.assertIsNone(cache.get_or_set('null', None)) def test_get_or_set_callable(self): def my_callable(): return 'value' self.assertEqual(cache.get_or_set('mykey', my_callable), 'value') self.assertEqual(cache.get_or_set('mykey', my_callable()), 'value') def test_get_or_set_callable_returning_none(self): self.assertIsNone(cache.get_or_set('mykey', lambda: None)) # Previous get_or_set() doesn't store None in the cache. self.assertEqual(cache.get('mykey', 'default'), 'default') def test_get_or_set_version(self): msg = "get_or_set() missing 1 required positional argument: 'default'" self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979) with self.assertRaisesMessage(TypeError, msg): cache.get_or_set('brian') with self.assertRaisesMessage(TypeError, msg): cache.get_or_set('brian', version=1) self.assertIsNone(cache.get('brian', version=1)) self.assertEqual(cache.get_or_set('brian', 42, version=1), 42) self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979) self.assertIsNone(cache.get('brian', version=3)) def test_get_or_set_racing(self): with mock.patch('%s.%s' % (settings.CACHES['default']['BACKEND'], 'add')) as cache_add: # Simulate cache.add() failing to add a value. In that case, the # default value should be returned. cache_add.return_value = False self.assertEqual(cache.get_or_set('key', 'default'), 'default') @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.db.DatabaseCache', # Spaces are used in the table name to ensure quoting/escaping is working LOCATION='test cache table' )) class DBCacheTests(BaseCacheTests, TransactionTestCase): available_apps = ['cache'] def setUp(self): # The super calls needs to happen first for the settings override. super().setUp() self.create_table() def tearDown(self): # The super call needs to happen first because it uses the database. super().tearDown() self.drop_table() def create_table(self): management.call_command('createcachetable', verbosity=0) def drop_table(self): with connection.cursor() as cursor: table_name = connection.ops.quote_name('test cache table') cursor.execute('DROP TABLE %s' % table_name) def test_get_many_num_queries(self): cache.set_many({'a': 1, 'b': 2}) cache.set('expired', 'expired', 0.01) with self.assertNumQueries(1): self.assertEqual(cache.get_many(['a', 'b']), {'a': 1, 'b': 2}) time.sleep(0.02) with self.assertNumQueries(2): self.assertEqual(cache.get_many(['a', 'b', 'expired']), {'a': 1, 'b': 2}) def test_delete_many_num_queries(self): cache.set_many({'a': 1, 'b': 2, 'c': 3}) with self.assertNumQueries(1): cache.delete_many(['a', 'b', 'c']) def test_zero_cull(self): self._perform_cull_test('zero_cull', 50, 18) def test_second_call_doesnt_crash(self): out = io.StringIO() management.call_command('createcachetable', stdout=out) self.assertEqual(out.getvalue(), "Cache table 'test cache table' already exists.\n" * len(settings.CACHES)) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.db.DatabaseCache', # Use another table name to avoid the 'table already exists' message. LOCATION='createcachetable_dry_run_mode' )) def test_createcachetable_dry_run_mode(self): out = io.StringIO() management.call_command('createcachetable', dry_run=True, stdout=out) output = out.getvalue() self.assertTrue(output.startswith("CREATE TABLE")) def test_createcachetable_with_table_argument(self): """ Delete and recreate cache table with legacy behavior (explicitly specifying the table name). """ self.drop_table() out = io.StringIO() management.call_command( 'createcachetable', 'test cache table', verbosity=2, stdout=out, ) self.assertEqual(out.getvalue(), "Cache table 'test cache table' created.\n") @override_settings(USE_TZ=True) class DBCacheWithTimeZoneTests(DBCacheTests): pass class DBCacheRouter: """A router that puts the cache table on the 'other' database.""" def db_for_read(self, model, **hints): if model._meta.app_label == 'django_cache': return 'other' return None def db_for_write(self, model, **hints): if model._meta.app_label == 'django_cache': return 'other' return None def allow_migrate(self, db, app_label, **hints): if app_label == 'django_cache': return db == 'other' return None @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'my_cache_table', }, }, ) class CreateCacheTableForDBCacheTests(TestCase): databases = {'default', 'other'} @override_settings(DATABASE_ROUTERS=[DBCacheRouter()]) def test_createcachetable_observes_database_router(self): # cache table should not be created on 'default' with self.assertNumQueries(0, using='default'): management.call_command('createcachetable', database='default', verbosity=0) # cache table should be created on 'other' # Queries: # 1: check table doesn't already exist # 2: create savepoint (if transactional DDL is supported) # 3: create the table # 4: create the index # 5: release savepoint (if transactional DDL is supported) num = 5 if connections['other'].features.can_rollback_ddl else 3 with self.assertNumQueries(num, using='other'): management.call_command('createcachetable', database='other', verbosity=0) class PicklingSideEffect: def __init__(self, cache): self.cache = cache self.locked = False def __getstate__(self): self.locked = self.cache._lock.locked() return {} limit_locmem_entries = override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.locmem.LocMemCache', OPTIONS={'MAX_ENTRIES': 9}, )) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.locmem.LocMemCache', )) class LocMemCacheTests(BaseCacheTests, TestCase): def setUp(self): super().setUp() # LocMem requires a hack to make the other caches # share a data store with the 'normal' cache. caches['prefix']._cache = cache._cache caches['prefix']._expire_info = cache._expire_info caches['v2']._cache = cache._cache caches['v2']._expire_info = cache._expire_info caches['custom_key']._cache = cache._cache caches['custom_key']._expire_info = cache._expire_info caches['custom_key2']._cache = cache._cache caches['custom_key2']._expire_info = cache._expire_info @override_settings(CACHES={ 'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}, 'other': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'other' }, }) def test_multiple_caches(self): "Multiple locmem caches are isolated" cache.set('value', 42) self.assertEqual(caches['default'].get('value'), 42) self.assertIsNone(caches['other'].get('value')) def test_locking_on_pickle(self): """#20613/#18541 -- Ensures pickling is done outside of the lock.""" bad_obj = PicklingSideEffect(cache) cache.set('set', bad_obj) self.assertFalse(bad_obj.locked, "Cache was locked during pickling") self.assertIs(cache.add('add', bad_obj), True) self.assertFalse(bad_obj.locked, "Cache was locked during pickling") def test_incr_decr_timeout(self): """incr/decr does not modify expiry time (matches memcached behavior)""" key = 'value' _key = cache.make_key(key) cache.set(key, 1, timeout=cache.default_timeout * 10) expire = cache._expire_info[_key] self.assertEqual(cache.incr(key), 2) self.assertEqual(expire, cache._expire_info[_key]) self.assertEqual(cache.decr(key), 1) self.assertEqual(expire, cache._expire_info[_key]) @limit_locmem_entries def test_lru_get(self): """get() moves cache keys.""" for key in range(9): cache.set(key, key, timeout=None) for key in range(6): self.assertEqual(cache.get(key), key) cache.set(9, 9, timeout=None) for key in range(6): self.assertEqual(cache.get(key), key) for key in range(6, 9): self.assertIsNone(cache.get(key)) self.assertEqual(cache.get(9), 9) @limit_locmem_entries def test_lru_set(self): """set() moves cache keys.""" for key in range(9): cache.set(key, key, timeout=None) for key in range(3, 9): cache.set(key, key, timeout=None) cache.set(9, 9, timeout=None) for key in range(3, 10): self.assertEqual(cache.get(key), key) for key in range(3): self.assertIsNone(cache.get(key)) @limit_locmem_entries def test_lru_incr(self): """incr() moves cache keys.""" for key in range(9): cache.set(key, key, timeout=None) for key in range(6): self.assertEqual(cache.incr(key), key + 1) cache.set(9, 9, timeout=None) for key in range(6): self.assertEqual(cache.get(key), key + 1) for key in range(6, 9): self.assertIsNone(cache.get(key)) self.assertEqual(cache.get(9), 9) # memcached backend isn't guaranteed to be available. # To check the memcached backend, the test settings file will # need to contain at least one cache backend setting that points at # your memcache server. configured_caches = {} for _cache_params in settings.CACHES.values(): configured_caches[_cache_params['BACKEND']] = _cache_params MemcachedCache_params = configured_caches.get('django.core.cache.backends.memcached.MemcachedCache') PyLibMCCache_params = configured_caches.get('django.core.cache.backends.memcached.PyLibMCCache') # The memcached backends don't support cull-related options like `MAX_ENTRIES`. memcached_excluded_caches = {'cull', 'zero_cull'} class BaseMemcachedTests(BaseCacheTests): # By default it's assumed that the client doesn't clean up connections # properly, in which case the backend must do so after each request. should_disconnect_on_close = True def test_location_multiple_servers(self): locations = [ ['server1.tld', 'server2:11211'], 'server1.tld;server2:11211', 'server1.tld,server2:11211', ] for location in locations: with self.subTest(location=location): params = {'BACKEND': self.base_params['BACKEND'], 'LOCATION': location} with self.settings(CACHES={'default': params}): self.assertEqual(cache._servers, ['server1.tld', 'server2:11211']) def _perform_invalid_key_test(self, key, expected_warning): """ While other backends merely warn, memcached should raise for an invalid key. """ msg = expected_warning.replace(key, cache.make_key(key)) with self.assertRaisesMessage(InvalidCacheKey, msg): cache.set(key, 'value') def test_default_never_expiring_timeout(self): # Regression test for #22845 with self.settings(CACHES=caches_setting_for_tests( base=self.base_params, exclude=memcached_excluded_caches, TIMEOUT=None)): cache.set('infinite_foo', 'bar') self.assertEqual(cache.get('infinite_foo'), 'bar') def test_default_far_future_timeout(self): # Regression test for #22845 with self.settings(CACHES=caches_setting_for_tests( base=self.base_params, exclude=memcached_excluded_caches, # 60*60*24*365, 1 year TIMEOUT=31536000)): cache.set('future_foo', 'bar') self.assertEqual(cache.get('future_foo'), 'bar') def test_memcached_deletes_key_on_failed_set(self): # By default memcached allows objects up to 1MB. For the cache_db session # backend to always use the current session, memcached needs to delete # the old key if it fails to set. # pylibmc doesn't seem to have SERVER_MAX_VALUE_LENGTH as far as I can # tell from a quick check of its source code. This is falling back to # the default value exposed by python-memcached on my system. max_value_length = getattr(cache._lib, 'SERVER_MAX_VALUE_LENGTH', 1048576) cache.set('small_value', 'a') self.assertEqual(cache.get('small_value'), 'a') large_value = 'a' * (max_value_length + 1) try: cache.set('small_value', large_value) except Exception: # Some clients (e.g. pylibmc) raise when the value is too large, # while others (e.g. python-memcached) intentionally return True # indicating success. This test is primarily checking that the key # was deleted, so the return/exception behavior for the set() # itself is not important. pass # small_value should be deleted, or set if configured to accept larger values value = cache.get('small_value') self.assertTrue(value is None or value == large_value) def test_close(self): # For clients that don't manage their connections properly, the # connection is closed when the request is complete. signals.request_finished.disconnect(close_old_connections) try: with mock.patch.object(cache._lib.Client, 'disconnect_all', autospec=True) as mock_disconnect: signals.request_finished.send(self.__class__) self.assertIs(mock_disconnect.called, self.should_disconnect_on_close) finally: signals.request_finished.connect(close_old_connections) def test_set_many_returns_failing_keys(self): def fail_set_multi(mapping, *args, **kwargs): return mapping.keys() with mock.patch('%s.Client.set_multi' % self.client_library_name, side_effect=fail_set_multi): failing_keys = cache.set_many({'key': 'value'}) self.assertEqual(failing_keys, ['key']) @unittest.skipUnless(MemcachedCache_params, "MemcachedCache backend not configured") @override_settings(CACHES=caches_setting_for_tests( base=MemcachedCache_params, exclude=memcached_excluded_caches, )) class MemcachedCacheTests(BaseMemcachedTests, TestCase): base_params = MemcachedCache_params client_library_name = 'memcache' def test_memcached_uses_highest_pickle_version(self): # Regression test for #19810 for cache_key in settings.CACHES: with self.subTest(cache_key=cache_key): self.assertEqual(caches[cache_key]._cache.pickleProtocol, pickle.HIGHEST_PROTOCOL) @override_settings(CACHES=caches_setting_for_tests( base=MemcachedCache_params, exclude=memcached_excluded_caches, OPTIONS={'server_max_value_length': 9999}, )) def test_memcached_options(self): self.assertEqual(cache._cache.server_max_value_length, 9999) def test_default_used_when_none_is_set(self): """ python-memcached doesn't support default in get() so this test overrides the one in BaseCacheTests. """ cache.set('key_default_none', None) self.assertEqual(cache.get('key_default_none', default='default'), 'default') @unittest.skipUnless(PyLibMCCache_params, "PyLibMCCache backend not configured") @override_settings(CACHES=caches_setting_for_tests( base=PyLibMCCache_params, exclude=memcached_excluded_caches, )) class PyLibMCCacheTests(BaseMemcachedTests, TestCase): base_params = PyLibMCCache_params client_library_name = 'pylibmc' # libmemcached manages its own connections. should_disconnect_on_close = False @override_settings(CACHES=caches_setting_for_tests( base=PyLibMCCache_params, exclude=memcached_excluded_caches, OPTIONS={ 'binary': True, 'behaviors': {'tcp_nodelay': True}, }, )) def test_pylibmc_options(self): self.assertTrue(cache._cache.binary) self.assertEqual(cache._cache.behaviors['tcp_nodelay'], int(True)) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.filebased.FileBasedCache', )) class FileBasedCacheTests(BaseCacheTests, TestCase): """ Specific test cases for the file-based cache. """ def setUp(self): super().setUp() self.dirname = self.mkdtemp() # Caches location cannot be modified through override_settings / modify_settings, # hence settings are manipulated directly here and the setting_changed signal # is triggered manually. for cache_params in settings.CACHES.values(): cache_params['LOCATION'] = self.dirname setting_changed.send(self.__class__, setting='CACHES', enter=False) def tearDown(self): super().tearDown() # Call parent first, as cache.clear() may recreate cache base directory shutil.rmtree(self.dirname) def mkdtemp(self): return tempfile.mkdtemp() def test_ignores_non_cache_files(self): fname = os.path.join(self.dirname, 'not-a-cache-file') with open(fname, 'w'): os.utime(fname, None) cache.clear() self.assertTrue(os.path.exists(fname), 'Expected cache.clear to ignore non cache files') os.remove(fname) def test_clear_does_not_remove_cache_dir(self): cache.clear() self.assertTrue(os.path.exists(self.dirname), 'Expected cache.clear to keep the cache dir') def test_creates_cache_dir_if_nonexistent(self): os.rmdir(self.dirname) cache.set('foo', 'bar') self.assertTrue(os.path.exists(self.dirname)) def test_get_ignores_enoent(self): cache.set('foo', 'bar') os.unlink(cache._key_to_file('foo')) # Returns the default instead of erroring. self.assertEqual(cache.get('foo', 'baz'), 'baz') def test_get_does_not_ignore_non_filenotfound_exceptions(self): with mock.patch('builtins.open', side_effect=OSError): with self.assertRaises(OSError): cache.get('foo') def test_empty_cache_file_considered_expired(self): cache_file = cache._key_to_file('foo') with open(cache_file, 'wb') as fh: fh.write(b'') with open(cache_file, 'rb') as fh: self.assertIs(cache._is_expired(fh), True) class FileBasedCachePathLibTests(FileBasedCacheTests): def mkdtemp(self): tmp_dir = super().mkdtemp() return Path(tmp_dir) @override_settings(CACHES={ 'default': { 'BACKEND': 'cache.liberal_backend.CacheClass', }, }) class CustomCacheKeyValidationTests(SimpleTestCase): """ Tests for the ability to mixin a custom ``validate_key`` method to a custom cache backend that otherwise inherits from a builtin backend, and override the default key validation. Refs #6447. """ def test_custom_key_validation(self): # this key is both longer than 250 characters, and has spaces key = 'some key with spaces' * 15 val = 'a value' cache.set(key, val) self.assertEqual(cache.get(key), val) @override_settings( CACHES={ 'default': { 'BACKEND': 'cache.closeable_cache.CacheClass', } } ) class CacheClosingTests(SimpleTestCase): def test_close(self): self.assertFalse(cache.closed) signals.request_finished.send(self.__class__) self.assertTrue(cache.closed) DEFAULT_MEMORY_CACHES_SETTINGS = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'unique-snowflake', } } NEVER_EXPIRING_CACHES_SETTINGS = copy.deepcopy(DEFAULT_MEMORY_CACHES_SETTINGS) NEVER_EXPIRING_CACHES_SETTINGS['default']['TIMEOUT'] = None class DefaultNonExpiringCacheKeyTests(SimpleTestCase): """ Settings having Cache arguments with a TIMEOUT=None create Caches that will set non-expiring keys. """ def setUp(self): # The 5 minute (300 seconds) default expiration time for keys is # defined in the implementation of the initializer method of the # BaseCache type. self.DEFAULT_TIMEOUT = caches[DEFAULT_CACHE_ALIAS].default_timeout def tearDown(self): del(self.DEFAULT_TIMEOUT) def test_default_expiration_time_for_keys_is_5_minutes(self): """The default expiration time of a cache key is 5 minutes. This value is defined in django.core.cache.backends.base.BaseCache.__init__(). """ self.assertEqual(300, self.DEFAULT_TIMEOUT) def test_caches_with_unset_timeout_has_correct_default_timeout(self): """Caches that have the TIMEOUT parameter undefined in the default settings will use the default 5 minute timeout. """ cache = caches[DEFAULT_CACHE_ALIAS] self.assertEqual(self.DEFAULT_TIMEOUT, cache.default_timeout) @override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS) def test_caches_set_with_timeout_as_none_has_correct_default_timeout(self): """Memory caches that have the TIMEOUT parameter set to `None` in the default settings with have `None` as the default timeout. This means "no timeout". """ cache = caches[DEFAULT_CACHE_ALIAS] self.assertIsNone(cache.default_timeout) self.assertIsNone(cache.get_backend_timeout()) @override_settings(CACHES=DEFAULT_MEMORY_CACHES_SETTINGS) def test_caches_with_unset_timeout_set_expiring_key(self): """Memory caches that have the TIMEOUT parameter unset will set cache keys having the default 5 minute timeout. """ key = "my-key" value = "my-value" cache = caches[DEFAULT_CACHE_ALIAS] cache.set(key, value) cache_key = cache.make_key(key) self.assertIsNotNone(cache._expire_info[cache_key]) @override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS) def test_caches_set_with_timeout_as_none_set_non_expiring_key(self): """Memory caches that have the TIMEOUT parameter set to `None` will set a non expiring key by default. """ key = "another-key" value = "another-value" cache = caches[DEFAULT_CACHE_ALIAS] cache.set(key, value) cache_key = cache.make_key(key) self.assertIsNone(cache._expire_info[cache_key]) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHE_MIDDLEWARE_SECONDS=1, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, USE_I18N=False, ALLOWED_HOSTS=['.example.com'], ) class CacheUtils(SimpleTestCase): """TestCase for django.utils.cache functions.""" host = 'www.example.com' path = '/cache/test/' factory = RequestFactory(HTTP_HOST=host) def tearDown(self): cache.clear() def _get_request_cache(self, method='GET', query_string=None, update_cache=None): request = self._get_request(self.host, self.path, method, query_string=query_string) request._cache_update_cache = True if not update_cache else update_cache return request def test_patch_vary_headers(self): headers = ( # Initial vary, new headers, resulting vary. (None, ('Accept-Encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'), ('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), (None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('*', ('Accept-Language', 'Cookie'), '*'), ('Accept-Language, Cookie', ('*',), '*'), ) for initial_vary, newheaders, resulting_vary in headers: with self.subTest(initial_vary=initial_vary, newheaders=newheaders): response = HttpResponse() if initial_vary is not None: response['Vary'] = initial_vary patch_vary_headers(response, newheaders) self.assertEqual(response['Vary'], resulting_vary) def test_get_cache_key(self): request = self.factory.get(self.path) response = HttpResponse() # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) # A specified key_prefix is taken into account. key_prefix = 'localprefix' learn_cache_key(request, response, key_prefix=key_prefix) self.assertEqual( get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) def test_get_cache_key_with_query(self): request = self.factory.get(self.path, {'test': 1}) response = HttpResponse() # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) # The querystring is taken into account. self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' 'beaf87a9a99ee81c673ea2d67ccbec2a.d41d8cd98f00b204e9800998ecf8427e' ) def test_cache_key_varies_by_url(self): """ get_cache_key keys differ by fully-qualified URL instead of path """ request1 = self.factory.get(self.path, HTTP_HOST='sub-1.example.com') learn_cache_key(request1, HttpResponse()) request2 = self.factory.get(self.path, HTTP_HOST='sub-2.example.com') learn_cache_key(request2, HttpResponse()) self.assertNotEqual(get_cache_key(request1), get_cache_key(request2)) def test_learn_cache_key(self): request = self.factory.head(self.path) response = HttpResponse() response['Vary'] = 'Pony' # Make sure that the Vary header is added to the key hash learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) def test_patch_cache_control(self): tests = ( # Initial Cache-Control, kwargs to patch_cache_control, expected Cache-Control parts (None, {'private': True}, {'private'}), ('', {'private': True}, {'private'}), # no-cache. ('', {'no_cache': 'Set-Cookie'}, {'no-cache=Set-Cookie'}), ('', {'no-cache': 'Set-Cookie'}, {'no-cache=Set-Cookie'}), ('no-cache=Set-Cookie', {'no_cache': True}, {'no-cache'}), ('no-cache=Set-Cookie,no-cache=Link', {'no_cache': True}, {'no-cache'}), ('no-cache=Set-Cookie', {'no_cache': 'Link'}, {'no-cache=Set-Cookie', 'no-cache=Link'}), ( 'no-cache=Set-Cookie,no-cache=Link', {'no_cache': 'Custom'}, {'no-cache=Set-Cookie', 'no-cache=Link', 'no-cache=Custom'}, ), # Test whether private/public attributes are mutually exclusive ('private', {'private': True}, {'private'}), ('private', {'public': True}, {'public'}), ('public', {'public': True}, {'public'}), ('public', {'private': True}, {'private'}), ('must-revalidate,max-age=60,private', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}), ('must-revalidate,max-age=60,public', {'private': True}, {'must-revalidate', 'max-age=60', 'private'}), ('must-revalidate,max-age=60', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}), ) cc_delim_re = re.compile(r'\s*,\s*') for initial_cc, newheaders, expected_cc in tests: with self.subTest(initial_cc=initial_cc, newheaders=newheaders): response = HttpResponse() if initial_cc is not None: response['Cache-Control'] = initial_cc patch_cache_control(response, **newheaders) parts = set(cc_delim_re.split(response['Cache-Control'])) self.assertEqual(parts, expected_cc) @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'KEY_PREFIX': 'cacheprefix', }, }, ) class PrefixedCacheUtils(CacheUtils): pass @override_settings( CACHE_MIDDLEWARE_SECONDS=60, CACHE_MIDDLEWARE_KEY_PREFIX='test', CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, ) class CacheHEADTest(SimpleTestCase): path = '/cache/test/' factory = RequestFactory() def tearDown(self): cache.clear() def _set_cache(self, request, msg): return UpdateCacheMiddleware(lambda req: HttpResponse(msg))(request) def test_head_caches_correctly(self): test_content = 'test content' request = self.factory.head(self.path) request._cache_update_cache = True self._set_cache(request, test_content) request = self.factory.head(self.path) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(test_content.encode(), get_cache_data.content) def test_head_with_cached_get(self): test_content = 'test content' request = self.factory.get(self.path) request._cache_update_cache = True self._set_cache(request, test_content) request = self.factory.head(self.path) get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(test_content.encode(), get_cache_data.content) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, LANGUAGES=[ ('en', 'English'), ('es', 'Spanish'), ], ) class CacheI18nTest(SimpleTestCase): path = '/cache/test/' factory = RequestFactory() def tearDown(self): cache.clear() @override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False) def test_cache_key_i18n_translation(self): request = self.factory.get(self.path) lang = translation.get_language() response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when translation is active") key2 = get_cache_key(request) self.assertEqual(key, key2) def check_accept_language_vary(self, accept_language, vary, reference_key): request = self.factory.get(self.path) request.META['HTTP_ACCEPT_LANGUAGE'] = accept_language request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0' response = HttpResponse() response['Vary'] = vary key = learn_cache_key(request, response) key2 = get_cache_key(request) self.assertEqual(key, reference_key) self.assertEqual(key2, reference_key) @override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False) def test_cache_key_i18n_translation_accept_language(self): lang = translation.get_language() self.assertEqual(lang, 'en') request = self.factory.get(self.path) request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0' response = HttpResponse() response['Vary'] = 'accept-encoding' key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when translation is active") self.check_accept_language_vary( 'en-us', 'cookie, accept-language, accept-encoding', key ) self.check_accept_language_vary( 'en-US', 'cookie, accept-encoding, accept-language', key ) self.check_accept_language_vary( 'en-US,en;q=0.8', 'accept-encoding, accept-language, cookie', key ) self.check_accept_language_vary( 'en-US,en;q=0.8,ko;q=0.6', 'accept-language, cookie, accept-encoding', key ) self.check_accept_language_vary( 'ko-kr,ko;q=0.8,en-us;q=0.5,en;q=0.3 ', 'accept-encoding, cookie, accept-language', key ) self.check_accept_language_vary( 'ko-KR,ko;q=0.8,en-US;q=0.6,en;q=0.4', 'accept-language, accept-encoding, cookie', key ) self.check_accept_language_vary( 'ko;q=1.0,en;q=0.5', 'cookie, accept-language, accept-encoding', key ) self.check_accept_language_vary( 'ko, en', 'cookie, accept-encoding, accept-language', key ) self.check_accept_language_vary( 'ko-KR, en-US', 'accept-encoding, accept-language, cookie', key ) @override_settings(USE_I18N=False, USE_L10N=True, USE_TZ=False) def test_cache_key_i18n_formatting(self): request = self.factory.get(self.path) lang = translation.get_language() response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when formatting is active") key2 = get_cache_key(request) self.assertEqual(key, key2) @override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True) def test_cache_key_i18n_timezone(self): request = self.factory.get(self.path) tz = timezone.get_current_timezone_name() response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(tz, key, "Cache keys should include the time zone name when time zones are active") key2 = get_cache_key(request) self.assertEqual(key, key2) @override_settings(USE_I18N=False, USE_L10N=False) def test_cache_key_no_i18n(self): request = self.factory.get(self.path) lang = translation.get_language() tz = timezone.get_current_timezone_name() response = HttpResponse() key = learn_cache_key(request, response) self.assertNotIn(lang, key, "Cache keys shouldn't include the language name when i18n isn't active") self.assertNotIn(tz, key, "Cache keys shouldn't include the time zone name when i18n isn't active") @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX="test", CACHE_MIDDLEWARE_SECONDS=60, USE_I18N=True, ) def test_middleware(self): def set_cache(request, lang, msg): def get_response(req): return HttpResponse(msg) translation.activate(lang) return UpdateCacheMiddleware(get_response)(request) # cache with non empty request.GET request = self.factory.get(self.path, {'foo': 'bar', 'other': 'true'}) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) # first access, cache must return None self.assertIsNone(get_cache_data) content = 'Check for cache with QUERY_STRING' def get_response(req): return HttpResponse(content) UpdateCacheMiddleware(get_response)(request) get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) # cache must return content self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) # different QUERY_STRING, cache must be empty request = self.factory.get(self.path, {'foo': 'bar', 'somethingelse': 'true'}) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNone(get_cache_data) # i18n tests en_message = "Hello world!" es_message = "Hola mundo!" request = self.factory.get(self.path) request._cache_update_cache = True set_cache(request, 'en', en_message) get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) # The cache can be recovered self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, en_message.encode()) # change the session language and set content request = self.factory.get(self.path) request._cache_update_cache = True set_cache(request, 'es', es_message) # change again the language translation.activate('en') # retrieve the content from cache get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertEqual(get_cache_data.content, en_message.encode()) # change again the language translation.activate('es') get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertEqual(get_cache_data.content, es_message.encode()) # reset the language translation.deactivate() @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX="test", CACHE_MIDDLEWARE_SECONDS=60, ) def test_middleware_doesnt_cache_streaming_response(self): request = self.factory.get(self.path) get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNone(get_cache_data) def get_stream_response(req): return StreamingHttpResponse(['Check for cache with streaming content.']) UpdateCacheMiddleware(get_stream_response)(request) get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNone(get_cache_data) @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'KEY_PREFIX': 'cacheprefix' }, }, ) class PrefixedCacheI18nTest(CacheI18nTest): pass def hello_world_view(request, value): return HttpResponse('Hello World %s' % value) def csrf_view(request): return HttpResponse(csrf(request)['csrf_token']) @override_settings( CACHE_MIDDLEWARE_ALIAS='other', CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix', CACHE_MIDDLEWARE_SECONDS=30, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, 'other': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'other', 'TIMEOUT': '1', }, }, ) class CacheMiddlewareTest(SimpleTestCase): factory = RequestFactory() def setUp(self): self.default_cache = caches['default'] self.other_cache = caches['other'] def tearDown(self): self.default_cache.clear() self.other_cache.clear() super().tearDown() def test_constructor(self): """ Ensure the constructor is correctly distinguishing between usage of CacheMiddleware as Middleware vs. usage of CacheMiddleware as view decorator and setting attributes appropriately. """ # If only one argument is passed in construction, it's being used as # middleware. middleware = CacheMiddleware(empty_response) # Now test object attributes against values defined in setUp above self.assertEqual(middleware.cache_timeout, 30) self.assertEqual(middleware.key_prefix, 'middlewareprefix') self.assertEqual(middleware.cache_alias, 'other') # If more arguments are being passed in construction, it's being used # as a decorator. First, test with "defaults": as_view_decorator = CacheMiddleware(empty_response, cache_alias=None, key_prefix=None) self.assertEqual(as_view_decorator.cache_timeout, 30) # Timeout value for 'default' cache, i.e. 30 self.assertEqual(as_view_decorator.key_prefix, '') # Value of DEFAULT_CACHE_ALIAS from django.core.cache self.assertEqual(as_view_decorator.cache_alias, 'default') # Next, test with custom values: as_view_decorator_with_custom = CacheMiddleware( hello_world_view, cache_timeout=60, cache_alias='other', key_prefix='foo' ) self.assertEqual(as_view_decorator_with_custom.cache_timeout, 60) self.assertEqual(as_view_decorator_with_custom.key_prefix, 'foo') self.assertEqual(as_view_decorator_with_custom.cache_alias, 'other') def test_middleware(self): middleware = CacheMiddleware(hello_world_view) prefix_middleware = CacheMiddleware(hello_world_view, key_prefix='prefix1') timeout_middleware = CacheMiddleware(hello_world_view, cache_timeout=1) request = self.factory.get('/view/') # Put the request through the request middleware result = middleware.process_request(request) self.assertIsNone(result) response = hello_world_view(request, '1') # Now put the response through the response middleware response = middleware.process_response(request, response) # Repeating the request should result in a cache hit result = middleware.process_request(request) self.assertIsNotNone(result) self.assertEqual(result.content, b'Hello World 1') # The same request through a different middleware won't hit result = prefix_middleware.process_request(request) self.assertIsNone(result) # The same request with a timeout _will_ hit result = timeout_middleware.process_request(request) self.assertIsNotNone(result) self.assertEqual(result.content, b'Hello World 1') def test_view_decorator(self): # decorate the same view with different cache decorators default_view = cache_page(3)(hello_world_view) default_with_prefix_view = cache_page(3, key_prefix='prefix1')(hello_world_view) explicit_default_view = cache_page(3, cache='default')(hello_world_view) explicit_default_with_prefix_view = cache_page(3, cache='default', key_prefix='prefix1')(hello_world_view) other_view = cache_page(1, cache='other')(hello_world_view) other_with_prefix_view = cache_page(1, cache='other', key_prefix='prefix2')(hello_world_view) request = self.factory.get('/view/') # Request the view once response = default_view(request, '1') self.assertEqual(response.content, b'Hello World 1') # Request again -- hit the cache response = default_view(request, '2') self.assertEqual(response.content, b'Hello World 1') # Requesting the same view with the explicit cache should yield the same result response = explicit_default_view(request, '3') self.assertEqual(response.content, b'Hello World 1') # Requesting with a prefix will hit a different cache key response = explicit_default_with_prefix_view(request, '4') self.assertEqual(response.content, b'Hello World 4') # Hitting the same view again gives a cache hit response = explicit_default_with_prefix_view(request, '5') self.assertEqual(response.content, b'Hello World 4') # And going back to the implicit cache will hit the same cache response = default_with_prefix_view(request, '6') self.assertEqual(response.content, b'Hello World 4') # Requesting from an alternate cache won't hit cache response = other_view(request, '7') self.assertEqual(response.content, b'Hello World 7') # But a repeated hit will hit cache response = other_view(request, '8') self.assertEqual(response.content, b'Hello World 7') # And prefixing the alternate cache yields yet another cache entry response = other_with_prefix_view(request, '9') self.assertEqual(response.content, b'Hello World 9') # But if we wait a couple of seconds... time.sleep(2) # ... the default cache will still hit caches['default'] response = default_view(request, '11') self.assertEqual(response.content, b'Hello World 1') # ... the default cache with a prefix will still hit response = default_with_prefix_view(request, '12') self.assertEqual(response.content, b'Hello World 4') # ... the explicit default cache will still hit response = explicit_default_view(request, '13') self.assertEqual(response.content, b'Hello World 1') # ... the explicit default cache with a prefix will still hit response = explicit_default_with_prefix_view(request, '14') self.assertEqual(response.content, b'Hello World 4') # .. but a rapidly expiring cache won't hit response = other_view(request, '15') self.assertEqual(response.content, b'Hello World 15') # .. even if it has a prefix response = other_with_prefix_view(request, '16') self.assertEqual(response.content, b'Hello World 16') def test_cache_page_timeout(self): # Page timeout takes precedence over the "max-age" section of the # "Cache-Control". tests = [ (1, 3), # max_age < page_timeout. (3, 1), # max_age > page_timeout. ] for max_age, page_timeout in tests: with self.subTest(max_age=max_age, page_timeout=page_timeout): view = cache_page(timeout=page_timeout)( cache_control(max_age=max_age)(hello_world_view) ) request = self.factory.get('/view/') response = view(request, '1') self.assertEqual(response.content, b'Hello World 1') time.sleep(1) response = view(request, '2') self.assertEqual( response.content, b'Hello World 1' if page_timeout > max_age else b'Hello World 2', ) cache.clear() def test_cached_control_private_not_cached(self): """Responses with 'Cache-Control: private' are not cached.""" view_with_private_cache = cache_page(3)(cache_control(private=True)(hello_world_view)) request = self.factory.get('/view/') response = view_with_private_cache(request, '1') self.assertEqual(response.content, b'Hello World 1') response = view_with_private_cache(request, '2') self.assertEqual(response.content, b'Hello World 2') def test_sensitive_cookie_not_cached(self): """ Django must prevent caching of responses that set a user-specific (and maybe security sensitive) cookie in response to a cookie-less request. """ request = self.factory.get('/view/') csrf_middleware = CsrfViewMiddleware(csrf_view) csrf_middleware.process_view(request, csrf_view, (), {}) cache_middleware = CacheMiddleware(csrf_middleware) self.assertIsNone(cache_middleware.process_request(request)) cache_middleware(request) # Inserting a CSRF cookie in a cookie-less request prevented caching. self.assertIsNone(cache_middleware.process_request(request)) def test_304_response_has_http_caching_headers_but_not_cached(self): original_view = mock.Mock(return_value=HttpResponseNotModified()) view = cache_page(2)(original_view) request = self.factory.get('/view/') # The view shouldn't be cached on the second call. view(request).close() response = view(request) response.close() self.assertEqual(original_view.call_count, 2) self.assertIsInstance(response, HttpResponseNotModified) self.assertIn('Cache-Control', response) self.assertIn('Expires', response) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHE_MIDDLEWARE_SECONDS=1, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, USE_I18N=False, ) class TestWithTemplateResponse(SimpleTestCase): """ Tests various headers w/ TemplateResponse. Most are probably redundant since they manipulate the same object anyway but the ETag header is 'special' because it relies on the content being complete (which is not necessarily always the case with a TemplateResponse) """ path = '/cache/test/' factory = RequestFactory() def tearDown(self): cache.clear() def test_patch_vary_headers(self): headers = ( # Initial vary, new headers, resulting vary. (None, ('Accept-Encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'), ('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), (None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ) for initial_vary, newheaders, resulting_vary in headers: with self.subTest(initial_vary=initial_vary, newheaders=newheaders): template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) if initial_vary is not None: response['Vary'] = initial_vary patch_vary_headers(response, newheaders) self.assertEqual(response['Vary'], resulting_vary) def test_get_cache_key(self): request = self.factory.get(self.path) template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) key_prefix = 'localprefix' # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e' ) # A specified key_prefix is taken into account. learn_cache_key(request, response, key_prefix=key_prefix) self.assertEqual( get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.' '58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e' ) def test_get_cache_key_with_query(self): request = self.factory.get(self.path, {'test': 1}) template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) # The querystring is taken into account. self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '0f1c2d56633c943073c4569d9a9502fe.d41d8cd98f00b204e9800998ecf8427e' ) class TestMakeTemplateFragmentKey(SimpleTestCase): def test_without_vary_on(self): key = make_template_fragment_key('a.fragment') self.assertEqual(key, 'template.cache.a.fragment.d41d8cd98f00b204e9800998ecf8427e') def test_with_one_vary_on(self): key = make_template_fragment_key('foo', ['abc']) self.assertEqual(key, 'template.cache.foo.493e283d571a73056196f1a68efd0f66') def test_with_many_vary_on(self): key = make_template_fragment_key('bar', ['abc', 'def']) self.assertEqual(key, 'template.cache.bar.17c1a507a0cb58384f4c639067a93520') def test_proper_escaping(self): key = make_template_fragment_key('spam', ['abc:def%']) self.assertEqual(key, 'template.cache.spam.06c8ae8e8c430b69fb0a6443504153dc') def test_with_ints_vary_on(self): key = make_template_fragment_key('foo', [1, 2, 3, 4, 5]) self.assertEqual(key, 'template.cache.foo.7ae8fd2e0d25d651c683bdeebdb29461') def test_with_unicode_vary_on(self): key = make_template_fragment_key('foo', ['42º', '😀']) self.assertEqual(key, 'template.cache.foo.7ced1c94e543668590ba39b3c08b0237') def test_long_vary_on(self): key = make_template_fragment_key('foo', ['x' * 10000]) self.assertEqual(key, 'template.cache.foo.3670b349b5124aa56bdb50678b02b23a') class CacheHandlerTest(SimpleTestCase): def test_same_instance(self): """ Attempting to retrieve the same alias should yield the same instance. """ cache1 = caches['default'] cache2 = caches['default'] self.assertIs(cache1, cache2) def test_per_thread(self): """ Requesting the same alias from separate threads should yield separate instances. """ c = [] def runner(): c.append(caches['default']) for x in range(2): t = threading.Thread(target=runner) t.start() t.join() self.assertIsNot(c[0], c[1])
0c7fba4955b9b3acc5b9219e26ae3e8cded1d1f8cf9d3a4f851120987d058fe3
import json import sys from django.test import SimpleTestCase, ignore_warnings from django.utils import text from django.utils.deprecation import RemovedInDjango40Warning from django.utils.functional import lazystr from django.utils.text import format_lazy from django.utils.translation import gettext_lazy, override IS_WIDE_BUILD = (len('\U0001F4A9') == 1) class TestUtilsText(SimpleTestCase): def test_get_text_list(self): self.assertEqual(text.get_text_list(['a', 'b', 'c', 'd']), 'a, b, c or d') self.assertEqual(text.get_text_list(['a', 'b', 'c'], 'and'), 'a, b and c') self.assertEqual(text.get_text_list(['a', 'b'], 'and'), 'a and b') self.assertEqual(text.get_text_list(['a']), 'a') self.assertEqual(text.get_text_list([]), '') with override('ar'): self.assertEqual(text.get_text_list(['a', 'b', 'c']), "a، b أو c") def test_smart_split(self): testdata = [ ('This is "a person" test.', ['This', 'is', '"a person"', 'test.']), ('This is "a person\'s" test.', ['This', 'is', '"a person\'s"', 'test.']), ('This is "a person\\"s" test.', ['This', 'is', '"a person\\"s"', 'test.']), ('"a \'one', ['"a', "'one"]), ('all friends\' tests', ['all', 'friends\'', 'tests']), ('url search_page words="something else"', ['url', 'search_page', 'words="something else"']), ("url search_page words='something else'", ['url', 'search_page', "words='something else'"]), ('url search_page words "something else"', ['url', 'search_page', 'words', '"something else"']), ('url search_page words-"something else"', ['url', 'search_page', 'words-"something else"']), ('url search_page words=hello', ['url', 'search_page', 'words=hello']), ('url search_page words="something else', ['url', 'search_page', 'words="something', 'else']), ("cut:','|cut:' '", ["cut:','|cut:' '"]), (lazystr("a b c d"), # Test for #20231 ['a', 'b', 'c', 'd']), ] for test, expected in testdata: with self.subTest(value=test): self.assertEqual(list(text.smart_split(test)), expected) def test_truncate_chars(self): truncator = text.Truncator('The quick brown fox jumped over the lazy dog.') self.assertEqual('The quick brown fox jumped over the lazy dog.', truncator.chars(100)), self.assertEqual('The quick brown fox …', truncator.chars(21)), self.assertEqual('The quick brown fo.....', truncator.chars(23, '.....')), self.assertEqual('.....', truncator.chars(4, '.....')), nfc = text.Truncator('o\xfco\xfco\xfco\xfc') nfd = text.Truncator('ou\u0308ou\u0308ou\u0308ou\u0308') self.assertEqual('oüoüoüoü', nfc.chars(8)) self.assertEqual('oüoüoüoü', nfd.chars(8)) self.assertEqual('oü…', nfc.chars(3)) self.assertEqual('oü…', nfd.chars(3)) # Ensure the final length is calculated correctly when there are # combining characters with no precomposed form, and that combining # characters are not split up. truncator = text.Truncator('-B\u030AB\u030A----8') self.assertEqual('-B\u030A…', truncator.chars(3)) self.assertEqual('-B\u030AB\u030A-…', truncator.chars(5)) self.assertEqual('-B\u030AB\u030A----8', truncator.chars(8)) # Ensure the length of the end text is correctly calculated when it # contains combining characters with no precomposed form. truncator = text.Truncator('-----') self.assertEqual('---B\u030A', truncator.chars(4, 'B\u030A')) self.assertEqual('-----', truncator.chars(5, 'B\u030A')) # Make a best effort to shorten to the desired length, but requesting # a length shorter than the ellipsis shouldn't break self.assertEqual('…', text.Truncator('asdf').chars(0)) # lazy strings are handled correctly self.assertEqual(text.Truncator(lazystr('The quick brown fox')).chars(10), 'The quick…') def test_truncate_chars_html(self): perf_test_values = [ (('</a' + '\t' * 50000) + '//>', None), ('&' * 50000, '&' * 9 + '…'), ('_X<<<<<<<<<<<>', None), ] for value, expected in perf_test_values: with self.subTest(value=value): truncator = text.Truncator(value) self.assertEqual(expected if expected else value, truncator.chars(10, html=True)) def test_truncate_words(self): truncator = text.Truncator('The quick brown fox jumped over the lazy dog.') self.assertEqual('The quick brown fox jumped over the lazy dog.', truncator.words(10)) self.assertEqual('The quick brown fox…', truncator.words(4)) self.assertEqual('The quick brown fox[snip]', truncator.words(4, '[snip]')) # lazy strings are handled correctly truncator = text.Truncator(lazystr('The quick brown fox jumped over the lazy dog.')) self.assertEqual('The quick brown fox…', truncator.words(4)) def test_truncate_html_words(self): truncator = text.Truncator( '<p id="par"><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>' ) self.assertEqual( '<p id="par"><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>', truncator.words(10, html=True) ) self.assertEqual( '<p id="par"><strong><em>The quick brown fox…</em></strong></p>', truncator.words(4, html=True) ) self.assertEqual( '<p id="par"><strong><em>The quick brown fox....</em></strong></p>', truncator.words(4, '....', html=True) ) self.assertEqual( '<p id="par"><strong><em>The quick brown fox</em></strong></p>', truncator.words(4, '', html=True) ) # Test with new line inside tag truncator = text.Truncator( '<p>The quick <a href="xyz.html"\n id="mylink">brown fox</a> jumped over the lazy dog.</p>' ) self.assertEqual( '<p>The quick <a href="xyz.html"\n id="mylink">brown…</a></p>', truncator.words(3, html=True) ) # Test self-closing tags truncator = text.Truncator('<br/>The <hr />quick brown fox jumped over the lazy dog.') self.assertEqual('<br/>The <hr />quick brown…', truncator.words(3, html=True)) truncator = text.Truncator('<br>The <hr/>quick <em>brown fox</em> jumped over the lazy dog.') self.assertEqual('<br>The <hr/>quick <em>brown…</em>', truncator.words(3, html=True)) # Test html entities truncator = text.Truncator('<i>Buenos d&iacute;as! &#x00bf;C&oacute;mo est&aacute;?</i>') self.assertEqual('<i>Buenos d&iacute;as! &#x00bf;C&oacute;mo…</i>', truncator.words(3, html=True)) truncator = text.Truncator('<p>I &lt;3 python, what about you?</p>') self.assertEqual('<p>I &lt;3 python,…</p>', truncator.words(3, html=True)) perf_test_values = [ ('</a' + '\t' * 50000) + '//>', '&' * 50000, '_X<<<<<<<<<<<>', ] for value in perf_test_values: with self.subTest(value=value): truncator = text.Truncator(value) self.assertEqual(value, truncator.words(50, html=True)) def test_wrap(self): digits = '1234 67 9' self.assertEqual(text.wrap(digits, 100), '1234 67 9') self.assertEqual(text.wrap(digits, 9), '1234 67 9') self.assertEqual(text.wrap(digits, 8), '1234 67\n9') self.assertEqual(text.wrap('short\na long line', 7), 'short\na long\nline') self.assertEqual(text.wrap('do-not-break-long-words please? ok', 8), 'do-not-break-long-words\nplease?\nok') long_word = 'l%sng' % ('o' * 20) self.assertEqual(text.wrap(long_word, 20), long_word) self.assertEqual(text.wrap('a %s word' % long_word, 10), 'a\n%s\nword' % long_word) self.assertEqual(text.wrap(lazystr(digits), 100), '1234 67 9') def test_normalize_newlines(self): self.assertEqual(text.normalize_newlines("abc\ndef\rghi\r\n"), "abc\ndef\nghi\n") self.assertEqual(text.normalize_newlines("\n\r\r\n\r"), "\n\n\n\n") self.assertEqual(text.normalize_newlines("abcdefghi"), "abcdefghi") self.assertEqual(text.normalize_newlines(""), "") self.assertEqual(text.normalize_newlines(lazystr("abc\ndef\rghi\r\n")), "abc\ndef\nghi\n") def test_phone2numeric(self): numeric = text.phone2numeric('0800 flowers') self.assertEqual(numeric, '0800 3569377') lazy_numeric = lazystr(text.phone2numeric('0800 flowers')) self.assertEqual(lazy_numeric, '0800 3569377') def test_slugify(self): items = ( # given - expected - Unicode? ('Hello, World!', 'hello-world', False), ('spam & eggs', 'spam-eggs', False), (' multiple---dash and space ', 'multiple-dash-and-space', False), ('\t whitespace-in-value \n', 'whitespace-in-value', False), ('underscore_in-value', 'underscore_in-value', False), ('__strip__underscore-value___', 'strip__underscore-value', False), ('--strip-dash-value---', 'strip-dash-value', False), ('__strip-mixed-value---', 'strip-mixed-value', False), ('_ -strip-mixed-value _-', 'strip-mixed-value', False), ('spam & ıçüş', 'spam-ıçüş', True), ('foo ıç bar', 'foo-ıç-bar', True), (' foo ıç bar', 'foo-ıç-bar', True), ('你好', '你好', True), ('İstanbul', 'istanbul', True), ) for value, output, is_unicode in items: with self.subTest(value=value): self.assertEqual(text.slugify(value, allow_unicode=is_unicode), output) # Interning the result may be useful, e.g. when fed to Path. with self.subTest('intern'): self.assertEqual(sys.intern(text.slugify('a')), 'a') @ignore_warnings(category=RemovedInDjango40Warning) def test_unescape_entities(self): items = [ ('', ''), ('foo', 'foo'), ('&amp;', '&'), ('&am;', '&am;'), ('&#x26;', '&'), ('&#xk;', '&#xk;'), ('&#38;', '&'), ('foo &amp; bar', 'foo & bar'), ('foo & bar', 'foo & bar'), ] for value, output in items: with self.subTest(value=value): self.assertEqual(text.unescape_entities(value), output) self.assertEqual(text.unescape_entities(lazystr(value)), output) def test_unescape_entities_deprecated(self): msg = ( 'django.utils.text.unescape_entities() is deprecated in favor of ' 'html.unescape().' ) with self.assertWarnsMessage(RemovedInDjango40Warning, msg): text.unescape_entities('foo') def test_unescape_string_literal(self): items = [ ('"abc"', 'abc'), ("'abc'", 'abc'), ('"a \"bc\""', 'a "bc"'), ("'\'ab\' c'", "'ab' c"), ] for value, output in items: with self.subTest(value=value): self.assertEqual(text.unescape_string_literal(value), output) self.assertEqual(text.unescape_string_literal(lazystr(value)), output) def test_get_valid_filename(self): filename = "^&'@{}[],$=!-#()%+~_123.txt" self.assertEqual(text.get_valid_filename(filename), "-_123.txt") self.assertEqual(text.get_valid_filename(lazystr(filename)), "-_123.txt") def test_compress_sequence(self): data = [{'key': i} for i in range(10)] seq = list(json.JSONEncoder().iterencode(data)) seq = [s.encode() for s in seq] actual_length = len(b''.join(seq)) out = text.compress_sequence(seq) compressed_length = len(b''.join(out)) self.assertLess(compressed_length, actual_length) def test_format_lazy(self): self.assertEqual('django/test', format_lazy('{}/{}', 'django', lazystr('test'))) self.assertEqual('django/test', format_lazy('{0}/{1}', *('django', 'test'))) self.assertEqual('django/test', format_lazy('{a}/{b}', **{'a': 'django', 'b': 'test'})) self.assertEqual('django/test', format_lazy('{a[0]}/{a[1]}', a=('django', 'test'))) t = {} s = format_lazy('{0[a]}-{p[a]}', t, p=t) t['a'] = lazystr('django') self.assertEqual('django-django', s) t['a'] = 'update' self.assertEqual('update-update', s) # The format string can be lazy. (string comes from contrib.admin) s = format_lazy( gettext_lazy('Added {name} “{object}”.'), name='article', object='My first try', ) with override('fr'): self.assertEqual('Ajout de article «\xa0My first try\xa0».', s)
7770333ccc7192c682384f622c4f5953e2c8775b2df7d0cc1ddf21aa42564b93
from unittest import mock from django.conf import settings from django.db import connection, models from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import isolate_apps from .models import Book, ChildModel1, ChildModel2 class SimpleIndexesTests(SimpleTestCase): def test_suffix(self): self.assertEqual(models.Index.suffix, 'idx') def test_repr(self): index = models.Index(fields=['title']) multi_col_index = models.Index(fields=['title', 'author']) partial_index = models.Index(fields=['title'], name='long_books_idx', condition=models.Q(pages__gt=400)) covering_index = models.Index( fields=['title'], name='include_idx', include=['author', 'pages'], ) self.assertEqual(repr(index), "<Index: fields='title'>") self.assertEqual(repr(multi_col_index), "<Index: fields='title, author'>") self.assertEqual(repr(partial_index), "<Index: fields='title', condition=(AND: ('pages__gt', 400))>") self.assertEqual( repr(covering_index), "<Index: fields='title', include='author, pages'>", ) def test_eq(self): index = models.Index(fields=['title']) same_index = models.Index(fields=['title']) another_index = models.Index(fields=['title', 'author']) index.model = Book same_index.model = Book another_index.model = Book self.assertEqual(index, same_index) self.assertEqual(index, mock.ANY) self.assertNotEqual(index, another_index) def test_index_fields_type(self): with self.assertRaisesMessage(ValueError, 'Index.fields must be a list or tuple.'): models.Index(fields='title') def test_fields_tuple(self): self.assertEqual(models.Index(fields=('title',)).fields, ['title']) def test_raises_error_without_field(self): msg = 'At least one field is required to define an index.' with self.assertRaisesMessage(ValueError, msg): models.Index() def test_opclasses_requires_index_name(self): with self.assertRaisesMessage(ValueError, 'An index must be named to use opclasses.'): models.Index(opclasses=['jsonb_path_ops']) def test_opclasses_requires_list_or_tuple(self): with self.assertRaisesMessage(ValueError, 'Index.opclasses must be a list or tuple.'): models.Index(name='test_opclass', fields=['field'], opclasses='jsonb_path_ops') def test_opclasses_and_fields_same_length(self): msg = 'Index.fields and Index.opclasses must have the same number of elements.' with self.assertRaisesMessage(ValueError, msg): models.Index(name='test_opclass', fields=['field', 'other'], opclasses=['jsonb_path_ops']) def test_condition_requires_index_name(self): with self.assertRaisesMessage(ValueError, 'An index must be named to use condition.'): models.Index(condition=models.Q(pages__gt=400)) def test_condition_must_be_q(self): with self.assertRaisesMessage(ValueError, 'Index.condition must be a Q instance.'): models.Index(condition='invalid', name='long_book_idx') def test_include_requires_list_or_tuple(self): msg = 'Index.include must be a list or tuple.' with self.assertRaisesMessage(ValueError, msg): models.Index(name='test_include', fields=['field'], include='other') def test_include_requires_index_name(self): msg = 'A covering index must be named.' with self.assertRaisesMessage(ValueError, msg): models.Index(fields=['field'], include=['other']) def test_name_auto_generation(self): index = models.Index(fields=['author']) index.set_name_with_model(Book) self.assertEqual(index.name, 'model_index_author_0f5565_idx') # '-' for DESC columns should be accounted for in the index name. index = models.Index(fields=['-author']) index.set_name_with_model(Book) self.assertEqual(index.name, 'model_index_author_708765_idx') # fields may be truncated in the name. db_column is used for naming. long_field_index = models.Index(fields=['pages']) long_field_index.set_name_with_model(Book) self.assertEqual(long_field_index.name, 'model_index_page_co_69235a_idx') # suffix can't be longer than 3 characters. long_field_index.suffix = 'suff' msg = 'Index too long for multiple database support. Is self.suffix longer than 3 characters?' with self.assertRaisesMessage(AssertionError, msg): long_field_index.set_name_with_model(Book) @isolate_apps('model_indexes') def test_name_auto_generation_with_quoted_db_table(self): class QuotedDbTable(models.Model): name = models.CharField(max_length=50) class Meta: db_table = '"t_quoted"' index = models.Index(fields=['name']) index.set_name_with_model(QuotedDbTable) self.assertEqual(index.name, 't_quoted_name_e4ed1b_idx') def test_deconstruction(self): index = models.Index(fields=['title'], db_tablespace='idx_tbls') index.set_name_with_model(Book) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.db.models.Index') self.assertEqual(args, ()) self.assertEqual( kwargs, {'fields': ['title'], 'name': 'model_index_title_196f42_idx', 'db_tablespace': 'idx_tbls'} ) def test_deconstruct_with_condition(self): index = models.Index( name='big_book_index', fields=['title'], condition=models.Q(pages__gt=400), ) index.set_name_with_model(Book) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.db.models.Index') self.assertEqual(args, ()) self.assertEqual( kwargs, { 'fields': ['title'], 'name': 'model_index_title_196f42_idx', 'condition': models.Q(pages__gt=400), } ) def test_deconstruct_with_include(self): index = models.Index( name='book_include_idx', fields=['title'], include=['author'], ) index.set_name_with_model(Book) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.db.models.Index') self.assertEqual(args, ()) self.assertEqual( kwargs, { 'fields': ['title'], 'name': 'model_index_title_196f42_idx', 'include': ('author',), }, ) def test_clone(self): index = models.Index(fields=['title']) new_index = index.clone() self.assertIsNot(index, new_index) self.assertEqual(index.fields, new_index.fields) def test_name_set(self): index_names = [index.name for index in Book._meta.indexes] self.assertCountEqual( index_names, [ 'model_index_title_196f42_idx', 'model_index_isbn_34f975_idx', 'model_indexes_book_barcode_idx', ], ) def test_abstract_children(self): index_names = [index.name for index in ChildModel1._meta.indexes] self.assertEqual( index_names, ['model_index_name_440998_idx', 'model_indexes_childmodel1_idx'], ) index_names = [index.name for index in ChildModel2._meta.indexes] self.assertEqual( index_names, ['model_index_name_b6c374_idx', 'model_indexes_childmodel2_idx'], ) class IndexesTests(TestCase): @skipUnlessDBFeature('supports_tablespaces') def test_db_tablespace(self): editor = connection.schema_editor() # Index with db_tablespace attribute. for fields in [ # Field with db_tablespace specified on model. ['shortcut'], # Field without db_tablespace specified on model. ['author'], # Multi-column with db_tablespaces specified on model. ['shortcut', 'isbn'], # Multi-column without db_tablespace specified on model. ['title', 'author'], ]: with self.subTest(fields=fields): index = models.Index(fields=fields, db_tablespace='idx_tbls2') self.assertIn('"idx_tbls2"', str(index.create_sql(Book, editor)).lower()) # Indexes without db_tablespace attribute. for fields in [['author'], ['shortcut', 'isbn'], ['title', 'author']]: with self.subTest(fields=fields): index = models.Index(fields=fields) # The DEFAULT_INDEX_TABLESPACE setting can't be tested because # it's evaluated when the model class is defined. As a # consequence, @override_settings doesn't work. if settings.DEFAULT_INDEX_TABLESPACE: self.assertIn( '"%s"' % settings.DEFAULT_INDEX_TABLESPACE, str(index.create_sql(Book, editor)).lower() ) else: self.assertNotIn('TABLESPACE', str(index.create_sql(Book, editor))) # Field with db_tablespace specified on the model and an index without # db_tablespace. index = models.Index(fields=['shortcut']) self.assertIn('"idx_tbls"', str(index.create_sql(Book, editor)).lower())
3b8cca780371f1292ae1da2443222ca47a4cdf3e5fc140317ff715160bb76da6
import datetime from unittest import skipIf, skipUnless from django.db import connection from django.db.models import CASCADE, ForeignKey, Index, Q from django.test import ( TestCase, TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature, ) from django.test.utils import override_settings from django.utils import timezone from .models import ( Article, ArticleTranslation, IndexedArticle2, IndexTogetherSingleList, ) class SchemaIndexesTests(TestCase): """ Test index handling by the db.backends.schema infrastructure. """ def test_index_name_hash(self): """ Index names should be deterministic. """ editor = connection.schema_editor() index_name = editor._create_index_name( table_name=Article._meta.db_table, column_names=("c1",), suffix="123", ) self.assertEqual(index_name, "indexes_article_c1_a52bd80b123") def test_index_name(self): """ Index names on the built-in database backends:: * Are truncated as needed. * Include all the column names. * Include a deterministic hash. """ long_name = 'l%sng' % ('o' * 100) editor = connection.schema_editor() index_name = editor._create_index_name( table_name=Article._meta.db_table, column_names=('c1', 'c2', long_name), suffix='ix', ) expected = { 'mysql': 'indexes_article_c1_c2_looooooooooooooooooo_255179b2ix', 'oracle': 'indexes_a_c1_c2_loo_255179b2ix', 'postgresql': 'indexes_article_c1_c2_loooooooooooooooooo_255179b2ix', 'sqlite': 'indexes_article_c1_c2_l%sng_255179b2ix' % ('o' * 100), } if connection.vendor not in expected: self.skipTest('This test is only supported on the built-in database backends.') self.assertEqual(index_name, expected[connection.vendor]) def test_index_together(self): editor = connection.schema_editor() index_sql = [str(statement) for statement in editor._model_indexes_sql(Article)] self.assertEqual(len(index_sql), 1) # Ensure the index name is properly quoted self.assertIn( connection.ops.quote_name( editor._create_index_name(Article._meta.db_table, ['headline', 'pub_date'], suffix='_idx') ), index_sql[0] ) def test_index_together_single_list(self): # Test for using index_together with a single list (#22172) index_sql = connection.schema_editor()._model_indexes_sql(IndexTogetherSingleList) self.assertEqual(len(index_sql), 1) def test_columns_list_sql(self): index = Index(fields=['headline'], name='whitespace_idx') editor = connection.schema_editor() self.assertIn( '(%s)' % editor.quote_name('headline'), str(index.create_sql(Article, editor)), ) def test_descending_columns_list_sql(self): index = Index(fields=['-headline'], name='whitespace_idx') editor = connection.schema_editor() self.assertIn( '(%s DESC)' % editor.quote_name('headline'), str(index.create_sql(Article, editor)), ) @skipIf(connection.vendor == 'postgresql', 'opclasses are PostgreSQL only') class SchemaIndexesNotPostgreSQLTests(TransactionTestCase): available_apps = ['indexes'] def test_create_index_ignores_opclasses(self): index = Index( name='test_ops_class', fields=['headline'], opclasses=['varchar_pattern_ops'], ) with connection.schema_editor() as editor: # This would error if opclasses weren't ignored. editor.add_index(IndexedArticle2, index) # The `condition` parameter is ignored by databases that don't support partial # indexes. @skipIfDBFeature('supports_partial_indexes') class PartialIndexConditionIgnoredTests(TransactionTestCase): available_apps = ['indexes'] def test_condition_ignored(self): index = Index( name='test_condition_ignored', fields=['published'], condition=Q(published=True), ) with connection.schema_editor() as editor: # This would error if condition weren't ignored. editor.add_index(Article, index) self.assertNotIn( 'WHERE %s' % editor.quote_name('published'), str(index.create_sql(Article, editor)) ) @skipUnless(connection.vendor == 'postgresql', 'PostgreSQL tests') class SchemaIndexesPostgreSQLTests(TransactionTestCase): available_apps = ['indexes'] get_opclass_query = ''' SELECT opcname, c.relname FROM pg_opclass AS oc JOIN pg_index as i on oc.oid = ANY(i.indclass) JOIN pg_class as c on c.oid = i.indexrelid WHERE c.relname = '%s' ''' def test_text_indexes(self): """Test creation of PostgreSQL-specific text indexes (#12234)""" from .models import IndexedArticle index_sql = [str(statement) for statement in connection.schema_editor()._model_indexes_sql(IndexedArticle)] self.assertEqual(len(index_sql), 5) self.assertIn('("headline" varchar_pattern_ops)', index_sql[1]) self.assertIn('("body" text_pattern_ops)', index_sql[3]) # unique=True and db_index=True should only create the varchar-specific # index (#19441). self.assertIn('("slug" varchar_pattern_ops)', index_sql[4]) def test_virtual_relation_indexes(self): """Test indexes are not created for related objects""" index_sql = connection.schema_editor()._model_indexes_sql(Article) self.assertEqual(len(index_sql), 1) def test_ops_class(self): index = Index( name='test_ops_class', fields=['headline'], opclasses=['varchar_pattern_ops'], ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % 'test_ops_class') self.assertEqual(cursor.fetchall(), [('varchar_pattern_ops', 'test_ops_class')]) def test_ops_class_multiple_columns(self): index = Index( name='test_ops_class_multiple', fields=['headline', 'body'], opclasses=['varchar_pattern_ops', 'text_pattern_ops'], ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % 'test_ops_class_multiple') expected_ops_classes = ( ('varchar_pattern_ops', 'test_ops_class_multiple'), ('text_pattern_ops', 'test_ops_class_multiple'), ) self.assertCountEqual(cursor.fetchall(), expected_ops_classes) def test_ops_class_partial(self): index = Index( name='test_ops_class_partial', fields=['body'], opclasses=['text_pattern_ops'], condition=Q(headline__contains='China'), ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % 'test_ops_class_partial') self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', 'test_ops_class_partial')]) def test_ops_class_partial_tablespace(self): indexname = 'test_ops_class_tblspace' index = Index( name=indexname, fields=['body'], opclasses=['text_pattern_ops'], condition=Q(headline__contains='China'), db_tablespace='pg_default', ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) self.assertIn('TABLESPACE "pg_default" ', str(index.create_sql(IndexedArticle2, editor))) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % indexname) self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', indexname)]) def test_ops_class_descending(self): indexname = 'test_ops_class_ordered' index = Index( name=indexname, fields=['-body'], opclasses=['text_pattern_ops'], ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % indexname) self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', indexname)]) def test_ops_class_descending_partial(self): indexname = 'test_ops_class_ordered_partial' index = Index( name=indexname, fields=['-body'], opclasses=['text_pattern_ops'], condition=Q(headline__contains='China'), ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % indexname) self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', indexname)]) @skipUnlessDBFeature('supports_covering_indexes') def test_ops_class_include(self): index_name = 'test_ops_class_include' index = Index( name=index_name, fields=['body'], opclasses=['text_pattern_ops'], include=['headline'], ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % index_name) self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', index_name)]) @skipUnlessDBFeature('supports_covering_indexes') def test_ops_class_include_tablespace(self): index_name = 'test_ops_class_include_tblspace' index = Index( name=index_name, fields=['body'], opclasses=['text_pattern_ops'], include=['headline'], db_tablespace='pg_default', ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) self.assertIn( 'TABLESPACE "pg_default"', str(index.create_sql(IndexedArticle2, editor)), ) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % index_name) self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', index_name)]) def test_ops_class_columns_lists_sql(self): index = Index( fields=['headline'], name='whitespace_idx', opclasses=['text_pattern_ops'], ) with connection.schema_editor() as editor: self.assertIn( '(%s text_pattern_ops)' % editor.quote_name('headline'), str(index.create_sql(Article, editor)), ) def test_ops_class_descending_columns_list_sql(self): index = Index( fields=['-headline'], name='whitespace_idx', opclasses=['text_pattern_ops'], ) with connection.schema_editor() as editor: self.assertIn( '(%s text_pattern_ops DESC)' % editor.quote_name('headline'), str(index.create_sql(Article, editor)), ) @skipUnless(connection.vendor == 'mysql', 'MySQL tests') class SchemaIndexesMySQLTests(TransactionTestCase): available_apps = ['indexes'] def test_no_index_for_foreignkey(self): """ MySQL on InnoDB already creates indexes automatically for foreign keys. (#14180). An index should be created if db_constraint=False (#26171). """ with connection.cursor() as cursor: storage = connection.introspection.get_storage_engine( cursor, ArticleTranslation._meta.db_table, ) if storage != "InnoDB": self.skip("This test only applies to the InnoDB storage engine") index_sql = [str(statement) for statement in connection.schema_editor()._model_indexes_sql(ArticleTranslation)] self.assertEqual(index_sql, [ 'CREATE INDEX `indexes_articletranslation_article_no_constraint_id_d6c0806b` ' 'ON `indexes_articletranslation` (`article_no_constraint_id`)' ]) # The index also shouldn't be created if the ForeignKey is added after # the model was created. field_created = False try: with connection.schema_editor() as editor: new_field = ForeignKey(Article, CASCADE) new_field.set_attributes_from_name('new_foreign_key') editor.add_field(ArticleTranslation, new_field) field_created = True # No deferred SQL. The FK constraint is included in the # statement to add the field. self.assertFalse(editor.deferred_sql) finally: if field_created: with connection.schema_editor() as editor: editor.remove_field(ArticleTranslation, new_field) @skipUnlessDBFeature('supports_partial_indexes') # SQLite doesn't support timezone-aware datetimes when USE_TZ is False. @override_settings(USE_TZ=True) class PartialIndexTests(TransactionTestCase): # Schema editor is used to create the index to test that it works. available_apps = ['indexes'] def test_partial_index(self): with connection.schema_editor() as editor: index = Index( name='recent_article_idx', fields=['pub_date'], condition=Q( pub_date__gt=datetime.datetime( year=2015, month=1, day=1, # PostgreSQL would otherwise complain about the lookup # being converted to a mutable function (by removing # the timezone in the cast) which is forbidden. tzinfo=timezone.get_current_timezone(), ), ) ) self.assertIn( 'WHERE %s' % editor.quote_name('pub_date'), str(index.create_sql(Article, schema_editor=editor)) ) editor.add_index(index=index, model=Article) with connection.cursor() as cursor: self.assertIn(index.name, connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, )) editor.remove_index(index=index, model=Article) def test_integer_restriction_partial(self): with connection.schema_editor() as editor: index = Index( name='recent_article_idx', fields=['id'], condition=Q(pk__gt=1), ) self.assertIn( 'WHERE %s' % editor.quote_name('id'), str(index.create_sql(Article, schema_editor=editor)) ) editor.add_index(index=index, model=Article) with connection.cursor() as cursor: self.assertIn(index.name, connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, )) editor.remove_index(index=index, model=Article) def test_boolean_restriction_partial(self): with connection.schema_editor() as editor: index = Index( name='published_index', fields=['published'], condition=Q(published=True), ) self.assertIn( 'WHERE %s' % editor.quote_name('published'), str(index.create_sql(Article, schema_editor=editor)) ) editor.add_index(index=index, model=Article) with connection.cursor() as cursor: self.assertIn(index.name, connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, )) editor.remove_index(index=index, model=Article) @skipUnlessDBFeature('supports_functions_in_partial_indexes') def test_multiple_conditions(self): with connection.schema_editor() as editor: index = Index( name='recent_article_idx', fields=['pub_date', 'headline'], condition=( Q(pub_date__gt=datetime.datetime( year=2015, month=1, day=1, tzinfo=timezone.get_current_timezone(), )) & Q(headline__contains='China') ), ) sql = str(index.create_sql(Article, schema_editor=editor)) where = sql.find('WHERE') self.assertIn( 'WHERE (%s' % editor.quote_name('pub_date'), sql ) # Because each backend has different syntax for the operators, # check ONLY the occurrence of headline in the SQL. self.assertGreater(sql.rfind('headline'), where) editor.add_index(index=index, model=Article) with connection.cursor() as cursor: self.assertIn(index.name, connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, )) editor.remove_index(index=index, model=Article) def test_is_null_condition(self): with connection.schema_editor() as editor: index = Index( name='recent_article_idx', fields=['pub_date'], condition=Q(pub_date__isnull=False), ) self.assertIn( 'WHERE %s IS NOT NULL' % editor.quote_name('pub_date'), str(index.create_sql(Article, schema_editor=editor)) ) editor.add_index(index=index, model=Article) with connection.cursor() as cursor: self.assertIn(index.name, connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, )) editor.remove_index(index=index, model=Article) @skipUnlessDBFeature('supports_covering_indexes') class CoveringIndexTests(TransactionTestCase): available_apps = ['indexes'] def test_covering_index(self): index = Index( name='covering_headline_idx', fields=['headline'], include=['pub_date', 'published'], ) with connection.schema_editor() as editor: self.assertIn( '(%s) INCLUDE (%s, %s)' % ( editor.quote_name('headline'), editor.quote_name('pub_date'), editor.quote_name('published'), ), str(index.create_sql(Article, editor)), ) editor.add_index(Article, index) with connection.cursor() as cursor: constraints = connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, ) self.assertIn(index.name, constraints) self.assertEqual( constraints[index.name]['columns'], ['headline', 'pub_date', 'published'], ) editor.remove_index(Article, index) with connection.cursor() as cursor: self.assertNotIn(index.name, connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, )) def test_covering_partial_index(self): index = Index( name='covering_partial_headline_idx', fields=['headline'], include=['pub_date'], condition=Q(pub_date__isnull=False), ) with connection.schema_editor() as editor: self.assertIn( '(%s) INCLUDE (%s) WHERE %s ' % ( editor.quote_name('headline'), editor.quote_name('pub_date'), editor.quote_name('pub_date'), ), str(index.create_sql(Article, editor)), ) editor.add_index(Article, index) with connection.cursor() as cursor: constraints = connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, ) self.assertIn(index.name, constraints) self.assertEqual( constraints[index.name]['columns'], ['headline', 'pub_date'], ) editor.remove_index(Article, index) with connection.cursor() as cursor: self.assertNotIn(index.name, connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, )) @skipIfDBFeature('supports_covering_indexes') class CoveringIndexIgnoredTests(TransactionTestCase): available_apps = ['indexes'] def test_covering_ignored(self): index = Index( name='test_covering_ignored', fields=['headline'], include=['pub_date'], ) with connection.schema_editor() as editor: editor.add_index(Article, index) self.assertNotIn( 'INCLUDE (%s)' % editor.quote_name('headline'), str(index.create_sql(Article, editor)), )
0634efaa2d1dee1883455c62d7b8124ebae61ce3d3ab96a935fd123484607c7a
import datetime import itertools import unittest from copy import copy from unittest import mock from django.core.management.color import no_style from django.db import ( DatabaseError, DataError, IntegrityError, OperationalError, connection, ) from django.db.models import ( CASCADE, PROTECT, AutoField, BigAutoField, BigIntegerField, BinaryField, BooleanField, CharField, CheckConstraint, DateField, DateTimeField, ForeignKey, ForeignObject, Index, IntegerField, ManyToManyField, Model, OneToOneField, PositiveIntegerField, Q, SlugField, SmallAutoField, SmallIntegerField, TextField, TimeField, UniqueConstraint, UUIDField, ) from django.db.transaction import TransactionManagementError, atomic from django.test import ( TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature, ) from django.test.utils import CaptureQueriesContext, isolate_apps from django.utils import timezone from .fields import ( CustomManyToManyField, InheritedManyToManyField, MediumBlobField, ) from .models import ( Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex, AuthorWithDefaultHeight, AuthorWithEvenLongerName, AuthorWithIndexedName, AuthorWithIndexedNameAndBirthday, AuthorWithUniqueName, AuthorWithUniqueNameAndBirthday, Book, BookForeignObj, BookWeak, BookWithLongName, BookWithO2O, BookWithoutAuthor, BookWithSlug, IntegerPK, Node, Note, NoteRename, Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest, new_apps, ) class SchemaTests(TransactionTestCase): """ Tests for the schema-alteration code. Be aware that these tests are more liable than most to false results, as sometimes the code to check if a test has worked is almost as complex as the code it is testing. """ available_apps = [] models = [ Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex, AuthorWithDefaultHeight, AuthorWithEvenLongerName, Book, BookWeak, BookWithLongName, BookWithO2O, BookWithSlug, IntegerPK, Node, Note, Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest, ] # Utility functions def setUp(self): # local_models should contain test dependent model classes that will be # automatically removed from the app cache on test tear down. self.local_models = [] # isolated_local_models contains models that are in test methods # decorated with @isolate_apps. self.isolated_local_models = [] def tearDown(self): # Delete any tables made for our models self.delete_tables() new_apps.clear_cache() for model in new_apps.get_models(): model._meta._expire_cache() if 'schema' in new_apps.all_models: for model in self.local_models: for many_to_many in model._meta.many_to_many: through = many_to_many.remote_field.through if through and through._meta.auto_created: del new_apps.all_models['schema'][through._meta.model_name] del new_apps.all_models['schema'][model._meta.model_name] if self.isolated_local_models: with connection.schema_editor() as editor: for model in self.isolated_local_models: editor.delete_model(model) def delete_tables(self): "Deletes all model tables for our models for a clean test environment" converter = connection.introspection.identifier_converter with connection.schema_editor() as editor: connection.disable_constraint_checking() table_names = connection.introspection.table_names() for model in itertools.chain(SchemaTests.models, self.local_models): tbl = converter(model._meta.db_table) if tbl in table_names: editor.delete_model(model) table_names.remove(tbl) connection.enable_constraint_checking() def column_classes(self, model): with connection.cursor() as cursor: columns = { d[0]: (connection.introspection.get_field_type(d[1], d), d) for d in connection.introspection.get_table_description( cursor, model._meta.db_table, ) } # SQLite has a different format for field_type for name, (type, desc) in columns.items(): if isinstance(type, tuple): columns[name] = (type[0], desc) # SQLite also doesn't error properly if not columns: raise DatabaseError("Table does not exist (empty pragma)") return columns def get_primary_key(self, table): with connection.cursor() as cursor: return connection.introspection.get_primary_key_column(cursor, table) def get_indexes(self, table): """ Get the indexes on the table using a new cursor. """ with connection.cursor() as cursor: return [ c['columns'][0] for c in connection.introspection.get_constraints(cursor, table).values() if c['index'] and len(c['columns']) == 1 ] def get_uniques(self, table): with connection.cursor() as cursor: return [ c['columns'][0] for c in connection.introspection.get_constraints(cursor, table).values() if c['unique'] and len(c['columns']) == 1 ] def get_constraints(self, table): """ Get the constraints on a table using a new cursor. """ with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) def get_constraints_for_column(self, model, column_name): constraints = self.get_constraints(model._meta.db_table) constraints_for_column = [] for name, details in constraints.items(): if details['columns'] == [column_name]: constraints_for_column.append(name) return sorted(constraints_for_column) def check_added_field_default(self, schema_editor, model, field, field_name, expected_default, cast_function=None): with connection.cursor() as cursor: schema_editor.add_field(model, field) cursor.execute("SELECT {} FROM {};".format(field_name, model._meta.db_table)) database_default = cursor.fetchall()[0][0] if cast_function and not type(database_default) == type(expected_default): database_default = cast_function(database_default) self.assertEqual(database_default, expected_default) def get_constraints_count(self, table, column, fk_to): """ Return a dict with keys 'fks', 'uniques, and 'indexes' indicating the number of foreign keys, unique constraints, and indexes on `table`.`column`. The `fk_to` argument is a 2-tuple specifying the expected foreign key relationship's (table, column). """ with connection.cursor() as cursor: constraints = connection.introspection.get_constraints(cursor, table) counts = {'fks': 0, 'uniques': 0, 'indexes': 0} for c in constraints.values(): if c['columns'] == [column]: if c['foreign_key'] == fk_to: counts['fks'] += 1 if c['unique']: counts['uniques'] += 1 elif c['index']: counts['indexes'] += 1 return counts def assertIndexOrder(self, table, index, order): constraints = self.get_constraints(table) self.assertIn(index, constraints) index_orders = constraints[index]['orders'] self.assertTrue(all(val == expected for val, expected in zip(index_orders, order))) def assertForeignKeyExists(self, model, column, expected_fk_table, field='id'): """ Fail if the FK constraint on `model.Meta.db_table`.`column` to `expected_fk_table`.id doesn't exist. """ constraints = self.get_constraints(model._meta.db_table) constraint_fk = None for details in constraints.values(): if details['columns'] == [column] and details['foreign_key']: constraint_fk = details['foreign_key'] break self.assertEqual(constraint_fk, (expected_fk_table, field)) def assertForeignKeyNotExists(self, model, column, expected_fk_table): with self.assertRaises(AssertionError): self.assertForeignKeyExists(model, column, expected_fk_table) # Tests def test_creation_deletion(self): """ Tries creating a model's table, and then deleting it. """ with connection.schema_editor() as editor: # Create the table editor.create_model(Author) # The table is there list(Author.objects.all()) # Clean up that table editor.delete_model(Author) # No deferred SQL should be left over. self.assertEqual(editor.deferred_sql, []) # The table is gone with self.assertRaises(DatabaseError): list(Author.objects.all()) @skipUnlessDBFeature('supports_foreign_keys') def test_fk(self): "Creating tables out of FK order, then repointing, works" # Create the table with connection.schema_editor() as editor: editor.create_model(Book) editor.create_model(Author) editor.create_model(Tag) # Initial tables are there list(Author.objects.all()) list(Book.objects.all()) # Make sure the FK constraint is present with self.assertRaises(IntegrityError): Book.objects.create( author_id=1, title="Much Ado About Foreign Keys", pub_date=datetime.datetime.now(), ) # Repoint the FK constraint old_field = Book._meta.get_field("author") new_field = ForeignKey(Tag, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) self.assertForeignKeyExists(Book, 'author_id', 'schema_tag') @skipUnlessDBFeature('can_create_inline_fk') def test_inline_fk(self): # Create some tables. with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) editor.create_model(Note) self.assertForeignKeyNotExists(Note, 'book_id', 'schema_book') # Add a foreign key from one to the other. with connection.schema_editor() as editor: new_field = ForeignKey(Book, CASCADE) new_field.set_attributes_from_name('book') editor.add_field(Note, new_field) self.assertForeignKeyExists(Note, 'book_id', 'schema_book') # Creating a FK field with a constraint uses a single statement without # a deferred ALTER TABLE. self.assertFalse([ sql for sql in (str(statement) for statement in editor.deferred_sql) if sql.startswith('ALTER TABLE') and 'ADD CONSTRAINT' in sql ]) @skipUnlessDBFeature('can_create_inline_fk') def test_add_inline_fk_update_data(self): with connection.schema_editor() as editor: editor.create_model(Node) # Add an inline foreign key and update data in the same transaction. new_field = ForeignKey(Node, CASCADE, related_name='new_fk', null=True) new_field.set_attributes_from_name('new_parent_fk') parent = Node.objects.create() with connection.schema_editor() as editor: editor.add_field(Node, new_field) editor.execute('UPDATE schema_node SET new_parent_fk_id = %s;', [parent.pk]) self.assertIn('new_parent_fk_id', self.get_indexes(Node._meta.db_table)) @skipUnlessDBFeature( 'can_create_inline_fk', 'allows_multiple_constraints_on_same_fields', ) @isolate_apps('schema') def test_add_inline_fk_index_update_data(self): class Node(Model): class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Node) # Add an inline foreign key, update data, and an index in the same # transaction. new_field = ForeignKey(Node, CASCADE, related_name='new_fk', null=True) new_field.set_attributes_from_name('new_parent_fk') parent = Node.objects.create() with connection.schema_editor() as editor: editor.add_field(Node, new_field) Node._meta.add_field(new_field) editor.execute('UPDATE schema_node SET new_parent_fk_id = %s;', [parent.pk]) editor.add_index(Node, Index(fields=['new_parent_fk'], name='new_parent_inline_fk_idx')) self.assertIn('new_parent_fk_id', self.get_indexes(Node._meta.db_table)) @skipUnlessDBFeature('supports_foreign_keys') def test_char_field_with_db_index_to_fk(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorCharFieldWithIndex) # Change CharField to FK old_field = AuthorCharFieldWithIndex._meta.get_field('char_field') new_field = ForeignKey(Author, CASCADE, blank=True) new_field.set_attributes_from_name('char_field') with connection.schema_editor() as editor: editor.alter_field(AuthorCharFieldWithIndex, old_field, new_field, strict=True) self.assertForeignKeyExists(AuthorCharFieldWithIndex, 'char_field_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') @skipUnlessDBFeature('supports_index_on_text_field') def test_text_field_with_db_index_to_fk(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorTextFieldWithIndex) # Change TextField to FK old_field = AuthorTextFieldWithIndex._meta.get_field('text_field') new_field = ForeignKey(Author, CASCADE, blank=True) new_field.set_attributes_from_name('text_field') with connection.schema_editor() as editor: editor.alter_field(AuthorTextFieldWithIndex, old_field, new_field, strict=True) self.assertForeignKeyExists(AuthorTextFieldWithIndex, 'text_field_id', 'schema_author') @isolate_apps('schema') def test_char_field_pk_to_auto_field(self): class Foo(Model): id = CharField(max_length=255, primary_key=True) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('id') new_field = AutoField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Foo with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) @skipUnlessDBFeature('supports_foreign_keys') def test_fk_to_proxy(self): "Creating a FK to a proxy model creates database constraints." class AuthorProxy(Author): class Meta: app_label = 'schema' apps = new_apps proxy = True class AuthorRef(Model): author = ForeignKey(AuthorProxy, on_delete=CASCADE) class Meta: app_label = 'schema' apps = new_apps self.local_models = [AuthorProxy, AuthorRef] # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorRef) self.assertForeignKeyExists(AuthorRef, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_fk_db_constraint(self): "The db_constraint parameter is respected" # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(Author) editor.create_model(BookWeak) # Initial tables are there list(Author.objects.all()) list(Tag.objects.all()) list(BookWeak.objects.all()) self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author') # Make a db_constraint=False FK new_field = ForeignKey(Tag, CASCADE, db_constraint=False) new_field.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.add_field(Author, new_field) self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag') # Alter to one with a constraint new_field2 = ForeignKey(Tag, CASCADE) new_field2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) self.assertForeignKeyExists(Author, 'tag_id', 'schema_tag') # Alter to one without a constraint again new_field2 = ForeignKey(Tag, CASCADE) new_field2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field(Author, new_field2, new_field, strict=True) self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag') @isolate_apps('schema') def test_no_db_constraint_added_during_primary_key_change(self): """ When a primary key that's pointed to by a ForeignKey with db_constraint=False is altered, a foreign key constraint isn't added. """ class Author(Model): class Meta: app_label = 'schema' class BookWeak(Model): author = ForeignKey(Author, CASCADE, db_constraint=False) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWeak) self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author') old_field = Author._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.model = Author new_field.set_attributes_from_name('id') # @isolate_apps() and inner models are needed to have the model # relations populated, otherwise this doesn't act as a regression test. self.assertEqual(len(new_field.model._meta.related_objects), 1) with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author') def _test_m2m_db_constraint(self, M2MFieldClass): class LocalAuthorWithM2M(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorWithM2M] # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(LocalAuthorWithM2M) # Initial tables are there list(LocalAuthorWithM2M.objects.all()) list(Tag.objects.all()) # Make a db_constraint=False FK new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False) new_field.contribute_to_class(LocalAuthorWithM2M, "tags") # Add the field with connection.schema_editor() as editor: editor.add_field(LocalAuthorWithM2M, new_field) self.assertForeignKeyNotExists(new_field.remote_field.through, 'tag_id', 'schema_tag') @skipUnlessDBFeature('supports_foreign_keys') def test_m2m_db_constraint(self): self._test_m2m_db_constraint(ManyToManyField) @skipUnlessDBFeature('supports_foreign_keys') def test_m2m_db_constraint_custom(self): self._test_m2m_db_constraint(CustomManyToManyField) @skipUnlessDBFeature('supports_foreign_keys') def test_m2m_db_constraint_inherited(self): self._test_m2m_db_constraint(InheritedManyToManyField) def test_add_field(self): """ Tests adding fields to models """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add the new field new_field = IntegerField(null=True) new_field.set_attributes_from_name("age") with CaptureQueriesContext(connection) as ctx, connection.schema_editor() as editor: editor.add_field(Author, new_field) drop_default_sql = editor.sql_alter_column_no_default % { 'column': editor.quote_name(new_field.name), } self.assertFalse(any(drop_default_sql in query['sql'] for query in ctx.captured_queries)) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['age'][0], connection.features.introspected_field_types['IntegerField']) self.assertTrue(columns['age'][1][6]) def test_add_field_remove_field(self): """ Adding a field and removing it removes all deferred sql referring to it. """ with connection.schema_editor() as editor: # Create a table with a unique constraint on the slug field. editor.create_model(Tag) # Remove the slug column. editor.remove_field(Tag, Tag._meta.get_field('slug')) self.assertEqual(editor.deferred_sql, []) def test_add_field_temp_default(self): """ Tests adding fields to models with a temporary default """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = CharField(max_length=30, default="Godwin") new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['surname'][0], connection.features.introspected_field_types['CharField']) self.assertEqual(columns['surname'][1][6], connection.features.interprets_empty_strings_as_nulls) def test_add_field_temp_default_boolean(self): """ Tests adding fields to models with a temporary default where the default is False. (#21783) """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = BooleanField(default=False) new_field.set_attributes_from_name("awesome") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) # BooleanField are stored as TINYINT(1) on MySQL. field_type = columns['awesome'][0] self.assertEqual(field_type, connection.features.introspected_field_types['BooleanField']) def test_add_field_default_transform(self): """ Tests adding fields to models with a default that is not directly valid in the database (#22581) """ class TestTransformField(IntegerField): # Weird field that saves the count of items in its value def get_default(self): return self.default def get_prep_value(self, value): if value is None: return 0 return len(value) # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add the field with a default it needs to cast (to string in this case) new_field = TestTransformField(default={1: 2}) new_field.set_attributes_from_name("thing") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is there columns = self.column_classes(Author) field_type, field_info = columns['thing'] self.assertEqual(field_type, connection.features.introspected_field_types['IntegerField']) # Make sure the values were transformed correctly self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2) def test_add_field_binary(self): """ Tests binary fields get a sane default (#22851) """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add the new field new_field = BinaryField(blank=True) new_field.set_attributes_from_name("bits") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) # MySQL annoyingly uses the same backend, so it'll come back as one of # these two types. self.assertIn(columns['bits'][0], ("BinaryField", "TextField")) @unittest.skipUnless(connection.vendor == 'mysql', "MySQL specific") def test_add_binaryfield_mediumblob(self): """ Test adding a custom-sized binary field on MySQL (#24846). """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add the new field with default new_field = MediumBlobField(blank=True, default=b'123') new_field.set_attributes_from_name('bits') with connection.schema_editor() as editor: editor.add_field(Author, new_field) columns = self.column_classes(Author) # Introspection treats BLOBs as TextFields self.assertEqual(columns['bits'][0], "TextField") def test_alter(self): """ Tests simple altering of fields """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls)) # Alter the name field to a TextField old_field = Author._meta.get_field("name") new_field = TextField(null=True) new_field.set_attributes_from_name("name") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") self.assertTrue(columns['name'][1][6]) # Change nullability again new_field2 = TextField(null=False) new_field2.set_attributes_from_name("name") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls)) def test_alter_auto_field_to_integer_field(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change AutoField to IntegerField old_field = Author._meta.get_field('id') new_field = IntegerField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) def test_alter_auto_field_to_char_field(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change AutoField to CharField old_field = Author._meta.get_field('id') new_field = CharField(primary_key=True, max_length=50) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) @isolate_apps('schema') def test_alter_auto_field_quoted_db_column(self): class Foo(Model): id = AutoField(primary_key=True, db_column='"quoted_id"') class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.model = Foo new_field.db_column = '"quoted_id"' new_field.set_attributes_from_name('id') with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) Foo.objects.create() def test_alter_not_unique_field_to_primary_key(self): # Create the table. with connection.schema_editor() as editor: editor.create_model(Author) # Change UUIDField to primary key. old_field = Author._meta.get_field('uuid') new_field = UUIDField(primary_key=True) new_field.set_attributes_from_name('uuid') new_field.model = Author with connection.schema_editor() as editor: editor.remove_field(Author, Author._meta.get_field('id')) editor.alter_field(Author, old_field, new_field, strict=True) @isolate_apps('schema') def test_alter_primary_key_quoted_db_table(self): class Foo(Model): class Meta: app_label = 'schema' db_table = '"foo"' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.model = Foo new_field.set_attributes_from_name('id') with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) Foo.objects.create() def test_alter_text_field(self): # Regression for "BLOB/TEXT column 'info' can't have a default value") # on MySQL. # Create the table with connection.schema_editor() as editor: editor.create_model(Note) old_field = Note._meta.get_field("info") new_field = TextField(blank=True) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) @skipUnlessDBFeature('can_defer_constraint_checks', 'can_rollback_ddl') def test_alter_fk_checks_deferred_constraints(self): """ #25492 - Altering a foreign key's structure and data in the same transaction. """ with connection.schema_editor() as editor: editor.create_model(Node) old_field = Node._meta.get_field('parent') new_field = ForeignKey(Node, CASCADE) new_field.set_attributes_from_name('parent') parent = Node.objects.create() with connection.schema_editor() as editor: # Update the parent FK to create a deferred constraint check. Node.objects.update(parent=parent) editor.alter_field(Node, old_field, new_field, strict=True) def test_alter_text_field_to_date_field(self): """ #25002 - Test conversion of text field to date field. """ with connection.schema_editor() as editor: editor.create_model(Note) Note.objects.create(info='1988-05-05') old_field = Note._meta.get_field('info') new_field = DateField(blank=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) # Make sure the field isn't nullable columns = self.column_classes(Note) self.assertFalse(columns['info'][1][6]) def test_alter_text_field_to_datetime_field(self): """ #25002 - Test conversion of text field to datetime field. """ with connection.schema_editor() as editor: editor.create_model(Note) Note.objects.create(info='1988-05-05 3:16:17.4567') old_field = Note._meta.get_field('info') new_field = DateTimeField(blank=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) # Make sure the field isn't nullable columns = self.column_classes(Note) self.assertFalse(columns['info'][1][6]) def test_alter_text_field_to_time_field(self): """ #25002 - Test conversion of text field to time field. """ with connection.schema_editor() as editor: editor.create_model(Note) Note.objects.create(info='3:16:17.4567') old_field = Note._meta.get_field('info') new_field = TimeField(blank=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) # Make sure the field isn't nullable columns = self.column_classes(Note) self.assertFalse(columns['info'][1][6]) @skipIfDBFeature('interprets_empty_strings_as_nulls') def test_alter_textual_field_keep_null_status(self): """ Changing a field type shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(Note) with self.assertRaises(IntegrityError): Note.objects.create(info=None) old_field = Note._meta.get_field("info") new_field = CharField(max_length=50) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) with self.assertRaises(IntegrityError): Note.objects.create(info=None) def test_alter_numeric_field_keep_null_status(self): """ Changing a field type shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(UniqueTest) with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=None, slug='aaa') old_field = UniqueTest._meta.get_field("year") new_field = BigIntegerField() new_field.set_attributes_from_name("year") with connection.schema_editor() as editor: editor.alter_field(UniqueTest, old_field, new_field, strict=True) with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=None, slug='bbb') def test_alter_null_to_not_null(self): """ #23609 - Tests handling of default values when altering from NULL to NOT NULL. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertTrue(columns['height'][1][6]) # Create some test data Author.objects.create(name='Not null author', height=12) Author.objects.create(name='Null author') # Verify null value self.assertEqual(Author.objects.get(name='Not null author').height, 12) self.assertIsNone(Author.objects.get(name='Null author').height) # Alter the height field to NOT NULL with default old_field = Author._meta.get_field("height") new_field = PositiveIntegerField(default=42) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertFalse(columns['height'][1][6]) # Verify default value self.assertEqual(Author.objects.get(name='Not null author').height, 12) self.assertEqual(Author.objects.get(name='Null author').height, 42) def test_alter_charfield_to_null(self): """ #24307 - Should skip an alter statement on databases with interprets_empty_strings_as_null when changing a CharField to null. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change the CharField to null old_field = Author._meta.get_field('name') new_field = copy(old_field) new_field.null = True with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_char_field_decrease_length(self): # Create the table. with connection.schema_editor() as editor: editor.create_model(Author) Author.objects.create(name='x' * 255) # Change max_length of CharField. old_field = Author._meta.get_field('name') new_field = CharField(max_length=254) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: msg = 'value too long for type character varying(254)' with self.assertRaisesMessage(DataError, msg): editor.alter_field(Author, old_field, new_field, strict=True) @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_field_with_custom_db_type(self): from django.contrib.postgres.fields import ArrayField class Foo(Model): field = ArrayField(CharField(max_length=255)) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('field') new_field = ArrayField(CharField(max_length=16)) new_field.set_attributes_from_name('field') new_field.model = Foo with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) @isolate_apps('schema') @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_array_field_decrease_base_field_length(self): from django.contrib.postgres.fields import ArrayField class ArrayModel(Model): field = ArrayField(CharField(max_length=16)) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(ArrayModel) self.isolated_local_models = [ArrayModel] ArrayModel.objects.create(field=['x' * 16]) old_field = ArrayModel._meta.get_field('field') new_field = ArrayField(CharField(max_length=15)) new_field.set_attributes_from_name('field') new_field.model = ArrayModel with connection.schema_editor() as editor: msg = 'value too long for type character varying(15)' with self.assertRaisesMessage(DataError, msg): editor.alter_field(ArrayModel, old_field, new_field, strict=True) @isolate_apps('schema') @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_array_field_decrease_nested_base_field_length(self): from django.contrib.postgres.fields import ArrayField class ArrayModel(Model): field = ArrayField(ArrayField(CharField(max_length=16))) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(ArrayModel) self.isolated_local_models = [ArrayModel] ArrayModel.objects.create(field=[['x' * 16]]) old_field = ArrayModel._meta.get_field('field') new_field = ArrayField(ArrayField(CharField(max_length=15))) new_field.set_attributes_from_name('field') new_field.model = ArrayModel with connection.schema_editor() as editor: msg = 'value too long for type character varying(15)' with self.assertRaisesMessage(DataError, msg): editor.alter_field(ArrayModel, old_field, new_field, strict=True) def test_alter_textfield_to_null(self): """ #24307 - Should skip an alter statement on databases with interprets_empty_strings_as_null when changing a TextField to null. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Note) # Change the TextField to null old_field = Note._meta.get_field('info') new_field = copy(old_field) new_field.null = True with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) @skipUnlessDBFeature('supports_combined_alters') def test_alter_null_to_not_null_keeping_default(self): """ #23738 - Can change a nullable field with default to non-nullable with the same default. """ # Create the table with connection.schema_editor() as editor: editor.create_model(AuthorWithDefaultHeight) # Ensure the field is right to begin with columns = self.column_classes(AuthorWithDefaultHeight) self.assertTrue(columns['height'][1][6]) # Alter the height field to NOT NULL keeping the previous default old_field = AuthorWithDefaultHeight._meta.get_field("height") new_field = PositiveIntegerField(default=42) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(AuthorWithDefaultHeight) self.assertFalse(columns['height'][1][6]) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_fk(self): """ Tests altering of FKs """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the field is right to begin with columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) self.assertForeignKeyExists(Book, 'author_id', 'schema_author') # Alter the FK old_field = Book._meta.get_field("author") new_field = ForeignKey(Author, CASCADE, editable=False) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) self.assertForeignKeyExists(Book, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_alter_to_fk(self): """ #24447 - Tests adding a FK constraint for an existing column """ class LocalBook(Model): author = IntegerField() title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBook] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(LocalBook) # Ensure no FK constraint exists constraints = self.get_constraints(LocalBook._meta.db_table) for details in constraints.values(): if details['foreign_key']: self.fail('Found an unexpected FK constraint to %s' % details['columns']) old_field = LocalBook._meta.get_field("author") new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(LocalBook, old_field, new_field, strict=True) self.assertForeignKeyExists(LocalBook, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_alter_o2o_to_fk(self): """ #24163 - Tests altering of OneToOneField to ForeignKey """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) # Ensure the field is right to begin with columns = self.column_classes(BookWithO2O) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is unique author = Author.objects.create(name="Joe") BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) with self.assertRaises(IntegrityError): BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) BookWithO2O.objects.all().delete() self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author') # Alter the OneToOneField to ForeignKey old_field = BookWithO2O._meta.get_field("author") new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is not unique anymore Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) self.assertForeignKeyExists(Book, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_alter_fk_to_o2o(self): """ #24163 - Tests altering of ForeignKey to OneToOneField """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the field is right to begin with columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is not unique author = Author.objects.create(name="Joe") Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) Book.objects.all().delete() self.assertForeignKeyExists(Book, 'author_id', 'schema_author') # Alter the ForeignKey to OneToOneField old_field = Book._meta.get_field("author") new_field = OneToOneField(Author, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(BookWithO2O) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is unique now BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) with self.assertRaises(IntegrityError): BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author') def test_alter_field_fk_to_o2o(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) expected_fks = 1 if connection.features.supports_foreign_keys else 0 # Check the index is right to begin with. counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1}) old_field = Book._meta.get_field('author') new_field = OneToOneField(Author, CASCADE) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The index on ForeignKey is replaced with a unique constraint for OneToOneField. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) def test_alter_field_fk_keeps_index(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) expected_fks = 1 if connection.features.supports_foreign_keys else 0 # Check the index is right to begin with. counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1}) old_field = Book._meta.get_field('author') # on_delete changed from CASCADE. new_field = ForeignKey(Author, PROTECT) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The index remains. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1}) def test_alter_field_o2o_to_fk(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) expected_fks = 1 if connection.features.supports_foreign_keys else 0 # Check the unique constraint is right to begin with. counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) old_field = BookWithO2O._meta.get_field('author') new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The unique constraint on OneToOneField is replaced with an index for ForeignKey. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1}) def test_alter_field_o2o_keeps_unique(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) expected_fks = 1 if connection.features.supports_foreign_keys else 0 # Check the unique constraint is right to begin with. counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) old_field = BookWithO2O._meta.get_field('author') # on_delete changed from CASCADE. new_field = OneToOneField(Author, PROTECT) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The unique constraint remains. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) @skipUnlessDBFeature('ignores_table_name_case') def test_alter_db_table_case(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Alter the case of the table old_table_name = Author._meta.db_table with connection.schema_editor() as editor: editor.alter_db_table(Author, old_table_name, old_table_name.upper()) def test_alter_implicit_id_to_explicit(self): """ Should be able to convert an implicit "id" field to an explicit "id" primary key field. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field("id") new_field = AutoField(primary_key=True) new_field.set_attributes_from_name("id") new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # This will fail if DROP DEFAULT is inadvertently executed on this # field which drops the id sequence, at least on PostgreSQL. Author.objects.create(name='Foo') Author.objects.create(name='Bar') def test_alter_autofield_pk_to_bigautofield_pk_sequence_owner(self): """ Converting an implicit PK to BigAutoField(primary_key=True) should keep a sequence owner on PostgreSQL. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) Author.objects.create(name='Foo', pk=1) with connection.cursor() as cursor: sequence_reset_sqls = connection.ops.sequence_reset_sql(no_style(), [Author]) if sequence_reset_sqls: cursor.execute(sequence_reset_sqls[0]) # Fail on PostgreSQL if sequence is missing an owner. self.assertIsNotNone(Author.objects.create(name='Bar')) def test_alter_autofield_pk_to_smallautofield_pk_sequence_owner(self): """ Converting an implicit PK to SmallAutoField(primary_key=True) should keep a sequence owner on PostgreSQL. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field('id') new_field = SmallAutoField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) Author.objects.create(name='Foo', pk=1) with connection.cursor() as cursor: sequence_reset_sqls = connection.ops.sequence_reset_sql(no_style(), [Author]) if sequence_reset_sqls: cursor.execute(sequence_reset_sqls[0]) # Fail on PostgreSQL if sequence is missing an owner. self.assertIsNotNone(Author.objects.create(name='Bar')) def test_alter_int_pk_to_autofield_pk(self): """ Should be able to rename an IntegerField(primary_key=True) to AutoField(primary_key=True). """ with connection.schema_editor() as editor: editor.create_model(IntegerPK) old_field = IntegerPK._meta.get_field('i') new_field = AutoField(primary_key=True) new_field.model = IntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(IntegerPK, old_field, new_field, strict=True) def test_alter_int_pk_to_bigautofield_pk(self): """ Should be able to rename an IntegerField(primary_key=True) to BigAutoField(primary_key=True). """ with connection.schema_editor() as editor: editor.create_model(IntegerPK) old_field = IntegerPK._meta.get_field('i') new_field = BigAutoField(primary_key=True) new_field.model = IntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(IntegerPK, old_field, new_field, strict=True) @isolate_apps('schema') def test_alter_smallint_pk_to_smallautofield_pk(self): """ Should be able to rename an SmallIntegerField(primary_key=True) to SmallAutoField(primary_key=True). """ class SmallIntegerPK(Model): i = SmallIntegerField(primary_key=True) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(SmallIntegerPK) self.isolated_local_models = [SmallIntegerPK] old_field = SmallIntegerPK._meta.get_field('i') new_field = SmallAutoField(primary_key=True) new_field.model = SmallIntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(SmallIntegerPK, old_field, new_field, strict=True) def test_alter_int_pk_to_int_unique(self): """ Should be able to rename an IntegerField(primary_key=True) to IntegerField(unique=True). """ with connection.schema_editor() as editor: editor.create_model(IntegerPK) # Delete the old PK old_field = IntegerPK._meta.get_field('i') new_field = IntegerField(unique=True) new_field.model = IntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(IntegerPK, old_field, new_field, strict=True) # The primary key constraint is gone. Result depends on database: # 'id' for SQLite, None for others (must not be 'i'). self.assertIn(self.get_primary_key(IntegerPK._meta.db_table), ('id', None)) # Set up a model class as it currently stands. The original IntegerPK # class is now out of date and some backends make use of the whole # model class when modifying a field (such as sqlite3 when remaking a # table) so an outdated model class leads to incorrect results. class Transitional(Model): i = IntegerField(unique=True) j = IntegerField(unique=True) class Meta: app_label = 'schema' apps = new_apps db_table = 'INTEGERPK' # model requires a new PK old_field = Transitional._meta.get_field('j') new_field = IntegerField(primary_key=True) new_field.model = Transitional new_field.set_attributes_from_name('j') with connection.schema_editor() as editor: editor.alter_field(Transitional, old_field, new_field, strict=True) # Create a model class representing the updated model. class IntegerUnique(Model): i = IntegerField(unique=True) j = IntegerField(primary_key=True) class Meta: app_label = 'schema' apps = new_apps db_table = 'INTEGERPK' # Ensure unique constraint works. IntegerUnique.objects.create(i=1, j=1) with self.assertRaises(IntegrityError): IntegerUnique.objects.create(i=1, j=2) def test_rename(self): """ Tests simple altering of fields """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) self.assertNotIn("display_name", columns) # Alter the name field's name old_field = Author._meta.get_field("name") new_field = CharField(max_length=254) new_field.set_attributes_from_name("display_name") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['display_name'][0], connection.features.introspected_field_types['CharField']) self.assertNotIn("name", columns) @isolate_apps('schema') def test_rename_referenced_field(self): class Author(Model): name = CharField(max_length=255, unique=True) class Meta: app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE, to_field='name') class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('renamed') with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: editor.alter_field(Author, Author._meta.get_field('name'), new_field) # Ensure the foreign key reference was updated. self.assertForeignKeyExists(Book, 'author_id', 'schema_author', 'renamed') @skipIfDBFeature('interprets_empty_strings_as_nulls') def test_rename_keep_null_status(self): """ Renaming a field shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(Note) with self.assertRaises(IntegrityError): Note.objects.create(info=None) old_field = Note._meta.get_field("info") new_field = TextField() new_field.set_attributes_from_name("detail_info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) columns = self.column_classes(Note) self.assertEqual(columns['detail_info'][0], "TextField") self.assertNotIn("info", columns) with self.assertRaises(IntegrityError): NoteRename.objects.create(detail_info=None) def _test_m2m_create(self, M2MFieldClass): """ Tests M2M fields on models during creation """ class LocalBookWithM2M(Model): author = ForeignKey(Author, CASCADE) title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() tags = M2MFieldClass("TagM2MTest", related_name="books") class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBookWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(TagM2MTest) editor.create_model(LocalBookWithM2M) # Ensure there is now an m2m table there columns = self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through) self.assertEqual(columns['tagm2mtest_id'][0], connection.features.introspected_field_types['IntegerField']) def test_m2m_create(self): self._test_m2m_create(ManyToManyField) def test_m2m_create_custom(self): self._test_m2m_create(CustomManyToManyField) def test_m2m_create_inherited(self): self._test_m2m_create(InheritedManyToManyField) def _test_m2m_create_through(self, M2MFieldClass): """ Tests M2M fields on models during creation with through models """ class LocalTagThrough(Model): book = ForeignKey("schema.LocalBookWithM2MThrough", CASCADE) tag = ForeignKey("schema.TagM2MTest", CASCADE) class Meta: app_label = 'schema' apps = new_apps class LocalBookWithM2MThrough(Model): tags = M2MFieldClass("TagM2MTest", related_name="books", through=LocalTagThrough) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalTagThrough, LocalBookWithM2MThrough] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalTagThrough) editor.create_model(TagM2MTest) editor.create_model(LocalBookWithM2MThrough) # Ensure there is now an m2m table there columns = self.column_classes(LocalTagThrough) self.assertEqual(columns['book_id'][0], connection.features.introspected_field_types['IntegerField']) self.assertEqual(columns['tag_id'][0], connection.features.introspected_field_types['IntegerField']) def test_m2m_create_through(self): self._test_m2m_create_through(ManyToManyField) def test_m2m_create_through_custom(self): self._test_m2m_create_through(CustomManyToManyField) def test_m2m_create_through_inherited(self): self._test_m2m_create_through(InheritedManyToManyField) def _test_m2m(self, M2MFieldClass): """ Tests adding/removing M2M fields on models """ class LocalAuthorWithM2M(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalAuthorWithM2M) editor.create_model(TagM2MTest) # Create an M2M field new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors") new_field.contribute_to_class(LocalAuthorWithM2M, "tags") # Ensure there's no m2m table there with self.assertRaises(DatabaseError): self.column_classes(new_field.remote_field.through) # Add the field with connection.schema_editor() as editor: editor.add_field(LocalAuthorWithM2M, new_field) # Ensure there is now an m2m table there columns = self.column_classes(new_field.remote_field.through) self.assertEqual(columns['tagm2mtest_id'][0], connection.features.introspected_field_types['IntegerField']) # "Alter" the field. This should not rename the DB table to itself. with connection.schema_editor() as editor: editor.alter_field(LocalAuthorWithM2M, new_field, new_field, strict=True) # Remove the M2M table again with connection.schema_editor() as editor: editor.remove_field(LocalAuthorWithM2M, new_field) # Ensure there's no m2m table there with self.assertRaises(DatabaseError): self.column_classes(new_field.remote_field.through) # Make sure the model state is coherent with the table one now that # we've removed the tags field. opts = LocalAuthorWithM2M._meta opts.local_many_to_many.remove(new_field) del new_apps.all_models['schema'][new_field.remote_field.through._meta.model_name] opts._expire_cache() def test_m2m(self): self._test_m2m(ManyToManyField) def test_m2m_custom(self): self._test_m2m(CustomManyToManyField) def test_m2m_inherited(self): self._test_m2m(InheritedManyToManyField) def _test_m2m_through_alter(self, M2MFieldClass): """ Tests altering M2Ms with explicit through models (should no-op) """ class LocalAuthorTag(Model): author = ForeignKey("schema.LocalAuthorWithM2MThrough", CASCADE) tag = ForeignKey("schema.TagM2MTest", CASCADE) class Meta: app_label = 'schema' apps = new_apps class LocalAuthorWithM2MThrough(Model): name = CharField(max_length=255) tags = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorTag, LocalAuthorWithM2MThrough] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalAuthorTag) editor.create_model(LocalAuthorWithM2MThrough) editor.create_model(TagM2MTest) # Ensure the m2m table is there self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3) # "Alter" the field's blankness. This should not actually do anything. old_field = LocalAuthorWithM2MThrough._meta.get_field("tags") new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag) new_field.contribute_to_class(LocalAuthorWithM2MThrough, "tags") with connection.schema_editor() as editor: editor.alter_field(LocalAuthorWithM2MThrough, old_field, new_field, strict=True) # Ensure the m2m table is still there self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3) def test_m2m_through_alter(self): self._test_m2m_through_alter(ManyToManyField) def test_m2m_through_alter_custom(self): self._test_m2m_through_alter(CustomManyToManyField) def test_m2m_through_alter_inherited(self): self._test_m2m_through_alter(InheritedManyToManyField) def _test_m2m_repoint(self, M2MFieldClass): """ Tests repointing M2M fields """ class LocalBookWithM2M(Model): author = ForeignKey(Author, CASCADE) title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() tags = M2MFieldClass("TagM2MTest", related_name="books") class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBookWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(LocalBookWithM2M) editor.create_model(TagM2MTest) editor.create_model(UniqueTest) # Ensure the M2M exists and points to TagM2MTest if connection.features.supports_foreign_keys: self.assertForeignKeyExists( LocalBookWithM2M._meta.get_field("tags").remote_field.through, 'tagm2mtest_id', 'schema_tagm2mtest', ) # Repoint the M2M old_field = LocalBookWithM2M._meta.get_field("tags") new_field = M2MFieldClass(UniqueTest) new_field.contribute_to_class(LocalBookWithM2M, "uniques") with connection.schema_editor() as editor: editor.alter_field(LocalBookWithM2M, old_field, new_field, strict=True) # Ensure old M2M is gone with self.assertRaises(DatabaseError): self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through) # This model looks like the new model and is used for teardown. opts = LocalBookWithM2M._meta opts.local_many_to_many.remove(old_field) # Ensure the new M2M exists and points to UniqueTest if connection.features.supports_foreign_keys: self.assertForeignKeyExists(new_field.remote_field.through, 'uniquetest_id', 'schema_uniquetest') def test_m2m_repoint(self): self._test_m2m_repoint(ManyToManyField) def test_m2m_repoint_custom(self): self._test_m2m_repoint(CustomManyToManyField) def test_m2m_repoint_inherited(self): self._test_m2m_repoint(InheritedManyToManyField) @isolate_apps('schema') def test_m2m_rename_field_in_target_model(self): class LocalTagM2MTest(Model): title = CharField(max_length=255) class Meta: app_label = 'schema' class LocalM2M(Model): tags = ManyToManyField(LocalTagM2MTest) class Meta: app_label = 'schema' # Create the tables. with connection.schema_editor() as editor: editor.create_model(LocalM2M) editor.create_model(LocalTagM2MTest) self.isolated_local_models = [LocalM2M, LocalTagM2MTest] # Ensure the m2m table is there. self.assertEqual(len(self.column_classes(LocalM2M)), 1) # Alter a field in LocalTagM2MTest. old_field = LocalTagM2MTest._meta.get_field('title') new_field = CharField(max_length=254) new_field.contribute_to_class(LocalTagM2MTest, 'title1') # @isolate_apps() and inner models are needed to have the model # relations populated, otherwise this doesn't act as a regression test. self.assertEqual(len(new_field.model._meta.related_objects), 1) with connection.schema_editor() as editor: editor.alter_field(LocalTagM2MTest, old_field, new_field, strict=True) # Ensure the m2m table is still there. self.assertEqual(len(self.column_classes(LocalM2M)), 1) @skipUnlessDBFeature('supports_column_check_constraints', 'can_introspect_check_constraints') def test_check_constraints(self): """ Tests creating/deleting CHECK constraints """ # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the constraint exists constraints = self.get_constraints(Author._meta.db_table) if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()): self.fail("No check constraint for height found") # Alter the column to remove it old_field = Author._meta.get_field("height") new_field = IntegerField(null=True, blank=True) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) constraints = self.get_constraints(Author._meta.db_table) for details in constraints.values(): if details['columns'] == ["height"] and details['check']: self.fail("Check constraint for height found") # Alter the column to re-add it new_field2 = Author._meta.get_field("height") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) constraints = self.get_constraints(Author._meta.db_table) if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()): self.fail("No check constraint for height found") @skipUnlessDBFeature('supports_column_check_constraints', 'can_introspect_check_constraints') def test_remove_field_check_does_not_remove_meta_constraints(self): with connection.schema_editor() as editor: editor.create_model(Author) # Add the custom check constraint constraint = CheckConstraint(check=Q(height__gte=0), name='author_height_gte_0_check') custom_constraint_name = constraint.name Author._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) # Ensure the constraints exist constraints = self.get_constraints(Author._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Alter the column to remove field check old_field = Author._meta.get_field('height') new_field = IntegerField(null=True, blank=True) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) constraints = self.get_constraints(Author._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 0) # Alter the column to re-add field check new_field2 = Author._meta.get_field('height') with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) constraints = self.get_constraints(Author._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Drop the check constraint with connection.schema_editor() as editor: Author._meta.constraints = [] editor.remove_constraint(Author, constraint) def test_unique(self): """ Tests removing and adding unique constraints to a single column. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure the field is unique to begin with Tag.objects.create(title="foo", slug="foo") with self.assertRaises(IntegrityError): Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Alter the slug field to be non-unique old_field = Tag._meta.get_field("slug") new_field = SlugField(unique=False) new_field.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(Tag, old_field, new_field, strict=True) # Ensure the field is no longer unique Tag.objects.create(title="foo", slug="foo") Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Alter the slug field to be unique new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(Tag, new_field, new_field2, strict=True) # Ensure the field is unique again Tag.objects.create(title="foo", slug="foo") with self.assertRaises(IntegrityError): Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Rename the field new_field3 = SlugField(unique=True) new_field3.set_attributes_from_name("slug2") with connection.schema_editor() as editor: editor.alter_field(Tag, new_field2, new_field3, strict=True) # Ensure the field is still unique TagUniqueRename.objects.create(title="foo", slug2="foo") with self.assertRaises(IntegrityError): TagUniqueRename.objects.create(title="bar", slug2="foo") Tag.objects.all().delete() def test_unique_name_quoting(self): old_table_name = TagUniqueRename._meta.db_table try: with connection.schema_editor() as editor: editor.create_model(TagUniqueRename) editor.alter_db_table(TagUniqueRename, old_table_name, 'unique-table') TagUniqueRename._meta.db_table = 'unique-table' # This fails if the unique index name isn't quoted. editor.alter_unique_together(TagUniqueRename, [], (('title', 'slug2'),)) finally: TagUniqueRename._meta.db_table = old_table_name @isolate_apps('schema') @unittest.skipIf(connection.vendor == 'sqlite', 'SQLite naively remakes the table on field alteration.') @skipUnlessDBFeature('supports_foreign_keys') def test_unique_no_unnecessary_fk_drops(self): """ If AlterField isn't selective about dropping foreign key constraints when modifying a field with a unique constraint, the AlterField incorrectly drops and recreates the Book.author foreign key even though it doesn't restrict the field being changed (#29193). """ class Author(Model): name = CharField(max_length=254, unique=True) class Meta: app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) new_field = CharField(max_length=255, unique=True) new_field.model = Author new_field.set_attributes_from_name('name') with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm: with connection.schema_editor() as editor: editor.alter_field(Author, Author._meta.get_field('name'), new_field) # One SQL statement is executed to alter the field. self.assertEqual(len(cm.records), 1) @isolate_apps('schema') @unittest.skipIf(connection.vendor == 'sqlite', 'SQLite remakes the table on field alteration.') def test_unique_and_reverse_m2m(self): """ AlterField can modify a unique field when there's a reverse M2M relation on the model. """ class Tag(Model): title = CharField(max_length=255) slug = SlugField(unique=True) class Meta: app_label = 'schema' class Book(Model): tags = ManyToManyField(Tag, related_name='books') class Meta: app_label = 'schema' self.isolated_local_models = [Book._meta.get_field('tags').remote_field.through] with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(Book) new_field = SlugField(max_length=75, unique=True) new_field.model = Tag new_field.set_attributes_from_name('slug') with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm: with connection.schema_editor() as editor: editor.alter_field(Tag, Tag._meta.get_field('slug'), new_field) # One SQL statement is executed to alter the field. self.assertEqual(len(cm.records), 1) # Ensure that the field is still unique. Tag.objects.create(title='foo', slug='foo') with self.assertRaises(IntegrityError): Tag.objects.create(title='bar', slug='foo') @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_remove_field_unique_does_not_remove_meta_constraints(self): with connection.schema_editor() as editor: editor.create_model(AuthorWithUniqueName) # Add the custom unique constraint constraint = UniqueConstraint(fields=['name'], name='author_name_uniq') custom_constraint_name = constraint.name AuthorWithUniqueName._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(AuthorWithUniqueName, constraint) # Ensure the constraints exist constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Alter the column to remove field uniqueness old_field = AuthorWithUniqueName._meta.get_field('name') new_field = CharField(max_length=255) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(AuthorWithUniqueName, old_field, new_field, strict=True) constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 0) # Alter the column to re-add field uniqueness new_field2 = AuthorWithUniqueName._meta.get_field('name') with connection.schema_editor() as editor: editor.alter_field(AuthorWithUniqueName, new_field, new_field2, strict=True) constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Drop the unique constraint with connection.schema_editor() as editor: AuthorWithUniqueName._meta.constraints = [] editor.remove_constraint(AuthorWithUniqueName, constraint) def test_unique_together(self): """ Tests removing and adding unique_together constraints on a model. """ # Create the table with connection.schema_editor() as editor: editor.create_model(UniqueTest) # Ensure the fields are unique to begin with UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2011, slug="foo") UniqueTest.objects.create(year=2011, slug="bar") with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() # Alter the model to its non-unique-together companion with connection.schema_editor() as editor: editor.alter_unique_together(UniqueTest, UniqueTest._meta.unique_together, []) # Ensure the fields are no longer unique UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() # Alter it back new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_unique_together(UniqueTest, [], UniqueTest._meta.unique_together) # Ensure the fields are unique again UniqueTest.objects.create(year=2012, slug="foo") with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() def test_unique_together_with_fk(self): """ Tests removing and adding unique_together constraints that include a foreign key. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the fields are unique to begin with self.assertEqual(Book._meta.unique_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_unique_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_unique_together(Book, [['author', 'title']], []) def test_unique_together_with_fk_with_existing_index(self): """ Tests removing and adding unique_together constraints that include a foreign key, where the foreign key is added after the model is created. """ # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithoutAuthor) new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name('author') editor.add_field(BookWithoutAuthor, new_field) # Ensure the fields aren't unique to begin with self.assertEqual(Book._meta.unique_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_unique_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_unique_together(Book, [['author', 'title']], []) @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_remove_unique_together_does_not_remove_meta_constraints(self): with connection.schema_editor() as editor: editor.create_model(AuthorWithUniqueNameAndBirthday) # Add the custom unique constraint constraint = UniqueConstraint(fields=['name', 'birthday'], name='author_name_birthday_uniq') custom_constraint_name = constraint.name AuthorWithUniqueNameAndBirthday._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(AuthorWithUniqueNameAndBirthday, constraint) # Ensure the constraints exist constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Remove unique together unique_together = AuthorWithUniqueNameAndBirthday._meta.unique_together with connection.schema_editor() as editor: editor.alter_unique_together(AuthorWithUniqueNameAndBirthday, unique_together, []) constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 0) # Re-add unique together with connection.schema_editor() as editor: editor.alter_unique_together(AuthorWithUniqueNameAndBirthday, [], unique_together) constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Drop the unique constraint with connection.schema_editor() as editor: AuthorWithUniqueNameAndBirthday._meta.constraints = [] editor.remove_constraint(AuthorWithUniqueNameAndBirthday, constraint) def test_index_together(self): """ Tests removing and adding index_together constraints on a model. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure there's no index on the year/slug columns first self.assertIs( any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), False, ) # Alter the model to add an index with connection.schema_editor() as editor: editor.alter_index_together(Tag, [], [("slug", "title")]) # Ensure there is now an index self.assertIs( any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), True, ) # Alter it back new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_index_together(Tag, [("slug", "title")], []) # Ensure there's no index self.assertIs( any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), False, ) def test_index_together_with_fk(self): """ Tests removing and adding index_together constraints that include a foreign key. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the fields are unique to begin with self.assertEqual(Book._meta.index_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_index_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_index_together(Book, [['author', 'title']], []) def test_create_index_together(self): """ Tests creating models with index_together already defined """ # Create the table with connection.schema_editor() as editor: editor.create_model(TagIndexed) # Ensure there is an index self.assertIs( any( c["index"] for c in self.get_constraints("schema_tagindexed").values() if c['columns'] == ["slug", "title"] ), True, ) @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_remove_index_together_does_not_remove_meta_indexes(self): with connection.schema_editor() as editor: editor.create_model(AuthorWithIndexedNameAndBirthday) # Add the custom index index = Index(fields=['name', 'birthday'], name='author_name_birthday_idx') custom_index_name = index.name AuthorWithIndexedNameAndBirthday._meta.indexes = [index] with connection.schema_editor() as editor: editor.add_index(AuthorWithIndexedNameAndBirthday, index) # Ensure the indexes exist constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table) self.assertIn(custom_index_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name ] self.assertEqual(len(other_constraints), 1) # Remove index together index_together = AuthorWithIndexedNameAndBirthday._meta.index_together with connection.schema_editor() as editor: editor.alter_index_together(AuthorWithIndexedNameAndBirthday, index_together, []) constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table) self.assertIn(custom_index_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name ] self.assertEqual(len(other_constraints), 0) # Re-add index together with connection.schema_editor() as editor: editor.alter_index_together(AuthorWithIndexedNameAndBirthday, [], index_together) constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table) self.assertIn(custom_index_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name ] self.assertEqual(len(other_constraints), 1) # Drop the index with connection.schema_editor() as editor: AuthorWithIndexedNameAndBirthday._meta.indexes = [] editor.remove_index(AuthorWithIndexedNameAndBirthday, index) @isolate_apps('schema') def test_db_table(self): """ Tests renaming of the table """ class Author(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE) class Meta: app_label = 'schema' # Create the table and one referring it. with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the table is there to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) # Alter the table with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: editor.alter_db_table(Author, "schema_author", "schema_otherauthor") # Ensure the table is there afterwards Author._meta.db_table = "schema_otherauthor" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) # Ensure the foreign key reference was updated self.assertForeignKeyExists(Book, "author_id", "schema_otherauthor") # Alter the table again with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: editor.alter_db_table(Author, "schema_otherauthor", "schema_author") # Ensure the table is still there Author._meta.db_table = "schema_author" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) def test_add_remove_index(self): """ Tests index addition and removal """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the table is there and has no index self.assertNotIn('title', self.get_indexes(Author._meta.db_table)) # Add the index index = Index(fields=['name'], name='author_title_idx') with connection.schema_editor() as editor: editor.add_index(Author, index) self.assertIn('name', self.get_indexes(Author._meta.db_table)) # Drop the index with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn('name', self.get_indexes(Author._meta.db_table)) def test_remove_db_index_doesnt_remove_custom_indexes(self): """ Changing db_index to False doesn't remove indexes from Meta.indexes. """ with connection.schema_editor() as editor: editor.create_model(AuthorWithIndexedName) # Ensure the table has its index self.assertIn('name', self.get_indexes(AuthorWithIndexedName._meta.db_table)) # Add the custom index index = Index(fields=['-name'], name='author_name_idx') author_index_name = index.name with connection.schema_editor() as editor: db_index_name = editor._create_index_name( table_name=AuthorWithIndexedName._meta.db_table, column_names=('name',), ) try: AuthorWithIndexedName._meta.indexes = [index] with connection.schema_editor() as editor: editor.add_index(AuthorWithIndexedName, index) old_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table) self.assertIn(author_index_name, old_constraints) self.assertIn(db_index_name, old_constraints) # Change name field to db_index=False old_field = AuthorWithIndexedName._meta.get_field('name') new_field = CharField(max_length=255) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(AuthorWithIndexedName, old_field, new_field, strict=True) new_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table) self.assertNotIn(db_index_name, new_constraints) # The index from Meta.indexes is still in the database. self.assertIn(author_index_name, new_constraints) # Drop the index with connection.schema_editor() as editor: editor.remove_index(AuthorWithIndexedName, index) finally: AuthorWithIndexedName._meta.indexes = [] def test_order_index(self): """ Indexes defined with ordering (ASC/DESC) defined on column """ with connection.schema_editor() as editor: editor.create_model(Author) # The table doesn't have an index self.assertNotIn('title', self.get_indexes(Author._meta.db_table)) index_name = 'author_name_idx' # Add the index index = Index(fields=['name', '-weight'], name=index_name) with connection.schema_editor() as editor: editor.add_index(Author, index) if connection.features.supports_index_column_ordering: self.assertIndexOrder(Author._meta.db_table, index_name, ['ASC', 'DESC']) # Drop the index with connection.schema_editor() as editor: editor.remove_index(Author, index) def test_indexes(self): """ Tests creation/altering of indexes """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the table is there and has the right index self.assertIn( "title", self.get_indexes(Book._meta.db_table), ) # Alter to remove the index old_field = Book._meta.get_field("title") new_field = CharField(max_length=100, db_index=False) new_field.set_attributes_from_name("title") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the table is there and has no index self.assertNotIn( "title", self.get_indexes(Book._meta.db_table), ) # Alter to re-add the index new_field2 = Book._meta.get_field("title") with connection.schema_editor() as editor: editor.alter_field(Book, new_field, new_field2, strict=True) # Ensure the table is there and has the index again self.assertIn( "title", self.get_indexes(Book._meta.db_table), ) # Add a unique column, verify that creates an implicit index new_field3 = BookWithSlug._meta.get_field("slug") with connection.schema_editor() as editor: editor.add_field(Book, new_field3) self.assertIn( "slug", self.get_uniques(Book._meta.db_table), ) # Remove the unique, check the index goes with it new_field4 = CharField(max_length=20, unique=False) new_field4.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(BookWithSlug, new_field3, new_field4, strict=True) self.assertNotIn( "slug", self.get_uniques(Book._meta.db_table), ) def test_text_field_with_db_index(self): with connection.schema_editor() as editor: editor.create_model(AuthorTextFieldWithIndex) # The text_field index is present if the database supports it. assertion = self.assertIn if connection.features.supports_index_on_text_field else self.assertNotIn assertion('text_field', self.get_indexes(AuthorTextFieldWithIndex._meta.db_table)) def test_primary_key(self): """ Tests altering of the primary key """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure the table is there and has the right PK self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'id') # Alter to change the PK id_field = Tag._meta.get_field("id") old_field = Tag._meta.get_field("slug") new_field = SlugField(primary_key=True) new_field.set_attributes_from_name("slug") new_field.model = Tag with connection.schema_editor() as editor: editor.remove_field(Tag, id_field) editor.alter_field(Tag, old_field, new_field) # Ensure the PK changed self.assertNotIn( 'id', self.get_indexes(Tag._meta.db_table), ) self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'slug') def test_context_manager_exit(self): """ Ensures transaction is correctly closed when an error occurs inside a SchemaEditor context. """ class SomeError(Exception): pass try: with connection.schema_editor(): raise SomeError except SomeError: self.assertFalse(connection.in_atomic_block) @skipIfDBFeature('can_rollback_ddl') def test_unsupported_transactional_ddl_disallowed(self): message = ( "Executing DDL statements while in a transaction on databases " "that can't perform a rollback is prohibited." ) with atomic(), connection.schema_editor() as editor: with self.assertRaisesMessage(TransactionManagementError, message): editor.execute(editor.sql_create_table % {'table': 'foo', 'definition': ''}) @skipUnlessDBFeature('supports_foreign_keys') def test_foreign_key_index_long_names_regression(self): """ Regression test for #21497. Only affects databases that supports foreign keys. """ # Create the table with connection.schema_editor() as editor: editor.create_model(AuthorWithEvenLongerName) editor.create_model(BookWithLongName) # Find the properly shortened column name column_name = connection.ops.quote_name("author_foreign_key_with_really_long_field_name_id") column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase # Ensure the table is there and has an index on the column self.assertIn( column_name, self.get_indexes(BookWithLongName._meta.db_table), ) @skipUnlessDBFeature('supports_foreign_keys') def test_add_foreign_key_long_names(self): """ Regression test for #23009. Only affects databases that supports foreign keys. """ # Create the initial tables with connection.schema_editor() as editor: editor.create_model(AuthorWithEvenLongerName) editor.create_model(BookWithLongName) # Add a second FK, this would fail due to long ref name before the fix new_field = ForeignKey(AuthorWithEvenLongerName, CASCADE, related_name="something") new_field.set_attributes_from_name("author_other_really_long_named_i_mean_so_long_fk") with connection.schema_editor() as editor: editor.add_field(BookWithLongName, new_field) @isolate_apps('schema') @skipUnlessDBFeature('supports_foreign_keys') def test_add_foreign_key_quoted_db_table(self): class Author(Model): class Meta: db_table = '"table_author_double_quoted"' app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) if connection.vendor == 'mysql': self.assertForeignKeyExists(Book, 'author_id', '"table_author_double_quoted"') else: self.assertForeignKeyExists(Book, 'author_id', 'table_author_double_quoted') def test_add_foreign_object(self): with connection.schema_editor() as editor: editor.create_model(BookForeignObj) new_field = ForeignObject(Author, on_delete=CASCADE, from_fields=['author_id'], to_fields=['id']) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.add_field(BookForeignObj, new_field) def test_creation_deletion_reserved_names(self): """ Tries creating a model's table, and then deleting it when it has a SQL reserved name. """ # Create the table with connection.schema_editor() as editor: try: editor.create_model(Thing) except OperationalError as e: self.fail("Errors when applying initial migration for a model " "with a table named after an SQL reserved word: %s" % e) # The table is there list(Thing.objects.all()) # Clean up that table with connection.schema_editor() as editor: editor.delete_model(Thing) # The table is gone with self.assertRaises(DatabaseError): list(Thing.objects.all()) def test_remove_constraints_capital_letters(self): """ #23065 - Constraint names must be quoted if they contain capital letters. """ def get_field(*args, field_class=IntegerField, **kwargs): kwargs['db_column'] = "CamelCase" field = field_class(*args, **kwargs) field.set_attributes_from_name("CamelCase") return field model = Author field = get_field() table = model._meta.db_table column = field.column identifier_converter = connection.introspection.identifier_converter with connection.schema_editor() as editor: editor.create_model(model) editor.add_field(model, field) constraint_name = 'CamelCaseIndex' expected_constraint_name = identifier_converter(constraint_name) editor.execute( editor.sql_create_index % { "table": editor.quote_name(table), "name": editor.quote_name(constraint_name), "using": "", "columns": editor.quote_name(column), "extra": "", "condition": "", "include": "", } ) self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) editor.alter_field(model, get_field(db_index=True), field, strict=True) self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) constraint_name = 'CamelCaseUniqConstraint' expected_constraint_name = identifier_converter(constraint_name) editor.execute(editor._create_unique_sql(model, [field.column], constraint_name)) self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) editor.alter_field(model, get_field(unique=True), field, strict=True) self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) if editor.sql_create_fk: constraint_name = 'CamelCaseFKConstraint' expected_constraint_name = identifier_converter(constraint_name) editor.execute( editor.sql_create_fk % { "table": editor.quote_name(table), "name": editor.quote_name(constraint_name), "column": editor.quote_name(column), "to_table": editor.quote_name(table), "to_column": editor.quote_name(model._meta.auto_field.column), "deferrable": connection.ops.deferrable_sql(), } ) self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) editor.alter_field(model, get_field(Author, CASCADE, field_class=ForeignKey), field, strict=True) self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) def test_add_field_use_effective_default(self): """ #23987 - effective_default() should be used as the field default when adding a new field. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no surname field columns = self.column_classes(Author) self.assertNotIn("surname", columns) # Create a row Author.objects.create(name='Anonymous1') # Add new CharField to ensure default will be used from effective_default new_field = CharField(max_length=15, blank=True) new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure field was added with the right default with connection.cursor() as cursor: cursor.execute("SELECT surname FROM schema_author;") item = cursor.fetchall()[0] self.assertEqual(item[0], None if connection.features.interprets_empty_strings_as_nulls else '') def test_add_field_default_dropped(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no surname field columns = self.column_classes(Author) self.assertNotIn("surname", columns) # Create a row Author.objects.create(name='Anonymous1') # Add new CharField with a default new_field = CharField(max_length=15, blank=True, default='surname default') new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure field was added with the right default with connection.cursor() as cursor: cursor.execute("SELECT surname FROM schema_author;") item = cursor.fetchall()[0] self.assertEqual(item[0], 'surname default') # And that the default is no longer set in the database. field = next( f for f in connection.introspection.get_table_description(cursor, "schema_author") if f.name == "surname" ) if connection.features.can_introspect_default: self.assertIsNone(field.default) def test_alter_field_default_dropped(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Create a row Author.objects.create(name='Anonymous1') self.assertIsNone(Author.objects.get().height) old_field = Author._meta.get_field('height') # The default from the new field is used in updating existing rows. new_field = IntegerField(blank=True, default=42) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual(Author.objects.get().height, 42) # The database default should be removed. with connection.cursor() as cursor: field = next( f for f in connection.introspection.get_table_description(cursor, "schema_author") if f.name == "height" ) if connection.features.can_introspect_default: self.assertIsNone(field.default) @unittest.skipIf(connection.vendor == 'sqlite', 'SQLite naively remakes the table on field alteration.') def test_alter_field_default_doesnt_perform_queries(self): """ No queries are performed if a field default changes and the field's not changing from null to non-null. """ with connection.schema_editor() as editor: editor.create_model(AuthorWithDefaultHeight) old_field = AuthorWithDefaultHeight._meta.get_field('height') new_default = old_field.default * 2 new_field = PositiveIntegerField(null=True, blank=True, default=new_default) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor, self.assertNumQueries(0): editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True) def test_add_textfield_unhashable_default(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Create a row Author.objects.create(name='Anonymous1') # Create a field that has an unhashable default new_field = TextField(default={}) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.add_field(Author, new_field) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_add_indexed_charfield(self): field = CharField(max_length=255, db_index=True) field.set_attributes_from_name('nom_de_plume') with connection.schema_editor() as editor: editor.create_model(Author) editor.add_field(Author, field) # Should create two indexes; one for like operator. self.assertEqual( self.get_constraints_for_column(Author, 'nom_de_plume'), ['schema_author_nom_de_plume_7570a851', 'schema_author_nom_de_plume_7570a851_like'], ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_add_unique_charfield(self): field = CharField(max_length=255, unique=True) field.set_attributes_from_name('nom_de_plume') with connection.schema_editor() as editor: editor.create_model(Author) editor.add_field(Author, field) # Should create two indexes; one for like operator. self.assertEqual( self.get_constraints_for_column(Author, 'nom_de_plume'), ['schema_author_nom_de_plume_7570a851_like', 'schema_author_nom_de_plume_key'] ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_index_to_charfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Author) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) # Alter to add db_index=True and create 2 indexes. old_field = Author._meta.get_field('name') new_field = CharField(max_length=255, db_index=True) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Author, 'name'), ['schema_author_name_1fbc5617', 'schema_author_name_1fbc5617_like'] ) # Remove db_index=True to drop both indexes. with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_unique_to_charfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Author) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) # Alter to add unique=True and create 2 indexes. old_field = Author._meta.get_field('name') new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Author, 'name'), ['schema_author_name_1fbc5617_like', 'schema_author_name_1fbc5617_uniq'] ) # Remove unique=True to drop both indexes. with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_index_to_textfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Note) self.assertEqual(self.get_constraints_for_column(Note, 'info'), []) # Alter to add db_index=True and create 2 indexes. old_field = Note._meta.get_field('info') new_field = TextField(db_index=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Note, 'info'), ['schema_note_info_4b0ea695', 'schema_note_info_4b0ea695_like'] ) # Remove db_index=True to drop both indexes. with connection.schema_editor() as editor: editor.alter_field(Note, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Note, 'info'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_unique_to_charfield_with_db_index(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(BookWithoutAuthor) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) # Alter to add unique=True (should replace the index) old_field = BookWithoutAuthor._meta.get_field('title') new_field = CharField(max_length=100, db_index=True, unique=True) new_field.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq'] ) # Alter to remove unique=True (should drop unique index) new_field2 = CharField(max_length=100, db_index=True) new_field2.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_remove_unique_and_db_index_from_charfield(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(BookWithoutAuthor) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) # Alter to add unique=True (should replace the index) old_field = BookWithoutAuthor._meta.get_field('title') new_field = CharField(max_length=100, db_index=True, unique=True) new_field.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq'] ) # Alter to remove both unique=True and db_index=True (should drop all indexes) new_field2 = CharField(max_length=100) new_field2.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) self.assertEqual(self.get_constraints_for_column(BookWithoutAuthor, 'title'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_swap_unique_and_db_index_with_charfield(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(BookWithoutAuthor) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) # Alter to set unique=True and remove db_index=True (should replace the index) old_field = BookWithoutAuthor._meta.get_field('title') new_field = CharField(max_length=100, unique=True) new_field.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq'] ) # Alter to set db_index=True and remove unique=True (should restore index) new_field2 = CharField(max_length=100, db_index=True) new_field2.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_db_index_to_charfield_with_unique(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(Tag) self.assertEqual( self.get_constraints_for_column(Tag, 'slug'), ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key'] ) # Alter to add db_index=True old_field = Tag._meta.get_field('slug') new_field = SlugField(db_index=True, unique=True) new_field.set_attributes_from_name('slug') with connection.schema_editor() as editor: editor.alter_field(Tag, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Tag, 'slug'), ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key'] ) # Alter to remove db_index=True new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name('slug') with connection.schema_editor() as editor: editor.alter_field(Tag, new_field, new_field2, strict=True) self.assertEqual( self.get_constraints_for_column(Tag, 'slug'), ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key'] ) def test_alter_field_add_index_to_integerfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Author) self.assertEqual(self.get_constraints_for_column(Author, 'weight'), []) # Alter to add db_index=True and create index. old_field = Author._meta.get_field('weight') new_field = IntegerField(null=True, db_index=True) new_field.set_attributes_from_name('weight') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'weight'), ['schema_author_weight_587740f9']) # Remove db_index=True to drop index. with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'weight'), []) def test_alter_pk_with_self_referential_field(self): """ Changing the primary key field name of a model with a self-referential foreign key (#26384). """ with connection.schema_editor() as editor: editor.create_model(Node) old_field = Node._meta.get_field('node_id') new_field = AutoField(primary_key=True) new_field.set_attributes_from_name('id') with connection.schema_editor() as editor: editor.alter_field(Node, old_field, new_field, strict=True) self.assertForeignKeyExists(Node, 'parent_id', Node._meta.db_table) @mock.patch('django.db.backends.base.schema.datetime') @mock.patch('django.db.backends.base.schema.timezone') def test_add_datefield_and_datetimefield_use_effective_default(self, mocked_datetime, mocked_tz): """ effective_default() should be used for DateField, DateTimeField, and TimeField if auto_now or auto_now_add is set (#25005). """ now = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1) now_tz = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1, tzinfo=timezone.utc) mocked_datetime.now = mock.MagicMock(return_value=now) mocked_tz.now = mock.MagicMock(return_value=now_tz) # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Check auto_now/auto_now_add attributes are not defined columns = self.column_classes(Author) self.assertNotIn("dob_auto_now", columns) self.assertNotIn("dob_auto_now_add", columns) self.assertNotIn("dtob_auto_now", columns) self.assertNotIn("dtob_auto_now_add", columns) self.assertNotIn("tob_auto_now", columns) self.assertNotIn("tob_auto_now_add", columns) # Create a row Author.objects.create(name='Anonymous1') # Ensure fields were added with the correct defaults dob_auto_now = DateField(auto_now=True) dob_auto_now.set_attributes_from_name('dob_auto_now') self.check_added_field_default( editor, Author, dob_auto_now, 'dob_auto_now', now.date(), cast_function=lambda x: x.date(), ) dob_auto_now_add = DateField(auto_now_add=True) dob_auto_now_add.set_attributes_from_name('dob_auto_now_add') self.check_added_field_default( editor, Author, dob_auto_now_add, 'dob_auto_now_add', now.date(), cast_function=lambda x: x.date(), ) dtob_auto_now = DateTimeField(auto_now=True) dtob_auto_now.set_attributes_from_name('dtob_auto_now') self.check_added_field_default( editor, Author, dtob_auto_now, 'dtob_auto_now', now, ) dt_tm_of_birth_auto_now_add = DateTimeField(auto_now_add=True) dt_tm_of_birth_auto_now_add.set_attributes_from_name('dtob_auto_now_add') self.check_added_field_default( editor, Author, dt_tm_of_birth_auto_now_add, 'dtob_auto_now_add', now, ) tob_auto_now = TimeField(auto_now=True) tob_auto_now.set_attributes_from_name('tob_auto_now') self.check_added_field_default( editor, Author, tob_auto_now, 'tob_auto_now', now.time(), cast_function=lambda x: x.time(), ) tob_auto_now_add = TimeField(auto_now_add=True) tob_auto_now_add.set_attributes_from_name('tob_auto_now_add') self.check_added_field_default( editor, Author, tob_auto_now_add, 'tob_auto_now_add', now.time(), cast_function=lambda x: x.time(), ) def test_namespaced_db_table_create_index_name(self): """ Table names are stripped of their namespace/schema before being used to generate index names. """ with connection.schema_editor() as editor: max_name_length = connection.ops.max_name_length() or 200 namespace = 'n' * max_name_length table_name = 't' * max_name_length namespaced_table_name = '"%s"."%s"' % (namespace, table_name) self.assertEqual( editor._create_index_name(table_name, []), editor._create_index_name(namespaced_table_name, []), ) @unittest.skipUnless(connection.vendor == 'oracle', 'Oracle specific db_table syntax') def test_creation_with_db_table_double_quotes(self): oracle_user = connection.creation._test_database_user() class Student(Model): name = CharField(max_length=30) class Meta: app_label = 'schema' apps = new_apps db_table = '"%s"."DJANGO_STUDENT_TABLE"' % oracle_user class Document(Model): name = CharField(max_length=30) students = ManyToManyField(Student) class Meta: app_label = 'schema' apps = new_apps db_table = '"%s"."DJANGO_DOCUMENT_TABLE"' % oracle_user self.local_models = [Student, Document] with connection.schema_editor() as editor: editor.create_model(Student) editor.create_model(Document) doc = Document.objects.create(name='Test Name') student = Student.objects.create(name='Some man') doc.students.add(student) def test_rename_table_renames_deferred_sql_references(self): atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: editor.create_model(Author) editor.create_model(Book) editor.alter_db_table(Author, 'schema_author', 'schema_renamed_author') editor.alter_db_table(Author, 'schema_book', 'schema_renamed_book') self.assertGreater(len(editor.deferred_sql), 0) for statement in editor.deferred_sql: self.assertIs(statement.references_table('schema_author'), False) self.assertIs(statement.references_table('schema_book'), False) @unittest.skipIf(connection.vendor == 'sqlite', 'SQLite naively remakes the table on field alteration.') def test_rename_column_renames_deferred_sql_references(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) old_title = Book._meta.get_field('title') new_title = CharField(max_length=100, db_index=True) new_title.set_attributes_from_name('renamed_title') editor.alter_field(Book, old_title, new_title) old_author = Book._meta.get_field('author') new_author = ForeignKey(Author, CASCADE) new_author.set_attributes_from_name('renamed_author') editor.alter_field(Book, old_author, new_author) self.assertGreater(len(editor.deferred_sql), 0) for statement in editor.deferred_sql: self.assertIs(statement.references_column('book', 'title'), False) self.assertIs(statement.references_column('book', 'author_id'), False) @isolate_apps('schema') def test_referenced_field_without_constraint_rename_inside_atomic_block(self): """ Foreign keys without database level constraint don't prevent the field they reference from being renamed in an atomic block. """ class Foo(Model): field = CharField(max_length=255, unique=True) class Meta: app_label = 'schema' class Bar(Model): foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False) class Meta: app_label = 'schema' self.isolated_local_models = [Foo, Bar] with connection.schema_editor() as editor: editor.create_model(Foo) editor.create_model(Bar) new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('renamed') with connection.schema_editor(atomic=True) as editor: editor.alter_field(Foo, Foo._meta.get_field('field'), new_field) @isolate_apps('schema') def test_referenced_table_without_constraint_rename_inside_atomic_block(self): """ Foreign keys without database level constraint don't prevent the table they reference from being renamed in an atomic block. """ class Foo(Model): field = CharField(max_length=255, unique=True) class Meta: app_label = 'schema' class Bar(Model): foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False) class Meta: app_label = 'schema' self.isolated_local_models = [Foo, Bar] with connection.schema_editor() as editor: editor.create_model(Foo) editor.create_model(Bar) new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('renamed') with connection.schema_editor(atomic=True) as editor: editor.alter_db_table(Foo, Foo._meta.db_table, 'renamed_table') Foo._meta.db_table = 'renamed_table'
4d7f50ed964c245275f6802b7a40e1e0d22a3824f8f75717f49de29b7b0386cf
from django.core import checks from django.core.checks import Error, Warning from django.db import models from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import ( isolate_apps, modify_settings, override_settings, override_system_checks, ) class EmptyRouter: pass @isolate_apps('check_framework', attr_name='apps') @override_system_checks([checks.model_checks.check_all_models]) class DuplicateDBTableTests(SimpleTestCase): def test_collision_in_same_app(self): class Model1(models.Model): class Meta: db_table = 'test_table' class Model2(models.Model): class Meta: db_table = 'test_table' self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Error( "db_table 'test_table' is used by multiple models: " "check_framework.Model1, check_framework.Model2.", obj='test_table', id='models.E028', ) ]) @override_settings(DATABASE_ROUTERS=['check_framework.test_model_checks.EmptyRouter']) def test_collision_in_same_app_database_routers_installed(self): class Model1(models.Model): class Meta: db_table = 'test_table' class Model2(models.Model): class Meta: db_table = 'test_table' self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Warning( "db_table 'test_table' is used by multiple models: " "check_framework.Model1, check_framework.Model2.", hint=( 'You have configured settings.DATABASE_ROUTERS. Verify ' 'that check_framework.Model1, check_framework.Model2 are ' 'correctly routed to separate databases.' ), obj='test_table', id='models.W035', ) ]) @modify_settings(INSTALLED_APPS={'append': 'basic'}) @isolate_apps('basic', 'check_framework', kwarg_name='apps') def test_collision_across_apps(self, apps): class Model1(models.Model): class Meta: app_label = 'basic' db_table = 'test_table' class Model2(models.Model): class Meta: app_label = 'check_framework' db_table = 'test_table' self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [ Error( "db_table 'test_table' is used by multiple models: " "basic.Model1, check_framework.Model2.", obj='test_table', id='models.E028', ) ]) @modify_settings(INSTALLED_APPS={'append': 'basic'}) @override_settings(DATABASE_ROUTERS=['check_framework.test_model_checks.EmptyRouter']) @isolate_apps('basic', 'check_framework', kwarg_name='apps') def test_collision_across_apps_database_routers_installed(self, apps): class Model1(models.Model): class Meta: app_label = 'basic' db_table = 'test_table' class Model2(models.Model): class Meta: app_label = 'check_framework' db_table = 'test_table' self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [ Warning( "db_table 'test_table' is used by multiple models: " "basic.Model1, check_framework.Model2.", hint=( 'You have configured settings.DATABASE_ROUTERS. Verify ' 'that basic.Model1, check_framework.Model2 are correctly ' 'routed to separate databases.' ), obj='test_table', id='models.W035', ) ]) def test_no_collision_for_unmanaged_models(self): class Unmanaged(models.Model): class Meta: db_table = 'test_table' managed = False class Managed(models.Model): class Meta: db_table = 'test_table' self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), []) def test_no_collision_for_proxy_models(self): class Model(models.Model): class Meta: db_table = 'test_table' class ProxyModel(Model): class Meta: proxy = True self.assertEqual(Model._meta.db_table, ProxyModel._meta.db_table) self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), []) @isolate_apps('check_framework', attr_name='apps') @override_system_checks([checks.model_checks.check_all_models]) class IndexNameTests(SimpleTestCase): def test_collision_in_same_model(self): index = models.Index(fields=['id'], name='foo') class Model(models.Model): class Meta: indexes = [index, index] self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Error( "index name 'foo' is not unique for model check_framework.Model.", id='models.E029', ), ]) def test_collision_in_different_models(self): index = models.Index(fields=['id'], name='foo') class Model1(models.Model): class Meta: indexes = [index] class Model2(models.Model): class Meta: indexes = [index] self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Error( "index name 'foo' is not unique among models: " "check_framework.Model1, check_framework.Model2.", id='models.E030', ), ]) def test_collision_abstract_model(self): class AbstractModel(models.Model): class Meta: indexes = [models.Index(fields=['id'], name='foo')] abstract = True class Model1(AbstractModel): pass class Model2(AbstractModel): pass self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Error( "index name 'foo' is not unique among models: " "check_framework.Model1, check_framework.Model2.", id='models.E030', ), ]) def test_no_collision_abstract_model_interpolation(self): class AbstractModel(models.Model): name = models.CharField(max_length=20) class Meta: indexes = [models.Index(fields=['name'], name='%(app_label)s_%(class)s_foo')] abstract = True class Model1(AbstractModel): pass class Model2(AbstractModel): pass self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), []) @modify_settings(INSTALLED_APPS={'append': 'basic'}) @isolate_apps('basic', 'check_framework', kwarg_name='apps') def test_collision_across_apps(self, apps): index = models.Index(fields=['id'], name='foo') class Model1(models.Model): class Meta: app_label = 'basic' indexes = [index] class Model2(models.Model): class Meta: app_label = 'check_framework' indexes = [index] self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [ Error( "index name 'foo' is not unique among models: basic.Model1, " "check_framework.Model2.", id='models.E030', ), ]) @modify_settings(INSTALLED_APPS={'append': 'basic'}) @isolate_apps('basic', 'check_framework', kwarg_name='apps') def test_no_collision_across_apps_interpolation(self, apps): index = models.Index(fields=['id'], name='%(app_label)s_%(class)s_foo') class Model1(models.Model): class Meta: app_label = 'basic' constraints = [index] class Model2(models.Model): class Meta: app_label = 'check_framework' constraints = [index] self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), []) @isolate_apps('check_framework', attr_name='apps') @override_system_checks([checks.model_checks.check_all_models]) @skipUnlessDBFeature('supports_table_check_constraints') class ConstraintNameTests(TestCase): def test_collision_in_same_model(self): class Model(models.Model): class Meta: constraints = [ models.CheckConstraint(check=models.Q(id__gt=0), name='foo'), models.CheckConstraint(check=models.Q(id__lt=100), name='foo'), ] self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Error( "constraint name 'foo' is not unique for model " "check_framework.Model.", id='models.E031', ), ]) def test_collision_in_different_models(self): constraint = models.CheckConstraint(check=models.Q(id__gt=0), name='foo') class Model1(models.Model): class Meta: constraints = [constraint] class Model2(models.Model): class Meta: constraints = [constraint] self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Error( "constraint name 'foo' is not unique among models: " "check_framework.Model1, check_framework.Model2.", id='models.E032', ), ]) def test_collision_abstract_model(self): class AbstractModel(models.Model): class Meta: constraints = [models.CheckConstraint(check=models.Q(id__gt=0), name='foo')] abstract = True class Model1(AbstractModel): pass class Model2(AbstractModel): pass self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [ Error( "constraint name 'foo' is not unique among models: " "check_framework.Model1, check_framework.Model2.", id='models.E032', ), ]) def test_no_collision_abstract_model_interpolation(self): class AbstractModel(models.Model): class Meta: constraints = [ models.CheckConstraint(check=models.Q(id__gt=0), name='%(app_label)s_%(class)s_foo'), ] abstract = True class Model1(AbstractModel): pass class Model2(AbstractModel): pass self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), []) @modify_settings(INSTALLED_APPS={'append': 'basic'}) @isolate_apps('basic', 'check_framework', kwarg_name='apps') def test_collision_across_apps(self, apps): constraint = models.CheckConstraint(check=models.Q(id__gt=0), name='foo') class Model1(models.Model): class Meta: app_label = 'basic' constraints = [constraint] class Model2(models.Model): class Meta: app_label = 'check_framework' constraints = [constraint] self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [ Error( "constraint name 'foo' is not unique among models: " "basic.Model1, check_framework.Model2.", id='models.E032', ), ]) @modify_settings(INSTALLED_APPS={'append': 'basic'}) @isolate_apps('basic', 'check_framework', kwarg_name='apps') def test_no_collision_across_apps_interpolation(self, apps): constraint = models.CheckConstraint(check=models.Q(id__gt=0), name='%(app_label)s_%(class)s_foo') class Model1(models.Model): class Meta: app_label = 'basic' constraints = [constraint] class Model2(models.Model): class Meta: app_label = 'check_framework' constraints = [constraint] self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [])
2b6e4caa4c08aaabe3a62fdfc7d8c94c782b392d0dc91709ccabee9a1a29e82f
import unittest from django.core.checks import Error, Warning from django.core.checks.model_checks import _check_lazy_references from django.db import connection, connections, models from django.db.models.functions import Lower from django.db.models.signals import post_init from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import isolate_apps, override_settings, register_lookup class EmptyRouter: pass def get_max_column_name_length(): allowed_len = None db_alias = None for db in ('default', 'other'): connection = connections[db] max_name_length = connection.ops.max_name_length() if max_name_length is not None and not connection.features.truncates_names: if allowed_len is None or max_name_length < allowed_len: allowed_len = max_name_length db_alias = db return (allowed_len, db_alias) @isolate_apps('invalid_models_tests') class IndexTogetherTests(SimpleTestCase): def test_non_iterable(self): class Model(models.Model): class Meta: index_together = 42 self.assertEqual(Model.check(), [ Error( "'index_together' must be a list or tuple.", obj=Model, id='models.E008', ), ]) def test_non_list(self): class Model(models.Model): class Meta: index_together = 'not-a-list' self.assertEqual(Model.check(), [ Error( "'index_together' must be a list or tuple.", obj=Model, id='models.E008', ), ]) def test_list_containing_non_iterable(self): class Model(models.Model): class Meta: index_together = [('a', 'b'), 42] self.assertEqual(Model.check(), [ Error( "All 'index_together' elements must be lists or tuples.", obj=Model, id='models.E009', ), ]) def test_pointing_to_missing_field(self): class Model(models.Model): class Meta: index_together = [['missing_field']] self.assertEqual(Model.check(), [ Error( "'index_together' refers to the nonexistent field 'missing_field'.", obj=Model, id='models.E012', ), ]) def test_pointing_to_non_local_field(self): class Foo(models.Model): field1 = models.IntegerField() class Bar(Foo): field2 = models.IntegerField() class Meta: index_together = [['field2', 'field1']] self.assertEqual(Bar.check(), [ Error( "'index_together' refers to field 'field1' which is not " "local to model 'Bar'.", hint='This issue may be caused by multi-table inheritance.', obj=Bar, id='models.E016', ), ]) def test_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: index_together = [['m2m']] self.assertEqual(Model.check(), [ Error( "'index_together' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'index_together'.", obj=Model, id='models.E013', ), ]) def test_pointing_to_fk(self): class Foo(models.Model): pass class Bar(models.Model): foo_1 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_1') foo_2 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_2') class Meta: index_together = [['foo_1_id', 'foo_2']] self.assertEqual(Bar.check(), []) # unique_together tests are very similar to index_together tests. @isolate_apps('invalid_models_tests') class UniqueTogetherTests(SimpleTestCase): def test_non_iterable(self): class Model(models.Model): class Meta: unique_together = 42 self.assertEqual(Model.check(), [ Error( "'unique_together' must be a list or tuple.", obj=Model, id='models.E010', ), ]) def test_list_containing_non_iterable(self): class Model(models.Model): one = models.IntegerField() two = models.IntegerField() class Meta: unique_together = [('a', 'b'), 42] self.assertEqual(Model.check(), [ Error( "All 'unique_together' elements must be lists or tuples.", obj=Model, id='models.E011', ), ]) def test_non_list(self): class Model(models.Model): class Meta: unique_together = 'not-a-list' self.assertEqual(Model.check(), [ Error( "'unique_together' must be a list or tuple.", obj=Model, id='models.E010', ), ]) def test_valid_model(self): class Model(models.Model): one = models.IntegerField() two = models.IntegerField() class Meta: # unique_together can be a simple tuple unique_together = ('one', 'two') self.assertEqual(Model.check(), []) def test_pointing_to_missing_field(self): class Model(models.Model): class Meta: unique_together = [['missing_field']] self.assertEqual(Model.check(), [ Error( "'unique_together' refers to the nonexistent field 'missing_field'.", obj=Model, id='models.E012', ), ]) def test_pointing_to_m2m(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: unique_together = [['m2m']] self.assertEqual(Model.check(), [ Error( "'unique_together' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'unique_together'.", obj=Model, id='models.E013', ), ]) def test_pointing_to_fk(self): class Foo(models.Model): pass class Bar(models.Model): foo_1 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_1') foo_2 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_2') class Meta: unique_together = [['foo_1_id', 'foo_2']] self.assertEqual(Bar.check(), []) @isolate_apps('invalid_models_tests') class IndexesTests(TestCase): def test_pointing_to_missing_field(self): class Model(models.Model): class Meta: indexes = [models.Index(fields=['missing_field'], name='name')] self.assertEqual(Model.check(), [ Error( "'indexes' refers to the nonexistent field 'missing_field'.", obj=Model, id='models.E012', ), ]) def test_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: indexes = [models.Index(fields=['m2m'], name='name')] self.assertEqual(Model.check(), [ Error( "'indexes' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'indexes'.", obj=Model, id='models.E013', ), ]) def test_pointing_to_non_local_field(self): class Foo(models.Model): field1 = models.IntegerField() class Bar(Foo): field2 = models.IntegerField() class Meta: indexes = [models.Index(fields=['field2', 'field1'], name='name')] self.assertEqual(Bar.check(), [ Error( "'indexes' refers to field 'field1' which is not local to " "model 'Bar'.", hint='This issue may be caused by multi-table inheritance.', obj=Bar, id='models.E016', ), ]) def test_pointing_to_fk(self): class Foo(models.Model): pass class Bar(models.Model): foo_1 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_1') foo_2 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_2') class Meta: indexes = [models.Index(fields=['foo_1_id', 'foo_2'], name='index_name')] self.assertEqual(Bar.check(), []) def test_name_constraints(self): class Model(models.Model): class Meta: indexes = [ models.Index(fields=['id'], name='_index_name'), models.Index(fields=['id'], name='5index_name'), ] self.assertEqual(Model.check(), [ Error( "The index name '%sindex_name' cannot start with an " "underscore or a number." % prefix, obj=Model, id='models.E033', ) for prefix in ('_', '5') ]) def test_max_name_length(self): index_name = 'x' * 31 class Model(models.Model): class Meta: indexes = [models.Index(fields=['id'], name=index_name)] self.assertEqual(Model.check(), [ Error( "The index name '%s' cannot be longer than 30 characters." % index_name, obj=Model, id='models.E034', ), ]) def test_index_with_condition(self): class Model(models.Model): age = models.IntegerField() class Meta: indexes = [ models.Index( fields=['age'], name='index_age_gte_10', condition=models.Q(age__gte=10), ), ] errors = Model.check(databases=self.databases) expected = [] if connection.features.supports_partial_indexes else [ Warning( '%s does not support indexes with conditions.' % connection.display_name, hint=( "Conditions will be ignored. Silence this warning if you " "don't care about it." ), obj=Model, id='models.W037', ) ] self.assertEqual(errors, expected) def test_index_with_condition_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_partial_indexes'} indexes = [ models.Index( fields=['age'], name='index_age_gte_10', condition=models.Q(age__gte=10), ), ] self.assertEqual(Model.check(databases=self.databases), []) def test_index_with_include(self): class Model(models.Model): age = models.IntegerField() class Meta: indexes = [ models.Index( fields=['age'], name='index_age_include_id', include=['id'], ), ] errors = Model.check(databases=self.databases) expected = [] if connection.features.supports_covering_indexes else [ Warning( '%s does not support indexes with non-key columns.' % connection.display_name, hint=( "Non-key columns will be ignored. Silence this warning if " "you don't care about it." ), obj=Model, id='models.W040', ) ] self.assertEqual(errors, expected) def test_index_with_include_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_covering_indexes'} indexes = [ models.Index( fields=['age'], name='index_age_include_id', include=['id'], ), ] self.assertEqual(Model.check(databases=self.databases), []) @skipUnlessDBFeature('supports_covering_indexes') def test_index_include_pointing_to_missing_field(self): class Model(models.Model): class Meta: indexes = [ models.Index(fields=['id'], include=['missing_field'], name='name'), ] self.assertEqual(Model.check(databases=self.databases), [ Error( "'indexes' refers to the nonexistent field 'missing_field'.", obj=Model, id='models.E012', ), ]) @skipUnlessDBFeature('supports_covering_indexes') def test_index_include_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: indexes = [models.Index(fields=['id'], include=['m2m'], name='name')] self.assertEqual(Model.check(databases=self.databases), [ Error( "'indexes' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'indexes'.", obj=Model, id='models.E013', ), ]) @skipUnlessDBFeature('supports_covering_indexes') def test_index_include_pointing_to_non_local_field(self): class Parent(models.Model): field1 = models.IntegerField() class Child(Parent): field2 = models.IntegerField() class Meta: indexes = [ models.Index(fields=['field2'], include=['field1'], name='name'), ] self.assertEqual(Child.check(databases=self.databases), [ Error( "'indexes' refers to field 'field1' which is not local to " "model 'Child'.", hint='This issue may be caused by multi-table inheritance.', obj=Child, id='models.E016', ), ]) @skipUnlessDBFeature('supports_covering_indexes') def test_index_include_pointing_to_fk(self): class Target(models.Model): pass class Model(models.Model): fk_1 = models.ForeignKey(Target, models.CASCADE, related_name='target_1') fk_2 = models.ForeignKey(Target, models.CASCADE, related_name='target_2') class Meta: constraints = [ models.Index( fields=['id'], include=['fk_1_id', 'fk_2'], name='name', ), ] self.assertEqual(Model.check(databases=self.databases), []) @isolate_apps('invalid_models_tests') class FieldNamesTests(TestCase): databases = {'default', 'other'} def test_ending_with_underscore(self): class Model(models.Model): field_ = models.CharField(max_length=10) m2m_ = models.ManyToManyField('self') self.assertEqual(Model.check(), [ Error( 'Field names must not end with an underscore.', obj=Model._meta.get_field('field_'), id='fields.E001', ), Error( 'Field names must not end with an underscore.', obj=Model._meta.get_field('m2m_'), id='fields.E001', ), ]) max_column_name_length, column_limit_db_alias = get_max_column_name_length() @unittest.skipIf(max_column_name_length is None, "The database doesn't have a column name length limit.") def test_M2M_long_column_name(self): """ #13711 -- Model check for long M2M column names when database has column name length limits. """ # A model with very long name which will be used to set relations to. class VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz(models.Model): title = models.CharField(max_length=11) # Main model for which checks will be performed. class ModelWithLongField(models.Model): m2m_field = models.ManyToManyField( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, related_name='rn1', ) m2m_field2 = models.ManyToManyField( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, related_name='rn2', through='m2msimple', ) m2m_field3 = models.ManyToManyField( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, related_name='rn3', through='m2mcomplex', ) fk = models.ForeignKey( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, models.CASCADE, related_name='rn4', ) # Models used for setting `through` in M2M field. class m2msimple(models.Model): id2 = models.ForeignKey(ModelWithLongField, models.CASCADE) class m2mcomplex(models.Model): id2 = models.ForeignKey(ModelWithLongField, models.CASCADE) long_field_name = 'a' * (self.max_column_name_length + 1) models.ForeignKey( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, models.CASCADE, ).contribute_to_class(m2msimple, long_field_name) models.ForeignKey( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, models.CASCADE, db_column=long_field_name ).contribute_to_class(m2mcomplex, long_field_name) errors = ModelWithLongField.check(databases=('default', 'other')) # First error because of M2M field set on the model with long name. m2m_long_name = "verylongmodelnamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz_id" if self.max_column_name_length > len(m2m_long_name): # Some databases support names longer than the test name. expected = [] else: expected = [ Error( 'Autogenerated column name too long for M2M field "%s". ' 'Maximum length is "%s" for database "%s".' % (m2m_long_name, self.max_column_name_length, self.column_limit_db_alias), hint="Use 'through' to create a separate model for " "M2M and then set column_name using 'db_column'.", obj=ModelWithLongField, id='models.E019', ) ] # Second error because the FK specified in the `through` model # `m2msimple` has auto-generated name longer than allowed. # There will be no check errors in the other M2M because it # specifies db_column for the FK in `through` model even if the actual # name is longer than the limits of the database. expected.append( Error( 'Autogenerated column name too long for M2M field "%s_id". ' 'Maximum length is "%s" for database "%s".' % (long_field_name, self.max_column_name_length, self.column_limit_db_alias), hint="Use 'through' to create a separate model for " "M2M and then set column_name using 'db_column'.", obj=ModelWithLongField, id='models.E019', ) ) self.assertEqual(errors, expected) # Check for long column names is called only for specified database # aliases. self.assertEqual(ModelWithLongField.check(databases=None), []) @unittest.skipIf(max_column_name_length is None, "The database doesn't have a column name length limit.") def test_local_field_long_column_name(self): """ #13711 -- Model check for long column names when database does not support long names. """ class ModelWithLongField(models.Model): title = models.CharField(max_length=11) long_field_name = 'a' * (self.max_column_name_length + 1) long_field_name2 = 'b' * (self.max_column_name_length + 1) models.CharField(max_length=11).contribute_to_class(ModelWithLongField, long_field_name) models.CharField(max_length=11, db_column='vlmn').contribute_to_class(ModelWithLongField, long_field_name2) self.assertEqual(ModelWithLongField.check(databases=('default', 'other')), [ Error( 'Autogenerated column name too long for field "%s". ' 'Maximum length is "%s" for database "%s".' % (long_field_name, self.max_column_name_length, self.column_limit_db_alias), hint="Set the column name manually using 'db_column'.", obj=ModelWithLongField, id='models.E018', ) ]) # Check for long column names is called only for specified database # aliases. self.assertEqual(ModelWithLongField.check(databases=None), []) def test_including_separator(self): class Model(models.Model): some__field = models.IntegerField() self.assertEqual(Model.check(), [ Error( 'Field names must not contain "__".', obj=Model._meta.get_field('some__field'), id='fields.E002', ) ]) def test_pk(self): class Model(models.Model): pk = models.IntegerField() self.assertEqual(Model.check(), [ Error( "'pk' is a reserved word that cannot be used as a field name.", obj=Model._meta.get_field('pk'), id='fields.E003', ) ]) def test_db_column_clash(self): class Model(models.Model): foo = models.IntegerField() bar = models.IntegerField(db_column='foo') self.assertEqual(Model.check(), [ Error( "Field 'bar' has column name 'foo' that is used by " "another field.", hint="Specify a 'db_column' for the field.", obj=Model, id='models.E007', ) ]) @isolate_apps('invalid_models_tests') class ShadowingFieldsTests(SimpleTestCase): def test_field_name_clash_with_child_accessor(self): class Parent(models.Model): pass class Child(Parent): child = models.CharField(max_length=100) self.assertEqual(Child.check(), [ Error( "The field 'child' clashes with the field " "'child' from model 'invalid_models_tests.parent'.", obj=Child._meta.get_field('child'), id='models.E006', ) ]) def test_multiinheritance_clash(self): class Mother(models.Model): clash = models.IntegerField() class Father(models.Model): clash = models.IntegerField() class Child(Mother, Father): # Here we have two clashed: id (automatic field) and clash, because # both parents define these fields. pass self.assertEqual(Child.check(), [ Error( "The field 'id' from parent model " "'invalid_models_tests.mother' clashes with the field 'id' " "from parent model 'invalid_models_tests.father'.", obj=Child, id='models.E005', ), Error( "The field 'clash' from parent model " "'invalid_models_tests.mother' clashes with the field 'clash' " "from parent model 'invalid_models_tests.father'.", obj=Child, id='models.E005', ) ]) def test_inheritance_clash(self): class Parent(models.Model): f_id = models.IntegerField() class Target(models.Model): # This field doesn't result in a clash. f_id = models.IntegerField() class Child(Parent): # This field clashes with parent "f_id" field. f = models.ForeignKey(Target, models.CASCADE) self.assertEqual(Child.check(), [ Error( "The field 'f' clashes with the field 'f_id' " "from model 'invalid_models_tests.parent'.", obj=Child._meta.get_field('f'), id='models.E006', ) ]) def test_multigeneration_inheritance(self): class GrandParent(models.Model): clash = models.IntegerField() class Parent(GrandParent): pass class Child(Parent): pass class GrandChild(Child): clash = models.IntegerField() self.assertEqual(GrandChild.check(), [ Error( "The field 'clash' clashes with the field 'clash' " "from model 'invalid_models_tests.grandparent'.", obj=GrandChild._meta.get_field('clash'), id='models.E006', ) ]) def test_id_clash(self): class Target(models.Model): pass class Model(models.Model): fk = models.ForeignKey(Target, models.CASCADE) fk_id = models.IntegerField() self.assertEqual(Model.check(), [ Error( "The field 'fk_id' clashes with the field 'fk' from model " "'invalid_models_tests.model'.", obj=Model._meta.get_field('fk_id'), id='models.E006', ) ]) @isolate_apps('invalid_models_tests') class OtherModelTests(SimpleTestCase): def test_unique_primary_key(self): invalid_id = models.IntegerField(primary_key=False) class Model(models.Model): id = invalid_id self.assertEqual(Model.check(), [ Error( "'id' can only be used as a field name if the field also sets " "'primary_key=True'.", obj=Model, id='models.E004', ), ]) def test_ordering_non_iterable(self): class Model(models.Model): class Meta: ordering = 'missing_field' self.assertEqual(Model.check(), [ Error( "'ordering' must be a tuple or list " "(even if you want to order by only one field).", obj=Model, id='models.E014', ), ]) def test_just_ordering_no_errors(self): class Model(models.Model): order = models.PositiveIntegerField() class Meta: ordering = ['order'] self.assertEqual(Model.check(), []) def test_just_order_with_respect_to_no_errors(self): class Question(models.Model): pass class Answer(models.Model): question = models.ForeignKey(Question, models.CASCADE) class Meta: order_with_respect_to = 'question' self.assertEqual(Answer.check(), []) def test_ordering_with_order_with_respect_to(self): class Question(models.Model): pass class Answer(models.Model): question = models.ForeignKey(Question, models.CASCADE) order = models.IntegerField() class Meta: order_with_respect_to = 'question' ordering = ['order'] self.assertEqual(Answer.check(), [ Error( "'ordering' and 'order_with_respect_to' cannot be used together.", obj=Answer, id='models.E021', ), ]) def test_non_valid(self): class RelationModel(models.Model): pass class Model(models.Model): relation = models.ManyToManyField(RelationModel) class Meta: ordering = ['relation'] self.assertEqual(Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'relation'.", obj=Model, id='models.E015', ), ]) def test_ordering_pointing_to_missing_field(self): class Model(models.Model): class Meta: ordering = ('missing_field',) self.assertEqual(Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'missing_field'.", obj=Model, id='models.E015', ) ]) def test_ordering_pointing_to_missing_foreignkey_field(self): class Model(models.Model): missing_fk_field = models.IntegerField() class Meta: ordering = ('missing_fk_field_id',) self.assertEqual(Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'missing_fk_field_id'.", obj=Model, id='models.E015', ) ]) def test_ordering_pointing_to_missing_related_field(self): class Model(models.Model): test = models.IntegerField() class Meta: ordering = ('missing_related__id',) self.assertEqual(Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'missing_related__id'.", obj=Model, id='models.E015', ) ]) def test_ordering_pointing_to_missing_related_model_field(self): class Parent(models.Model): pass class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ('parent__missing_field',) self.assertEqual(Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent__missing_field'.", obj=Child, id='models.E015', ) ]) def test_ordering_pointing_to_non_related_field(self): class Child(models.Model): parent = models.IntegerField() class Meta: ordering = ('parent__missing_field',) self.assertEqual(Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent__missing_field'.", obj=Child, id='models.E015', ) ]) def test_ordering_pointing_to_two_related_model_field(self): class Parent2(models.Model): pass class Parent1(models.Model): parent2 = models.ForeignKey(Parent2, models.CASCADE) class Child(models.Model): parent1 = models.ForeignKey(Parent1, models.CASCADE) class Meta: ordering = ('parent1__parent2__missing_field',) self.assertEqual(Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent1__parent2__missing_field'.", obj=Child, id='models.E015', ) ]) def test_ordering_pointing_multiple_times_to_model_fields(self): class Parent(models.Model): field1 = models.CharField(max_length=100) field2 = models.CharField(max_length=100) class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ('parent__field1__field2',) self.assertEqual(Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent__field1__field2'.", obj=Child, id='models.E015', ) ]) def test_ordering_allows_registered_lookups(self): class Model(models.Model): test = models.CharField(max_length=100) class Meta: ordering = ('test__lower',) with register_lookup(models.CharField, Lower): self.assertEqual(Model.check(), []) def test_ordering_pointing_to_lookup_not_transform(self): class Model(models.Model): test = models.CharField(max_length=100) class Meta: ordering = ('test__isnull',) self.assertEqual(Model.check(), []) def test_ordering_pointing_to_related_model_pk(self): class Parent(models.Model): pass class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ('parent__pk',) self.assertEqual(Child.check(), []) def test_ordering_pointing_to_foreignkey_field(self): class Parent(models.Model): pass class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ('parent_id',) self.assertFalse(Child.check()) def test_name_beginning_with_underscore(self): class _Model(models.Model): pass self.assertEqual(_Model.check(), [ Error( "The model name '_Model' cannot start or end with an underscore " "as it collides with the query lookup syntax.", obj=_Model, id='models.E023', ) ]) def test_name_ending_with_underscore(self): class Model_(models.Model): pass self.assertEqual(Model_.check(), [ Error( "The model name 'Model_' cannot start or end with an underscore " "as it collides with the query lookup syntax.", obj=Model_, id='models.E023', ) ]) def test_name_contains_double_underscores(self): class Test__Model(models.Model): pass self.assertEqual(Test__Model.check(), [ Error( "The model name 'Test__Model' cannot contain double underscores " "as it collides with the query lookup syntax.", obj=Test__Model, id='models.E024', ) ]) def test_property_and_related_field_accessor_clash(self): class Model(models.Model): fk = models.ForeignKey('self', models.CASCADE) @property def fk_id(self): pass self.assertEqual(Model.check(), [ Error( "The property 'fk_id' clashes with a related field accessor.", obj=Model, id='models.E025', ) ]) def test_single_primary_key(self): class Model(models.Model): foo = models.IntegerField(primary_key=True) bar = models.IntegerField(primary_key=True) self.assertEqual(Model.check(), [ Error( "The model cannot have more than one field with 'primary_key=True'.", obj=Model, id='models.E026', ) ]) @override_settings(TEST_SWAPPED_MODEL_BAD_VALUE='not-a-model') def test_swappable_missing_app_name(self): class Model(models.Model): class Meta: swappable = 'TEST_SWAPPED_MODEL_BAD_VALUE' self.assertEqual(Model.check(), [ Error( "'TEST_SWAPPED_MODEL_BAD_VALUE' is not of the form 'app_label.app_name'.", id='models.E001', ), ]) @override_settings(TEST_SWAPPED_MODEL_BAD_MODEL='not_an_app.Target') def test_swappable_missing_app(self): class Model(models.Model): class Meta: swappable = 'TEST_SWAPPED_MODEL_BAD_MODEL' self.assertEqual(Model.check(), [ Error( "'TEST_SWAPPED_MODEL_BAD_MODEL' references 'not_an_app.Target', " 'which has not been installed, or is abstract.', id='models.E002', ), ]) def test_two_m2m_through_same_relationship(self): class Person(models.Model): pass class Group(models.Model): primary = models.ManyToManyField(Person, through='Membership', related_name='primary') secondary = models.ManyToManyField(Person, through='Membership', related_name='secondary') class Membership(models.Model): person = models.ForeignKey(Person, models.CASCADE) group = models.ForeignKey(Group, models.CASCADE) self.assertEqual(Group.check(), [ Error( "The model has two identical many-to-many relations through " "the intermediate model 'invalid_models_tests.Membership'.", obj=Group, id='models.E003', ) ]) def test_two_m2m_through_same_model_with_different_through_fields(self): class Country(models.Model): pass class ShippingMethod(models.Model): to_countries = models.ManyToManyField( Country, through='ShippingMethodPrice', through_fields=('method', 'to_country'), ) from_countries = models.ManyToManyField( Country, through='ShippingMethodPrice', through_fields=('method', 'from_country'), related_name='+', ) class ShippingMethodPrice(models.Model): method = models.ForeignKey(ShippingMethod, models.CASCADE) to_country = models.ForeignKey(Country, models.CASCADE) from_country = models.ForeignKey(Country, models.CASCADE) self.assertEqual(ShippingMethod.check(), []) def test_onetoone_with_parent_model(self): class Place(models.Model): pass class ParkingLot(Place): other_place = models.OneToOneField(Place, models.CASCADE, related_name='other_parking') self.assertEqual(ParkingLot.check(), []) def test_onetoone_with_explicit_parent_link_parent_model(self): class Place(models.Model): pass class ParkingLot(Place): place = models.OneToOneField(Place, models.CASCADE, parent_link=True, primary_key=True) other_place = models.OneToOneField(Place, models.CASCADE, related_name='other_parking') self.assertEqual(ParkingLot.check(), []) def test_m2m_table_name_clash(self): class Foo(models.Model): bar = models.ManyToManyField('Bar', db_table='myapp_bar') class Meta: db_table = 'myapp_foo' class Bar(models.Model): class Meta: db_table = 'myapp_bar' self.assertEqual(Foo.check(), [ Error( "The field's intermediary table 'myapp_bar' clashes with the " "table name of 'invalid_models_tests.Bar'.", obj=Foo._meta.get_field('bar'), id='fields.E340', ) ]) @override_settings(DATABASE_ROUTERS=['invalid_models_tests.test_models.EmptyRouter']) def test_m2m_table_name_clash_database_routers_installed(self): class Foo(models.Model): bar = models.ManyToManyField('Bar', db_table='myapp_bar') class Meta: db_table = 'myapp_foo' class Bar(models.Model): class Meta: db_table = 'myapp_bar' self.assertEqual(Foo.check(), [ Warning( "The field's intermediary table 'myapp_bar' clashes with the " "table name of 'invalid_models_tests.Bar'.", obj=Foo._meta.get_field('bar'), hint=( "You have configured settings.DATABASE_ROUTERS. Verify " "that the table of 'invalid_models_tests.Bar' is " "correctly routed to a separate database." ), id='fields.W344', ), ]) def test_m2m_field_table_name_clash(self): class Foo(models.Model): pass class Bar(models.Model): foos = models.ManyToManyField(Foo, db_table='clash') class Baz(models.Model): foos = models.ManyToManyField(Foo, db_table='clash') self.assertEqual(Bar.check() + Baz.check(), [ Error( "The field's intermediary table 'clash' clashes with the " "table name of 'invalid_models_tests.Baz.foos'.", obj=Bar._meta.get_field('foos'), id='fields.E340', ), Error( "The field's intermediary table 'clash' clashes with the " "table name of 'invalid_models_tests.Bar.foos'.", obj=Baz._meta.get_field('foos'), id='fields.E340', ) ]) @override_settings(DATABASE_ROUTERS=['invalid_models_tests.test_models.EmptyRouter']) def test_m2m_field_table_name_clash_database_routers_installed(self): class Foo(models.Model): pass class Bar(models.Model): foos = models.ManyToManyField(Foo, db_table='clash') class Baz(models.Model): foos = models.ManyToManyField(Foo, db_table='clash') self.assertEqual(Bar.check() + Baz.check(), [ Warning( "The field's intermediary table 'clash' clashes with the " "table name of 'invalid_models_tests.%s.foos'." % clashing_model, obj=model_cls._meta.get_field('foos'), hint=( "You have configured settings.DATABASE_ROUTERS. Verify " "that the table of 'invalid_models_tests.%s.foos' is " "correctly routed to a separate database." % clashing_model ), id='fields.W344', ) for model_cls, clashing_model in [(Bar, 'Baz'), (Baz, 'Bar')] ]) def test_m2m_autogenerated_table_name_clash(self): class Foo(models.Model): class Meta: db_table = 'bar_foos' class Bar(models.Model): # The autogenerated `db_table` will be bar_foos. foos = models.ManyToManyField(Foo) class Meta: db_table = 'bar' self.assertEqual(Bar.check(), [ Error( "The field's intermediary table 'bar_foos' clashes with the " "table name of 'invalid_models_tests.Foo'.", obj=Bar._meta.get_field('foos'), id='fields.E340', ) ]) @override_settings(DATABASE_ROUTERS=['invalid_models_tests.test_models.EmptyRouter']) def test_m2m_autogenerated_table_name_clash_database_routers_installed(self): class Foo(models.Model): class Meta: db_table = 'bar_foos' class Bar(models.Model): # The autogenerated db_table is bar_foos. foos = models.ManyToManyField(Foo) class Meta: db_table = 'bar' self.assertEqual(Bar.check(), [ Warning( "The field's intermediary table 'bar_foos' clashes with the " "table name of 'invalid_models_tests.Foo'.", obj=Bar._meta.get_field('foos'), hint=( "You have configured settings.DATABASE_ROUTERS. Verify " "that the table of 'invalid_models_tests.Foo' is " "correctly routed to a separate database." ), id='fields.W344', ), ]) def test_m2m_unmanaged_shadow_models_not_checked(self): class A1(models.Model): pass class C1(models.Model): mm_a = models.ManyToManyField(A1, db_table='d1') # Unmanaged models that shadow the above models. Reused table names # shouldn't be flagged by any checks. class A2(models.Model): class Meta: managed = False class C2(models.Model): mm_a = models.ManyToManyField(A2, through='Intermediate') class Meta: managed = False class Intermediate(models.Model): a2 = models.ForeignKey(A2, models.CASCADE, db_column='a1_id') c2 = models.ForeignKey(C2, models.CASCADE, db_column='c1_id') class Meta: db_table = 'd1' managed = False self.assertEqual(C1.check(), []) self.assertEqual(C2.check(), []) def test_m2m_to_concrete_and_proxy_allowed(self): class A(models.Model): pass class Through(models.Model): a = models.ForeignKey('A', models.CASCADE) c = models.ForeignKey('C', models.CASCADE) class ThroughProxy(Through): class Meta: proxy = True class C(models.Model): mm_a = models.ManyToManyField(A, through=Through) mm_aproxy = models.ManyToManyField(A, through=ThroughProxy, related_name='proxied_m2m') self.assertEqual(C.check(), []) @isolate_apps('django.contrib.auth', kwarg_name='apps') def test_lazy_reference_checks(self, apps): class DummyModel(models.Model): author = models.ForeignKey('Author', models.CASCADE) class Meta: app_label = 'invalid_models_tests' class DummyClass: def __call__(self, **kwargs): pass def dummy_method(self): pass def dummy_function(*args, **kwargs): pass apps.lazy_model_operation(dummy_function, ('auth', 'imaginarymodel')) apps.lazy_model_operation(dummy_function, ('fanciful_app', 'imaginarymodel')) post_init.connect(dummy_function, sender='missing-app.Model', apps=apps) post_init.connect(DummyClass(), sender='missing-app.Model', apps=apps) post_init.connect(DummyClass().dummy_method, sender='missing-app.Model', apps=apps) self.assertEqual(_check_lazy_references(apps), [ Error( "%r contains a lazy reference to auth.imaginarymodel, " "but app 'auth' doesn't provide model 'imaginarymodel'." % dummy_function, obj=dummy_function, id='models.E022', ), Error( "%r contains a lazy reference to fanciful_app.imaginarymodel, " "but app 'fanciful_app' isn't installed." % dummy_function, obj=dummy_function, id='models.E022', ), Error( "An instance of class 'DummyClass' was connected to " "the 'post_init' signal with a lazy reference to the sender " "'missing-app.model', but app 'missing-app' isn't installed.", hint=None, obj='invalid_models_tests.test_models', id='signals.E001', ), Error( "Bound method 'DummyClass.dummy_method' was connected to the " "'post_init' signal with a lazy reference to the sender " "'missing-app.model', but app 'missing-app' isn't installed.", hint=None, obj='invalid_models_tests.test_models', id='signals.E001', ), Error( "The field invalid_models_tests.DummyModel.author was declared " "with a lazy reference to 'invalid_models_tests.author', but app " "'invalid_models_tests' isn't installed.", hint=None, obj=DummyModel.author.field, id='fields.E307', ), Error( "The function 'dummy_function' was connected to the 'post_init' " "signal with a lazy reference to the sender " "'missing-app.model', but app 'missing-app' isn't installed.", hint=None, obj='invalid_models_tests.test_models', id='signals.E001', ), ]) @isolate_apps('invalid_models_tests') class JSONFieldTests(TestCase): @skipUnlessDBFeature('supports_json_field') def test_ordering_pointing_to_json_field_value(self): class Model(models.Model): field = models.JSONField() class Meta: ordering = ['field__value'] self.assertEqual(Model.check(databases=self.databases), []) def test_check_jsonfield(self): class Model(models.Model): field = models.JSONField() error = Error( '%s does not support JSONFields.' % connection.display_name, obj=Model, id='fields.E180', ) expected = [] if connection.features.supports_json_field else [error] self.assertEqual(Model.check(databases=self.databases), expected) def test_check_jsonfield_required_db_features(self): class Model(models.Model): field = models.JSONField() class Meta: required_db_features = {'supports_json_field'} self.assertEqual(Model.check(databases=self.databases), []) @isolate_apps('invalid_models_tests') class ConstraintsTests(TestCase): def test_check_constraints(self): class Model(models.Model): age = models.IntegerField() class Meta: constraints = [models.CheckConstraint(check=models.Q(age__gte=18), name='is_adult')] errors = Model.check(databases=self.databases) warn = Warning( '%s does not support check constraints.' % connection.display_name, hint=( "A constraint won't be created. Silence this warning if you " "don't care about it." ), obj=Model, id='models.W027', ) expected = [] if connection.features.supports_table_check_constraints else [warn] self.assertCountEqual(errors, expected) def test_check_constraints_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_table_check_constraints'} constraints = [models.CheckConstraint(check=models.Q(age__gte=18), name='is_adult')] self.assertEqual(Model.check(databases=self.databases), []) def test_unique_constraint_with_condition(self): class Model(models.Model): age = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_gte_100', condition=models.Q(age__gte=100), ), ] errors = Model.check(databases=self.databases) expected = [] if connection.features.supports_partial_indexes else [ Warning( '%s does not support unique constraints with conditions.' % connection.display_name, hint=( "A constraint won't be created. Silence this warning if " "you don't care about it." ), obj=Model, id='models.W036', ), ] self.assertEqual(errors, expected) def test_unique_constraint_with_condition_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_partial_indexes'} constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_gte_100', condition=models.Q(age__gte=100), ), ] self.assertEqual(Model.check(databases=self.databases), []) def test_deferrable_unique_constraint(self): class Model(models.Model): age = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_deferrable', deferrable=models.Deferrable.DEFERRED, ), ] errors = Model.check(databases=self.databases) expected = [] if connection.features.supports_deferrable_unique_constraints else [ Warning( '%s does not support deferrable unique constraints.' % connection.display_name, hint=( "A constraint won't be created. Silence this warning if " "you don't care about it." ), obj=Model, id='models.W038', ), ] self.assertEqual(errors, expected) def test_deferrable_unique_constraint_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_deferrable_unique_constraints'} constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_deferrable', deferrable=models.Deferrable.IMMEDIATE, ), ] self.assertEqual(Model.check(databases=self.databases), []) def test_unique_constraint_pointing_to_missing_field(self): class Model(models.Model): class Meta: constraints = [models.UniqueConstraint(fields=['missing_field'], name='name')] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field " "'missing_field'.", obj=Model, id='models.E012', ), ]) def test_unique_constraint_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: constraints = [models.UniqueConstraint(fields=['m2m'], name='name')] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'constraints'.", obj=Model, id='models.E013', ), ]) def test_unique_constraint_pointing_to_non_local_field(self): class Parent(models.Model): field1 = models.IntegerField() class Child(Parent): field2 = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint(fields=['field2', 'field1'], name='name'), ] self.assertEqual(Child.check(databases=self.databases), [ Error( "'constraints' refers to field 'field1' which is not local to " "model 'Child'.", hint='This issue may be caused by multi-table inheritance.', obj=Child, id='models.E016', ), ]) def test_unique_constraint_pointing_to_fk(self): class Target(models.Model): pass class Model(models.Model): fk_1 = models.ForeignKey(Target, models.CASCADE, related_name='target_1') fk_2 = models.ForeignKey(Target, models.CASCADE, related_name='target_2') class Meta: constraints = [ models.UniqueConstraint(fields=['fk_1_id', 'fk_2'], name='name'), ] self.assertEqual(Model.check(databases=self.databases), []) def test_unique_constraint_with_include(self): class Model(models.Model): age = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_include_id', include=['id'], ), ] errors = Model.check(databases=self.databases) expected = [] if connection.features.supports_covering_indexes else [ Warning( '%s does not support unique constraints with non-key columns.' % connection.display_name, hint=( "A constraint won't be created. Silence this warning if " "you don't care about it." ), obj=Model, id='models.W039', ), ] self.assertEqual(errors, expected) def test_unique_constraint_with_include_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {'supports_covering_indexes'} constraints = [ models.UniqueConstraint( fields=['age'], name='unique_age_include_id', include=['id'], ), ] self.assertEqual(Model.check(databases=self.databases), []) @skipUnlessDBFeature('supports_covering_indexes') def test_unique_constraint_include_pointing_to_missing_field(self): class Model(models.Model): class Meta: constraints = [ models.UniqueConstraint( fields=['id'], include=['missing_field'], name='name', ), ] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field " "'missing_field'.", obj=Model, id='models.E012', ), ]) @skipUnlessDBFeature('supports_covering_indexes') def test_unique_constraint_include_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField('self') class Meta: constraints = [ models.UniqueConstraint( fields=['id'], include=['m2m'], name='name', ), ] self.assertEqual(Model.check(databases=self.databases), [ Error( "'constraints' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'constraints'.", obj=Model, id='models.E013', ), ]) @skipUnlessDBFeature('supports_covering_indexes') def test_unique_constraint_include_pointing_to_non_local_field(self): class Parent(models.Model): field1 = models.IntegerField() class Child(Parent): field2 = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=['field2'], include=['field1'], name='name', ), ] self.assertEqual(Child.check(databases=self.databases), [ Error( "'constraints' refers to field 'field1' which is not local to " "model 'Child'.", hint='This issue may be caused by multi-table inheritance.', obj=Child, id='models.E016', ), ]) @skipUnlessDBFeature('supports_covering_indexes') def test_unique_constraint_include_pointing_to_fk(self): class Target(models.Model): pass class Model(models.Model): fk_1 = models.ForeignKey(Target, models.CASCADE, related_name='target_1') fk_2 = models.ForeignKey(Target, models.CASCADE, related_name='target_2') class Meta: constraints = [ models.UniqueConstraint( fields=['id'], include=['fk_1_id', 'fk_2'], name='name', ), ] self.assertEqual(Model.check(databases=self.databases), [])
4148a6fa5ff921a9159f403b9a386bfd8e4efac743980f34a4d9d3038f6ed167
from unittest import mock from django.core.exceptions import ValidationError from django.db import IntegrityError, connection, models from django.db.models.constraints import BaseConstraint from django.db.transaction import atomic from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from .models import ( ChildModel, Product, UniqueConstraintConditionProduct, UniqueConstraintDeferrable, UniqueConstraintInclude, UniqueConstraintProduct, ) def get_constraints(table): with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) class BaseConstraintTests(SimpleTestCase): def test_constraint_sql(self): c = BaseConstraint('name') msg = 'This method must be implemented by a subclass.' with self.assertRaisesMessage(NotImplementedError, msg): c.constraint_sql(None, None) def test_create_sql(self): c = BaseConstraint('name') msg = 'This method must be implemented by a subclass.' with self.assertRaisesMessage(NotImplementedError, msg): c.create_sql(None, None) def test_remove_sql(self): c = BaseConstraint('name') msg = 'This method must be implemented by a subclass.' with self.assertRaisesMessage(NotImplementedError, msg): c.remove_sql(None, None) class CheckConstraintTests(TestCase): def test_eq(self): check1 = models.Q(price__gt=models.F('discounted_price')) check2 = models.Q(price__lt=models.F('discounted_price')) self.assertEqual( models.CheckConstraint(check=check1, name='price'), models.CheckConstraint(check=check1, name='price'), ) self.assertEqual(models.CheckConstraint(check=check1, name='price'), mock.ANY) self.assertNotEqual( models.CheckConstraint(check=check1, name='price'), models.CheckConstraint(check=check1, name='price2'), ) self.assertNotEqual( models.CheckConstraint(check=check1, name='price'), models.CheckConstraint(check=check2, name='price'), ) self.assertNotEqual(models.CheckConstraint(check=check1, name='price'), 1) def test_repr(self): check = models.Q(price__gt=models.F('discounted_price')) name = 'price_gt_discounted_price' constraint = models.CheckConstraint(check=check, name=name) self.assertEqual( repr(constraint), "<CheckConstraint: check='{}' name='{}'>".format(check, name), ) def test_invalid_check_types(self): msg = ( 'CheckConstraint.check must be a Q instance or boolean expression.' ) with self.assertRaisesMessage(TypeError, msg): models.CheckConstraint(check=models.F('discounted_price'), name='check') def test_deconstruction(self): check = models.Q(price__gt=models.F('discounted_price')) name = 'price_gt_discounted_price' constraint = models.CheckConstraint(check=check, name=name) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, 'django.db.models.CheckConstraint') self.assertEqual(args, ()) self.assertEqual(kwargs, {'check': check, 'name': name}) @skipUnlessDBFeature('supports_table_check_constraints') def test_database_constraint(self): Product.objects.create(price=10, discounted_price=5) with self.assertRaises(IntegrityError): Product.objects.create(price=10, discounted_price=20) @skipUnlessDBFeature('supports_table_check_constraints') def test_database_constraint_expression(self): Product.objects.create(price=999, discounted_price=5) with self.assertRaises(IntegrityError): Product.objects.create(price=1000, discounted_price=5) @skipUnlessDBFeature('supports_table_check_constraints') def test_database_constraint_expressionwrapper(self): Product.objects.create(price=499, discounted_price=5) with self.assertRaises(IntegrityError): Product.objects.create(price=500, discounted_price=5) @skipUnlessDBFeature('supports_table_check_constraints', 'can_introspect_check_constraints') def test_name(self): constraints = get_constraints(Product._meta.db_table) for expected_name in ( 'price_gt_discounted_price', 'constraints_price_lt_1000_raw', 'constraints_price_neq_500_wrap', 'constraints_product_price_gt_0', ): with self.subTest(expected_name): self.assertIn(expected_name, constraints) @skipUnlessDBFeature('supports_table_check_constraints', 'can_introspect_check_constraints') def test_abstract_name(self): constraints = get_constraints(ChildModel._meta.db_table) self.assertIn('constraints_childmodel_adult', constraints) class UniqueConstraintTests(TestCase): @classmethod def setUpTestData(cls): cls.p1, cls.p2 = UniqueConstraintProduct.objects.bulk_create([ UniqueConstraintProduct(name='p1', color='red'), UniqueConstraintProduct(name='p2'), ]) def test_eq(self): self.assertEqual( models.UniqueConstraint(fields=['foo', 'bar'], name='unique'), models.UniqueConstraint(fields=['foo', 'bar'], name='unique'), ) self.assertEqual( models.UniqueConstraint(fields=['foo', 'bar'], name='unique'), mock.ANY, ) self.assertNotEqual( models.UniqueConstraint(fields=['foo', 'bar'], name='unique'), models.UniqueConstraint(fields=['foo', 'bar'], name='unique2'), ) self.assertNotEqual( models.UniqueConstraint(fields=['foo', 'bar'], name='unique'), models.UniqueConstraint(fields=['foo', 'baz'], name='unique'), ) self.assertNotEqual(models.UniqueConstraint(fields=['foo', 'bar'], name='unique'), 1) def test_eq_with_condition(self): self.assertEqual( models.UniqueConstraint( fields=['foo', 'bar'], name='unique', condition=models.Q(foo=models.F('bar')) ), models.UniqueConstraint( fields=['foo', 'bar'], name='unique', condition=models.Q(foo=models.F('bar'))), ) self.assertNotEqual( models.UniqueConstraint( fields=['foo', 'bar'], name='unique', condition=models.Q(foo=models.F('bar')) ), models.UniqueConstraint( fields=['foo', 'bar'], name='unique', condition=models.Q(foo=models.F('baz')) ), ) def test_eq_with_deferrable(self): constraint_1 = models.UniqueConstraint( fields=['foo', 'bar'], name='unique', deferrable=models.Deferrable.DEFERRED, ) constraint_2 = models.UniqueConstraint( fields=['foo', 'bar'], name='unique', deferrable=models.Deferrable.IMMEDIATE, ) self.assertEqual(constraint_1, constraint_1) self.assertNotEqual(constraint_1, constraint_2) def test_eq_with_include(self): constraint_1 = models.UniqueConstraint( fields=['foo', 'bar'], name='include', include=['baz_1'], ) constraint_2 = models.UniqueConstraint( fields=['foo', 'bar'], name='include', include=['baz_2'], ) self.assertEqual(constraint_1, constraint_1) self.assertNotEqual(constraint_1, constraint_2) def test_repr(self): fields = ['foo', 'bar'] name = 'unique_fields' constraint = models.UniqueConstraint(fields=fields, name=name) self.assertEqual( repr(constraint), "<UniqueConstraint: fields=('foo', 'bar') name='unique_fields'>", ) def test_repr_with_condition(self): constraint = models.UniqueConstraint( fields=['foo', 'bar'], name='unique_fields', condition=models.Q(foo=models.F('bar')), ) self.assertEqual( repr(constraint), "<UniqueConstraint: fields=('foo', 'bar') name='unique_fields' " "condition=(AND: ('foo', F(bar)))>", ) def test_repr_with_deferrable(self): constraint = models.UniqueConstraint( fields=['foo', 'bar'], name='unique_fields', deferrable=models.Deferrable.IMMEDIATE, ) self.assertEqual( repr(constraint), "<UniqueConstraint: fields=('foo', 'bar') name='unique_fields' " "deferrable=Deferrable.IMMEDIATE>", ) def test_repr_with_include(self): constraint = models.UniqueConstraint( fields=['foo', 'bar'], name='include_fields', include=['baz_1', 'baz_2'], ) self.assertEqual( repr(constraint), "<UniqueConstraint: fields=('foo', 'bar') name='include_fields' " "include=('baz_1', 'baz_2')>", ) def test_deconstruction(self): fields = ['foo', 'bar'] name = 'unique_fields' constraint = models.UniqueConstraint(fields=fields, name=name) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, 'django.db.models.UniqueConstraint') self.assertEqual(args, ()) self.assertEqual(kwargs, {'fields': tuple(fields), 'name': name}) def test_deconstruction_with_condition(self): fields = ['foo', 'bar'] name = 'unique_fields' condition = models.Q(foo=models.F('bar')) constraint = models.UniqueConstraint(fields=fields, name=name, condition=condition) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, 'django.db.models.UniqueConstraint') self.assertEqual(args, ()) self.assertEqual(kwargs, {'fields': tuple(fields), 'name': name, 'condition': condition}) def test_deconstruction_with_deferrable(self): fields = ['foo'] name = 'unique_fields' constraint = models.UniqueConstraint( fields=fields, name=name, deferrable=models.Deferrable.DEFERRED, ) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, 'django.db.models.UniqueConstraint') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'fields': tuple(fields), 'name': name, 'deferrable': models.Deferrable.DEFERRED, }) def test_deconstruction_with_include(self): fields = ['foo', 'bar'] name = 'unique_fields' include = ['baz_1', 'baz_2'] constraint = models.UniqueConstraint(fields=fields, name=name, include=include) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, 'django.db.models.UniqueConstraint') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'fields': tuple(fields), 'name': name, 'include': tuple(include), }) def test_database_constraint(self): with self.assertRaises(IntegrityError): UniqueConstraintProduct.objects.create(name=self.p1.name, color=self.p1.color) @skipUnlessDBFeature('supports_partial_indexes') def test_database_constraint_with_condition(self): UniqueConstraintConditionProduct.objects.create(name='p1') UniqueConstraintConditionProduct.objects.create(name='p2') with self.assertRaises(IntegrityError): UniqueConstraintConditionProduct.objects.create(name='p1') def test_model_validation(self): msg = 'Unique constraint product with this Name and Color already exists.' with self.assertRaisesMessage(ValidationError, msg): UniqueConstraintProduct(name=self.p1.name, color=self.p1.color).validate_unique() @skipUnlessDBFeature('supports_partial_indexes') def test_model_validation_with_condition(self): """Partial unique constraints are ignored by Model.validate_unique().""" obj1 = UniqueConstraintConditionProduct.objects.create(name='p1', color='red') obj2 = UniqueConstraintConditionProduct.objects.create(name='p2') UniqueConstraintConditionProduct(name=obj1.name, color='blue').validate_unique() UniqueConstraintConditionProduct(name=obj2.name).validate_unique() def test_name(self): constraints = get_constraints(UniqueConstraintProduct._meta.db_table) expected_name = 'name_color_uniq' self.assertIn(expected_name, constraints) def test_condition_must_be_q(self): with self.assertRaisesMessage(ValueError, 'UniqueConstraint.condition must be a Q instance.'): models.UniqueConstraint(name='uniq', fields=['name'], condition='invalid') @skipUnlessDBFeature('supports_deferrable_unique_constraints') def test_initially_deferred_database_constraint(self): obj_1 = UniqueConstraintDeferrable.objects.create(name='p1', shelf='front') obj_2 = UniqueConstraintDeferrable.objects.create(name='p2', shelf='back') def swap(): obj_1.name, obj_2.name = obj_2.name, obj_1.name obj_1.save() obj_2.save() swap() # Behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with atomic(), connection.cursor() as cursor: constraint_name = connection.ops.quote_name('name_init_deferred_uniq') cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % constraint_name) swap() @skipUnlessDBFeature('supports_deferrable_unique_constraints') def test_initially_immediate_database_constraint(self): obj_1 = UniqueConstraintDeferrable.objects.create(name='p1', shelf='front') obj_2 = UniqueConstraintDeferrable.objects.create(name='p2', shelf='back') obj_1.shelf, obj_2.shelf = obj_2.shelf, obj_1.shelf with self.assertRaises(IntegrityError), atomic(): obj_1.save() # Behavior can be changed with SET CONSTRAINTS. with connection.cursor() as cursor: constraint_name = connection.ops.quote_name('sheld_init_immediate_uniq') cursor.execute('SET CONSTRAINTS %s DEFERRED' % constraint_name) obj_1.save() obj_2.save() def test_deferrable_with_condition(self): message = 'UniqueConstraint with conditions cannot be deferred.' with self.assertRaisesMessage(ValueError, message): models.UniqueConstraint( fields=['name'], name='name_without_color_unique', condition=models.Q(color__isnull=True), deferrable=models.Deferrable.DEFERRED, ) def test_invalid_defer_argument(self): message = 'UniqueConstraint.deferrable must be a Deferrable instance.' with self.assertRaisesMessage(ValueError, message): models.UniqueConstraint( fields=['name'], name='name_invalid', deferrable='invalid', ) @skipUnlessDBFeature( 'supports_table_check_constraints', 'supports_covering_indexes', ) def test_include_database_constraint(self): UniqueConstraintInclude.objects.create(name='p1', color='red') with self.assertRaises(IntegrityError): UniqueConstraintInclude.objects.create(name='p1', color='blue') def test_invalid_include_argument(self): msg = 'UniqueConstraint.include must be a list or tuple.' with self.assertRaisesMessage(ValueError, msg): models.UniqueConstraint( name='uniq_include', fields=['field'], include='other', )
16a050fe3cd61c11727008aa4be90c72be43cf70c0e8d05adfcb017ed79442f7
from django.db import models class Product(models.Model): price = models.IntegerField(null=True) discounted_price = models.IntegerField(null=True) class Meta: required_db_features = { 'supports_table_check_constraints', } constraints = [ models.CheckConstraint( check=models.Q(price__gt=models.F('discounted_price')), name='price_gt_discounted_price', ), models.CheckConstraint( check=models.Q(price__gt=0), name='%(app_label)s_%(class)s_price_gt_0', ), models.CheckConstraint( check=models.expressions.RawSQL( 'price < %s', (1000,), output_field=models.BooleanField() ), name='%(app_label)s_price_lt_1000_raw', ), models.CheckConstraint( check=models.expressions.ExpressionWrapper( models.Q(price__gt=500) | models.Q(price__lt=500), output_field=models.BooleanField() ), name='%(app_label)s_price_neq_500_wrap', ), ] class UniqueConstraintProduct(models.Model): name = models.CharField(max_length=255) color = models.CharField(max_length=32, null=True) class Meta: constraints = [ models.UniqueConstraint(fields=['name', 'color'], name='name_color_uniq'), ] class UniqueConstraintConditionProduct(models.Model): name = models.CharField(max_length=255) color = models.CharField(max_length=32, null=True) class Meta: required_db_features = {'supports_partial_indexes'} constraints = [ models.UniqueConstraint( fields=['name'], name='name_without_color_uniq', condition=models.Q(color__isnull=True), ), ] class UniqueConstraintDeferrable(models.Model): name = models.CharField(max_length=255) shelf = models.CharField(max_length=31) class Meta: required_db_features = { 'supports_deferrable_unique_constraints', } constraints = [ models.UniqueConstraint( fields=['name'], name='name_init_deferred_uniq', deferrable=models.Deferrable.DEFERRED, ), models.UniqueConstraint( fields=['shelf'], name='sheld_init_immediate_uniq', deferrable=models.Deferrable.IMMEDIATE, ), ] class UniqueConstraintInclude(models.Model): name = models.CharField(max_length=255) color = models.CharField(max_length=32, null=True) class Meta: required_db_features = { 'supports_table_check_constraints', 'supports_covering_indexes', } constraints = [ models.UniqueConstraint( fields=['name'], name='name_include_color_uniq', include=['color'], ), ] class AbstractModel(models.Model): age = models.IntegerField() class Meta: abstract = True required_db_features = { 'supports_table_check_constraints', } constraints = [ models.CheckConstraint( check=models.Q(age__gte=18), name='%(app_label)s_%(class)s_adult', ), ] class ChildModel(AbstractModel): pass
ba11d9817a2f4f7fa77bde80a3606b4cb616eefac14d1fcfc9baff75c18915f9
import datetime import pickle import unittest import uuid from copy import deepcopy from unittest import mock from django.core.exceptions import FieldError from django.db import DatabaseError, NotSupportedError, connection from django.db.models import ( Avg, BooleanField, Case, CharField, Count, DateField, DateTimeField, DurationField, Exists, Expression, ExpressionList, ExpressionWrapper, F, Func, IntegerField, Max, Min, Model, OrderBy, OuterRef, Q, StdDev, Subquery, Sum, TimeField, UUIDField, Value, Variance, When, ) from django.db.models.expressions import Col, Combinable, Random, RawSQL, Ref from django.db.models.functions import ( Coalesce, Concat, Left, Length, Lower, Substr, Upper, ) from django.db.models.sql import constants from django.db.models.sql.datastructures import Join from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import Approximate, isolate_apps from django.utils.functional import SimpleLazyObject from .models import ( UUID, UUIDPK, Company, Employee, Experiment, Manager, Number, RemoteEmployee, Result, SimulationRun, Time, ) class BasicExpressionsTests(TestCase): @classmethod def setUpTestData(cls): cls.example_inc = Company.objects.create( name="Example Inc.", num_employees=2300, num_chairs=5, ceo=Employee.objects.create(firstname="Joe", lastname="Smith", salary=10) ) cls.foobar_ltd = Company.objects.create( name="Foobar Ltd.", num_employees=3, num_chairs=4, based_in_eu=True, ceo=Employee.objects.create(firstname="Frank", lastname="Meyer", salary=20) ) cls.max = Employee.objects.create(firstname='Max', lastname='Mustermann', salary=30) cls.gmbh = Company.objects.create(name='Test GmbH', num_employees=32, num_chairs=1, ceo=cls.max) def setUp(self): self.company_query = Company.objects.values( "name", "num_employees", "num_chairs" ).order_by( "name", "num_employees", "num_chairs" ) def test_annotate_values_aggregate(self): companies = Company.objects.annotate( salaries=F('ceo__salary'), ).values('num_employees', 'salaries').aggregate( result=Sum( F('salaries') + F('num_employees'), output_field=IntegerField() ), ) self.assertEqual(companies['result'], 2395) def test_annotate_values_filter(self): companies = Company.objects.annotate( foo=RawSQL('%s', ['value']), ).filter(foo='value').order_by('name') self.assertQuerysetEqual( companies, ['<Company: Example Inc.>', '<Company: Foobar Ltd.>', '<Company: Test GmbH>'], ) def test_annotate_values_count(self): companies = Company.objects.annotate(foo=RawSQL('%s', ['value'])) self.assertEqual(companies.count(), 3) @skipUnlessDBFeature('supports_boolean_expr_in_select_clause') def test_filtering_on_annotate_that_uses_q(self): self.assertEqual( Company.objects.annotate( num_employees_check=ExpressionWrapper(Q(num_employees__gt=3), output_field=BooleanField()) ).filter(num_employees_check=True).count(), 2, ) def test_filtering_on_q_that_is_boolean(self): self.assertEqual( Company.objects.filter( ExpressionWrapper(Q(num_employees__gt=3), output_field=BooleanField()) ).count(), 2, ) def test_filtering_on_rawsql_that_is_boolean(self): self.assertEqual( Company.objects.filter( RawSQL('num_employees > %s', (3,), output_field=BooleanField()), ).count(), 2, ) def test_filter_inter_attribute(self): # We can filter on attribute relationships on same model obj, e.g. # find companies where the number of employees is greater # than the number of chairs. self.assertSequenceEqual( self.company_query.filter(num_employees__gt=F("num_chairs")), [ { "num_chairs": 5, "name": "Example Inc.", "num_employees": 2300, }, { "num_chairs": 1, "name": "Test GmbH", "num_employees": 32 }, ], ) def test_update(self): # We can set one field to have the value of another field # Make sure we have enough chairs self.company_query.update(num_chairs=F("num_employees")) self.assertSequenceEqual( self.company_query, [ { "num_chairs": 2300, "name": "Example Inc.", "num_employees": 2300 }, { "num_chairs": 3, "name": "Foobar Ltd.", "num_employees": 3 }, { "num_chairs": 32, "name": "Test GmbH", "num_employees": 32 } ], ) def test_arithmetic(self): # We can perform arithmetic operations in expressions # Make sure we have 2 spare chairs self.company_query.update(num_chairs=F("num_employees") + 2) self.assertSequenceEqual( self.company_query, [ { 'num_chairs': 2302, 'name': 'Example Inc.', 'num_employees': 2300 }, { 'num_chairs': 5, 'name': 'Foobar Ltd.', 'num_employees': 3 }, { 'num_chairs': 34, 'name': 'Test GmbH', 'num_employees': 32 } ], ) def test_order_of_operations(self): # Law of order of operations is followed self.company_query.update(num_chairs=F('num_employees') + 2 * F('num_employees')) self.assertSequenceEqual( self.company_query, [ { 'num_chairs': 6900, 'name': 'Example Inc.', 'num_employees': 2300 }, { 'num_chairs': 9, 'name': 'Foobar Ltd.', 'num_employees': 3 }, { 'num_chairs': 96, 'name': 'Test GmbH', 'num_employees': 32 } ], ) def test_parenthesis_priority(self): # Law of order of operations can be overridden by parentheses self.company_query.update(num_chairs=(F('num_employees') + 2) * F('num_employees')) self.assertSequenceEqual( self.company_query, [ { 'num_chairs': 5294600, 'name': 'Example Inc.', 'num_employees': 2300 }, { 'num_chairs': 15, 'name': 'Foobar Ltd.', 'num_employees': 3 }, { 'num_chairs': 1088, 'name': 'Test GmbH', 'num_employees': 32 } ], ) def test_update_with_fk(self): # ForeignKey can become updated with the value of another ForeignKey. self.assertEqual(Company.objects.update(point_of_contact=F('ceo')), 3) self.assertQuerysetEqual( Company.objects.all(), ['Joe Smith', 'Frank Meyer', 'Max Mustermann'], lambda c: str(c.point_of_contact), ordered=False ) def test_update_with_none(self): Number.objects.create(integer=1, float=1.0) Number.objects.create(integer=2) Number.objects.filter(float__isnull=False).update(float=Value(None)) self.assertQuerysetEqual( Number.objects.all(), [None, None], lambda n: n.float, ordered=False ) def test_filter_with_join(self): # F Expressions can also span joins Company.objects.update(point_of_contact=F('ceo')) c = Company.objects.first() c.point_of_contact = Employee.objects.create(firstname="Guido", lastname="van Rossum") c.save() self.assertQuerysetEqual( Company.objects.filter(ceo__firstname=F('point_of_contact__firstname')), ['Foobar Ltd.', 'Test GmbH'], lambda c: c.name, ordered=False ) Company.objects.exclude( ceo__firstname=F("point_of_contact__firstname") ).update(name="foo") self.assertEqual( Company.objects.exclude( ceo__firstname=F('point_of_contact__firstname') ).get().name, "foo", ) msg = "Joined field references are not permitted in this query" with self.assertRaisesMessage(FieldError, msg): Company.objects.exclude( ceo__firstname=F('point_of_contact__firstname') ).update(name=F('point_of_contact__lastname')) def test_object_update(self): # F expressions can be used to update attributes on single objects self.gmbh.num_employees = F('num_employees') + 4 self.gmbh.save() self.gmbh.refresh_from_db() self.assertEqual(self.gmbh.num_employees, 36) def test_new_object_save(self): # We should be able to use Funcs when inserting new data test_co = Company(name=Lower(Value('UPPER')), num_employees=32, num_chairs=1, ceo=self.max) test_co.save() test_co.refresh_from_db() self.assertEqual(test_co.name, "upper") def test_new_object_create(self): test_co = Company.objects.create(name=Lower(Value('UPPER')), num_employees=32, num_chairs=1, ceo=self.max) test_co.refresh_from_db() self.assertEqual(test_co.name, "upper") def test_object_create_with_aggregate(self): # Aggregates are not allowed when inserting new data msg = 'Aggregate functions are not allowed in this query (num_employees=Max(Value(1))).' with self.assertRaisesMessage(FieldError, msg): Company.objects.create( name='Company', num_employees=Max(Value(1)), num_chairs=1, ceo=Employee.objects.create(firstname="Just", lastname="Doit", salary=30), ) def test_object_update_fk(self): # F expressions cannot be used to update attributes which are foreign # keys, or attributes which involve joins. test_gmbh = Company.objects.get(pk=self.gmbh.pk) msg = 'F(ceo)": "Company.point_of_contact" must be a "Employee" instance.' with self.assertRaisesMessage(ValueError, msg): test_gmbh.point_of_contact = F('ceo') test_gmbh.point_of_contact = self.gmbh.ceo test_gmbh.save() test_gmbh.name = F('ceo__lastname') msg = 'Joined field references are not permitted in this query' with self.assertRaisesMessage(FieldError, msg): test_gmbh.save() def test_update_inherited_field_value(self): msg = 'Joined field references are not permitted in this query' with self.assertRaisesMessage(FieldError, msg): RemoteEmployee.objects.update(adjusted_salary=F('salary') * 5) def test_object_update_unsaved_objects(self): # F expressions cannot be used to update attributes on objects which do # not yet exist in the database acme = Company(name='The Acme Widget Co.', num_employees=12, num_chairs=5, ceo=self.max) acme.num_employees = F("num_employees") + 16 msg = ( 'Failed to insert expression "Col(expressions_company, ' 'expressions.Company.num_employees) + Value(16)" on ' 'expressions.Company.num_employees. F() expressions can only be ' 'used to update, not to insert.' ) with self.assertRaisesMessage(ValueError, msg): acme.save() acme.num_employees = 12 acme.name = Lower(F('name')) msg = ( 'Failed to insert expression "Lower(Col(expressions_company, ' 'expressions.Company.name))" on expressions.Company.name. F() ' 'expressions can only be used to update, not to insert.' ) with self.assertRaisesMessage(ValueError, msg): acme.save() def test_ticket_11722_iexact_lookup(self): Employee.objects.create(firstname="John", lastname="Doe") Employee.objects.create(firstname="Test", lastname="test") queryset = Employee.objects.filter(firstname__iexact=F('lastname')) self.assertQuerysetEqual(queryset, ["<Employee: Test test>"]) def test_ticket_16731_startswith_lookup(self): Employee.objects.create(firstname="John", lastname="Doe") e2 = Employee.objects.create(firstname="Jack", lastname="Jackson") e3 = Employee.objects.create(firstname="Jack", lastname="jackson") self.assertSequenceEqual( Employee.objects.filter(lastname__startswith=F('firstname')), [e2, e3] if connection.features.has_case_insensitive_like else [e2] ) qs = Employee.objects.filter(lastname__istartswith=F('firstname')).order_by('pk') self.assertSequenceEqual(qs, [e2, e3]) def test_ticket_18375_join_reuse(self): # Reverse multijoin F() references and the lookup target the same join. # Pre #18375 the F() join was generated first and the lookup couldn't # reuse that join. qs = Employee.objects.filter(company_ceo_set__num_chairs=F('company_ceo_set__num_employees')) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_ticket_18375_kwarg_ordering(self): # The next query was dict-randomization dependent - if the "gte=1" # was seen first, then the F() will reuse the join generated by the # gte lookup, if F() was seen first, then it generated a join the # other lookups could not reuse. qs = Employee.objects.filter( company_ceo_set__num_chairs=F('company_ceo_set__num_employees'), company_ceo_set__num_chairs__gte=1, ) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_ticket_18375_kwarg_ordering_2(self): # Another similar case for F() than above. Now we have the same join # in two filter kwargs, one in the lhs lookup, one in F. Here pre # #18375 the amount of joins generated was random if dict # randomization was enabled, that is the generated query dependent # on which clause was seen first. qs = Employee.objects.filter( company_ceo_set__num_employees=F('pk'), pk=F('company_ceo_set__num_employees') ) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_ticket_18375_chained_filters(self): # F() expressions do not reuse joins from previous filter. qs = Employee.objects.filter( company_ceo_set__num_employees=F('pk') ).filter( company_ceo_set__num_employees=F('company_ceo_set__num_employees') ) self.assertEqual(str(qs.query).count('JOIN'), 2) def test_order_by_exists(self): mary = Employee.objects.create(firstname='Mary', lastname='Mustermann', salary=20) mustermanns_by_seniority = Employee.objects.filter(lastname='Mustermann').order_by( # Order by whether the employee is the CEO of a company Exists(Company.objects.filter(ceo=OuterRef('pk'))).desc() ) self.assertSequenceEqual(mustermanns_by_seniority, [self.max, mary]) def test_order_by_multiline_sql(self): raw_order_by = ( RawSQL(''' CASE WHEN num_employees > 1000 THEN num_chairs ELSE 0 END ''', []).desc(), RawSQL(''' CASE WHEN num_chairs > 1 THEN 1 ELSE 0 END ''', []).asc() ) for qs in ( Company.objects.all(), Company.objects.distinct(), ): with self.subTest(qs=qs): self.assertSequenceEqual( qs.order_by(*raw_order_by), [self.example_inc, self.gmbh, self.foobar_ltd], ) def test_outerref(self): inner = Company.objects.filter(point_of_contact=OuterRef('pk')) msg = ( 'This queryset contains a reference to an outer query and may only ' 'be used in a subquery.' ) with self.assertRaisesMessage(ValueError, msg): inner.exists() outer = Employee.objects.annotate(is_point_of_contact=Exists(inner)) self.assertIs(outer.exists(), True) def test_exist_single_field_output_field(self): queryset = Company.objects.values('pk') self.assertIsInstance(Exists(queryset).output_field, BooleanField) def test_subquery(self): Company.objects.filter(name='Example Inc.').update( point_of_contact=Employee.objects.get(firstname='Joe', lastname='Smith'), ceo=self.max, ) Employee.objects.create(firstname='Bob', lastname='Brown', salary=40) qs = Employee.objects.annotate( is_point_of_contact=Exists(Company.objects.filter(point_of_contact=OuterRef('pk'))), is_not_point_of_contact=~Exists(Company.objects.filter(point_of_contact=OuterRef('pk'))), is_ceo_of_small_company=Exists(Company.objects.filter(num_employees__lt=200, ceo=OuterRef('pk'))), is_ceo_small_2=~~Exists(Company.objects.filter(num_employees__lt=200, ceo=OuterRef('pk'))), largest_company=Subquery(Company.objects.order_by('-num_employees').filter( Q(ceo=OuterRef('pk')) | Q(point_of_contact=OuterRef('pk')) ).values('name')[:1], output_field=CharField()) ).values( 'firstname', 'is_point_of_contact', 'is_not_point_of_contact', 'is_ceo_of_small_company', 'is_ceo_small_2', 'largest_company', ).order_by('firstname') results = list(qs) # Could use Coalesce(subq, Value('')) instead except for the bug in # cx_Oracle mentioned in #23843. bob = results[0] if bob['largest_company'] == '' and connection.features.interprets_empty_strings_as_nulls: bob['largest_company'] = None self.assertEqual(results, [ { 'firstname': 'Bob', 'is_point_of_contact': False, 'is_not_point_of_contact': True, 'is_ceo_of_small_company': False, 'is_ceo_small_2': False, 'largest_company': None, }, { 'firstname': 'Frank', 'is_point_of_contact': False, 'is_not_point_of_contact': True, 'is_ceo_of_small_company': True, 'is_ceo_small_2': True, 'largest_company': 'Foobar Ltd.', }, { 'firstname': 'Joe', 'is_point_of_contact': True, 'is_not_point_of_contact': False, 'is_ceo_of_small_company': False, 'is_ceo_small_2': False, 'largest_company': 'Example Inc.', }, { 'firstname': 'Max', 'is_point_of_contact': False, 'is_not_point_of_contact': True, 'is_ceo_of_small_company': True, 'is_ceo_small_2': True, 'largest_company': 'Example Inc.' } ]) # A less elegant way to write the same query: this uses a LEFT OUTER # JOIN and an IS NULL, inside a WHERE NOT IN which is probably less # efficient than EXISTS. self.assertCountEqual( qs.filter(is_point_of_contact=True).values('pk'), Employee.objects.exclude(company_point_of_contact_set=None).values('pk') ) def test_subquery_eq(self): qs = Employee.objects.annotate( is_ceo=Exists(Company.objects.filter(ceo=OuterRef('pk'))), is_point_of_contact=Exists( Company.objects.filter(point_of_contact=OuterRef('pk')), ), small_company=Exists( queryset=Company.objects.filter(num_employees__lt=200), ), ).filter(is_ceo=True, is_point_of_contact=False, small_company=True) self.assertNotEqual( qs.query.annotations['is_ceo'], qs.query.annotations['is_point_of_contact'], ) self.assertNotEqual( qs.query.annotations['is_ceo'], qs.query.annotations['small_company'], ) def test_in_subquery(self): # This is a contrived test (and you really wouldn't write this query), # but it is a succinct way to test the __in=Subquery() construct. small_companies = Company.objects.filter(num_employees__lt=200).values('pk') subquery_test = Company.objects.filter(pk__in=Subquery(small_companies)) self.assertCountEqual(subquery_test, [self.foobar_ltd, self.gmbh]) subquery_test2 = Company.objects.filter(pk=Subquery(small_companies.filter(num_employees=3))) self.assertCountEqual(subquery_test2, [self.foobar_ltd]) def test_uuid_pk_subquery(self): u = UUIDPK.objects.create() UUID.objects.create(uuid_fk=u) qs = UUIDPK.objects.filter(id__in=Subquery(UUID.objects.values('uuid_fk__id'))) self.assertCountEqual(qs, [u]) def test_nested_subquery(self): inner = Company.objects.filter(point_of_contact=OuterRef('pk')) outer = Employee.objects.annotate(is_point_of_contact=Exists(inner)) contrived = Employee.objects.annotate( is_point_of_contact=Subquery( outer.filter(pk=OuterRef('pk')).values('is_point_of_contact'), output_field=BooleanField(), ), ) self.assertCountEqual(contrived.values_list(), outer.values_list()) def test_nested_subquery_join_outer_ref(self): inner = Employee.objects.filter(pk=OuterRef('ceo__pk')).values('pk') qs = Employee.objects.annotate( ceo_company=Subquery( Company.objects.filter( ceo__in=inner, ceo__pk=OuterRef('pk'), ).values('pk'), ), ) self.assertSequenceEqual( qs.values_list('ceo_company', flat=True), [self.example_inc.pk, self.foobar_ltd.pk, self.gmbh.pk], ) def test_nested_subquery_outer_ref_2(self): first = Time.objects.create(time='09:00') second = Time.objects.create(time='17:00') third = Time.objects.create(time='21:00') SimulationRun.objects.bulk_create([ SimulationRun(start=first, end=second, midpoint='12:00'), SimulationRun(start=first, end=third, midpoint='15:00'), SimulationRun(start=second, end=first, midpoint='00:00'), ]) inner = Time.objects.filter(time=OuterRef(OuterRef('time')), pk=OuterRef('start')).values('time') middle = SimulationRun.objects.annotate(other=Subquery(inner)).values('other')[:1] outer = Time.objects.annotate(other=Subquery(middle, output_field=TimeField())) # This is a contrived example. It exercises the double OuterRef form. self.assertCountEqual(outer, [first, second, third]) def test_nested_subquery_outer_ref_with_autofield(self): first = Time.objects.create(time='09:00') second = Time.objects.create(time='17:00') SimulationRun.objects.create(start=first, end=second, midpoint='12:00') inner = SimulationRun.objects.filter(start=OuterRef(OuterRef('pk'))).values('start') middle = Time.objects.annotate(other=Subquery(inner)).values('other')[:1] outer = Time.objects.annotate(other=Subquery(middle, output_field=IntegerField())) # This exercises the double OuterRef form with AutoField as pk. self.assertCountEqual(outer, [first, second]) def test_annotations_within_subquery(self): Company.objects.filter(num_employees__lt=50).update(ceo=Employee.objects.get(firstname='Frank')) inner = Company.objects.filter( ceo=OuterRef('pk') ).values('ceo').annotate(total_employees=Sum('num_employees')).values('total_employees') outer = Employee.objects.annotate(total_employees=Subquery(inner)).filter(salary__lte=Subquery(inner)) self.assertSequenceEqual( outer.order_by('-total_employees').values('salary', 'total_employees'), [{'salary': 10, 'total_employees': 2300}, {'salary': 20, 'total_employees': 35}], ) def test_subquery_references_joined_table_twice(self): inner = Company.objects.filter( num_chairs__gte=OuterRef('ceo__salary'), num_employees__gte=OuterRef('point_of_contact__salary'), ) # Another contrived example (there is no need to have a subquery here) outer = Company.objects.filter(pk__in=Subquery(inner.values('pk'))) self.assertFalse(outer.exists()) def test_subquery_filter_by_aggregate(self): Number.objects.create(integer=1000, float=1.2) Employee.objects.create(salary=1000) qs = Number.objects.annotate( min_valuable_count=Subquery( Employee.objects.filter( salary=OuterRef('integer'), ).annotate(cnt=Count('salary')).filter(cnt__gt=0).values('cnt')[:1] ), ) self.assertEqual(qs.get().float, 1.2) def test_subquery_filter_by_lazy(self): self.max.manager = Manager.objects.create(name='Manager') self.max.save() max_manager = SimpleLazyObject( lambda: Manager.objects.get(pk=self.max.manager.pk) ) qs = Company.objects.annotate( ceo_manager=Subquery( Employee.objects.filter( lastname=OuterRef('ceo__lastname'), ).values('manager'), ), ).filter(ceo_manager=max_manager) self.assertEqual(qs.get(), self.gmbh) def test_aggregate_subquery_annotation(self): with self.assertNumQueries(1) as ctx: aggregate = Company.objects.annotate( ceo_salary=Subquery( Employee.objects.filter( id=OuterRef('ceo_id'), ).values('salary') ), ).aggregate( ceo_salary_gt_20=Count('pk', filter=Q(ceo_salary__gt=20)), ) self.assertEqual(aggregate, {'ceo_salary_gt_20': 1}) # Aggregation over a subquery annotation doesn't annotate the subquery # twice in the inner query. sql = ctx.captured_queries[0]['sql'] self.assertLessEqual(sql.count('SELECT'), 3) # GROUP BY isn't required to aggregate over a query that doesn't # contain nested aggregates. self.assertNotIn('GROUP BY', sql) def test_explicit_output_field(self): class FuncA(Func): output_field = CharField() class FuncB(Func): pass expr = FuncB(FuncA()) self.assertEqual(expr.output_field, FuncA.output_field) def test_outerref_mixed_case_table_name(self): inner = Result.objects.filter(result_time__gte=OuterRef('experiment__assigned')) outer = Result.objects.filter(pk__in=Subquery(inner.values('pk'))) self.assertFalse(outer.exists()) def test_outerref_with_operator(self): inner = Company.objects.filter(num_employees=OuterRef('ceo__salary') + 2) outer = Company.objects.filter(pk__in=Subquery(inner.values('pk'))) self.assertEqual(outer.get().name, 'Test GmbH') def test_nested_outerref_with_function(self): self.gmbh.point_of_contact = Employee.objects.get(lastname='Meyer') self.gmbh.save() inner = Employee.objects.filter( lastname__startswith=Left(OuterRef(OuterRef('lastname')), 1), ) qs = Employee.objects.annotate( ceo_company=Subquery( Company.objects.filter( point_of_contact__in=inner, ceo__pk=OuterRef('pk'), ).values('name'), ), ).filter(ceo_company__isnull=False) self.assertEqual(qs.get().ceo_company, 'Test GmbH') def test_annotation_with_outerref(self): gmbh_salary = Company.objects.annotate( max_ceo_salary_raise=Subquery( Company.objects.annotate( salary_raise=OuterRef('num_employees') + F('num_employees'), ).order_by('-salary_raise').values('salary_raise')[:1], output_field=IntegerField(), ), ).get(pk=self.gmbh.pk) self.assertEqual(gmbh_salary.max_ceo_salary_raise, 2332) def test_pickle_expression(self): expr = Value(1, output_field=IntegerField()) expr.convert_value # populate cached property self.assertEqual(pickle.loads(pickle.dumps(expr)), expr) def test_incorrect_field_in_F_expression(self): with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."): list(Employee.objects.filter(firstname=F('nope'))) def test_incorrect_joined_field_in_F_expression(self): with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."): list(Company.objects.filter(ceo__pk=F('point_of_contact__nope'))) def test_exists_in_filter(self): inner = Company.objects.filter(ceo=OuterRef('pk')).values('pk') qs1 = Employee.objects.filter(Exists(inner)) qs2 = Employee.objects.annotate(found=Exists(inner)).filter(found=True) self.assertCountEqual(qs1, qs2) self.assertFalse(Employee.objects.exclude(Exists(inner)).exists()) self.assertCountEqual(qs2, Employee.objects.exclude(~Exists(inner))) def test_subquery_in_filter(self): inner = Company.objects.filter(ceo=OuterRef('pk')).values('based_in_eu') self.assertSequenceEqual( Employee.objects.filter(Subquery(inner)), [self.foobar_ltd.ceo], ) def test_subquery_group_by_outerref_in_filter(self): inner = Company.objects.annotate( employee=OuterRef('pk'), ).values('employee').annotate( min_num_chairs=Min('num_chairs'), ).values('ceo') self.assertIs(Employee.objects.filter(pk__in=Subquery(inner)).exists(), True) def test_case_in_filter_if_boolean_output_field(self): is_ceo = Company.objects.filter(ceo=OuterRef('pk')) is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) qs = Employee.objects.filter( Case( When(Exists(is_ceo), then=True), When(Exists(is_poc), then=True), default=False, output_field=BooleanField(), ), ) self.assertSequenceEqual(qs, [self.example_inc.ceo, self.foobar_ltd.ceo, self.max]) def test_boolean_expression_combined(self): is_ceo = Company.objects.filter(ceo=OuterRef('pk')) is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) self.gmbh.point_of_contact = self.max self.gmbh.save() self.assertSequenceEqual( Employee.objects.filter(Exists(is_ceo) | Exists(is_poc)), [self.example_inc.ceo, self.foobar_ltd.ceo, self.max], ) self.assertSequenceEqual( Employee.objects.filter(Exists(is_ceo) & Exists(is_poc)), [self.max], ) self.assertSequenceEqual( Employee.objects.filter(Exists(is_ceo) & Q(salary__gte=30)), [self.max], ) self.assertSequenceEqual( Employee.objects.filter(Exists(is_poc) | Q(salary__lt=15)), [self.example_inc.ceo, self.max], ) class IterableLookupInnerExpressionsTests(TestCase): @classmethod def setUpTestData(cls): ceo = Employee.objects.create(firstname='Just', lastname='Doit', salary=30) # MySQL requires that the values calculated for expressions don't pass # outside of the field's range, so it's inconvenient to use the values # in the more general tests. Company.objects.create(name='5020 Ltd', num_employees=50, num_chairs=20, ceo=ceo) Company.objects.create(name='5040 Ltd', num_employees=50, num_chairs=40, ceo=ceo) Company.objects.create(name='5050 Ltd', num_employees=50, num_chairs=50, ceo=ceo) Company.objects.create(name='5060 Ltd', num_employees=50, num_chairs=60, ceo=ceo) Company.objects.create(name='99300 Ltd', num_employees=99, num_chairs=300, ceo=ceo) def test_in_lookup_allows_F_expressions_and_expressions_for_integers(self): # __in lookups can use F() expressions for integers. queryset = Company.objects.filter(num_employees__in=([F('num_chairs') - 10])) self.assertQuerysetEqual(queryset, ['<Company: 5060 Ltd>'], ordered=False) self.assertQuerysetEqual( Company.objects.filter(num_employees__in=([F('num_chairs') - 10, F('num_chairs') + 10])), ['<Company: 5040 Ltd>', '<Company: 5060 Ltd>'], ordered=False ) self.assertQuerysetEqual( Company.objects.filter( num_employees__in=([F('num_chairs') - 10, F('num_chairs'), F('num_chairs') + 10]) ), ['<Company: 5040 Ltd>', '<Company: 5050 Ltd>', '<Company: 5060 Ltd>'], ordered=False ) def test_expressions_in_lookups_join_choice(self): midpoint = datetime.time(13, 0) t1 = Time.objects.create(time=datetime.time(12, 0)) t2 = Time.objects.create(time=datetime.time(14, 0)) SimulationRun.objects.create(start=t1, end=t2, midpoint=midpoint) SimulationRun.objects.create(start=t1, end=None, midpoint=midpoint) SimulationRun.objects.create(start=None, end=t2, midpoint=midpoint) SimulationRun.objects.create(start=None, end=None, midpoint=midpoint) queryset = SimulationRun.objects.filter(midpoint__range=[F('start__time'), F('end__time')]) self.assertQuerysetEqual( queryset, ['<SimulationRun: 13:00:00 (12:00:00 to 14:00:00)>'], ordered=False ) for alias in queryset.query.alias_map.values(): if isinstance(alias, Join): self.assertEqual(alias.join_type, constants.INNER) queryset = SimulationRun.objects.exclude(midpoint__range=[F('start__time'), F('end__time')]) self.assertQuerysetEqual(queryset, [], ordered=False) for alias in queryset.query.alias_map.values(): if isinstance(alias, Join): self.assertEqual(alias.join_type, constants.LOUTER) def test_range_lookup_allows_F_expressions_and_expressions_for_integers(self): # Range lookups can use F() expressions for integers. Company.objects.filter(num_employees__exact=F("num_chairs")) self.assertQuerysetEqual( Company.objects.filter(num_employees__range=(F('num_chairs'), 100)), ['<Company: 5020 Ltd>', '<Company: 5040 Ltd>', '<Company: 5050 Ltd>'], ordered=False ) self.assertQuerysetEqual( Company.objects.filter(num_employees__range=(F('num_chairs') - 10, F('num_chairs') + 10)), ['<Company: 5040 Ltd>', '<Company: 5050 Ltd>', '<Company: 5060 Ltd>'], ordered=False ) self.assertQuerysetEqual( Company.objects.filter(num_employees__range=(F('num_chairs') - 10, 100)), ['<Company: 5020 Ltd>', '<Company: 5040 Ltd>', '<Company: 5050 Ltd>', '<Company: 5060 Ltd>'], ordered=False ) self.assertQuerysetEqual( Company.objects.filter(num_employees__range=(1, 100)), [ '<Company: 5020 Ltd>', '<Company: 5040 Ltd>', '<Company: 5050 Ltd>', '<Company: 5060 Ltd>', '<Company: 99300 Ltd>', ], ordered=False ) @unittest.skipUnless(connection.vendor == 'sqlite', "This defensive test only works on databases that don't validate parameter types") def test_complex_expressions_do_not_introduce_sql_injection_via_untrusted_string_inclusion(self): """ This tests that SQL injection isn't possible using compilation of expressions in iterable filters, as their compilation happens before the main query compilation. It's limited to SQLite, as PostgreSQL, Oracle and other vendors have defense in depth against this by type checking. Testing against SQLite (the most permissive of the built-in databases) demonstrates that the problem doesn't exist while keeping the test simple. """ queryset = Company.objects.filter(name__in=[F('num_chairs') + '1)) OR ((1==1']) self.assertQuerysetEqual(queryset, [], ordered=False) def test_in_lookup_allows_F_expressions_and_expressions_for_datetimes(self): start = datetime.datetime(2016, 2, 3, 15, 0, 0) end = datetime.datetime(2016, 2, 5, 15, 0, 0) experiment_1 = Experiment.objects.create( name='Integrity testing', assigned=start.date(), start=start, end=end, completed=end.date(), estimated_time=end - start, ) experiment_2 = Experiment.objects.create( name='Taste testing', assigned=start.date(), start=start, end=end, completed=end.date(), estimated_time=end - start, ) Result.objects.create( experiment=experiment_1, result_time=datetime.datetime(2016, 2, 4, 15, 0, 0), ) Result.objects.create( experiment=experiment_1, result_time=datetime.datetime(2016, 3, 10, 2, 0, 0), ) Result.objects.create( experiment=experiment_2, result_time=datetime.datetime(2016, 1, 8, 5, 0, 0), ) within_experiment_time = [F('experiment__start'), F('experiment__end')] queryset = Result.objects.filter(result_time__range=within_experiment_time) self.assertQuerysetEqual(queryset, ["<Result: Result at 2016-02-04 15:00:00>"]) within_experiment_time = [F('experiment__start'), F('experiment__end')] queryset = Result.objects.filter(result_time__range=within_experiment_time) self.assertQuerysetEqual(queryset, ["<Result: Result at 2016-02-04 15:00:00>"]) class FTests(SimpleTestCase): def test_deepcopy(self): f = F("foo") g = deepcopy(f) self.assertEqual(f.name, g.name) def test_deconstruct(self): f = F('name') path, args, kwargs = f.deconstruct() self.assertEqual(path, 'django.db.models.expressions.F') self.assertEqual(args, (f.name,)) self.assertEqual(kwargs, {}) def test_equal(self): f = F('name') same_f = F('name') other_f = F('username') self.assertEqual(f, same_f) self.assertNotEqual(f, other_f) def test_hash(self): d = {F('name'): 'Bob'} self.assertIn(F('name'), d) self.assertEqual(d[F('name')], 'Bob') def test_not_equal_Value(self): f = F('name') value = Value('name') self.assertNotEqual(f, value) self.assertNotEqual(value, f) class ExpressionsTests(TestCase): def test_F_reuse(self): f = F('id') n = Number.objects.create(integer=-1) c = Company.objects.create( name="Example Inc.", num_employees=2300, num_chairs=5, ceo=Employee.objects.create(firstname="Joe", lastname="Smith") ) c_qs = Company.objects.filter(id=f) self.assertEqual(c_qs.get(), c) # Reuse the same F-object for another queryset n_qs = Number.objects.filter(id=f) self.assertEqual(n_qs.get(), n) # The original query still works correctly self.assertEqual(c_qs.get(), c) def test_patterns_escape(self): r""" Special characters (e.g. %, _ and \) stored in database are properly escaped when using a pattern lookup with an expression refs #16731 """ Employee.objects.bulk_create([ Employee(firstname="%Joh\\nny", lastname="%Joh\\n"), Employee(firstname="Johnny", lastname="%John"), Employee(firstname="Jean-Claude", lastname="Claud_"), Employee(firstname="Jean-Claude", lastname="Claude"), Employee(firstname="Jean-Claude", lastname="Claude%"), Employee(firstname="Johnny", lastname="Joh\\n"), Employee(firstname="Johnny", lastname="John"), Employee(firstname="Johnny", lastname="_ohn"), ]) self.assertQuerysetEqual( Employee.objects.filter(firstname__contains=F('lastname')), ["<Employee: %Joh\\nny %Joh\\n>", "<Employee: Jean-Claude Claude>", "<Employee: Johnny John>"], ordered=False, ) self.assertQuerysetEqual( Employee.objects.filter(firstname__startswith=F('lastname')), ["<Employee: %Joh\\nny %Joh\\n>", "<Employee: Johnny John>"], ordered=False, ) self.assertQuerysetEqual( Employee.objects.filter(firstname__endswith=F('lastname')), ["<Employee: Jean-Claude Claude>"], ordered=False, ) def test_insensitive_patterns_escape(self): r""" Special characters (e.g. %, _ and \) stored in database are properly escaped when using a case insensitive pattern lookup with an expression -- refs #16731 """ Employee.objects.bulk_create([ Employee(firstname="%Joh\\nny", lastname="%joh\\n"), Employee(firstname="Johnny", lastname="%john"), Employee(firstname="Jean-Claude", lastname="claud_"), Employee(firstname="Jean-Claude", lastname="claude"), Employee(firstname="Jean-Claude", lastname="claude%"), Employee(firstname="Johnny", lastname="joh\\n"), Employee(firstname="Johnny", lastname="john"), Employee(firstname="Johnny", lastname="_ohn"), ]) self.assertQuerysetEqual( Employee.objects.filter(firstname__icontains=F('lastname')), ["<Employee: %Joh\\nny %joh\\n>", "<Employee: Jean-Claude claude>", "<Employee: Johnny john>"], ordered=False, ) self.assertQuerysetEqual( Employee.objects.filter(firstname__istartswith=F('lastname')), ["<Employee: %Joh\\nny %joh\\n>", "<Employee: Johnny john>"], ordered=False, ) self.assertQuerysetEqual( Employee.objects.filter(firstname__iendswith=F('lastname')), ["<Employee: Jean-Claude claude>"], ordered=False, ) @isolate_apps('expressions') class SimpleExpressionTests(SimpleTestCase): def test_equal(self): self.assertEqual(Expression(), Expression()) self.assertEqual( Expression(IntegerField()), Expression(output_field=IntegerField()) ) self.assertEqual(Expression(IntegerField()), mock.ANY) self.assertNotEqual( Expression(IntegerField()), Expression(CharField()) ) class TestModel(Model): field = IntegerField() other_field = IntegerField() self.assertNotEqual( Expression(TestModel._meta.get_field('field')), Expression(TestModel._meta.get_field('other_field')), ) def test_hash(self): self.assertEqual(hash(Expression()), hash(Expression())) self.assertEqual( hash(Expression(IntegerField())), hash(Expression(output_field=IntegerField())) ) self.assertNotEqual( hash(Expression(IntegerField())), hash(Expression(CharField())), ) class TestModel(Model): field = IntegerField() other_field = IntegerField() self.assertNotEqual( hash(Expression(TestModel._meta.get_field('field'))), hash(Expression(TestModel._meta.get_field('other_field'))), ) class ExpressionsNumericTests(TestCase): @classmethod def setUpTestData(cls): Number(integer=-1).save() Number(integer=42).save() Number(integer=1337).save() Number.objects.update(float=F('integer')) def test_fill_with_value_from_same_object(self): """ We can fill a value in all objects with an other value of the same object. """ self.assertQuerysetEqual( Number.objects.all(), ['<Number: -1, -1.000>', '<Number: 42, 42.000>', '<Number: 1337, 1337.000>'], ordered=False ) def test_increment_value(self): """ We can increment a value of all objects in a query set. """ self.assertEqual(Number.objects.filter(integer__gt=0).update(integer=F('integer') + 1), 2) self.assertQuerysetEqual( Number.objects.all(), ['<Number: -1, -1.000>', '<Number: 43, 42.000>', '<Number: 1338, 1337.000>'], ordered=False ) def test_filter_not_equals_other_field(self): """ We can filter for objects, where a value is not equals the value of an other field. """ self.assertEqual(Number.objects.filter(integer__gt=0).update(integer=F('integer') + 1), 2) self.assertQuerysetEqual( Number.objects.exclude(float=F('integer')), ['<Number: 43, 42.000>', '<Number: 1338, 1337.000>'], ordered=False ) def test_complex_expressions(self): """ Complex expressions of different connection types are possible. """ n = Number.objects.create(integer=10, float=123.45) self.assertEqual(Number.objects.filter(pk=n.pk).update( float=F('integer') + F('float') * 2), 1) self.assertEqual(Number.objects.get(pk=n.pk).integer, 10) self.assertEqual(Number.objects.get(pk=n.pk).float, Approximate(256.900, places=3)) class ExpressionOperatorTests(TestCase): @classmethod def setUpTestData(cls): cls.n = Number.objects.create(integer=42, float=15.5) cls.n1 = Number.objects.create(integer=-42, float=-15.5) def test_lefthand_addition(self): # LH Addition of floats and integers Number.objects.filter(pk=self.n.pk).update( integer=F('integer') + 15, float=F('float') + 42.7 ) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3)) def test_lefthand_subtraction(self): # LH Subtraction of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') - 15, float=F('float') - 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(-27.200, places=3)) def test_lefthand_multiplication(self): # Multiplication of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') * 15, float=F('float') * 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3)) def test_lefthand_division(self): # LH Division of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') / 2, float=F('float') / 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 21) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(0.363, places=3)) def test_lefthand_modulo(self): # LH Modulo arithmetic on integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') % 20) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 2) def test_lefthand_bitwise_and(self): # LH Bitwise ands on integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer').bitand(56)) Number.objects.filter(pk=self.n1.pk).update(integer=F('integer').bitand(-56)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 40) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -64) def test_lefthand_bitwise_left_shift_operator(self): Number.objects.update(integer=F('integer').bitleftshift(2)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 168) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -168) def test_lefthand_bitwise_right_shift_operator(self): Number.objects.update(integer=F('integer').bitrightshift(2)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 10) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -11) def test_lefthand_bitwise_or(self): # LH Bitwise or on integers Number.objects.update(integer=F('integer').bitor(48)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 58) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -10) def test_lefthand_power(self): # LH Power arithmetic operation on floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') ** 2, float=F('float') ** 1.5) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 1764) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(61.02, places=2)) @unittest.skipIf(connection.vendor == 'oracle', "Oracle doesn't support bitwise XOR.") def test_lefthand_bitwise_xor(self): Number.objects.update(integer=F('integer').bitxor(48)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 26) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -26) @unittest.skipIf(connection.vendor == 'oracle', "Oracle doesn't support bitwise XOR.") def test_lefthand_bitwise_xor_null(self): employee = Employee.objects.create(firstname='John', lastname='Doe') Employee.objects.update(salary=F('salary').bitxor(48)) employee.refresh_from_db() self.assertIsNone(employee.salary) @unittest.skipUnless(connection.vendor == 'oracle', "Oracle doesn't support bitwise XOR.") def test_lefthand_bitwise_xor_not_supported(self): msg = 'Bitwise XOR is not supported in Oracle.' with self.assertRaisesMessage(NotSupportedError, msg): Number.objects.update(integer=F('integer').bitxor(48)) def test_right_hand_addition(self): # Right hand operators Number.objects.filter(pk=self.n.pk).update(integer=15 + F('integer'), float=42.7 + F('float')) # RH Addition of floats and integers self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3)) def test_right_hand_subtraction(self): Number.objects.filter(pk=self.n.pk).update(integer=15 - F('integer'), float=42.7 - F('float')) # RH Subtraction of floats and integers self.assertEqual(Number.objects.get(pk=self.n.pk).integer, -27) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(27.200, places=3)) def test_right_hand_multiplication(self): # RH Multiplication of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=15 * F('integer'), float=42.7 * F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3)) def test_right_hand_division(self): # RH Division of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=640 / F('integer'), float=42.7 / F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 15) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(2.755, places=3)) def test_right_hand_modulo(self): # RH Modulo arithmetic on integers Number.objects.filter(pk=self.n.pk).update(integer=69 % F('integer')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27) def test_righthand_power(self): # RH Power arithmetic operation on floats and integers Number.objects.filter(pk=self.n.pk).update(integer=2 ** F('integer'), float=1.5 ** F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 4398046511104) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(536.308, places=3)) class FTimeDeltaTests(TestCase): @classmethod def setUpTestData(cls): cls.sday = sday = datetime.date(2010, 6, 25) cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000) midnight = datetime.time(0) delta0 = datetime.timedelta(0) delta1 = datetime.timedelta(microseconds=253000) delta2 = datetime.timedelta(seconds=44) delta3 = datetime.timedelta(hours=21, minutes=8) delta4 = datetime.timedelta(days=10) delta5 = datetime.timedelta(days=90) # Test data is set so that deltas and delays will be # strictly increasing. cls.deltas = [] cls.delays = [] cls.days_long = [] # e0: started same day as assigned, zero duration end = stime + delta0 cls.e0 = Experiment.objects.create( name='e0', assigned=sday, start=stime, end=end, completed=end.date(), estimated_time=delta0, ) cls.deltas.append(delta0) cls.delays.append(cls.e0.start - datetime.datetime.combine(cls.e0.assigned, midnight)) cls.days_long.append(cls.e0.completed - cls.e0.assigned) # e1: started one day after assigned, tiny duration, data # set so that end time has no fractional seconds, which # tests an edge case on sqlite. delay = datetime.timedelta(1) end = stime + delay + delta1 e1 = Experiment.objects.create( name='e1', assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta1, ) cls.deltas.append(delta1) cls.delays.append(e1.start - datetime.datetime.combine(e1.assigned, midnight)) cls.days_long.append(e1.completed - e1.assigned) # e2: started three days after assigned, small duration end = stime + delta2 e2 = Experiment.objects.create( name='e2', assigned=sday - datetime.timedelta(3), start=stime, end=end, completed=end.date(), estimated_time=datetime.timedelta(hours=1), ) cls.deltas.append(delta2) cls.delays.append(e2.start - datetime.datetime.combine(e2.assigned, midnight)) cls.days_long.append(e2.completed - e2.assigned) # e3: started four days after assigned, medium duration delay = datetime.timedelta(4) end = stime + delay + delta3 e3 = Experiment.objects.create( name='e3', assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta3, ) cls.deltas.append(delta3) cls.delays.append(e3.start - datetime.datetime.combine(e3.assigned, midnight)) cls.days_long.append(e3.completed - e3.assigned) # e4: started 10 days after assignment, long duration end = stime + delta4 e4 = Experiment.objects.create( name='e4', assigned=sday - datetime.timedelta(10), start=stime, end=end, completed=end.date(), estimated_time=delta4 - datetime.timedelta(1), ) cls.deltas.append(delta4) cls.delays.append(e4.start - datetime.datetime.combine(e4.assigned, midnight)) cls.days_long.append(e4.completed - e4.assigned) # e5: started a month after assignment, very long duration delay = datetime.timedelta(30) end = stime + delay + delta5 e5 = Experiment.objects.create( name='e5', assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta5, ) cls.deltas.append(delta5) cls.delays.append(e5.start - datetime.datetime.combine(e5.assigned, midnight)) cls.days_long.append(e5.completed - e5.assigned) cls.expnames = [e.name for e in Experiment.objects.all()] def test_multiple_query_compilation(self): # Ticket #21643 queryset = Experiment.objects.filter(end__lt=F('start') + datetime.timedelta(hours=1)) q1 = str(queryset.query) q2 = str(queryset.query) self.assertEqual(q1, q2) def test_query_clone(self): # Ticket #21643 - Crash when compiling query more than once qs = Experiment.objects.filter(end__lt=F('start') + datetime.timedelta(hours=1)) qs2 = qs.all() list(qs) list(qs2) # Intentionally no assert def test_delta_add(self): for i, delta in enumerate(self.deltas): test_set = [e.name for e in Experiment.objects.filter(end__lt=F('start') + delta)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(end__lt=delta + F('start'))] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(end__lte=F('start') + delta)] self.assertEqual(test_set, self.expnames[:i + 1]) def test_delta_subtract(self): for i, delta in enumerate(self.deltas): test_set = [e.name for e in Experiment.objects.filter(start__gt=F('end') - delta)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(start__gte=F('end') - delta)] self.assertEqual(test_set, self.expnames[:i + 1]) def test_exclude(self): for i, delta in enumerate(self.deltas): test_set = [e.name for e in Experiment.objects.exclude(end__lt=F('start') + delta)] self.assertEqual(test_set, self.expnames[i:]) test_set = [e.name for e in Experiment.objects.exclude(end__lte=F('start') + delta)] self.assertEqual(test_set, self.expnames[i + 1:]) def test_date_comparison(self): for i, days in enumerate(self.days_long): test_set = [e.name for e in Experiment.objects.filter(completed__lt=F('assigned') + days)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(completed__lte=F('assigned') + days)] self.assertEqual(test_set, self.expnames[:i + 1]) @skipUnlessDBFeature("supports_mixed_date_datetime_comparisons") def test_mixed_comparisons1(self): for i, delay in enumerate(self.delays): test_set = [e.name for e in Experiment.objects.filter(assigned__gt=F('start') - delay)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(assigned__gte=F('start') - delay)] self.assertEqual(test_set, self.expnames[:i + 1]) def test_mixed_comparisons2(self): for i, delay in enumerate(self.delays): delay = datetime.timedelta(delay.days) test_set = [e.name for e in Experiment.objects.filter(start__lt=F('assigned') + delay)] self.assertEqual(test_set, self.expnames[:i]) test_set = [ e.name for e in Experiment.objects.filter(start__lte=F('assigned') + delay + datetime.timedelta(1)) ] self.assertEqual(test_set, self.expnames[:i + 1]) def test_delta_update(self): for delta in self.deltas: exps = Experiment.objects.all() expected_durations = [e.duration() for e in exps] expected_starts = [e.start + delta for e in exps] expected_ends = [e.end + delta for e in exps] Experiment.objects.update(start=F('start') + delta, end=F('end') + delta) exps = Experiment.objects.all() new_starts = [e.start for e in exps] new_ends = [e.end for e in exps] new_durations = [e.duration() for e in exps] self.assertEqual(expected_starts, new_starts) self.assertEqual(expected_ends, new_ends) self.assertEqual(expected_durations, new_durations) def test_invalid_operator(self): with self.assertRaises(DatabaseError): list(Experiment.objects.filter(start=F('start') * datetime.timedelta(0))) def test_durationfield_add(self): zeros = [e.name for e in Experiment.objects.filter(start=F('start') + F('estimated_time'))] self.assertEqual(zeros, ['e0']) end_less = [e.name for e in Experiment.objects.filter(end__lt=F('start') + F('estimated_time'))] self.assertEqual(end_less, ['e2']) delta_math = [ e.name for e in Experiment.objects.filter(end__gte=F('start') + F('estimated_time') + datetime.timedelta(hours=1)) ] self.assertEqual(delta_math, ['e4']) queryset = Experiment.objects.annotate(shifted=ExpressionWrapper( F('start') + Value(None, output_field=DurationField()), output_field=DateTimeField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_date_subtraction(self): queryset = Experiment.objects.annotate( completion_duration=ExpressionWrapper( F('completed') - F('assigned'), output_field=DurationField() ) ) at_least_5_days = {e.name for e in queryset.filter(completion_duration__gte=datetime.timedelta(days=5))} self.assertEqual(at_least_5_days, {'e3', 'e4', 'e5'}) at_least_120_days = {e.name for e in queryset.filter(completion_duration__gte=datetime.timedelta(days=120))} self.assertEqual(at_least_120_days, {'e5'}) less_than_5_days = {e.name for e in queryset.filter(completion_duration__lt=datetime.timedelta(days=5))} self.assertEqual(less_than_5_days, {'e0', 'e1', 'e2'}) queryset = Experiment.objects.annotate(difference=ExpressionWrapper( F('completed') - Value(None, output_field=DateField()), output_field=DurationField(), )) self.assertIsNone(queryset.first().difference) queryset = Experiment.objects.annotate(shifted=ExpressionWrapper( F('completed') - Value(None, output_field=DurationField()), output_field=DateField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_date_subquery_subtraction(self): subquery = Experiment.objects.filter(pk=OuterRef('pk')).values('completed') queryset = Experiment.objects.annotate( difference=ExpressionWrapper( subquery - F('completed'), output_field=DurationField(), ), ).filter(difference=datetime.timedelta()) self.assertTrue(queryset.exists()) @skipUnlessDBFeature('supports_temporal_subtraction') def test_date_case_subtraction(self): queryset = Experiment.objects.annotate( date_case=Case( When(Q(name='e0'), then=F('completed')), output_field=DateField(), ), completed_value=Value( self.e0.completed, output_field=DateField(), ), difference=ExpressionWrapper( F('date_case') - F('completed_value'), output_field=DurationField(), ), ).filter(difference=datetime.timedelta()) self.assertEqual(queryset.get(), self.e0) @skipUnlessDBFeature('supports_temporal_subtraction') def test_time_subtraction(self): Time.objects.create(time=datetime.time(12, 30, 15, 2345)) queryset = Time.objects.annotate( difference=ExpressionWrapper( F('time') - Value(datetime.time(11, 15, 0), output_field=TimeField()), output_field=DurationField(), ) ) self.assertEqual( queryset.get().difference, datetime.timedelta(hours=1, minutes=15, seconds=15, microseconds=2345) ) queryset = Time.objects.annotate(difference=ExpressionWrapper( F('time') - Value(None, output_field=TimeField()), output_field=DurationField(), )) self.assertIsNone(queryset.first().difference) queryset = Time.objects.annotate(shifted=ExpressionWrapper( F('time') - Value(None, output_field=DurationField()), output_field=TimeField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_time_subquery_subtraction(self): Time.objects.create(time=datetime.time(12, 30, 15, 2345)) subquery = Time.objects.filter(pk=OuterRef('pk')).values('time') queryset = Time.objects.annotate( difference=ExpressionWrapper( subquery - F('time'), output_field=DurationField(), ), ).filter(difference=datetime.timedelta()) self.assertTrue(queryset.exists()) @skipUnlessDBFeature('supports_temporal_subtraction') def test_datetime_subtraction(self): under_estimate = [ e.name for e in Experiment.objects.filter(estimated_time__gt=F('end') - F('start')) ] self.assertEqual(under_estimate, ['e2']) over_estimate = [ e.name for e in Experiment.objects.filter(estimated_time__lt=F('end') - F('start')) ] self.assertEqual(over_estimate, ['e4']) queryset = Experiment.objects.annotate(difference=ExpressionWrapper( F('start') - Value(None, output_field=DateTimeField()), output_field=DurationField(), )) self.assertIsNone(queryset.first().difference) queryset = Experiment.objects.annotate(shifted=ExpressionWrapper( F('start') - Value(None, output_field=DurationField()), output_field=DateTimeField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_datetime_subquery_subtraction(self): subquery = Experiment.objects.filter(pk=OuterRef('pk')).values('start') queryset = Experiment.objects.annotate( difference=ExpressionWrapper( subquery - F('start'), output_field=DurationField(), ), ).filter(difference=datetime.timedelta()) self.assertTrue(queryset.exists()) @skipUnlessDBFeature('supports_temporal_subtraction') def test_datetime_subtraction_microseconds(self): delta = datetime.timedelta(microseconds=8999999999999999) Experiment.objects.update(end=F('start') + delta) qs = Experiment.objects.annotate( delta=ExpressionWrapper(F('end') - F('start'), output_field=DurationField()) ) for e in qs: self.assertEqual(e.delta, delta) def test_duration_with_datetime(self): # Exclude e1 which has very high precision so we can test this on all # backends regardless of whether or not it supports # microsecond_precision. over_estimate = Experiment.objects.exclude(name='e1').filter( completed__gt=self.stime + F('estimated_time'), ).order_by('name') self.assertQuerysetEqual(over_estimate, ['e3', 'e4', 'e5'], lambda e: e.name) def test_duration_with_datetime_microseconds(self): delta = datetime.timedelta(microseconds=8999999999999999) qs = Experiment.objects.annotate(dt=ExpressionWrapper( F('start') + delta, output_field=DateTimeField(), )) for e in qs: self.assertEqual(e.dt, e.start + delta) def test_date_minus_duration(self): more_than_4_days = Experiment.objects.filter( assigned__lt=F('completed') - Value(datetime.timedelta(days=4), output_field=DurationField()) ) self.assertQuerysetEqual(more_than_4_days, ['e3', 'e4', 'e5'], lambda e: e.name) def test_negative_timedelta_update(self): # subtract 30 seconds, 30 minutes, 2 hours and 2 days experiments = Experiment.objects.filter(name='e0').annotate( start_sub_seconds=F('start') + datetime.timedelta(seconds=-30), ).annotate( start_sub_minutes=F('start_sub_seconds') + datetime.timedelta(minutes=-30), ).annotate( start_sub_hours=F('start_sub_minutes') + datetime.timedelta(hours=-2), ).annotate( new_start=F('start_sub_hours') + datetime.timedelta(days=-2), ) expected_start = datetime.datetime(2010, 6, 23, 9, 45, 0) # subtract 30 microseconds experiments = experiments.annotate(new_start=F('new_start') + datetime.timedelta(microseconds=-30)) expected_start += datetime.timedelta(microseconds=+746970) experiments.update(start=F('new_start')) e0 = Experiment.objects.get(name='e0') self.assertEqual(e0.start, expected_start) class ValueTests(TestCase): def test_update_TimeField_using_Value(self): Time.objects.create() Time.objects.update(time=Value(datetime.time(1), output_field=TimeField())) self.assertEqual(Time.objects.get().time, datetime.time(1)) def test_update_UUIDField_using_Value(self): UUID.objects.create() UUID.objects.update(uuid=Value(uuid.UUID('12345678901234567890123456789012'), output_field=UUIDField())) self.assertEqual(UUID.objects.get().uuid, uuid.UUID('12345678901234567890123456789012')) def test_deconstruct(self): value = Value('name') path, args, kwargs = value.deconstruct() self.assertEqual(path, 'django.db.models.expressions.Value') self.assertEqual(args, (value.value,)) self.assertEqual(kwargs, {}) def test_deconstruct_output_field(self): value = Value('name', output_field=CharField()) path, args, kwargs = value.deconstruct() self.assertEqual(path, 'django.db.models.expressions.Value') self.assertEqual(args, (value.value,)) self.assertEqual(len(kwargs), 1) self.assertEqual(kwargs['output_field'].deconstruct(), CharField().deconstruct()) def test_equal(self): value = Value('name') self.assertEqual(value, Value('name')) self.assertNotEqual(value, Value('username')) def test_hash(self): d = {Value('name'): 'Bob'} self.assertIn(Value('name'), d) self.assertEqual(d[Value('name')], 'Bob') def test_equal_output_field(self): value = Value('name', output_field=CharField()) same_value = Value('name', output_field=CharField()) other_value = Value('name', output_field=TimeField()) no_output_field = Value('name') self.assertEqual(value, same_value) self.assertNotEqual(value, other_value) self.assertNotEqual(value, no_output_field) def test_raise_empty_expressionlist(self): msg = 'ExpressionList requires at least one expression' with self.assertRaisesMessage(ValueError, msg): ExpressionList() class FieldTransformTests(TestCase): @classmethod def setUpTestData(cls): cls.sday = sday = datetime.date(2010, 6, 25) cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000) cls.ex1 = Experiment.objects.create( name='Experiment 1', assigned=sday, completed=sday + datetime.timedelta(2), estimated_time=datetime.timedelta(2), start=stime, end=stime + datetime.timedelta(2), ) def test_month_aggregation(self): self.assertEqual( Experiment.objects.aggregate(month_count=Count('assigned__month')), {'month_count': 1} ) def test_transform_in_values(self): self.assertQuerysetEqual( Experiment.objects.values('assigned__month'), ["{'assigned__month': 6}"] ) def test_multiple_transforms_in_values(self): self.assertQuerysetEqual( Experiment.objects.values('end__date__month'), ["{'end__date__month': 6}"] ) class ReprTests(SimpleTestCase): def test_expressions(self): self.assertEqual( repr(Case(When(a=1))), "<Case: CASE WHEN <Q: (AND: ('a', 1))> THEN Value(None), ELSE Value(None)>" ) self.assertEqual( repr(When(Q(age__gte=18), then=Value('legal'))), "<When: WHEN <Q: (AND: ('age__gte', 18))> THEN Value(legal)>" ) self.assertEqual(repr(Col('alias', 'field')), "Col(alias, field)") self.assertEqual(repr(F('published')), "F(published)") self.assertEqual(repr(F('cost') + F('tax')), "<CombinedExpression: F(cost) + F(tax)>") self.assertEqual( repr(ExpressionWrapper(F('cost') + F('tax'), IntegerField())), "ExpressionWrapper(F(cost) + F(tax))" ) self.assertEqual(repr(Func('published', function='TO_CHAR')), "Func(F(published), function=TO_CHAR)") self.assertEqual(repr(OrderBy(Value(1))), 'OrderBy(Value(1), descending=False)') self.assertEqual(repr(Random()), "Random()") self.assertEqual(repr(RawSQL('table.col', [])), "RawSQL(table.col, [])") self.assertEqual(repr(Ref('sum_cost', Sum('cost'))), "Ref(sum_cost, Sum(F(cost)))") self.assertEqual(repr(Value(1)), "Value(1)") self.assertEqual( repr(ExpressionList(F('col'), F('anothercol'))), 'ExpressionList(F(col), F(anothercol))' ) self.assertEqual( repr(ExpressionList(OrderBy(F('col'), descending=False))), 'ExpressionList(OrderBy(F(col), descending=False))' ) def test_functions(self): self.assertEqual(repr(Coalesce('a', 'b')), "Coalesce(F(a), F(b))") self.assertEqual(repr(Concat('a', 'b')), "Concat(ConcatPair(F(a), F(b)))") self.assertEqual(repr(Length('a')), "Length(F(a))") self.assertEqual(repr(Lower('a')), "Lower(F(a))") self.assertEqual(repr(Substr('a', 1, 3)), "Substr(F(a), Value(1), Value(3))") self.assertEqual(repr(Upper('a')), "Upper(F(a))") def test_aggregates(self): self.assertEqual(repr(Avg('a')), "Avg(F(a))") self.assertEqual(repr(Count('a')), "Count(F(a))") self.assertEqual(repr(Count('*')), "Count('*')") self.assertEqual(repr(Max('a')), "Max(F(a))") self.assertEqual(repr(Min('a')), "Min(F(a))") self.assertEqual(repr(StdDev('a')), "StdDev(F(a), sample=False)") self.assertEqual(repr(Sum('a')), "Sum(F(a))") self.assertEqual(repr(Variance('a', sample=True)), "Variance(F(a), sample=True)") def test_distinct_aggregates(self): self.assertEqual(repr(Count('a', distinct=True)), "Count(F(a), distinct=True)") self.assertEqual(repr(Count('*', distinct=True)), "Count('*', distinct=True)") def test_filtered_aggregates(self): filter = Q(a=1) self.assertEqual(repr(Avg('a', filter=filter)), "Avg(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(Count('a', filter=filter)), "Count(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(Max('a', filter=filter)), "Max(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(Min('a', filter=filter)), "Min(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(StdDev('a', filter=filter)), "StdDev(F(a), filter=(AND: ('a', 1)), sample=False)") self.assertEqual(repr(Sum('a', filter=filter)), "Sum(F(a), filter=(AND: ('a', 1)))") self.assertEqual( repr(Variance('a', sample=True, filter=filter)), "Variance(F(a), filter=(AND: ('a', 1)), sample=True)" ) self.assertEqual( repr(Count('a', filter=filter, distinct=True)), "Count(F(a), distinct=True, filter=(AND: ('a', 1)))" ) class CombinableTests(SimpleTestCase): bitwise_msg = 'Use .bitand() and .bitor() for bitwise logical operations.' def test_negation(self): c = Combinable() self.assertEqual(-c, c * -1) def test_and(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): Combinable() & Combinable() def test_or(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): Combinable() | Combinable() def test_reversed_and(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): object() & Combinable() def test_reversed_or(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): object() | Combinable() class ExpressionWrapperTests(SimpleTestCase): def test_empty_group_by(self): expr = ExpressionWrapper(Value(3), output_field=IntegerField()) self.assertEqual(expr.get_group_by_cols(alias=None), []) def test_non_empty_group_by(self): expr = ExpressionWrapper(Lower(Value('f')), output_field=IntegerField()) self.assertEqual(expr.get_group_by_cols(alias=None), [expr.expression])
eb023f9491e01598d7303c4622c56e22f97c2241854f870c4823814644ef2ec4
from unittest import mock, skipUnless from django.db import DatabaseError, connection from django.db.models import Index from django.test import TransactionTestCase, skipUnlessDBFeature from .models import ( Article, ArticleReporter, CheckConstraintModel, City, Comment, Country, District, Reporter, ) class IntrospectionTests(TransactionTestCase): available_apps = ['introspection'] def test_table_names(self): tl = connection.introspection.table_names() self.assertEqual(tl, sorted(tl)) self.assertIn(Reporter._meta.db_table, tl, "'%s' isn't in table_list()." % Reporter._meta.db_table) self.assertIn(Article._meta.db_table, tl, "'%s' isn't in table_list()." % Article._meta.db_table) def test_django_table_names(self): with connection.cursor() as cursor: cursor.execute('CREATE TABLE django_ixn_test_table (id INTEGER);') tl = connection.introspection.django_table_names() cursor.execute("DROP TABLE django_ixn_test_table;") self.assertNotIn('django_ixn_test_table', tl, "django_table_names() returned a non-Django table") def test_django_table_names_retval_type(self): # Table name is a list #15216 tl = connection.introspection.django_table_names(only_existing=True) self.assertIs(type(tl), list) tl = connection.introspection.django_table_names(only_existing=False) self.assertIs(type(tl), list) def test_table_names_with_views(self): with connection.cursor() as cursor: try: cursor.execute( 'CREATE VIEW introspection_article_view AS SELECT headline ' 'from introspection_article;') except DatabaseError as e: if 'insufficient privileges' in str(e): self.fail("The test user has no CREATE VIEW privileges") else: raise try: self.assertIn('introspection_article_view', connection.introspection.table_names(include_views=True)) self.assertNotIn('introspection_article_view', connection.introspection.table_names()) finally: with connection.cursor() as cursor: cursor.execute('DROP VIEW introspection_article_view') def test_unmanaged_through_model(self): tables = connection.introspection.django_table_names() self.assertNotIn(ArticleReporter._meta.db_table, tables) def test_installed_models(self): tables = [Article._meta.db_table, Reporter._meta.db_table] models = connection.introspection.installed_models(tables) self.assertEqual(models, {Article, Reporter}) def test_sequence_list(self): sequences = connection.introspection.sequence_list() reporter_seqs = [seq for seq in sequences if seq['table'] == Reporter._meta.db_table] self.assertEqual(len(reporter_seqs), 1, 'Reporter sequence not found in sequence_list()') self.assertEqual(reporter_seqs[0]['column'], 'id') def test_get_table_description_names(self): with connection.cursor() as cursor: desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table) self.assertEqual([r[0] for r in desc], [f.column for f in Reporter._meta.fields]) def test_get_table_description_types(self): with connection.cursor() as cursor: desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table) self.assertEqual( [connection.introspection.get_field_type(r[1], r) for r in desc], [ connection.features.introspected_field_types[field] for field in ( 'AutoField', 'CharField', 'CharField', 'CharField', 'BigIntegerField', 'BinaryField', 'SmallIntegerField', 'DurationField', ) ], ) def test_get_table_description_col_lengths(self): with connection.cursor() as cursor: desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table) self.assertEqual( [r[3] for r in desc if connection.introspection.get_field_type(r[1], r) == 'CharField'], [30, 30, 254] ) def test_get_table_description_nullable(self): with connection.cursor() as cursor: desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table) nullable_by_backend = connection.features.interprets_empty_strings_as_nulls self.assertEqual( [r[6] for r in desc], [False, nullable_by_backend, nullable_by_backend, nullable_by_backend, True, True, False, False] ) def test_bigautofield(self): with connection.cursor() as cursor: desc = connection.introspection.get_table_description(cursor, City._meta.db_table) self.assertIn( connection.features.introspected_field_types['BigAutoField'], [connection.introspection.get_field_type(r[1], r) for r in desc], ) def test_smallautofield(self): with connection.cursor() as cursor: desc = connection.introspection.get_table_description(cursor, Country._meta.db_table) self.assertIn( connection.features.introspected_field_types['SmallAutoField'], [connection.introspection.get_field_type(r[1], r) for r in desc], ) # Regression test for #9991 - 'real' types in postgres @skipUnlessDBFeature('has_real_datatype') def test_postgresql_real_type(self): with connection.cursor() as cursor: cursor.execute("CREATE TABLE django_ixn_real_test_table (number REAL);") desc = connection.introspection.get_table_description(cursor, 'django_ixn_real_test_table') cursor.execute('DROP TABLE django_ixn_real_test_table;') self.assertEqual(connection.introspection.get_field_type(desc[0][1], desc[0]), 'FloatField') @skipUnlessDBFeature('can_introspect_foreign_keys') def test_get_relations(self): with connection.cursor() as cursor: relations = connection.introspection.get_relations(cursor, Article._meta.db_table) # That's {field_name: (field_name_other_table, other_table)} expected_relations = { 'reporter_id': ('id', Reporter._meta.db_table), 'response_to_id': ('id', Article._meta.db_table), } self.assertEqual(relations, expected_relations) # Removing a field shouldn't disturb get_relations (#17785) body = Article._meta.get_field('body') with connection.schema_editor() as editor: editor.remove_field(Article, body) with connection.cursor() as cursor: relations = connection.introspection.get_relations(cursor, Article._meta.db_table) with connection.schema_editor() as editor: editor.add_field(Article, body) self.assertEqual(relations, expected_relations) @skipUnless(connection.vendor == 'sqlite', "This is an sqlite-specific issue") def test_get_relations_alt_format(self): """ With SQLite, foreign keys can be added with different syntaxes and formatting. """ create_table_statements = [ "CREATE TABLE track(id, art_id INTEGER, FOREIGN KEY(art_id) REFERENCES {}(id));", "CREATE TABLE track(id, art_id INTEGER, FOREIGN KEY (art_id) REFERENCES {}(id));" ] for statement in create_table_statements: with connection.cursor() as cursor: cursor.fetchone = mock.Mock(return_value=[statement.format(Article._meta.db_table), 'table']) relations = connection.introspection.get_relations(cursor, 'mocked_table') self.assertEqual(relations, {'art_id': ('id', Article._meta.db_table)}) @skipUnlessDBFeature('can_introspect_foreign_keys') def test_get_key_columns(self): with connection.cursor() as cursor: key_columns = connection.introspection.get_key_columns(cursor, Article._meta.db_table) self.assertEqual(set(key_columns), { ('reporter_id', Reporter._meta.db_table, 'id'), ('response_to_id', Article._meta.db_table, 'id'), }) def test_get_primary_key_column(self): with connection.cursor() as cursor: primary_key_column = connection.introspection.get_primary_key_column(cursor, Article._meta.db_table) pk_fk_column = connection.introspection.get_primary_key_column(cursor, District._meta.db_table) self.assertEqual(primary_key_column, 'id') self.assertEqual(pk_fk_column, 'city_id') def test_get_constraints_index_types(self): with connection.cursor() as cursor: constraints = connection.introspection.get_constraints(cursor, Article._meta.db_table) index = {} index2 = {} for val in constraints.values(): if val['columns'] == ['headline', 'pub_date']: index = val if val['columns'] == ['headline', 'response_to_id', 'pub_date', 'reporter_id']: index2 = val self.assertEqual(index['type'], Index.suffix) self.assertEqual(index2['type'], Index.suffix) @skipUnlessDBFeature('supports_index_column_ordering') def test_get_constraints_indexes_orders(self): """ Indexes have the 'orders' key with a list of 'ASC'/'DESC' values. """ with connection.cursor() as cursor: constraints = connection.introspection.get_constraints(cursor, Article._meta.db_table) indexes_verified = 0 expected_columns = [ ['reporter_id'], ['headline', 'pub_date'], ['response_to_id'], ['headline', 'response_to_id', 'pub_date', 'reporter_id'], ] for val in constraints.values(): if val['index'] and not (val['primary_key'] or val['unique']): self.assertIn(val['columns'], expected_columns) self.assertEqual(val['orders'], ['ASC'] * len(val['columns'])) indexes_verified += 1 self.assertEqual(indexes_verified, 4) def test_get_constraints(self): def assertDetails(details, cols, primary_key=False, unique=False, index=False, check=False, foreign_key=None): # Different backends have different values for same constraints: # PRIMARY KEY UNIQUE CONSTRAINT UNIQUE INDEX # MySQL pk=1 uniq=1 idx=1 pk=0 uniq=1 idx=1 pk=0 uniq=1 idx=1 # PostgreSQL pk=1 uniq=1 idx=0 pk=0 uniq=1 idx=0 pk=0 uniq=1 idx=1 # SQLite pk=1 uniq=0 idx=0 pk=0 uniq=1 idx=0 pk=0 uniq=1 idx=1 if details['primary_key']: details['unique'] = True if details['unique']: details['index'] = False self.assertEqual(details['columns'], cols) self.assertEqual(details['primary_key'], primary_key) self.assertEqual(details['unique'], unique) self.assertEqual(details['index'], index) self.assertEqual(details['check'], check) self.assertEqual(details['foreign_key'], foreign_key) # Test custom constraints custom_constraints = { 'article_email_pub_date_uniq', 'email_pub_date_idx', } with connection.cursor() as cursor: constraints = connection.introspection.get_constraints(cursor, Comment._meta.db_table) if ( connection.features.supports_column_check_constraints and connection.features.can_introspect_check_constraints ): constraints.update( connection.introspection.get_constraints(cursor, CheckConstraintModel._meta.db_table) ) custom_constraints.add('up_votes_gte_0_check') assertDetails(constraints['up_votes_gte_0_check'], ['up_votes'], check=True) assertDetails(constraints['article_email_pub_date_uniq'], ['article_id', 'email', 'pub_date'], unique=True) assertDetails(constraints['email_pub_date_idx'], ['email', 'pub_date'], index=True) # Test field constraints field_constraints = set() for name, details in constraints.items(): if name in custom_constraints: continue elif details['columns'] == ['up_votes'] and details['check']: assertDetails(details, ['up_votes'], check=True) field_constraints.add(name) elif details['columns'] == ['voting_number'] and details['check']: assertDetails(details, ['voting_number'], check=True) field_constraints.add(name) elif details['columns'] == ['ref'] and details['unique']: assertDetails(details, ['ref'], unique=True) field_constraints.add(name) elif details['columns'] == ['voting_number'] and details['unique']: assertDetails(details, ['voting_number'], unique=True) field_constraints.add(name) elif details['columns'] == ['article_id'] and details['index']: assertDetails(details, ['article_id'], index=True) field_constraints.add(name) elif details['columns'] == ['id'] and details['primary_key']: assertDetails(details, ['id'], primary_key=True, unique=True) field_constraints.add(name) elif details['columns'] == ['article_id'] and details['foreign_key']: assertDetails(details, ['article_id'], foreign_key=('introspection_article', 'id')) field_constraints.add(name) elif details['check']: # Some databases (e.g. Oracle) include additional check # constraints. field_constraints.add(name) # All constraints are accounted for. self.assertEqual(constraints.keys() ^ (custom_constraints | field_constraints), set())
2409edee98b2d8842c8bd0adc775d126ba349c2b6704ac0b4dee3314770f7100
import datetime import decimal import gettext as gettext_module import os import pickle import re import tempfile from contextlib import contextmanager from importlib import import_module from pathlib import Path from unittest import mock from asgiref.local import Local from django import forms from django.apps import AppConfig from django.conf import settings from django.conf.locale import LANG_INFO from django.conf.urls.i18n import i18n_patterns from django.template import Context, Template from django.test import ( RequestFactory, SimpleTestCase, TestCase, override_settings, ) from django.utils import translation from django.utils.deprecation import RemovedInDjango40Warning from django.utils.formats import ( date_format, get_format, get_format_modules, iter_format_modules, localize, localize_input, reset_format_cache, sanitize_separators, time_format, ) from django.utils.numberformat import format as nformat from django.utils.safestring import SafeString, mark_safe from django.utils.translation import ( LANGUAGE_SESSION_KEY, activate, check_for_language, deactivate, get_language, get_language_bidi, get_language_from_request, get_language_info, gettext, gettext_lazy, ngettext, ngettext_lazy, npgettext, npgettext_lazy, pgettext, round_away_from_one, to_language, to_locale, trans_null, trans_real, ugettext, ugettext_lazy, ugettext_noop, ungettext, ungettext_lazy, ) from django.utils.translation.reloader import ( translation_file_changed, watch_for_translation_changes, ) from .forms import CompanyForm, I18nForm, SelectDateForm from .models import Company, TestModel here = os.path.dirname(os.path.abspath(__file__)) extended_locale_paths = settings.LOCALE_PATHS + [ os.path.join(here, 'other', 'locale'), ] class AppModuleStub: def __init__(self, **kwargs): self.__dict__.update(kwargs) @contextmanager def patch_formats(lang, **settings): from django.utils.formats import _format_cache # Populate _format_cache with temporary values for key, value in settings.items(): _format_cache[(key, lang)] = value try: yield finally: reset_format_cache() class TranslationTests(SimpleTestCase): @translation.override('de') def test_legacy_aliases(self): """ Pre-Django 2.0 aliases with u prefix are still available. """ msg = ( 'django.utils.translation.ugettext_noop() is deprecated in favor ' 'of django.utils.translation.gettext_noop().' ) with self.assertWarnsMessage(RemovedInDjango40Warning, msg): self.assertEqual(ugettext_noop("Image"), "Image") msg = ( 'django.utils.translation.ugettext() is deprecated in favor of ' 'django.utils.translation.gettext().' ) with self.assertWarnsMessage(RemovedInDjango40Warning, msg): self.assertEqual(ugettext("Image"), "Bild") msg = ( 'django.utils.translation.ugettext_lazy() is deprecated in favor ' 'of django.utils.translation.gettext_lazy().' ) with self.assertWarnsMessage(RemovedInDjango40Warning, msg): self.assertEqual(ugettext_lazy("Image"), gettext_lazy("Image")) msg = ( 'django.utils.translation.ungettext() is deprecated in favor of ' 'django.utils.translation.ngettext().' ) with self.assertWarnsMessage(RemovedInDjango40Warning, msg): self.assertEqual(ungettext("%d year", "%d years", 0) % 0, "0 Jahre") msg = ( 'django.utils.translation.ungettext_lazy() is deprecated in favor ' 'of django.utils.translation.ngettext_lazy().' ) with self.assertWarnsMessage(RemovedInDjango40Warning, msg): self.assertEqual( ungettext_lazy("%d year", "%d years", 0) % 0, ngettext_lazy("%d year", "%d years", 0) % 0, ) @translation.override('fr') def test_plural(self): """ Test plurals with ngettext. French differs from English in that 0 is singular. """ self.assertEqual(ngettext("%d year", "%d years", 0) % 0, "0 année") self.assertEqual(ngettext("%d year", "%d years", 2) % 2, "2 années") self.assertEqual(ngettext("%(size)d byte", "%(size)d bytes", 0) % {'size': 0}, "0 octet") self.assertEqual(ngettext("%(size)d byte", "%(size)d bytes", 2) % {'size': 2}, "2 octets") def test_plural_null(self): g = trans_null.ngettext self.assertEqual(g('%d year', '%d years', 0) % 0, '0 years') self.assertEqual(g('%d year', '%d years', 1) % 1, '1 year') self.assertEqual(g('%d year', '%d years', 2) % 2, '2 years') @override_settings(LOCALE_PATHS=extended_locale_paths) @translation.override('fr') def test_multiple_plurals_per_language(self): """ Normally, French has 2 plurals. As other/locale/fr/LC_MESSAGES/django.po has a different plural equation with 3 plurals, this tests if those plural are honored. """ self.assertEqual(ngettext("%d singular", "%d plural", 0) % 0, "0 pluriel1") self.assertEqual(ngettext("%d singular", "%d plural", 1) % 1, "1 singulier") self.assertEqual(ngettext("%d singular", "%d plural", 2) % 2, "2 pluriel2") french = trans_real.catalog() # Internal _catalog can query subcatalogs (from different po files). self.assertEqual(french._catalog[('%d singular', 0)], '%d singulier') self.assertEqual(french._catalog[('%d hour', 0)], '%d heure') def test_override(self): activate('de') try: with translation.override('pl'): self.assertEqual(get_language(), 'pl') self.assertEqual(get_language(), 'de') with translation.override(None): self.assertIsNone(get_language()) with translation.override('pl'): pass self.assertIsNone(get_language()) self.assertEqual(get_language(), 'de') finally: deactivate() def test_override_decorator(self): @translation.override('pl') def func_pl(): self.assertEqual(get_language(), 'pl') @translation.override(None) def func_none(): self.assertIsNone(get_language()) try: activate('de') func_pl() self.assertEqual(get_language(), 'de') func_none() self.assertEqual(get_language(), 'de') finally: deactivate() def test_override_exit(self): """ The language restored is the one used when the function was called, not the one used when the decorator was initialized (#23381). """ activate('fr') @translation.override('pl') def func_pl(): pass deactivate() try: activate('en') func_pl() self.assertEqual(get_language(), 'en') finally: deactivate() def test_lazy_objects(self): """ Format string interpolation should work with *_lazy objects. """ s = gettext_lazy('Add %(name)s') d = {'name': 'Ringo'} self.assertEqual('Add Ringo', s % d) with translation.override('de', deactivate=True): self.assertEqual('Ringo hinzuf\xfcgen', s % d) with translation.override('pl'): self.assertEqual('Dodaj Ringo', s % d) # It should be possible to compare *_lazy objects. s1 = gettext_lazy('Add %(name)s') self.assertEqual(s, s1) s2 = gettext_lazy('Add %(name)s') s3 = gettext_lazy('Add %(name)s') self.assertEqual(s2, s3) self.assertEqual(s, s2) s4 = gettext_lazy('Some other string') self.assertNotEqual(s, s4) def test_lazy_pickle(self): s1 = gettext_lazy("test") self.assertEqual(str(s1), "test") s2 = pickle.loads(pickle.dumps(s1)) self.assertEqual(str(s2), "test") @override_settings(LOCALE_PATHS=extended_locale_paths) def test_ngettext_lazy(self): simple_with_format = ngettext_lazy('%d good result', '%d good results') simple_context_with_format = npgettext_lazy('Exclamation', '%d good result', '%d good results') simple_without_format = ngettext_lazy('good result', 'good results') with translation.override('de'): self.assertEqual(simple_with_format % 1, '1 gutes Resultat') self.assertEqual(simple_with_format % 4, '4 guten Resultate') self.assertEqual(simple_context_with_format % 1, '1 gutes Resultat!') self.assertEqual(simple_context_with_format % 4, '4 guten Resultate!') self.assertEqual(simple_without_format % 1, 'gutes Resultat') self.assertEqual(simple_without_format % 4, 'guten Resultate') complex_nonlazy = ngettext_lazy('Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 4) complex_deferred = ngettext_lazy( 'Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 'num' ) complex_context_nonlazy = npgettext_lazy( 'Greeting', 'Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 4 ) complex_context_deferred = npgettext_lazy( 'Greeting', 'Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 'num' ) with translation.override('de'): self.assertEqual(complex_nonlazy % {'num': 4, 'name': 'Jim'}, 'Hallo Jim, 4 guten Resultate') self.assertEqual(complex_deferred % {'name': 'Jim', 'num': 1}, 'Hallo Jim, 1 gutes Resultat') self.assertEqual(complex_deferred % {'name': 'Jim', 'num': 5}, 'Hallo Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_deferred % {'name': 'Jim'} self.assertEqual(complex_context_nonlazy % {'num': 4, 'name': 'Jim'}, 'Willkommen Jim, 4 guten Resultate') self.assertEqual(complex_context_deferred % {'name': 'Jim', 'num': 1}, 'Willkommen Jim, 1 gutes Resultat') self.assertEqual(complex_context_deferred % {'name': 'Jim', 'num': 5}, 'Willkommen Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_context_deferred % {'name': 'Jim'} @override_settings(LOCALE_PATHS=extended_locale_paths) def test_ngettext_lazy_format_style(self): simple_with_format = ngettext_lazy('{} good result', '{} good results') simple_context_with_format = npgettext_lazy('Exclamation', '{} good result', '{} good results') with translation.override('de'): self.assertEqual(simple_with_format.format(1), '1 gutes Resultat') self.assertEqual(simple_with_format.format(4), '4 guten Resultate') self.assertEqual(simple_context_with_format.format(1), '1 gutes Resultat!') self.assertEqual(simple_context_with_format.format(4), '4 guten Resultate!') complex_nonlazy = ngettext_lazy('Hi {name}, {num} good result', 'Hi {name}, {num} good results', 4) complex_deferred = ngettext_lazy( 'Hi {name}, {num} good result', 'Hi {name}, {num} good results', 'num' ) complex_context_nonlazy = npgettext_lazy( 'Greeting', 'Hi {name}, {num} good result', 'Hi {name}, {num} good results', 4 ) complex_context_deferred = npgettext_lazy( 'Greeting', 'Hi {name}, {num} good result', 'Hi {name}, {num} good results', 'num' ) with translation.override('de'): self.assertEqual(complex_nonlazy.format(num=4, name='Jim'), 'Hallo Jim, 4 guten Resultate') self.assertEqual(complex_deferred.format(name='Jim', num=1), 'Hallo Jim, 1 gutes Resultat') self.assertEqual(complex_deferred.format(name='Jim', num=5), 'Hallo Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_deferred.format(name='Jim') self.assertEqual(complex_context_nonlazy.format(num=4, name='Jim'), 'Willkommen Jim, 4 guten Resultate') self.assertEqual(complex_context_deferred.format(name='Jim', num=1), 'Willkommen Jim, 1 gutes Resultat') self.assertEqual(complex_context_deferred.format(name='Jim', num=5), 'Willkommen Jim, 5 guten Resultate') with self.assertRaisesMessage(KeyError, 'Your dictionary lacks key'): complex_context_deferred.format(name='Jim') def test_ngettext_lazy_bool(self): self.assertTrue(ngettext_lazy('%d good result', '%d good results')) self.assertFalse(ngettext_lazy('', '')) def test_ngettext_lazy_pickle(self): s1 = ngettext_lazy('%d good result', '%d good results') self.assertEqual(s1 % 1, '1 good result') self.assertEqual(s1 % 8, '8 good results') s2 = pickle.loads(pickle.dumps(s1)) self.assertEqual(s2 % 1, '1 good result') self.assertEqual(s2 % 8, '8 good results') @override_settings(LOCALE_PATHS=extended_locale_paths) def test_pgettext(self): trans_real._active = Local() trans_real._translations = {} with translation.override('de'): self.assertEqual(pgettext("unexisting", "May"), "May") self.assertEqual(pgettext("month name", "May"), "Mai") self.assertEqual(pgettext("verb", "May"), "Kann") self.assertEqual(npgettext("search", "%d result", "%d results", 4) % 4, "4 Resultate") def test_empty_value(self): """Empty value must stay empty after being translated (#23196).""" with translation.override('de'): self.assertEqual('', gettext('')) s = mark_safe('') self.assertEqual(s, gettext(s)) @override_settings(LOCALE_PATHS=extended_locale_paths) def test_safe_status(self): """ Translating a string requiring no auto-escaping with gettext or pgettext shouldn't change the "safe" status. """ trans_real._active = Local() trans_real._translations = {} s1 = mark_safe('Password') s2 = mark_safe('May') with translation.override('de', deactivate=True): self.assertIs(type(gettext(s1)), SafeString) self.assertIs(type(pgettext('month name', s2)), SafeString) self.assertEqual('aPassword', SafeString('a') + s1) self.assertEqual('Passworda', s1 + SafeString('a')) self.assertEqual('Passworda', s1 + mark_safe('a')) self.assertEqual('aPassword', mark_safe('a') + s1) self.assertEqual('as', mark_safe('a') + mark_safe('s')) def test_maclines(self): """ Translations on files with Mac or DOS end of lines will be converted to unix EOF in .po catalogs. """ ca_translation = trans_real.translation('ca') ca_translation._catalog['Mac\nEOF\n'] = 'Catalan Mac\nEOF\n' ca_translation._catalog['Win\nEOF\n'] = 'Catalan Win\nEOF\n' with translation.override('ca', deactivate=True): self.assertEqual('Catalan Mac\nEOF\n', gettext('Mac\rEOF\r')) self.assertEqual('Catalan Win\nEOF\n', gettext('Win\r\nEOF\r\n')) def test_to_locale(self): tests = ( ('en', 'en'), ('EN', 'en'), ('en-us', 'en_US'), ('EN-US', 'en_US'), # With > 2 characters after the dash. ('sr-latn', 'sr_Latn'), ('sr-LATN', 'sr_Latn'), # With private use subtag (x-informal). ('nl-nl-x-informal', 'nl_NL-x-informal'), ('NL-NL-X-INFORMAL', 'nl_NL-x-informal'), ('sr-latn-x-informal', 'sr_Latn-x-informal'), ('SR-LATN-X-INFORMAL', 'sr_Latn-x-informal'), ) for lang, locale in tests: with self.subTest(lang=lang): self.assertEqual(to_locale(lang), locale) def test_to_language(self): self.assertEqual(to_language('en_US'), 'en-us') self.assertEqual(to_language('sr_Lat'), 'sr-lat') def test_language_bidi(self): self.assertIs(get_language_bidi(), False) with translation.override(None): self.assertIs(get_language_bidi(), False) def test_language_bidi_null(self): self.assertIs(trans_null.get_language_bidi(), False) with override_settings(LANGUAGE_CODE='he'): self.assertIs(get_language_bidi(), True) class TranslationLoadingTests(SimpleTestCase): def setUp(self): """Clear translation state.""" self._old_language = get_language() self._old_translations = trans_real._translations deactivate() trans_real._translations = {} def tearDown(self): trans_real._translations = self._old_translations activate(self._old_language) @override_settings( USE_I18N=True, LANGUAGE_CODE='en', LANGUAGES=[ ('en', 'English'), ('en-ca', 'English (Canada)'), ('en-nz', 'English (New Zealand)'), ('en-au', 'English (Australia)'), ], LOCALE_PATHS=[os.path.join(here, 'loading')], INSTALLED_APPS=['i18n.loading_app'], ) def test_translation_loading(self): """ "loading_app" does not have translations for all languages provided by "loading". Catalogs are merged correctly. """ tests = [ ('en', 'local country person'), ('en_AU', 'aussie'), ('en_NZ', 'kiwi'), ('en_CA', 'canuck'), ] # Load all relevant translations. for language, _ in tests: activate(language) # Catalogs are merged correctly. for language, nickname in tests: with self.subTest(language=language): activate(language) self.assertEqual(gettext('local country person'), nickname) class TranslationThreadSafetyTests(SimpleTestCase): def setUp(self): self._old_language = get_language() self._translations = trans_real._translations # here we rely on .split() being called inside the _fetch() # in trans_real.translation() class sideeffect_str(str): def split(self, *args, **kwargs): res = str.split(self, *args, **kwargs) trans_real._translations['en-YY'] = None return res trans_real._translations = {sideeffect_str('en-XX'): None} def tearDown(self): trans_real._translations = self._translations activate(self._old_language) def test_bug14894_translation_activate_thread_safety(self): translation_count = len(trans_real._translations) # May raise RuntimeError if translation.activate() isn't thread-safe. translation.activate('pl') # make sure sideeffect_str actually added a new translation self.assertLess(translation_count, len(trans_real._translations)) @override_settings(USE_L10N=True) class FormattingTests(SimpleTestCase): def setUp(self): super().setUp() self.n = decimal.Decimal('66666.666') self.f = 99999.999 self.d = datetime.date(2009, 12, 31) self.dt = datetime.datetime(2009, 12, 31, 20, 50) self.t = datetime.time(10, 15, 48) self.long = 10000 self.ctxt = Context({ 'n': self.n, 't': self.t, 'd': self.d, 'dt': self.dt, 'f': self.f, 'l': self.long, }) def test_all_format_strings(self): all_locales = LANG_INFO.keys() some_date = datetime.date(2017, 10, 14) some_datetime = datetime.datetime(2017, 10, 14, 10, 23) for locale in all_locales: with self.subTest(locale=locale), translation.override(locale): self.assertIn('2017', date_format(some_date)) # Uses DATE_FORMAT by default self.assertIn('23', time_format(some_datetime)) # Uses TIME_FORMAT by default self.assertIn('2017', date_format(some_datetime, format=get_format('DATETIME_FORMAT'))) self.assertIn('2017', date_format(some_date, format=get_format('YEAR_MONTH_FORMAT'))) self.assertIn('14', date_format(some_date, format=get_format('MONTH_DAY_FORMAT'))) self.assertIn('2017', date_format(some_date, format=get_format('SHORT_DATE_FORMAT'))) self.assertIn('2017', date_format(some_datetime, format=get_format('SHORT_DATETIME_FORMAT'))) def test_locale_independent(self): """ Localization of numbers """ with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666.66', nformat(self.n, decimal_sep='.', decimal_pos=2, grouping=3, thousand_sep=',')) self.assertEqual('66666A6', nformat(self.n, decimal_sep='A', decimal_pos=1, grouping=1, thousand_sep='B')) self.assertEqual('66666', nformat(self.n, decimal_sep='X', decimal_pos=0, grouping=1, thousand_sep='Y')) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual( '66,666.66', nformat(self.n, decimal_sep='.', decimal_pos=2, grouping=3, thousand_sep=',') ) self.assertEqual( '6B6B6B6B6A6', nformat(self.n, decimal_sep='A', decimal_pos=1, grouping=1, thousand_sep='B') ) self.assertEqual('-66666.6', nformat(-66666.666, decimal_sep='.', decimal_pos=1)) self.assertEqual('-66666.0', nformat(int('-66666'), decimal_sep='.', decimal_pos=1)) self.assertEqual('10000.0', nformat(self.long, decimal_sep='.', decimal_pos=1)) self.assertEqual( '10,00,00,000.00', nformat(100000000.00, decimal_sep='.', decimal_pos=2, grouping=(3, 2, 0), thousand_sep=',') ) self.assertEqual( '1,0,00,000,0000.00', nformat(10000000000.00, decimal_sep='.', decimal_pos=2, grouping=(4, 3, 2, 1, 0), thousand_sep=',') ) self.assertEqual( '10000,00,000.00', nformat(1000000000.00, decimal_sep='.', decimal_pos=2, grouping=(3, 2, -1), thousand_sep=',') ) # This unusual grouping/force_grouping combination may be triggered by the intcomma filter (#17414) self.assertEqual( '10000', nformat(self.long, decimal_sep='.', decimal_pos=0, grouping=0, force_grouping=True) ) # date filter self.assertEqual('31.12.2009 в 20:50', Template('{{ dt|date:"d.m.Y в H:i" }}').render(self.ctxt)) self.assertEqual('⌚ 10:15', Template('{{ t|time:"⌚ H:i" }}').render(self.ctxt)) @override_settings(USE_L10N=False) def test_l10n_disabled(self): """ Catalan locale with format i18n disabled translations will be used, but not formats """ with translation.override('ca', deactivate=True): self.maxDiff = 3000 self.assertEqual('N j, Y', get_format('DATE_FORMAT')) self.assertEqual(0, get_format('FIRST_DAY_OF_WEEK')) self.assertEqual('.', get_format('DECIMAL_SEPARATOR')) self.assertEqual('10:15 a.m.', time_format(self.t)) self.assertEqual('des. 31, 2009', date_format(self.d)) self.assertEqual('desembre 2009', date_format(self.d, 'YEAR_MONTH_FORMAT')) self.assertEqual('12/31/2009 8:50 p.m.', date_format(self.dt, 'SHORT_DATETIME_FORMAT')) self.assertEqual('No localizable', localize('No localizable')) self.assertEqual('66666.666', localize(self.n)) self.assertEqual('99999.999', localize(self.f)) self.assertEqual('10000', localize(self.long)) self.assertEqual('des. 31, 2009', localize(self.d)) self.assertEqual('des. 31, 2009, 8:50 p.m.', localize(self.dt)) self.assertEqual('66666.666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99999.999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('des. 31, 2009', Template('{{ d }}').render(self.ctxt)) self.assertEqual('des. 31, 2009, 8:50 p.m.', Template('{{ dt }}').render(self.ctxt)) self.assertEqual('66666.67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000.0', Template('{{ f|floatformat }}').render(self.ctxt)) self.assertEqual('10:15 a.m.', Template('{{ t|time:"TIME_FORMAT" }}').render(self.ctxt)) self.assertEqual('12/31/2009', Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt)) self.assertEqual( '12/31/2009 8:50 p.m.', Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt) ) form = I18nForm({ 'decimal_field': '66666,666', 'float_field': '99999,999', 'date_field': '31/12/2009', 'datetime_field': '31/12/2009 20:50', 'time_field': '20:50', 'integer_field': '1.234', }) self.assertFalse(form.is_valid()) self.assertEqual(['Introdu\xefu un n\xfamero.'], form.errors['float_field']) self.assertEqual(['Introdu\xefu un n\xfamero.'], form.errors['decimal_field']) self.assertEqual(['Introdu\xefu una data v\xe0lida.'], form.errors['date_field']) self.assertEqual(['Introdu\xefu una data/hora v\xe0lides.'], form.errors['datetime_field']) self.assertEqual(['Introdu\xefu un n\xfamero sencer.'], form.errors['integer_field']) form2 = SelectDateForm({ 'date_field_month': '12', 'date_field_day': '31', 'date_field_year': '2009' }) self.assertTrue(form2.is_valid()) self.assertEqual(datetime.date(2009, 12, 31), form2.cleaned_data['date_field']) self.assertHTMLEqual( '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">gener</option>' '<option value="2">febrer</option>' '<option value="3">mar\xe7</option>' '<option value="4">abril</option>' '<option value="5">maig</option>' '<option value="6">juny</option>' '<option value="7">juliol</option>' '<option value="8">agost</option>' '<option value="9">setembre</option>' '<option value="10">octubre</option>' '<option value="11">novembre</option>' '<option value="12" selected>desembre</option>' '</select>' '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) # We shouldn't change the behavior of the floatformat filter re: # thousand separator and grouping when USE_L10N is False even # if the USE_THOUSAND_SEPARATOR, NUMBER_GROUPING and # THOUSAND_SEPARATOR settings are specified with self.settings(USE_THOUSAND_SEPARATOR=True, NUMBER_GROUPING=1, THOUSAND_SEPARATOR='!'): self.assertEqual('66666.67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000.0', Template('{{ f|floatformat }}').render(self.ctxt)) def test_false_like_locale_formats(self): """ The active locale's formats take precedence over the default settings even if they would be interpreted as False in a conditional test (e.g. 0 or empty string) (#16938). """ with translation.override('fr'): with self.settings(USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR='!'): self.assertEqual('\xa0', get_format('THOUSAND_SEPARATOR')) # Even a second time (after the format has been cached)... self.assertEqual('\xa0', get_format('THOUSAND_SEPARATOR')) with self.settings(FIRST_DAY_OF_WEEK=0): self.assertEqual(1, get_format('FIRST_DAY_OF_WEEK')) # Even a second time (after the format has been cached)... self.assertEqual(1, get_format('FIRST_DAY_OF_WEEK')) def test_l10n_enabled(self): self.maxDiff = 3000 # Catalan locale with translation.override('ca', deactivate=True): self.assertEqual(r'j \d\e F \d\e Y', get_format('DATE_FORMAT')) self.assertEqual(1, get_format('FIRST_DAY_OF_WEEK')) self.assertEqual(',', get_format('DECIMAL_SEPARATOR')) self.assertEqual('10:15', time_format(self.t)) self.assertEqual('31 de desembre de 2009', date_format(self.d)) self.assertEqual('desembre del 2009', date_format(self.d, 'YEAR_MONTH_FORMAT')) self.assertEqual('31/12/2009 20:50', date_format(self.dt, 'SHORT_DATETIME_FORMAT')) self.assertEqual('No localizable', localize('No localizable')) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66.666,666', localize(self.n)) self.assertEqual('99.999,999', localize(self.f)) self.assertEqual('10.000', localize(self.long)) self.assertEqual('True', localize(True)) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666,666', localize(self.n)) self.assertEqual('99999,999', localize(self.f)) self.assertEqual('10000', localize(self.long)) self.assertEqual('31 de desembre de 2009', localize(self.d)) self.assertEqual('31 de desembre de 2009 a les 20:50', localize(self.dt)) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66.666,666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99.999,999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('10.000', Template('{{ l }}').render(self.ctxt)) with self.settings(USE_THOUSAND_SEPARATOR=True): form3 = I18nForm({ 'decimal_field': '66.666,666', 'float_field': '99.999,999', 'date_field': '31/12/2009', 'datetime_field': '31/12/2009 20:50', 'time_field': '20:50', 'integer_field': '1.234', }) self.assertTrue(form3.is_valid()) self.assertEqual(decimal.Decimal('66666.666'), form3.cleaned_data['decimal_field']) self.assertEqual(99999.999, form3.cleaned_data['float_field']) self.assertEqual(datetime.date(2009, 12, 31), form3.cleaned_data['date_field']) self.assertEqual(datetime.datetime(2009, 12, 31, 20, 50), form3.cleaned_data['datetime_field']) self.assertEqual(datetime.time(20, 50), form3.cleaned_data['time_field']) self.assertEqual(1234, form3.cleaned_data['integer_field']) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666,666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99999,999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('31 de desembre de 2009', Template('{{ d }}').render(self.ctxt)) self.assertEqual('31 de desembre de 2009 a les 20:50', Template('{{ dt }}').render(self.ctxt)) self.assertEqual('66666,67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000,0', Template('{{ f|floatformat }}').render(self.ctxt)) self.assertEqual('10:15', Template('{{ t|time:"TIME_FORMAT" }}').render(self.ctxt)) self.assertEqual('31/12/2009', Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt)) self.assertEqual( '31/12/2009 20:50', Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt) ) self.assertEqual(date_format(datetime.datetime.now(), "DATE_FORMAT"), Template('{% now "DATE_FORMAT" %}').render(self.ctxt)) with self.settings(USE_THOUSAND_SEPARATOR=False): form4 = I18nForm({ 'decimal_field': '66666,666', 'float_field': '99999,999', 'date_field': '31/12/2009', 'datetime_field': '31/12/2009 20:50', 'time_field': '20:50', 'integer_field': '1234', }) self.assertTrue(form4.is_valid()) self.assertEqual(decimal.Decimal('66666.666'), form4.cleaned_data['decimal_field']) self.assertEqual(99999.999, form4.cleaned_data['float_field']) self.assertEqual(datetime.date(2009, 12, 31), form4.cleaned_data['date_field']) self.assertEqual(datetime.datetime(2009, 12, 31, 20, 50), form4.cleaned_data['datetime_field']) self.assertEqual(datetime.time(20, 50), form4.cleaned_data['time_field']) self.assertEqual(1234, form4.cleaned_data['integer_field']) form5 = SelectDateForm({ 'date_field_month': '12', 'date_field_day': '31', 'date_field_year': '2009' }) self.assertTrue(form5.is_valid()) self.assertEqual(datetime.date(2009, 12, 31), form5.cleaned_data['date_field']) self.assertHTMLEqual( '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">gener</option>' '<option value="2">febrer</option>' '<option value="3">mar\xe7</option>' '<option value="4">abril</option>' '<option value="5">maig</option>' '<option value="6">juny</option>' '<option value="7">juliol</option>' '<option value="8">agost</option>' '<option value="9">setembre</option>' '<option value="10">octubre</option>' '<option value="11">novembre</option>' '<option value="12" selected>desembre</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) # Russian locale (with E as month) with translation.override('ru', deactivate=True): self.assertHTMLEqual( '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">\u042f\u043d\u0432\u0430\u0440\u044c</option>' '<option value="2">\u0424\u0435\u0432\u0440\u0430\u043b\u044c</option>' '<option value="3">\u041c\u0430\u0440\u0442</option>' '<option value="4">\u0410\u043f\u0440\u0435\u043b\u044c</option>' '<option value="5">\u041c\u0430\u0439</option>' '<option value="6">\u0418\u044e\u043d\u044c</option>' '<option value="7">\u0418\u044e\u043b\u044c</option>' '<option value="8">\u0410\u0432\u0433\u0443\u0441\u0442</option>' '<option value="9">\u0421\u0435\u043d\u0442\u044f\u0431\u0440\u044c</option>' '<option value="10">\u041e\u043a\u0442\u044f\u0431\u0440\u044c</option>' '<option value="11">\u041d\u043e\u044f\u0431\u0440\u044c</option>' '<option value="12" selected>\u0414\u0435\u043a\u0430\u0431\u0440\u044c</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) # English locale with translation.override('en', deactivate=True): self.assertEqual('N j, Y', get_format('DATE_FORMAT')) self.assertEqual(0, get_format('FIRST_DAY_OF_WEEK')) self.assertEqual('.', get_format('DECIMAL_SEPARATOR')) self.assertEqual('Dec. 31, 2009', date_format(self.d)) self.assertEqual('December 2009', date_format(self.d, 'YEAR_MONTH_FORMAT')) self.assertEqual('12/31/2009 8:50 p.m.', date_format(self.dt, 'SHORT_DATETIME_FORMAT')) self.assertEqual('No localizable', localize('No localizable')) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66,666.666', localize(self.n)) self.assertEqual('99,999.999', localize(self.f)) self.assertEqual('10,000', localize(self.long)) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666.666', localize(self.n)) self.assertEqual('99999.999', localize(self.f)) self.assertEqual('10000', localize(self.long)) self.assertEqual('Dec. 31, 2009', localize(self.d)) self.assertEqual('Dec. 31, 2009, 8:50 p.m.', localize(self.dt)) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual('66,666.666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99,999.999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('10,000', Template('{{ l }}').render(self.ctxt)) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual('66666.666', Template('{{ n }}').render(self.ctxt)) self.assertEqual('99999.999', Template('{{ f }}').render(self.ctxt)) self.assertEqual('Dec. 31, 2009', Template('{{ d }}').render(self.ctxt)) self.assertEqual('Dec. 31, 2009, 8:50 p.m.', Template('{{ dt }}').render(self.ctxt)) self.assertEqual('66666.67', Template('{{ n|floatformat:2 }}').render(self.ctxt)) self.assertEqual('100000.0', Template('{{ f|floatformat }}').render(self.ctxt)) self.assertEqual('12/31/2009', Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt)) self.assertEqual( '12/31/2009 8:50 p.m.', Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt) ) form5 = I18nForm({ 'decimal_field': '66666.666', 'float_field': '99999.999', 'date_field': '12/31/2009', 'datetime_field': '12/31/2009 20:50', 'time_field': '20:50', 'integer_field': '1234', }) self.assertTrue(form5.is_valid()) self.assertEqual(decimal.Decimal('66666.666'), form5.cleaned_data['decimal_field']) self.assertEqual(99999.999, form5.cleaned_data['float_field']) self.assertEqual(datetime.date(2009, 12, 31), form5.cleaned_data['date_field']) self.assertEqual(datetime.datetime(2009, 12, 31, 20, 50), form5.cleaned_data['datetime_field']) self.assertEqual(datetime.time(20, 50), form5.cleaned_data['time_field']) self.assertEqual(1234, form5.cleaned_data['integer_field']) form6 = SelectDateForm({ 'date_field_month': '12', 'date_field_day': '31', 'date_field_year': '2009' }) self.assertTrue(form6.is_valid()) self.assertEqual(datetime.date(2009, 12, 31), form6.cleaned_data['date_field']) self.assertHTMLEqual( '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">January</option>' '<option value="2">February</option>' '<option value="3">March</option>' '<option value="4">April</option>' '<option value="5">May</option>' '<option value="6">June</option>' '<option value="7">July</option>' '<option value="8">August</option>' '<option value="9">September</option>' '<option value="10">October</option>' '<option value="11">November</option>' '<option value="12" selected>December</option>' '</select>' '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' '</select>' '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' '</select>', forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31)) ) def test_sub_locales(self): """ Check if sublocales fall back to the main locale """ with self.settings(USE_THOUSAND_SEPARATOR=True): with translation.override('de-at', deactivate=True): self.assertEqual('66.666,666', Template('{{ n }}').render(self.ctxt)) with translation.override('es-us', deactivate=True): self.assertEqual('31 de Diciembre de 2009', date_format(self.d)) def test_localized_input(self): """ Tests if form input is correctly localized """ self.maxDiff = 1200 with translation.override('de-at', deactivate=True): form6 = CompanyForm({ 'name': 'acme', 'date_added': datetime.datetime(2009, 12, 31, 6, 0, 0), 'cents_paid': decimal.Decimal('59.47'), 'products_delivered': 12000, }) self.assertTrue(form6.is_valid()) self.assertHTMLEqual( form6.as_ul(), '<li><label for="id_name">Name:</label>' '<input id="id_name" type="text" name="name" value="acme" maxlength="50" required></li>' '<li><label for="id_date_added">Date added:</label>' '<input type="text" name="date_added" value="31.12.2009 06:00:00" id="id_date_added" required></li>' '<li><label for="id_cents_paid">Cents paid:</label>' '<input type="text" name="cents_paid" value="59,47" id="id_cents_paid" required></li>' '<li><label for="id_products_delivered">Products delivered:</label>' '<input type="text" name="products_delivered" value="12000" id="id_products_delivered" required>' '</li>' ) self.assertEqual(localize_input(datetime.datetime(2009, 12, 31, 6, 0, 0)), '31.12.2009 06:00:00') self.assertEqual(datetime.datetime(2009, 12, 31, 6, 0, 0), form6.cleaned_data['date_added']) with self.settings(USE_THOUSAND_SEPARATOR=True): # Checking for the localized "products_delivered" field self.assertInHTML( '<input type="text" name="products_delivered" ' 'value="12.000" id="id_products_delivered" required>', form6.as_ul() ) def test_localized_input_func(self): tests = ( (True, 'True'), (datetime.date(1, 1, 1), '0001-01-01'), (datetime.datetime(1, 1, 1), '0001-01-01 00:00:00'), ) with self.settings(USE_THOUSAND_SEPARATOR=True): for value, expected in tests: with self.subTest(value=value): self.assertEqual(localize_input(value), expected) def test_sanitize_separators(self): """ Tests django.utils.formats.sanitize_separators. """ # Non-strings are untouched self.assertEqual(sanitize_separators(123), 123) with translation.override('ru', deactivate=True): # Russian locale has non-breaking space (\xa0) as thousand separator # Usual space is accepted too when sanitizing inputs with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual(sanitize_separators('1\xa0234\xa0567'), '1234567') self.assertEqual(sanitize_separators('77\xa0777,777'), '77777.777') self.assertEqual(sanitize_separators('12 345'), '12345') self.assertEqual(sanitize_separators('77 777,777'), '77777.777') with self.settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=False): self.assertEqual(sanitize_separators('12\xa0345'), '12\xa0345') with self.settings(USE_THOUSAND_SEPARATOR=True): with patch_formats(get_language(), THOUSAND_SEPARATOR='.', DECIMAL_SEPARATOR=','): self.assertEqual(sanitize_separators('10.234'), '10234') # Suspicion that user entered dot as decimal separator (#22171) self.assertEqual(sanitize_separators('10.10'), '10.10') with self.settings(USE_L10N=False, DECIMAL_SEPARATOR=','): self.assertEqual(sanitize_separators('1001,10'), '1001.10') self.assertEqual(sanitize_separators('1001.10'), '1001.10') with self.settings( USE_L10N=False, DECIMAL_SEPARATOR=',', USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR='.' ): self.assertEqual(sanitize_separators('1.001,10'), '1001.10') self.assertEqual(sanitize_separators('1001,10'), '1001.10') self.assertEqual(sanitize_separators('1001.10'), '1001.10') self.assertEqual(sanitize_separators('1,001.10'), '1.001.10') # Invalid output def test_iter_format_modules(self): """ Tests the iter_format_modules function. """ # Importing some format modules so that we can compare the returned # modules with these expected modules default_mod = import_module('django.conf.locale.de.formats') test_mod = import_module('i18n.other.locale.de.formats') test_mod2 = import_module('i18n.other2.locale.de.formats') with translation.override('de-at', deactivate=True): # Should return the correct default module when no setting is set self.assertEqual(list(iter_format_modules('de')), [default_mod]) # When the setting is a string, should return the given module and # the default module self.assertEqual( list(iter_format_modules('de', 'i18n.other.locale')), [test_mod, default_mod]) # When setting is a list of strings, should return the given # modules and the default module self.assertEqual( list(iter_format_modules('de', ['i18n.other.locale', 'i18n.other2.locale'])), [test_mod, test_mod2, default_mod]) def test_iter_format_modules_stability(self): """ Tests the iter_format_modules function always yields format modules in a stable and correct order in presence of both base ll and ll_CC formats. """ en_format_mod = import_module('django.conf.locale.en.formats') en_gb_format_mod = import_module('django.conf.locale.en_GB.formats') self.assertEqual(list(iter_format_modules('en-gb')), [en_gb_format_mod, en_format_mod]) def test_get_format_modules_lang(self): with translation.override('de', deactivate=True): self.assertEqual('.', get_format('DECIMAL_SEPARATOR', lang='en')) def test_get_format_modules_stability(self): with self.settings(FORMAT_MODULE_PATH='i18n.other.locale'): with translation.override('de', deactivate=True): old = "%r" % get_format_modules(reverse=True) new = "%r" % get_format_modules(reverse=True) # second try self.assertEqual(new, old, 'Value returned by get_formats_modules() must be preserved between calls.') def test_localize_templatetag_and_filter(self): """ Test the {% localize %} templatetag and the localize/unlocalize filters. """ context = Context({'int': 1455, 'float': 3.14, 'date': datetime.date(2016, 12, 31)}) template1 = Template( '{% load l10n %}{% localize %}{{ int }}/{{ float }}/{{ date }}{% endlocalize %}; ' '{% localize on %}{{ int }}/{{ float }}/{{ date }}{% endlocalize %}' ) template2 = Template( '{% load l10n %}{{ int }}/{{ float }}/{{ date }}; ' '{% localize off %}{{ int }}/{{ float }}/{{ date }};{% endlocalize %} ' '{{ int }}/{{ float }}/{{ date }}' ) template3 = Template( '{% load l10n %}{{ int }}/{{ float }}/{{ date }}; ' '{{ int|unlocalize }}/{{ float|unlocalize }}/{{ date|unlocalize }}' ) template4 = Template( '{% load l10n %}{{ int }}/{{ float }}/{{ date }}; ' '{{ int|localize }}/{{ float|localize }}/{{ date|localize }}' ) expected_localized = '1.455/3,14/31. Dezember 2016' expected_unlocalized = '1455/3.14/Dez. 31, 2016' output1 = '; '.join([expected_localized, expected_localized]) output2 = '; '.join([expected_localized, expected_unlocalized, expected_localized]) output3 = '; '.join([expected_localized, expected_unlocalized]) output4 = '; '.join([expected_unlocalized, expected_localized]) with translation.override('de', deactivate=True): with self.settings(USE_L10N=False, USE_THOUSAND_SEPARATOR=True): self.assertEqual(template1.render(context), output1) self.assertEqual(template4.render(context), output4) with self.settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True): self.assertEqual(template1.render(context), output1) self.assertEqual(template2.render(context), output2) self.assertEqual(template3.render(context), output3) def test_localized_off_numbers(self): """A string representation is returned for unlocalized numbers.""" template = Template( '{% load l10n %}{% localize off %}' '{{ int }}/{{ float }}/{{ decimal }}{% endlocalize %}' ) context = Context( {'int': 1455, 'float': 3.14, 'decimal': decimal.Decimal('24.1567')} ) for use_l10n in [True, False]: with self.subTest(use_l10n=use_l10n), self.settings( USE_L10N=use_l10n, DECIMAL_SEPARATOR=',', USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR='°', NUMBER_GROUPING=2, ): self.assertEqual(template.render(context), '1455/3.14/24.1567') def test_localized_as_text_as_hidden_input(self): """ Tests if form input with 'as_hidden' or 'as_text' is correctly localized. Ticket #18777 """ self.maxDiff = 1200 with translation.override('de-at', deactivate=True): template = Template('{% load l10n %}{{ form.date_added }}; {{ form.cents_paid }}') template_as_text = Template('{% load l10n %}{{ form.date_added.as_text }}; {{ form.cents_paid.as_text }}') template_as_hidden = Template( '{% load l10n %}{{ form.date_added.as_hidden }}; {{ form.cents_paid.as_hidden }}' ) form = CompanyForm({ 'name': 'acme', 'date_added': datetime.datetime(2009, 12, 31, 6, 0, 0), 'cents_paid': decimal.Decimal('59.47'), 'products_delivered': 12000, }) context = Context({'form': form}) self.assertTrue(form.is_valid()) self.assertHTMLEqual( template.render(context), '<input id="id_date_added" name="date_added" type="text" value="31.12.2009 06:00:00" required>;' '<input id="id_cents_paid" name="cents_paid" type="text" value="59,47" required>' ) self.assertHTMLEqual( template_as_text.render(context), '<input id="id_date_added" name="date_added" type="text" value="31.12.2009 06:00:00" required>;' ' <input id="id_cents_paid" name="cents_paid" type="text" value="59,47" required>' ) self.assertHTMLEqual( template_as_hidden.render(context), '<input id="id_date_added" name="date_added" type="hidden" value="31.12.2009 06:00:00">;' '<input id="id_cents_paid" name="cents_paid" type="hidden" value="59,47">' ) def test_format_arbitrary_settings(self): self.assertEqual(get_format('DEBUG'), 'DEBUG') def test_get_custom_format(self): with self.settings(FORMAT_MODULE_PATH='i18n.other.locale'): with translation.override('fr', deactivate=True): self.assertEqual('d/m/Y CUSTOM', get_format('CUSTOM_DAY_FORMAT')) def test_admin_javascript_supported_input_formats(self): """ The first input format for DATE_INPUT_FORMATS, TIME_INPUT_FORMATS, and DATETIME_INPUT_FORMATS must not contain %f since that's unsupported by the admin's time picker widget. """ regex = re.compile('%([^BcdHImMpSwxXyY%])') for language_code, language_name in settings.LANGUAGES: for format_name in ('DATE_INPUT_FORMATS', 'TIME_INPUT_FORMATS', 'DATETIME_INPUT_FORMATS'): with self.subTest(language=language_code, format=format_name): formatter = get_format(format_name, lang=language_code)[0] self.assertEqual( regex.findall(formatter), [], "%s locale's %s uses an unsupported format code." % (language_code, format_name) ) class MiscTests(SimpleTestCase): rf = RequestFactory() @override_settings(LANGUAGE_CODE='de') def test_english_fallback(self): """ With a non-English LANGUAGE_CODE and if the active language is English or one of its variants, the untranslated string should be returned (instead of falling back to LANGUAGE_CODE) (See #24413). """ self.assertEqual(gettext("Image"), "Bild") with translation.override('en'): self.assertEqual(gettext("Image"), "Image") with translation.override('en-us'): self.assertEqual(gettext("Image"), "Image") with translation.override('en-ca'): self.assertEqual(gettext("Image"), "Image") def test_parse_spec_http_header(self): """ Testing HTTP header parsing. First, we test that we can parse the values according to the spec (and that we extract all the pieces in the right order). """ tests = [ # Good headers ('de', [('de', 1.0)]), ('en-AU', [('en-au', 1.0)]), ('es-419', [('es-419', 1.0)]), ('*;q=1.00', [('*', 1.0)]), ('en-AU;q=0.123', [('en-au', 0.123)]), ('en-au;q=0.5', [('en-au', 0.5)]), ('en-au;q=1.0', [('en-au', 1.0)]), ('da, en-gb;q=0.25, en;q=0.5', [('da', 1.0), ('en', 0.5), ('en-gb', 0.25)]), ('en-au-xx', [('en-au-xx', 1.0)]), ('de,en-au;q=0.75,en-us;q=0.5,en;q=0.25,es;q=0.125,fa;q=0.125', [('de', 1.0), ('en-au', 0.75), ('en-us', 0.5), ('en', 0.25), ('es', 0.125), ('fa', 0.125)]), ('*', [('*', 1.0)]), ('de;q=0.', [('de', 0.0)]), ('en; q=1,', [('en', 1.0)]), ('en; q=1.0, * ; q=0.5', [('en', 1.0), ('*', 0.5)]), # Bad headers ('en-gb;q=1.0000', []), ('en;q=0.1234', []), ('en;q=.2', []), ('abcdefghi-au', []), ('**', []), ('en,,gb', []), ('en-au;q=0.1.0', []), (('X' * 97) + 'Z,en', []), ('da, en-gb;q=0.8, en;q=0.7,#', []), ('de;q=2.0', []), ('de;q=0.a', []), ('12-345', []), ('', []), ('en;q=1e0', []), ] for value, expected in tests: with self.subTest(value=value): self.assertEqual(trans_real.parse_accept_lang_header(value), tuple(expected)) def test_parse_literal_http_header(self): """ Now test that we parse a literal HTTP header correctly. """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-br'} self.assertEqual('pt-br', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt'} self.assertEqual('pt', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'es,de'} self.assertEqual('es', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'es-ar,de'} self.assertEqual('es-ar', g(r)) # This test assumes there won't be a Django translation to a US # variation of the Spanish language, a safe assumption. When the # user sets it as the preferred language, the main 'es' # translation should be selected instead. r.META = {'HTTP_ACCEPT_LANGUAGE': 'es-us'} self.assertEqual(g(r), 'es') # This tests the following scenario: there isn't a main language (zh) # translation of Django but there is a translation to variation (zh-hans) # the user sets zh-hans as the preferred language, it should be selected # by Django without falling back nor ignoring it. r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-hans,de'} self.assertEqual(g(r), 'zh-hans') r.META = {'HTTP_ACCEPT_LANGUAGE': 'NL'} self.assertEqual('nl', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'fy'} self.assertEqual('fy', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'ia'} self.assertEqual('ia', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'sr-latn'} self.assertEqual('sr-latn', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-hans'} self.assertEqual('zh-hans', g(r)) r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-hant'} self.assertEqual('zh-hant', g(r)) @override_settings( LANGUAGES=[ ('en', 'English'), ('zh-hans', 'Simplified Chinese'), ('zh-hant', 'Traditional Chinese'), ] ) def test_support_for_deprecated_chinese_language_codes(self): """ Some browsers (Firefox, IE, etc.) use deprecated language codes. As these language codes will be removed in Django 1.9, these will be incorrectly matched. For example zh-tw (traditional) will be interpreted as zh-hans (simplified), which is wrong. So we should also accept these deprecated language codes. refs #18419 -- this is explicitly for browser compatibility """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-cn,en'} self.assertEqual(g(r), 'zh-hans') r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-tw,en'} self.assertEqual(g(r), 'zh-hant') def test_special_fallback_language(self): """ Some languages may have special fallbacks that don't follow the simple 'fr-ca' -> 'fr' logic (notably Chinese codes). """ r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-my,en'} self.assertEqual(get_language_from_request(r), 'zh-hans') def test_parse_language_cookie(self): """ Now test that we parse language preferences stored in a cookie correctly. """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'pt-br'} r.META = {} self.assertEqual('pt-br', g(r)) r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'pt'} r.META = {} self.assertEqual('pt', g(r)) r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'es'} r.META = {'HTTP_ACCEPT_LANGUAGE': 'de'} self.assertEqual('es', g(r)) # This test assumes there won't be a Django translation to a US # variation of the Spanish language, a safe assumption. When the # user sets it as the preferred language, the main 'es' # translation should be selected instead. r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'es-us'} r.META = {} self.assertEqual(g(r), 'es') # This tests the following scenario: there isn't a main language (zh) # translation of Django but there is a translation to variation (zh-hans) # the user sets zh-hans as the preferred language, it should be selected # by Django without falling back nor ignoring it. r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'zh-hans'} r.META = {'HTTP_ACCEPT_LANGUAGE': 'de'} self.assertEqual(g(r), 'zh-hans') @override_settings( USE_I18N=True, LANGUAGES=[ ('en', 'English'), ('de', 'German'), ('de-at', 'Austrian German'), ('pt-br', 'Portuguese (Brazil)'), ], ) def test_get_supported_language_variant_real(self): g = trans_real.get_supported_language_variant self.assertEqual(g('en'), 'en') self.assertEqual(g('en-gb'), 'en') self.assertEqual(g('de'), 'de') self.assertEqual(g('de-at'), 'de-at') self.assertEqual(g('de-ch'), 'de') self.assertEqual(g('pt-br'), 'pt-br') self.assertEqual(g('pt'), 'pt-br') self.assertEqual(g('pt-pt'), 'pt-br') with self.assertRaises(LookupError): g('pt', strict=True) with self.assertRaises(LookupError): g('pt-pt', strict=True) with self.assertRaises(LookupError): g('xyz') with self.assertRaises(LookupError): g('xy-zz') def test_get_supported_language_variant_null(self): g = trans_null.get_supported_language_variant self.assertEqual(g(settings.LANGUAGE_CODE), settings.LANGUAGE_CODE) with self.assertRaises(LookupError): g('pt') with self.assertRaises(LookupError): g('de') with self.assertRaises(LookupError): g('de-at') with self.assertRaises(LookupError): g('de', strict=True) with self.assertRaises(LookupError): g('de-at', strict=True) with self.assertRaises(LookupError): g('xyz') @override_settings( LANGUAGES=[ ('en', 'English'), ('de', 'German'), ('de-at', 'Austrian German'), ('pl', 'Polish'), ], ) def test_get_language_from_path_real(self): g = trans_real.get_language_from_path self.assertEqual(g('/pl/'), 'pl') self.assertEqual(g('/pl'), 'pl') self.assertIsNone(g('/xyz/')) self.assertEqual(g('/en/'), 'en') self.assertEqual(g('/en-gb/'), 'en') self.assertEqual(g('/de/'), 'de') self.assertEqual(g('/de-at/'), 'de-at') self.assertEqual(g('/de-ch/'), 'de') self.assertIsNone(g('/de-simple-page/')) def test_get_language_from_path_null(self): g = trans_null.get_language_from_path self.assertIsNone(g('/pl/')) self.assertIsNone(g('/pl')) self.assertIsNone(g('/xyz/')) def test_cache_resetting(self): """ After setting LANGUAGE, the cache should be cleared and languages previously valid should not be used (#14170). """ g = get_language_from_request r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-br'} self.assertEqual('pt-br', g(r)) with self.settings(LANGUAGES=[('en', 'English')]): self.assertNotEqual('pt-br', g(r)) def test_i18n_patterns_returns_list(self): with override_settings(USE_I18N=False): self.assertIsInstance(i18n_patterns([]), list) with override_settings(USE_I18N=True): self.assertIsInstance(i18n_patterns([]), list) class ResolutionOrderI18NTests(SimpleTestCase): def setUp(self): super().setUp() activate('de') def tearDown(self): deactivate() super().tearDown() def assertGettext(self, msgid, msgstr): result = gettext(msgid) self.assertIn( msgstr, result, "The string '%s' isn't in the translation of '%s'; the actual result is '%s'." % (msgstr, msgid, result) ) class AppResolutionOrderI18NTests(ResolutionOrderI18NTests): @override_settings(LANGUAGE_CODE='de') def test_app_translation(self): # Original translation. self.assertGettext('Date/time', 'Datum/Zeit') # Different translation. with self.modify_settings(INSTALLED_APPS={'append': 'i18n.resolution'}): # Force refreshing translations. activate('de') # Doesn't work because it's added later in the list. self.assertGettext('Date/time', 'Datum/Zeit') with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.admin.apps.SimpleAdminConfig'}): # Force refreshing translations. activate('de') # Unless the original is removed from the list. self.assertGettext('Date/time', 'Datum/Zeit (APP)') @override_settings(LOCALE_PATHS=extended_locale_paths) class LocalePathsResolutionOrderI18NTests(ResolutionOrderI18NTests): def test_locale_paths_translation(self): self.assertGettext('Time', 'LOCALE_PATHS') def test_locale_paths_override_app_translation(self): with self.settings(INSTALLED_APPS=['i18n.resolution']): self.assertGettext('Time', 'LOCALE_PATHS') class DjangoFallbackResolutionOrderI18NTests(ResolutionOrderI18NTests): def test_django_fallback(self): self.assertEqual(gettext('Date/time'), 'Datum/Zeit') @override_settings(INSTALLED_APPS=['i18n.territorial_fallback']) class TranslationFallbackI18NTests(ResolutionOrderI18NTests): def test_sparse_territory_catalog(self): """ Untranslated strings for territorial language variants use the translations of the generic language. In this case, the de-de translation falls back to de. """ with translation.override('de-de'): self.assertGettext('Test 1 (en)', '(de-de)') self.assertGettext('Test 2 (en)', '(de)') class TestModels(TestCase): def test_lazy(self): tm = TestModel() tm.save() def test_safestr(self): c = Company(cents_paid=12, products_delivered=1) c.name = SafeString('Iñtërnâtiônàlizætiøn1') c.save() class TestLanguageInfo(SimpleTestCase): def test_localized_language_info(self): li = get_language_info('de') self.assertEqual(li['code'], 'de') self.assertEqual(li['name_local'], 'Deutsch') self.assertEqual(li['name'], 'German') self.assertIs(li['bidi'], False) def test_unknown_language_code(self): with self.assertRaisesMessage(KeyError, "Unknown language code xx"): get_language_info('xx') with translation.override('xx'): # A language with no translation catalogs should fallback to the # untranslated string. self.assertEqual(gettext("Title"), "Title") def test_unknown_only_country_code(self): li = get_language_info('de-xx') self.assertEqual(li['code'], 'de') self.assertEqual(li['name_local'], 'Deutsch') self.assertEqual(li['name'], 'German') self.assertIs(li['bidi'], False) def test_unknown_language_code_and_country_code(self): with self.assertRaisesMessage(KeyError, "Unknown language code xx-xx and xx"): get_language_info('xx-xx') def test_fallback_language_code(self): """ get_language_info return the first fallback language info if the lang_info struct does not contain the 'name' key. """ li = get_language_info('zh-my') self.assertEqual(li['code'], 'zh-hans') li = get_language_info('zh-hans') self.assertEqual(li['code'], 'zh-hans') @override_settings( USE_I18N=True, LANGUAGES=[ ('en', 'English'), ('fr', 'French'), ], MIDDLEWARE=[ 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ROOT_URLCONF='i18n.urls', ) class LocaleMiddlewareTests(TestCase): def test_streaming_response(self): # Regression test for #5241 response = self.client.get('/fr/streaming/') self.assertContains(response, "Oui/Non") response = self.client.get('/en/streaming/') self.assertContains(response, "Yes/No") @override_settings( MIDDLEWARE=[ 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ) def test_language_not_saved_to_session(self): """ The Current language isno' automatically saved to the session on every request (#21473). """ self.client.get('/fr/simple/') self.assertNotIn(LANGUAGE_SESSION_KEY, self.client.session) @override_settings( USE_I18N=True, LANGUAGES=[ ('en', 'English'), ('de', 'German'), ('fr', 'French'), ], MIDDLEWARE=[ 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ROOT_URLCONF='i18n.urls_default_unprefixed', LANGUAGE_CODE='en', ) class UnprefixedDefaultLanguageTests(SimpleTestCase): def test_default_lang_without_prefix(self): """ With i18n_patterns(..., prefix_default_language=False), the default language (settings.LANGUAGE_CODE) should be accessible without a prefix. """ response = self.client.get('/simple/') self.assertEqual(response.content, b'Yes') def test_other_lang_with_prefix(self): response = self.client.get('/fr/simple/') self.assertEqual(response.content, b'Oui') def test_unprefixed_language_other_than_accept_language(self): response = self.client.get('/simple/', HTTP_ACCEPT_LANGUAGE='fr') self.assertEqual(response.content, b'Yes') def test_page_with_dash(self): # A page starting with /de* shouldn't match the 'de' language code. response = self.client.get('/de-simple-page/') self.assertEqual(response.content, b'Yes') def test_no_redirect_on_404(self): """ A request for a nonexistent URL shouldn't cause a redirect to /<default_language>/<request_url> when prefix_default_language=False and /<default_language>/<request_url> has a URL match (#27402). """ # A match for /group1/group2/ must exist for this to act as a # regression test. response = self.client.get('/group1/group2/') self.assertEqual(response.status_code, 200) response = self.client.get('/nonexistent/') self.assertEqual(response.status_code, 404) @override_settings( USE_I18N=True, LANGUAGES=[ ('bg', 'Bulgarian'), ('en-us', 'English'), ('pt-br', 'Portuguese (Brazil)'), ], MIDDLEWARE=[ 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ROOT_URLCONF='i18n.urls' ) class CountrySpecificLanguageTests(SimpleTestCase): rf = RequestFactory() def test_check_for_language(self): self.assertTrue(check_for_language('en')) self.assertTrue(check_for_language('en-us')) self.assertTrue(check_for_language('en-US')) self.assertFalse(check_for_language('en_US')) self.assertTrue(check_for_language('be')) self.assertTrue(check_for_language('be@latin')) self.assertTrue(check_for_language('sr-RS@latin')) self.assertTrue(check_for_language('sr-RS@12345')) self.assertFalse(check_for_language('en-ü')) self.assertFalse(check_for_language('en\x00')) self.assertFalse(check_for_language(None)) self.assertFalse(check_for_language('be@ ')) # Specifying encoding is not supported (Django enforces UTF-8) self.assertFalse(check_for_language('tr-TR.UTF-8')) self.assertFalse(check_for_language('tr-TR.UTF8')) self.assertFalse(check_for_language('de-DE.utf-8')) def test_check_for_language_null(self): self.assertIs(trans_null.check_for_language('en'), True) def test_get_language_from_request(self): # issue 19919 r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'en-US,en;q=0.8,bg;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('en-us', lang) r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'bg-bg,en-US;q=0.8,en;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('bg', lang) def test_get_language_from_request_null(self): lang = trans_null.get_language_from_request(None) self.assertEqual(lang, 'en') with override_settings(LANGUAGE_CODE='de'): lang = trans_null.get_language_from_request(None) self.assertEqual(lang, 'de') def test_specific_language_codes(self): # issue 11915 r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt,en-US;q=0.8,en;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('pt-br', lang) r = self.rf.get('/') r.COOKIES = {} r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-pt,en-US;q=0.8,en;q=0.6,ru;q=0.4'} lang = get_language_from_request(r) self.assertEqual('pt-br', lang) class TranslationFilesMissing(SimpleTestCase): def setUp(self): super().setUp() self.gettext_find_builtin = gettext_module.find def tearDown(self): gettext_module.find = self.gettext_find_builtin super().tearDown() def patchGettextFind(self): gettext_module.find = lambda *args, **kw: None def test_failure_finding_default_mo_files(self): """OSError is raised if the default language is unparseable.""" self.patchGettextFind() trans_real._translations = {} with self.assertRaises(OSError): activate('en') class NonDjangoLanguageTests(SimpleTestCase): """ A language non present in default Django languages can still be installed/used by a Django project. """ @override_settings( USE_I18N=True, LANGUAGES=[ ('en-us', 'English'), ('xxx', 'Somelanguage'), ], LANGUAGE_CODE='xxx', LOCALE_PATHS=[os.path.join(here, 'commands', 'locale')], ) def test_non_django_language(self): self.assertEqual(get_language(), 'xxx') self.assertEqual(gettext("year"), "reay") @override_settings(USE_I18N=True) def test_check_for_language(self): with tempfile.TemporaryDirectory() as app_dir: os.makedirs(os.path.join(app_dir, 'locale', 'dummy_Lang', 'LC_MESSAGES')) open(os.path.join(app_dir, 'locale', 'dummy_Lang', 'LC_MESSAGES', 'django.mo'), 'w').close() app_config = AppConfig('dummy_app', AppModuleStub(__path__=[app_dir])) with mock.patch('django.apps.apps.get_app_configs', return_value=[app_config]): self.assertIs(check_for_language('dummy-lang'), True) @override_settings( USE_I18N=True, LANGUAGES=[ ('en-us', 'English'), # xyz language has no locale files ('xyz', 'XYZ'), ], ) @translation.override('xyz') def test_plural_non_django_language(self): self.assertEqual(get_language(), 'xyz') self.assertEqual(ngettext('year', 'years', 2), 'years') @override_settings(USE_I18N=True) class WatchForTranslationChangesTests(SimpleTestCase): @override_settings(USE_I18N=False) def test_i18n_disabled(self): mocked_sender = mock.MagicMock() watch_for_translation_changes(mocked_sender) mocked_sender.watch_dir.assert_not_called() def test_i18n_enabled(self): mocked_sender = mock.MagicMock() watch_for_translation_changes(mocked_sender) self.assertGreater(mocked_sender.watch_dir.call_count, 1) def test_i18n_locale_paths(self): mocked_sender = mock.MagicMock() with tempfile.TemporaryDirectory() as app_dir: with self.settings(LOCALE_PATHS=[app_dir]): watch_for_translation_changes(mocked_sender) mocked_sender.watch_dir.assert_any_call(Path(app_dir), '**/*.mo') def test_i18n_app_dirs(self): mocked_sender = mock.MagicMock() with self.settings(INSTALLED_APPS=['tests.i18n.sampleproject']): watch_for_translation_changes(mocked_sender) project_dir = Path(__file__).parent / 'sampleproject' / 'locale' mocked_sender.watch_dir.assert_any_call(project_dir, '**/*.mo') def test_i18n_app_dirs_ignore_django_apps(self): mocked_sender = mock.MagicMock() with self.settings(INSTALLED_APPS=['django.contrib.admin']): watch_for_translation_changes(mocked_sender) mocked_sender.watch_dir.assert_called_once_with(Path('locale'), '**/*.mo') def test_i18n_local_locale(self): mocked_sender = mock.MagicMock() watch_for_translation_changes(mocked_sender) locale_dir = Path(__file__).parent / 'locale' mocked_sender.watch_dir.assert_any_call(locale_dir, '**/*.mo') class TranslationFileChangedTests(SimpleTestCase): def setUp(self): self.gettext_translations = gettext_module._translations.copy() self.trans_real_translations = trans_real._translations.copy() def tearDown(self): gettext._translations = self.gettext_translations trans_real._translations = self.trans_real_translations def test_ignores_non_mo_files(self): gettext_module._translations = {'foo': 'bar'} path = Path('test.py') self.assertIsNone(translation_file_changed(None, path)) self.assertEqual(gettext_module._translations, {'foo': 'bar'}) def test_resets_cache_with_mo_files(self): gettext_module._translations = {'foo': 'bar'} trans_real._translations = {'foo': 'bar'} trans_real._default = 1 trans_real._active = False path = Path('test.mo') self.assertIs(translation_file_changed(None, path), True) self.assertEqual(gettext_module._translations, {}) self.assertEqual(trans_real._translations, {}) self.assertIsNone(trans_real._default) self.assertIsInstance(trans_real._active, Local) class UtilsTests(SimpleTestCase): def test_round_away_from_one(self): tests = [ (0, 0), (0., 0), (0.25, 0), (0.5, 0), (0.75, 0), (1, 1), (1., 1), (1.25, 2), (1.5, 2), (1.75, 2), (-0., 0), (-0.25, -1), (-0.5, -1), (-0.75, -1), (-1, -1), (-1., -1), (-1.25, -2), (-1.5, -2), (-1.75, -2), ] for value, expected in tests: with self.subTest(value=value): self.assertEqual(round_away_from_one(value), expected)
58e8994ea0da05250d7c49e24f8861830468fa3f8748ee6e501b0735848e4f9d
import gettext import os import re from datetime import datetime, timedelta from importlib import import_module import pytz from django import forms from django.conf import settings from django.contrib import admin from django.contrib.admin import widgets from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.auth.models import User from django.core.files.storage import default_storage from django.core.files.uploadedfile import SimpleUploadedFile from django.db.models import ( CharField, DateField, DateTimeField, ManyToManyField, UUIDField, ) from django.test import SimpleTestCase, TestCase, override_settings from django.urls import reverse from django.utils import translation from .models import ( Advisor, Album, Band, Bee, Car, Company, Event, Honeycomb, Individual, Inventory, Member, MyFileField, Profile, School, Student, UnsafeLimitChoicesTo, ) from .widgetadmin import site as widget_admin_site class TestDataMixin: @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email=None) cls.u2 = User.objects.create_user(username='testser', password='secret') Car.objects.create(owner=cls.superuser, make='Volkswagen', model='Passat') Car.objects.create(owner=cls.u2, make='BMW', model='M3') class AdminFormfieldForDBFieldTests(SimpleTestCase): """ Tests for correct behavior of ModelAdmin.formfield_for_dbfield """ def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides): """ Helper to call formfield_for_dbfield for a given model and field name and verify that the returned formfield is appropriate. """ # Override any settings on the model admin class MyModelAdmin(admin.ModelAdmin): pass for k in admin_overrides: setattr(MyModelAdmin, k, admin_overrides[k]) # Construct the admin, and ask it for a formfield ma = MyModelAdmin(model, admin.site) ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None) # "unwrap" the widget wrapper, if needed if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper): widget = ff.widget.widget else: widget = ff.widget self.assertIsInstance(widget, widgetclass) # Return the formfield so that other tests can continue return ff def test_DateField(self): self.assertFormfield(Event, 'start_date', widgets.AdminDateWidget) def test_DateTimeField(self): self.assertFormfield(Member, 'birthdate', widgets.AdminSplitDateTime) def test_TimeField(self): self.assertFormfield(Event, 'start_time', widgets.AdminTimeWidget) def test_TextField(self): self.assertFormfield(Event, 'description', widgets.AdminTextareaWidget) def test_URLField(self): self.assertFormfield(Event, 'link', widgets.AdminURLFieldWidget) def test_IntegerField(self): self.assertFormfield(Event, 'min_age', widgets.AdminIntegerFieldWidget) def test_CharField(self): self.assertFormfield(Member, 'name', widgets.AdminTextInputWidget) def test_EmailField(self): self.assertFormfield(Member, 'email', widgets.AdminEmailInputWidget) def test_FileField(self): self.assertFormfield(Album, 'cover_art', widgets.AdminFileWidget) def test_ForeignKey(self): self.assertFormfield(Event, 'main_band', forms.Select) def test_raw_id_ForeignKey(self): self.assertFormfield(Event, 'main_band', widgets.ForeignKeyRawIdWidget, raw_id_fields=['main_band']) def test_radio_fields_ForeignKey(self): ff = self.assertFormfield(Event, 'main_band', widgets.AdminRadioSelect, radio_fields={'main_band': admin.VERTICAL}) self.assertIsNone(ff.empty_label) def test_many_to_many(self): self.assertFormfield(Band, 'members', forms.SelectMultiple) def test_raw_id_many_to_many(self): self.assertFormfield(Band, 'members', widgets.ManyToManyRawIdWidget, raw_id_fields=['members']) def test_filtered_many_to_many(self): self.assertFormfield(Band, 'members', widgets.FilteredSelectMultiple, filter_vertical=['members']) def test_formfield_overrides(self): self.assertFormfield(Event, 'start_date', forms.TextInput, formfield_overrides={DateField: {'widget': forms.TextInput}}) def test_formfield_overrides_widget_instances(self): """ Widget instances in formfield_overrides are not shared between different fields. (#19423) """ class BandAdmin(admin.ModelAdmin): formfield_overrides = { CharField: {'widget': forms.TextInput(attrs={'size': '10'})} } ma = BandAdmin(Band, admin.site) f1 = ma.formfield_for_dbfield(Band._meta.get_field('name'), request=None) f2 = ma.formfield_for_dbfield(Band._meta.get_field('style'), request=None) self.assertNotEqual(f1.widget, f2.widget) self.assertEqual(f1.widget.attrs['maxlength'], '100') self.assertEqual(f2.widget.attrs['maxlength'], '20') self.assertEqual(f2.widget.attrs['size'], '10') def test_formfield_overrides_m2m_filter_widget(self): """ The autocomplete_fields, raw_id_fields, filter_vertical, and filter_horizontal widgets for ManyToManyFields may be overridden by specifying a widget in formfield_overrides. """ class BandAdmin(admin.ModelAdmin): filter_vertical = ['members'] formfield_overrides = { ManyToManyField: {'widget': forms.CheckboxSelectMultiple}, } ma = BandAdmin(Band, admin.site) field = ma.formfield_for_dbfield(Band._meta.get_field('members'), request=None) self.assertIsInstance(field.widget.widget, forms.CheckboxSelectMultiple) def test_formfield_overrides_for_datetime_field(self): """ Overriding the widget for DateTimeField doesn't overrides the default form_class for that field (#26449). """ class MemberAdmin(admin.ModelAdmin): formfield_overrides = {DateTimeField: {'widget': widgets.AdminSplitDateTime}} ma = MemberAdmin(Member, admin.site) f1 = ma.formfield_for_dbfield(Member._meta.get_field('birthdate'), request=None) self.assertIsInstance(f1.widget, widgets.AdminSplitDateTime) self.assertIsInstance(f1, forms.SplitDateTimeField) def test_formfield_overrides_for_custom_field(self): """ formfield_overrides works for a custom field class. """ class AlbumAdmin(admin.ModelAdmin): formfield_overrides = {MyFileField: {'widget': forms.TextInput()}} ma = AlbumAdmin(Member, admin.site) f1 = ma.formfield_for_dbfield(Album._meta.get_field('backside_art'), request=None) self.assertIsInstance(f1.widget, forms.TextInput) def test_field_with_choices(self): self.assertFormfield(Member, 'gender', forms.Select) def test_choices_with_radio_fields(self): self.assertFormfield(Member, 'gender', widgets.AdminRadioSelect, radio_fields={'gender': admin.VERTICAL}) def test_inheritance(self): self.assertFormfield(Album, 'backside_art', widgets.AdminFileWidget) def test_m2m_widgets(self): """m2m fields help text as it applies to admin app (#9321).""" class AdvisorAdmin(admin.ModelAdmin): filter_vertical = ['companies'] self.assertFormfield(Advisor, 'companies', widgets.FilteredSelectMultiple, filter_vertical=['companies']) ma = AdvisorAdmin(Advisor, admin.site) f = ma.formfield_for_dbfield(Advisor._meta.get_field('companies'), request=None) self.assertEqual( f.help_text, 'Hold down “Control”, or “Command” on a Mac, to select more than one.' ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, TestCase): def test_filter_choices_by_request_user(self): """ Ensure the user can only see their own cars in the foreign key dropdown. """ self.client.force_login(self.superuser) response = self.client.get(reverse('admin:admin_widgets_cartire_add')) self.assertNotContains(response, "BMW M3") self.assertContains(response, "Volkswagen Passat") @override_settings(ROOT_URLCONF='admin_widgets.urls') class AdminForeignKeyWidgetChangeList(TestDataMixin, TestCase): def setUp(self): self.client.force_login(self.superuser) def test_changelist_ForeignKey(self): response = self.client.get(reverse('admin:admin_widgets_car_changelist')) self.assertContains(response, '/auth/user/add/') @override_settings(ROOT_URLCONF='admin_widgets.urls') class AdminForeignKeyRawIdWidget(TestDataMixin, TestCase): def setUp(self): self.client.force_login(self.superuser) def test_nonexistent_target_id(self): band = Band.objects.create(name='Bogey Blues') pk = band.pk band.delete() post_data = { "main_band": str(pk), } # Try posting with a nonexistent pk in a raw id field: this # should result in an error message, not a server exception. response = self.client.post(reverse('admin:admin_widgets_event_add'), post_data) self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.') def test_invalid_target_id(self): for test_str in ('Iñtërnâtiônàlizætiøn', "1234'", -1234): # This should result in an error message, not a server exception. response = self.client.post(reverse('admin:admin_widgets_event_add'), {"main_band": test_str}) self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.') def test_url_params_from_lookup_dict_any_iterable(self): lookup1 = widgets.url_params_from_lookup_dict({'color__in': ('red', 'blue')}) lookup2 = widgets.url_params_from_lookup_dict({'color__in': ['red', 'blue']}) self.assertEqual(lookup1, {'color__in': 'red,blue'}) self.assertEqual(lookup1, lookup2) def test_url_params_from_lookup_dict_callable(self): def my_callable(): return 'works' lookup1 = widgets.url_params_from_lookup_dict({'myfield': my_callable}) lookup2 = widgets.url_params_from_lookup_dict({'myfield': my_callable()}) self.assertEqual(lookup1, lookup2) def test_label_and_url_for_value_invalid_uuid(self): field = Bee._meta.get_field('honeycomb') self.assertIsInstance(field.target_field, UUIDField) widget = widgets.ForeignKeyRawIdWidget(field.remote_field, admin.site) self.assertEqual(widget.label_and_url_for_value('invalid-uuid'), ('', '')) class FilteredSelectMultipleWidgetTest(SimpleTestCase): def test_render(self): # Backslash in verbose_name to ensure it is JavaScript escaped. w = widgets.FilteredSelectMultiple('test\\', False) self.assertHTMLEqual( w.render('test', 'test'), '<select multiple name="test" class="selectfilter" ' 'data-field-name="test\\" data-is-stacked="0">\n</select>' ) def test_stacked_render(self): # Backslash in verbose_name to ensure it is JavaScript escaped. w = widgets.FilteredSelectMultiple('test\\', True) self.assertHTMLEqual( w.render('test', 'test'), '<select multiple name="test" class="selectfilterstacked" ' 'data-field-name="test\\" data-is-stacked="1">\n</select>' ) class AdminDateWidgetTest(SimpleTestCase): def test_attrs(self): w = widgets.AdminDateWidget() self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="2007-12-01" type="text" class="vDateField" name="test" size="10">', ) # pass attrs to widget w = widgets.AdminDateWidget(attrs={'size': 20, 'class': 'myDateField'}) self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="2007-12-01" type="text" class="myDateField" name="test" size="20">', ) class AdminTimeWidgetTest(SimpleTestCase): def test_attrs(self): w = widgets.AdminTimeWidget() self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="09:30:00" type="text" class="vTimeField" name="test" size="8">', ) # pass attrs to widget w = widgets.AdminTimeWidget(attrs={'size': 20, 'class': 'myTimeField'}) self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="09:30:00" type="text" class="myTimeField" name="test" size="20">', ) class AdminSplitDateTimeWidgetTest(SimpleTestCase): def test_render(self): w = widgets.AdminSplitDateTime() self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<p class="datetime">' 'Date: <input value="2007-12-01" type="text" class="vDateField" ' 'name="test_0" size="10"><br>' 'Time: <input value="09:30:00" type="text" class="vTimeField" ' 'name="test_1" size="8"></p>' ) def test_localization(self): w = widgets.AdminSplitDateTime() with self.settings(USE_L10N=True), translation.override('de-at'): w.is_localized = True self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<p class="datetime">' 'Datum: <input value="01.12.2007" type="text" ' 'class="vDateField" name="test_0"size="10"><br>' 'Zeit: <input value="09:30:00" type="text" class="vTimeField" ' 'name="test_1" size="8"></p>' ) class AdminURLWidgetTest(SimpleTestCase): def test_get_context_validates_url(self): w = widgets.AdminURLFieldWidget() for invalid in ['', '/not/a/full/url/', 'javascript:alert("Danger XSS!")']: with self.subTest(url=invalid): self.assertFalse(w.get_context('name', invalid, {})['url_valid']) self.assertTrue(w.get_context('name', 'http://example.com', {})['url_valid']) def test_render(self): w = widgets.AdminURLFieldWidget() self.assertHTMLEqual( w.render('test', ''), '<input class="vURLField" name="test" type="url">' ) self.assertHTMLEqual( w.render('test', 'http://example.com'), '<p class="url">Currently:<a href="http://example.com">' 'http://example.com</a><br>' 'Change:<input class="vURLField" name="test" type="url" ' 'value="http://example.com"></p>' ) def test_render_idn(self): w = widgets.AdminURLFieldWidget() self.assertHTMLEqual( w.render('test', 'http://example-äüö.com'), '<p class="url">Currently: <a href="http://xn--example--7za4pnc.com">' 'http://example-äüö.com</a><br>' 'Change:<input class="vURLField" name="test" type="url" ' 'value="http://example-äüö.com"></p>' ) def test_render_quoting(self): """ WARNING: This test doesn't use assertHTMLEqual since it will get rid of some escapes which are tested here! """ HREF_RE = re.compile('href="([^"]+)"') VALUE_RE = re.compile('value="([^"]+)"') TEXT_RE = re.compile('<a[^>]+>([^>]+)</a>') w = widgets.AdminURLFieldWidget() output = w.render('test', 'http://example.com/<sometag>some-text</sometag>') self.assertEqual( HREF_RE.search(output)[1], 'http://example.com/%3Csometag%3Esome-text%3C/sometag%3E', ) self.assertEqual( TEXT_RE.search(output)[1], 'http://example.com/&lt;sometag&gt;some-text&lt;/sometag&gt;', ) self.assertEqual( VALUE_RE.search(output)[1], 'http://example.com/&lt;sometag&gt;some-text&lt;/sometag&gt;', ) output = w.render('test', 'http://example-äüö.com/<sometag>some-text</sometag>') self.assertEqual( HREF_RE.search(output)[1], 'http://xn--example--7za4pnc.com/%3Csometag%3Esome-text%3C/sometag%3E', ) self.assertEqual( TEXT_RE.search(output)[1], 'http://example-äüö.com/&lt;sometag&gt;some-text&lt;/sometag&gt;', ) self.assertEqual( VALUE_RE.search(output)[1], 'http://example-äüö.com/&lt;sometag&gt;some-text&lt;/sometag&gt;', ) output = w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"') self.assertEqual( HREF_RE.search(output)[1], 'http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22', ) self.assertEqual( TEXT_RE.search(output)[1], 'http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;' 'alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;' ) self.assertEqual( VALUE_RE.search(output)[1], 'http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;', ) class AdminUUIDWidgetTests(SimpleTestCase): def test_attrs(self): w = widgets.AdminUUIDInputWidget() self.assertHTMLEqual( w.render('test', '550e8400-e29b-41d4-a716-446655440000'), '<input value="550e8400-e29b-41d4-a716-446655440000" type="text" class="vUUIDField" name="test">', ) w = widgets.AdminUUIDInputWidget(attrs={'class': 'myUUIDInput'}) self.assertHTMLEqual( w.render('test', '550e8400-e29b-41d4-a716-446655440000'), '<input value="550e8400-e29b-41d4-a716-446655440000" type="text" class="myUUIDInput" name="test">', ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class AdminFileWidgetTests(TestDataMixin, TestCase): @classmethod def setUpTestData(cls): super().setUpTestData() band = Band.objects.create(name='Linkin Park') cls.album = band.album_set.create( name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg' ) def test_render(self): w = widgets.AdminFileWidget() self.assertHTMLEqual( w.render('test', self.album.cover_art), '<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/' r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> ' '<span class="clearable-file-input">' '<input type="checkbox" name="test-clear" id="test-clear_id"> ' '<label for="test-clear_id">Clear</label></span><br>' 'Change: <input type="file" name="test"></p>' % { 'STORAGE_URL': default_storage.url(''), }, ) self.assertHTMLEqual( w.render('test', SimpleUploadedFile('test', b'content')), '<input type="file" name="test">', ) def test_render_required(self): widget = widgets.AdminFileWidget() widget.is_required = True self.assertHTMLEqual( widget.render('test', self.album.cover_art), '<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/' r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a><br>' 'Change: <input type="file" name="test"></p>' % { 'STORAGE_URL': default_storage.url(''), }, ) def test_render_disabled(self): widget = widgets.AdminFileWidget(attrs={'disabled': True}) self.assertHTMLEqual( widget.render('test', self.album.cover_art), '<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/' r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> ' '<span class="clearable-file-input">' '<input type="checkbox" name="test-clear" id="test-clear_id" disabled>' '<label for="test-clear_id">Clear</label></span><br>' 'Change: <input type="file" name="test" disabled></p>' % { 'STORAGE_URL': default_storage.url(''), }, ) def test_readonly_fields(self): """ File widgets should render as a link when they're marked "read only." """ self.client.force_login(self.superuser) response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,))) self.assertContains( response, '<div class="readonly"><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">' r'albums\hybrid_theory.jpg</a></div>' % {'STORAGE_URL': default_storage.url('')}, html=True, ) self.assertNotContains( response, '<input type="file" name="cover_art" id="id_cover_art">', html=True, ) response = self.client.get(reverse('admin:admin_widgets_album_add')) self.assertContains( response, '<div class="readonly"></div>', html=True, ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class ForeignKeyRawIdWidgetTest(TestCase): def test_render(self): band = Band.objects.create(name='Linkin Park') band.album_set.create( name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg' ) rel = Album._meta.get_field('band').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('test', band.pk, attrs={}), '<input type="text" name="test" value="%(bandpk)s" ' 'class="vForeignKeyRawIdAdminField">' '<a href="/admin_widgets/band/?_to_field=id" class="related-lookup" ' 'id="lookup_id_test" title="Lookup"></a>&nbsp;<strong>' '<a href="/admin_widgets/band/%(bandpk)s/change/">Linkin Park</a>' '</strong>' % {'bandpk': band.pk} ) def test_relations_to_non_primary_key(self): # ForeignKeyRawIdWidget works with fields which aren't related to # the model's primary key. apple = Inventory.objects.create(barcode=86, name='Apple') Inventory.objects.create(barcode=22, name='Pear') core = Inventory.objects.create( barcode=87, name='Core', parent=apple ) rel = Inventory._meta.get_field('parent').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('test', core.parent_id, attrs={}), '<input type="text" name="test" value="86" ' 'class="vForeignKeyRawIdAdminField">' '<a href="/admin_widgets/inventory/?_to_field=barcode" ' 'class="related-lookup" id="lookup_id_test" title="Lookup"></a>' '&nbsp;<strong><a href="/admin_widgets/inventory/%(pk)s/change/">' 'Apple</a></strong>' % {'pk': apple.pk} ) def test_fk_related_model_not_in_admin(self): # FK to a model not registered with admin site. Raw ID widget should # have no magnifying glass link. See #16542 big_honeycomb = Honeycomb.objects.create(location='Old tree') big_honeycomb.bee_set.create() rel = Bee._meta.get_field('honeycomb').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('honeycomb_widget', big_honeycomb.pk, attrs={}), '<input type="text" name="honeycomb_widget" value="%(hcombpk)s">' '&nbsp;<strong>%(hcomb)s</strong>' % {'hcombpk': big_honeycomb.pk, 'hcomb': big_honeycomb} ) def test_fk_to_self_model_not_in_admin(self): # FK to self, not registered with admin site. Raw ID widget should have # no magnifying glass link. See #16542 subject1 = Individual.objects.create(name='Subject #1') Individual.objects.create(name='Child', parent=subject1) rel = Individual._meta.get_field('parent').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('individual_widget', subject1.pk, attrs={}), '<input type="text" name="individual_widget" value="%(subj1pk)s">' '&nbsp;<strong>%(subj1)s</strong>' % {'subj1pk': subject1.pk, 'subj1': subject1} ) def test_proper_manager_for_label_lookup(self): # see #9258 rel = Inventory._meta.get_field('parent').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) hidden = Inventory.objects.create( barcode=93, name='Hidden', hidden=True ) child_of_hidden = Inventory.objects.create( barcode=94, name='Child of hidden', parent=hidden ) self.assertHTMLEqual( w.render('test', child_of_hidden.parent_id, attrs={}), '<input type="text" name="test" value="93" class="vForeignKeyRawIdAdminField">' '<a href="/admin_widgets/inventory/?_to_field=barcode" ' 'class="related-lookup" id="lookup_id_test" title="Lookup"></a>' '&nbsp;<strong><a href="/admin_widgets/inventory/%(pk)s/change/">' 'Hidden</a></strong>' % {'pk': hidden.pk} ) def test_render_unsafe_limit_choices_to(self): rel = UnsafeLimitChoicesTo._meta.get_field('band').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('test', None), '<input type="text" name="test" class="vForeignKeyRawIdAdminField">\n' '<a href="/admin_widgets/band/?name=%22%26%3E%3Cescapeme&amp;_to_field=id" ' 'class="related-lookup" id="lookup_id_test" title="Lookup"></a>' ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class ManyToManyRawIdWidgetTest(TestCase): def test_render(self): band = Band.objects.create(name='Linkin Park') m1 = Member.objects.create(name='Chester') m2 = Member.objects.create(name='Mike') band.members.add(m1, m2) rel = Band._meta.get_field('members').remote_field w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('test', [m1.pk, m2.pk], attrs={}), ( '<input type="text" name="test" value="%(m1pk)s,%(m2pk)s" class="vManyToManyRawIdAdminField">' '<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>' ) % {'m1pk': m1.pk, 'm2pk': m2.pk} ) self.assertHTMLEqual( w.render('test', [m1.pk]), ( '<input type="text" name="test" value="%(m1pk)s" class="vManyToManyRawIdAdminField">' '<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>' ) % {'m1pk': m1.pk} ) def test_m2m_related_model_not_in_admin(self): # M2M relationship with model not registered with admin site. Raw ID # widget should have no magnifying glass link. See #16542 consultor1 = Advisor.objects.create(name='Rockstar Techie') c1 = Company.objects.create(name='Doodle') c2 = Company.objects.create(name='Pear') consultor1.companies.add(c1, c2) rel = Advisor._meta.get_field('companies').remote_field w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('company_widget1', [c1.pk, c2.pk], attrs={}), '<input type="text" name="company_widget1" value="%(c1pk)s,%(c2pk)s">' % {'c1pk': c1.pk, 'c2pk': c2.pk} ) self.assertHTMLEqual( w.render('company_widget2', [c1.pk]), '<input type="text" name="company_widget2" value="%(c1pk)s">' % {'c1pk': c1.pk} ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class RelatedFieldWidgetWrapperTests(SimpleTestCase): def test_no_can_add_related(self): rel = Individual._meta.get_field('parent').remote_field w = widgets.AdminRadioSelect() # Used to fail with a name error. w = widgets.RelatedFieldWidgetWrapper(w, rel, widget_admin_site) self.assertFalse(w.can_add_related) def test_select_multiple_widget_cant_change_delete_related(self): rel = Individual._meta.get_field('parent').remote_field widget = forms.SelectMultiple() wrapper = widgets.RelatedFieldWidgetWrapper( widget, rel, widget_admin_site, can_add_related=True, can_change_related=True, can_delete_related=True, ) self.assertTrue(wrapper.can_add_related) self.assertFalse(wrapper.can_change_related) self.assertFalse(wrapper.can_delete_related) def test_on_delete_cascade_rel_cant_delete_related(self): rel = Individual._meta.get_field('soulmate').remote_field widget = forms.Select() wrapper = widgets.RelatedFieldWidgetWrapper( widget, rel, widget_admin_site, can_add_related=True, can_change_related=True, can_delete_related=True, ) self.assertTrue(wrapper.can_add_related) self.assertTrue(wrapper.can_change_related) self.assertFalse(wrapper.can_delete_related) def test_custom_widget_render(self): class CustomWidget(forms.Select): def render(self, *args, **kwargs): return 'custom render output' rel = Album._meta.get_field('band').remote_field widget = CustomWidget() wrapper = widgets.RelatedFieldWidgetWrapper( widget, rel, widget_admin_site, can_add_related=True, can_change_related=True, can_delete_related=True, ) output = wrapper.render('name', 'value') self.assertIn('custom render output', output) def test_widget_delegates_value_omitted_from_data(self): class CustomWidget(forms.Select): def value_omitted_from_data(self, data, files, name): return False rel = Album._meta.get_field('band').remote_field widget = CustomWidget() wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site) self.assertIs(wrapper.value_omitted_from_data({}, {}, 'band'), False) def test_widget_is_hidden(self): rel = Album._meta.get_field('band').remote_field widget = forms.HiddenInput() widget.choices = () wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site) self.assertIs(wrapper.is_hidden, True) context = wrapper.get_context('band', None, {}) self.assertIs(context['is_hidden'], True) output = wrapper.render('name', 'value') # Related item links are hidden. self.assertNotIn('<a ', output) def test_widget_is_not_hidden(self): rel = Album._meta.get_field('band').remote_field widget = forms.Select() wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site) self.assertIs(wrapper.is_hidden, False) context = wrapper.get_context('band', None, {}) self.assertIs(context['is_hidden'], False) output = wrapper.render('name', 'value') # Related item links are present. self.assertIn('<a ', output) @override_settings(ROOT_URLCONF='admin_widgets.urls') class AdminWidgetSeleniumTestCase(AdminSeleniumTestCase): available_apps = ['admin_widgets'] + AdminSeleniumTestCase.available_apps def setUp(self): self.u1 = User.objects.create_superuser(username='super', password='secret', email='[email protected]') class DateTimePickerSeleniumTests(AdminWidgetSeleniumTestCase): def test_show_hide_date_time_picker_widgets(self): """ Pressing the ESC key or clicking on a widget value closes the date and time picker widgets. """ from selenium.webdriver.common.keys import Keys self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add')) # First, with the date picker widget --------------------------------- cal_icon = self.selenium.find_element_by_id('calendarlink0') # The date picker is hidden self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed()) # Click the calendar icon cal_icon.click() # The date picker is visible self.assertTrue(self.selenium.find_element_by_id('calendarbox0').is_displayed()) # Press the ESC key self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE]) # The date picker is hidden again self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed()) # Click the calendar icon, then on the 15th of current month cal_icon.click() self.selenium.find_element_by_xpath("//a[contains(text(), '15')]").click() self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed()) self.assertEqual( self.selenium.find_element_by_id('id_birthdate_0').get_attribute('value'), datetime.today().strftime('%Y-%m-') + '15', ) # Then, with the time picker widget ---------------------------------- time_icon = self.selenium.find_element_by_id('clocklink0') # The time picker is hidden self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed()) # Click the time icon time_icon.click() # The time picker is visible self.assertTrue(self.selenium.find_element_by_id('clockbox0').is_displayed()) self.assertEqual( [ x.text for x in self.selenium.find_elements_by_xpath("//ul[@class='timelist']/li/a") ], ['Now', 'Midnight', '6 a.m.', 'Noon', '6 p.m.'] ) # Press the ESC key self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE]) # The time picker is hidden again self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed()) # Click the time icon, then select the 'Noon' value time_icon.click() self.selenium.find_element_by_xpath("//a[contains(text(), 'Noon')]").click() self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed()) self.assertEqual( self.selenium.find_element_by_id('id_birthdate_1').get_attribute('value'), '12:00:00', ) def test_calendar_nonday_class(self): """ Ensure cells that are not days of the month have the `nonday` CSS class. Refs #4574. """ self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add')) # fill in the birth date. self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01') # Click the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # get all the tds within the calendar calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') # make sure the first and last 6 cells have class nonday for td in tds[:6] + tds[-6:]: self.assertEqual(td.get_attribute('class'), 'nonday') def test_calendar_selected_class(self): """ Ensure cell for the day in the input has the `selected` CSS class. Refs #4574. """ self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add')) # fill in the birth date. self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01') # Click the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # get all the tds within the calendar calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') # verify the selected cell selected = tds[6] self.assertEqual(selected.get_attribute('class'), 'selected') self.assertEqual(selected.text, '1') def test_calendar_no_selected_class(self): """ Ensure no cells are given the selected class when the field is empty. Refs #4574. """ self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add')) # Click the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # get all the tds within the calendar calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') # verify there are no cells with the selected class selected = [td for td in tds if td.get_attribute('class') == 'selected'] self.assertEqual(len(selected), 0) def test_calendar_show_date_from_input(self): """ The calendar shows the date from the input field for every locale supported by Django. """ self.selenium.set_window_size(1024, 768) self.admin_login(username='super', password='secret', login_url='/') # Enter test data member = Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M') # Get month name translations for every locale month_string = 'May' path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale') for language_code, language_name in settings.LANGUAGES: try: catalog = gettext.translation('djangojs', path, [language_code]) except OSError: continue if month_string in catalog._catalog: month_name = catalog._catalog[month_string] else: month_name = month_string # Get the expected caption may_translation = month_name expected_caption = '{:s} {:d}'.format(may_translation.upper(), 1984) # Test with every locale with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True): # Open a page that has a date picker widget url = reverse('admin:admin_widgets_member_change', args=(member.pk,)) self.selenium.get(self.live_server_url + url) # Click on the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # Make sure that the right month and year are displayed self.wait_for_text('#calendarin0 caption', expected_caption) @override_settings(TIME_ZONE='Asia/Singapore') class DateTimePickerShortcutsSeleniumTests(AdminWidgetSeleniumTestCase): def test_date_time_picker_shortcuts(self): """ date/time/datetime picker shortcuts work in the current time zone. Refs #20663. This test case is fairly tricky, it relies on selenium still running the browser in the default time zone "America/Chicago" despite `override_settings` changing the time zone to "Asia/Singapore". """ self.admin_login(username='super', password='secret', login_url='/') error_margin = timedelta(seconds=10) # If we are neighbouring a DST, we add an hour of error margin. tz = pytz.timezone('America/Chicago') utc_now = datetime.now(pytz.utc) tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname() tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname() if tz_yesterday != tz_tomorrow: error_margin += timedelta(hours=1) self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add')) self.selenium.find_element_by_id('id_name').send_keys('test') # Click on the "today" and "now" shortcuts. shortcuts = self.selenium.find_elements_by_css_selector('.field-birthdate .datetimeshortcuts') now = datetime.now() for shortcut in shortcuts: shortcut.find_element_by_tag_name('a').click() # There is a time zone mismatch warning. # Warning: This would effectively fail if the TIME_ZONE defined in the # settings has the same UTC offset as "Asia/Singapore" because the # mismatch warning would be rightfully missing from the page. self.selenium.find_elements_by_css_selector('.field-birthdate .timezonewarning') # Submit the form. with self.wait_page_loaded(): self.selenium.find_element_by_name('_save').click() # Make sure that "now" in javascript is within 10 seconds # from "now" on the server side. member = Member.objects.get(name='test') self.assertGreater(member.birthdate, now - error_margin) self.assertLess(member.birthdate, now + error_margin) # The above tests run with Asia/Singapore which are on the positive side of # UTC. Here we test with a timezone on the negative side. @override_settings(TIME_ZONE='US/Eastern') class DateTimePickerAltTimezoneSeleniumTests(DateTimePickerShortcutsSeleniumTests): pass class HorizontalVerticalFilterSeleniumTests(AdminWidgetSeleniumTestCase): def setUp(self): super().setUp() self.lisa = Student.objects.create(name='Lisa') self.john = Student.objects.create(name='John') self.bob = Student.objects.create(name='Bob') self.peter = Student.objects.create(name='Peter') self.jenny = Student.objects.create(name='Jenny') self.jason = Student.objects.create(name='Jason') self.cliff = Student.objects.create(name='Cliff') self.arthur = Student.objects.create(name='Arthur') self.school = School.objects.create(name='School of Awesome') def assertActiveButtons(self, mode, field_name, choose, remove, choose_all=None, remove_all=None): choose_link = '#id_%s_add_link' % field_name choose_all_link = '#id_%s_add_all_link' % field_name remove_link = '#id_%s_remove_link' % field_name remove_all_link = '#id_%s_remove_all_link' % field_name self.assertEqual(self.has_css_class(choose_link, 'active'), choose) self.assertEqual(self.has_css_class(remove_link, 'active'), remove) if mode == 'horizontal': self.assertEqual(self.has_css_class(choose_all_link, 'active'), choose_all) self.assertEqual(self.has_css_class(remove_all_link, 'active'), remove_all) def execute_basic_operations(self, mode, field_name): original_url = self.selenium.current_url from_box = '#id_%s_from' % field_name to_box = '#id_%s_to' % field_name choose_link = 'id_%s_add_link' % field_name choose_all_link = 'id_%s_add_all_link' % field_name remove_link = 'id_%s_remove_link' % field_name remove_all_link = 'id_%s_remove_all_link' % field_name # Initial positions --------------------------------------------------- self.assertSelectOptions(from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ]) self.assertSelectOptions(to_box, [str(self.lisa.id), str(self.peter.id)]) self.assertActiveButtons(mode, field_name, False, False, True, True) # Click 'Choose all' -------------------------------------------------- if mode == 'horizontal': self.selenium.find_element_by_id(choose_all_link).click() elif mode == 'vertical': # There 's no 'Choose all' button in vertical mode, so individually # select all options and click 'Choose'. for option in self.selenium.find_elements_by_css_selector(from_box + ' > option'): option.click() self.selenium.find_element_by_id(choose_link).click() self.assertSelectOptions(from_box, []) self.assertSelectOptions(to_box, [ str(self.lisa.id), str(self.peter.id), str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ]) self.assertActiveButtons(mode, field_name, False, False, False, True) # Click 'Remove all' -------------------------------------------------- if mode == 'horizontal': self.selenium.find_element_by_id(remove_all_link).click() elif mode == 'vertical': # There 's no 'Remove all' button in vertical mode, so individually # select all options and click 'Remove'. for option in self.selenium.find_elements_by_css_selector(to_box + ' > option'): option.click() self.selenium.find_element_by_id(remove_link).click() self.assertSelectOptions(from_box, [ str(self.lisa.id), str(self.peter.id), str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ]) self.assertSelectOptions(to_box, []) self.assertActiveButtons(mode, field_name, False, False, True, False) # Choose some options ------------------------------------------------ from_lisa_select_option = self.selenium.find_element_by_css_selector( '{} > option[value="{}"]'.format(from_box, self.lisa.id) ) # Check the title attribute is there for tool tips: ticket #20821 self.assertEqual(from_lisa_select_option.get_attribute('title'), from_lisa_select_option.get_attribute('text')) self.select_option(from_box, str(self.lisa.id)) self.select_option(from_box, str(self.jason.id)) self.select_option(from_box, str(self.bob.id)) self.select_option(from_box, str(self.john.id)) self.assertActiveButtons(mode, field_name, True, False, True, False) self.selenium.find_element_by_id(choose_link).click() self.assertActiveButtons(mode, field_name, False, False, True, True) self.assertSelectOptions(from_box, [ str(self.peter.id), str(self.arthur.id), str(self.cliff.id), str(self.jenny.id), ]) self.assertSelectOptions(to_box, [ str(self.lisa.id), str(self.bob.id), str(self.jason.id), str(self.john.id), ]) # Check the tooltip is still there after moving: ticket #20821 to_lisa_select_option = self.selenium.find_element_by_css_selector( '{} > option[value="{}"]'.format(to_box, self.lisa.id) ) self.assertEqual(to_lisa_select_option.get_attribute('title'), to_lisa_select_option.get_attribute('text')) # Remove some options ------------------------------------------------- self.select_option(to_box, str(self.lisa.id)) self.select_option(to_box, str(self.bob.id)) self.assertActiveButtons(mode, field_name, False, True, True, True) self.selenium.find_element_by_id(remove_link).click() self.assertActiveButtons(mode, field_name, False, False, True, True) self.assertSelectOptions(from_box, [ str(self.peter.id), str(self.arthur.id), str(self.cliff.id), str(self.jenny.id), str(self.lisa.id), str(self.bob.id) ]) self.assertSelectOptions(to_box, [str(self.jason.id), str(self.john.id)]) # Choose some more options -------------------------------------------- self.select_option(from_box, str(self.arthur.id)) self.select_option(from_box, str(self.cliff.id)) self.selenium.find_element_by_id(choose_link).click() self.assertSelectOptions(from_box, [ str(self.peter.id), str(self.jenny.id), str(self.lisa.id), str(self.bob.id), ]) self.assertSelectOptions(to_box, [ str(self.jason.id), str(self.john.id), str(self.arthur.id), str(self.cliff.id), ]) # Choose some more options -------------------------------------------- self.select_option(from_box, str(self.peter.id)) self.select_option(from_box, str(self.lisa.id)) # Confirm they're selected after clicking inactive buttons: ticket #26575 self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)]) self.selenium.find_element_by_id(remove_link).click() self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)]) # Unselect the options ------------------------------------------------ self.deselect_option(from_box, str(self.peter.id)) self.deselect_option(from_box, str(self.lisa.id)) # Choose some more options -------------------------------------------- self.select_option(to_box, str(self.jason.id)) self.select_option(to_box, str(self.john.id)) # Confirm they're selected after clicking inactive buttons: ticket #26575 self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)]) self.selenium.find_element_by_id(choose_link).click() self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)]) # Unselect the options ------------------------------------------------ self.deselect_option(to_box, str(self.jason.id)) self.deselect_option(to_box, str(self.john.id)) # Pressing buttons shouldn't change the URL. self.assertEqual(self.selenium.current_url, original_url) def test_basic(self): self.selenium.set_window_size(1024, 768) self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username='super', password='secret', login_url='/') self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,))) self.wait_page_ready() self.execute_basic_operations('vertical', 'students') self.execute_basic_operations('horizontal', 'alumni') # Save and check that everything is properly stored in the database --- self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.wait_page_ready() self.school = School.objects.get(id=self.school.id) # Reload from database self.assertEqual(list(self.school.students.all()), [self.arthur, self.cliff, self.jason, self.john]) self.assertEqual(list(self.school.alumni.all()), [self.arthur, self.cliff, self.jason, self.john]) def test_filter(self): """ Typing in the search box filters out options displayed in the 'from' box. """ from selenium.webdriver.common.keys import Keys self.selenium.set_window_size(1024, 768) self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username='super', password='secret', login_url='/') self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,))) for field_name in ['students', 'alumni']: from_box = '#id_%s_from' % field_name to_box = '#id_%s_to' % field_name choose_link = 'id_%s_add_link' % field_name remove_link = 'id_%s_remove_link' % field_name input = self.selenium.find_element_by_id('id_%s_input' % field_name) # Initial values self.assertSelectOptions(from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ]) # Typing in some characters filters out non-matching options input.send_keys('a') self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) input.send_keys('R') self.assertSelectOptions(from_box, [str(self.arthur.id)]) # Clearing the text box makes the other options reappear input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ]) # ----------------------------------------------------------------- # Choosing a filtered option sends it properly to the 'to' box. input.send_keys('a') self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) self.select_option(from_box, str(self.jason.id)) self.selenium.find_element_by_id(choose_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id)]) self.assertSelectOptions(to_box, [ str(self.lisa.id), str(self.peter.id), str(self.jason.id), ]) self.select_option(to_box, str(self.lisa.id)) self.selenium.find_element_by_id(remove_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.lisa.id)]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE]) # Clear text box self.assertSelectOptions(from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jenny.id), str(self.john.id), str(self.lisa.id), ]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) # ----------------------------------------------------------------- # Pressing enter on a filtered option sends it properly to # the 'to' box. self.select_option(to_box, str(self.jason.id)) self.selenium.find_element_by_id(remove_link).click() input.send_keys('ja') self.assertSelectOptions(from_box, [str(self.jason.id)]) input.send_keys([Keys.ENTER]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE]) # Save and check that everything is properly stored in the database --- with self.wait_page_loaded(): self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.school = School.objects.get(id=self.school.id) # Reload from database self.assertEqual(list(self.school.students.all()), [self.jason, self.peter]) self.assertEqual(list(self.school.alumni.all()), [self.jason, self.peter]) def test_back_button_bug(self): """ Some browsers had a bug where navigating away from the change page and then clicking the browser's back button would clear the filter_horizontal/filter_vertical widgets (#13614). """ self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username='super', password='secret', login_url='/') change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,)) self.selenium.get(self.live_server_url + change_url) # Navigate away and go back to the change form page. self.selenium.find_element_by_link_text('Home').click() self.selenium.back() expected_unselected_values = [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ] expected_selected_values = [str(self.lisa.id), str(self.peter.id)] # Everything is still in place self.assertSelectOptions('#id_students_from', expected_unselected_values) self.assertSelectOptions('#id_students_to', expected_selected_values) self.assertSelectOptions('#id_alumni_from', expected_unselected_values) self.assertSelectOptions('#id_alumni_to', expected_selected_values) def test_refresh_page(self): """ Horizontal and vertical filter widgets keep selected options on page reload (#22955). """ self.school.students.add(self.arthur, self.jason) self.school.alumni.add(self.arthur, self.jason) self.admin_login(username='super', password='secret', login_url='/') change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,)) self.selenium.get(self.live_server_url + change_url) options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option')) self.assertEqual(options_len, 2) # self.selenium.refresh() or send_keys(Keys.F5) does hard reload and # doesn't replicate what happens when a user clicks the browser's # 'Refresh' button. with self.wait_page_loaded(): self.selenium.execute_script("location.reload()") options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option')) self.assertEqual(options_len, 2) class AdminRawIdWidgetSeleniumTests(AdminWidgetSeleniumTestCase): def setUp(self): super().setUp() Band.objects.create(id=42, name='Bogey Blues') Band.objects.create(id=98, name='Green Potatoes') def test_ForeignKey(self): self.admin_login(username='super', password='secret', login_url='/') self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_event_add')) main_window = self.selenium.current_window_handle # No value has been selected yet self.assertEqual(self.selenium.find_element_by_id('id_main_band').get_attribute('value'), '') # Open the popup window and click on a band self.selenium.find_element_by_id('lookup_id_main_band').click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element_by_link_text('Bogey Blues') self.assertIn('/band/42/', link.get_attribute('href')) link.click() # The field now contains the selected band's id self.selenium.switch_to.window(main_window) self.wait_for_value('#id_main_band', '42') # Reopen the popup window and click on another band self.selenium.find_element_by_id('lookup_id_main_band').click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element_by_link_text('Green Potatoes') self.assertIn('/band/98/', link.get_attribute('href')) link.click() # The field now contains the other selected band's id self.selenium.switch_to.window(main_window) self.wait_for_value('#id_main_band', '98') def test_many_to_many(self): self.admin_login(username='super', password='secret', login_url='/') self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_event_add')) main_window = self.selenium.current_window_handle # No value has been selected yet self.assertEqual(self.selenium.find_element_by_id('id_supporting_bands').get_attribute('value'), '') # Help text for the field is displayed self.assertEqual( self.selenium.find_element_by_css_selector('.field-supporting_bands div.help').text, 'Supporting Bands.' ) # Open the popup window and click on a band self.selenium.find_element_by_id('lookup_id_supporting_bands').click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element_by_link_text('Bogey Blues') self.assertIn('/band/42/', link.get_attribute('href')) link.click() # The field now contains the selected band's id self.selenium.switch_to.window(main_window) self.wait_for_value('#id_supporting_bands', '42') # Reopen the popup window and click on another band self.selenium.find_element_by_id('lookup_id_supporting_bands').click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element_by_link_text('Green Potatoes') self.assertIn('/band/98/', link.get_attribute('href')) link.click() # The field now contains the two selected bands' ids self.selenium.switch_to.window(main_window) self.wait_for_value('#id_supporting_bands', '42,98') class RelatedFieldWidgetSeleniumTests(AdminWidgetSeleniumTestCase): def test_ForeignKey_using_to_field(self): self.admin_login(username='super', password='secret', login_url='/') self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_profile_add')) main_window = self.selenium.current_window_handle # Click the Add User button to add new self.selenium.find_element_by_id('add_id_user').click() self.wait_for_and_switch_to_popup() password_field = self.selenium.find_element_by_id('id_password') password_field.send_keys('password') username_field = self.selenium.find_element_by_id('id_username') username_value = 'newuser' username_field.send_keys(username_value) save_button_css_selector = '.submit-row > input[type=submit]' self.selenium.find_element_by_css_selector(save_button_css_selector).click() self.selenium.switch_to.window(main_window) # The field now contains the new user self.selenium.find_element_by_css_selector('#id_user option[value=newuser]') # Click the Change User button to change it self.selenium.find_element_by_id('change_id_user').click() self.wait_for_and_switch_to_popup() username_field = self.selenium.find_element_by_id('id_username') username_value = 'changednewuser' username_field.clear() username_field.send_keys(username_value) save_button_css_selector = '.submit-row > input[type=submit]' self.selenium.find_element_by_css_selector(save_button_css_selector).click() self.selenium.switch_to.window(main_window) self.selenium.find_element_by_css_selector('#id_user option[value=changednewuser]') # Go ahead and submit the form to make sure it works self.selenium.find_element_by_css_selector(save_button_css_selector).click() self.wait_for_text('li.success', 'The profile “changednewuser” was added successfully.') profiles = Profile.objects.all() self.assertEqual(len(profiles), 1) self.assertEqual(profiles[0].user.username, username_value)
01a7529393004f3fdd5d06b88f845890d5f4fbf6e9a78f4e8d7d40a6664136b1
import uuid from django.contrib.auth.models import User from django.db import models class MyFileField(models.FileField): pass class Member(models.Model): name = models.CharField(max_length=100) birthdate = models.DateTimeField(blank=True, null=True) gender = models.CharField(max_length=1, blank=True, choices=[('M', 'Male'), ('F', 'Female')]) email = models.EmailField(blank=True) def __str__(self): return self.name class Band(models.Model): name = models.CharField(max_length=100) style = models.CharField(max_length=20) members = models.ManyToManyField(Member) def __str__(self): return self.name class UnsafeLimitChoicesTo(models.Model): band = models.ForeignKey( Band, models.CASCADE, limit_choices_to={'name': '"&><escapeme'}, ) class Album(models.Model): band = models.ForeignKey(Band, models.CASCADE) featuring = models.ManyToManyField(Band, related_name='featured') name = models.CharField(max_length=100) cover_art = models.FileField(upload_to='albums') backside_art = MyFileField(upload_to='albums_back', null=True) def __str__(self): return self.name class HiddenInventoryManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(hidden=False) class Inventory(models.Model): barcode = models.PositiveIntegerField(unique=True) parent = models.ForeignKey('self', models.SET_NULL, to_field='barcode', blank=True, null=True) name = models.CharField(blank=False, max_length=20) hidden = models.BooleanField(default=False) # see #9258 default_manager = models.Manager() objects = HiddenInventoryManager() def __str__(self): return self.name class Event(models.Model): main_band = models.ForeignKey( Band, models.CASCADE, limit_choices_to=models.Q(pk__gt=0), related_name='events_main_band_at', ) supporting_bands = models.ManyToManyField( Band, blank=True, related_name='events_supporting_band_at', help_text='Supporting Bands.', ) start_date = models.DateField(blank=True, null=True) start_time = models.TimeField(blank=True, null=True) description = models.TextField(blank=True) link = models.URLField(blank=True) min_age = models.IntegerField(blank=True, null=True) class Car(models.Model): owner = models.ForeignKey(User, models.CASCADE) make = models.CharField(max_length=30) model = models.CharField(max_length=30) def __str__(self): return "%s %s" % (self.make, self.model) class CarTire(models.Model): """ A single car tire. This to test that a user can only select their own cars. """ car = models.ForeignKey(Car, models.CASCADE) class Honeycomb(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) location = models.CharField(max_length=20) class Bee(models.Model): """ A model with a FK to a model that won't be registered with the admin (Honeycomb) so the corresponding raw ID widget won't have a magnifying glass link to select related honeycomb instances. """ honeycomb = models.ForeignKey(Honeycomb, models.CASCADE) class Individual(models.Model): """ A model with a FK to itself. It won't be registered with the admin, so the corresponding raw ID widget won't have a magnifying glass link to select related instances (rendering will be called programmatically in this case). """ name = models.CharField(max_length=20) parent = models.ForeignKey('self', models.SET_NULL, null=True) soulmate = models.ForeignKey('self', models.CASCADE, null=True, related_name='soulmates') class Company(models.Model): name = models.CharField(max_length=20) class Advisor(models.Model): """ A model with a m2m to a model that won't be registered with the admin (Company) so the corresponding raw ID widget won't have a magnifying glass link to select related company instances. """ name = models.CharField(max_length=20) companies = models.ManyToManyField(Company) class Student(models.Model): name = models.CharField(max_length=255) class Meta: ordering = ('name',) def __str__(self): return self.name class School(models.Model): name = models.CharField(max_length=255) students = models.ManyToManyField(Student, related_name='current_schools') alumni = models.ManyToManyField(Student, related_name='previous_schools') def __str__(self): return self.name class Profile(models.Model): user = models.ForeignKey('auth.User', models.CASCADE, to_field='username') def __str__(self): return self.user.username
b7f5bdb9d3cfd6332871d33a7eda5f0eb3acad1d6af032b479fc831445d1e5df
""" Sphinx plugins for Django documentation. """ import json import os import re from docutils import nodes from docutils.parsers.rst import Directive from docutils.statemachine import ViewList from sphinx import addnodes from sphinx.builders.html import StandaloneHTMLBuilder from sphinx.directives.code import CodeBlock from sphinx.domains.std import Cmdoption from sphinx.errors import ExtensionError from sphinx.util import logging from sphinx.util.console import bold from sphinx.writers.html import HTMLTranslator logger = logging.getLogger(__name__) # RE for option descriptions without a '--' prefix simple_option_desc_re = re.compile( r'([-_a-zA-Z0-9]+)(\s*.*?)(?=,\s+(?:/|-|--)|$)') def setup(app): app.add_crossref_type( directivename="setting", rolename="setting", indextemplate="pair: %s; setting", ) app.add_crossref_type( directivename="templatetag", rolename="ttag", indextemplate="pair: %s; template tag" ) app.add_crossref_type( directivename="templatefilter", rolename="tfilter", indextemplate="pair: %s; template filter" ) app.add_crossref_type( directivename="fieldlookup", rolename="lookup", indextemplate="pair: %s; field lookup type", ) app.add_object_type( directivename="django-admin", rolename="djadmin", indextemplate="pair: %s; django-admin command", parse_node=parse_django_admin_node, ) app.add_directive('django-admin-option', Cmdoption) app.add_config_value('django_next_version', '0.0', True) app.add_directive('versionadded', VersionDirective) app.add_directive('versionchanged', VersionDirective) app.add_builder(DjangoStandaloneHTMLBuilder) app.set_translator('djangohtml', DjangoHTMLTranslator) app.set_translator('json', DjangoHTMLTranslator) app.add_node( ConsoleNode, html=(visit_console_html, None), latex=(visit_console_dummy, depart_console_dummy), man=(visit_console_dummy, depart_console_dummy), text=(visit_console_dummy, depart_console_dummy), texinfo=(visit_console_dummy, depart_console_dummy), ) app.add_directive('console', ConsoleDirective) app.connect('html-page-context', html_page_context_hook) app.add_role('default-role-error', default_role_error) return {'parallel_read_safe': True} class VersionDirective(Directive): has_content = True required_arguments = 1 optional_arguments = 1 final_argument_whitespace = True option_spec = {} def run(self): if len(self.arguments) > 1: msg = """Only one argument accepted for directive '{directive_name}::'. Comments should be provided as content, not as an extra argument.""".format(directive_name=self.name) raise self.error(msg) env = self.state.document.settings.env ret = [] node = addnodes.versionmodified() ret.append(node) if self.arguments[0] == env.config.django_next_version: node['version'] = "Development version" else: node['version'] = self.arguments[0] node['type'] = self.name if self.content: self.state.nested_parse(self.content, self.content_offset, node) try: env.get_domain('changeset').note_changeset(node) except ExtensionError: # Sphinx < 1.8: Domain 'changeset' is not registered env.note_versionchange(node['type'], node['version'], node, self.lineno) return ret class DjangoHTMLTranslator(HTMLTranslator): """ Django-specific reST to HTML tweaks. """ # Don't use border=1, which docutils does by default. def visit_table(self, node): self.context.append(self.compact_p) self.compact_p = True self._table_row_index = 0 # Needed by Sphinx self.body.append(self.starttag(node, 'table', CLASS='docutils')) def depart_table(self, node): self.compact_p = self.context.pop() self.body.append('</table>\n') def visit_desc_parameterlist(self, node): self.body.append('(') # by default sphinx puts <big> around the "(" self.first_param = 1 self.optional_param_level = 0 self.param_separator = node.child_text_separator self.required_params_left = sum(isinstance(c, addnodes.desc_parameter) for c in node.children) def depart_desc_parameterlist(self, node): self.body.append(')') # # Turn the "new in version" stuff (versionadded/versionchanged) into a # better callout -- the Sphinx default is just a little span, # which is a bit less obvious that I'd like. # # FIXME: these messages are all hardcoded in English. We need to change # that to accommodate other language docs, but I can't work out how to make # that work. # version_text = { 'versionchanged': 'Changed in Django %s', 'versionadded': 'New in Django %s', } def visit_versionmodified(self, node): self.body.append( self.starttag(node, 'div', CLASS=node['type']) ) version_text = self.version_text.get(node['type']) if version_text: title = "%s%s" % ( version_text % node['version'], ":" if len(node) else "." ) self.body.append('<span class="title">%s</span> ' % title) def depart_versionmodified(self, node): self.body.append("</div>\n") # Give each section a unique ID -- nice for custom CSS hooks def visit_section(self, node): old_ids = node.get('ids', []) node['ids'] = ['s-' + i for i in old_ids] node['ids'].extend(old_ids) super().visit_section(node) node['ids'] = old_ids def parse_django_admin_node(env, sig, signode): command = sig.split(' ')[0] env.ref_context['std:program'] = command title = "django-admin %s" % sig signode += addnodes.desc_name(title, title) return command class DjangoStandaloneHTMLBuilder(StandaloneHTMLBuilder): """ Subclass to add some extra things we need. """ name = 'djangohtml' def finish(self): super().finish() logger.info(bold("writing templatebuiltins.js...")) xrefs = self.env.domaindata["std"]["objects"] templatebuiltins = { "ttags": [ n for ((t, n), (k, a)) in xrefs.items() if t == "templatetag" and k == "ref/templates/builtins" ], "tfilters": [ n for ((t, n), (k, a)) in xrefs.items() if t == "templatefilter" and k == "ref/templates/builtins" ], } outfilename = os.path.join(self.outdir, "templatebuiltins.js") with open(outfilename, 'w') as fp: fp.write('var django_template_builtins = ') json.dump(templatebuiltins, fp) fp.write(';\n') class ConsoleNode(nodes.literal_block): """ Custom node to override the visit/depart event handlers at registration time. Wrap a literal_block object and defer to it. """ tagname = 'ConsoleNode' def __init__(self, litblk_obj): self.wrapped = litblk_obj def __getattr__(self, attr): if attr == 'wrapped': return self.__dict__.wrapped return getattr(self.wrapped, attr) def visit_console_dummy(self, node): """Defer to the corresponding parent's handler.""" self.visit_literal_block(node) def depart_console_dummy(self, node): """Defer to the corresponding parent's handler.""" self.depart_literal_block(node) def visit_console_html(self, node): """Generate HTML for the console directive.""" if self.builder.name in ('djangohtml', 'json') and node['win_console_text']: # Put a mark on the document object signaling the fact the directive # has been used on it. self.document._console_directive_used_flag = True uid = node['uid'] self.body.append('''\ <div class="console-block" id="console-block-%(id)s"> <input class="c-tab-unix" id="c-tab-%(id)s-unix" type="radio" name="console-%(id)s" checked> <label for="c-tab-%(id)s-unix" title="Linux/macOS">&#xf17c/&#xf179</label> <input class="c-tab-win" id="c-tab-%(id)s-win" type="radio" name="console-%(id)s"> <label for="c-tab-%(id)s-win" title="Windows">&#xf17a</label> <section class="c-content-unix" id="c-content-%(id)s-unix">\n''' % {'id': uid}) try: self.visit_literal_block(node) except nodes.SkipNode: pass self.body.append('</section>\n') self.body.append('<section class="c-content-win" id="c-content-%(id)s-win">\n' % {'id': uid}) win_text = node['win_console_text'] highlight_args = {'force': True} linenos = node.get('linenos', False) def warner(msg): self.builder.warn(msg, (self.builder.current_docname, node.line)) highlighted = self.highlighter.highlight_block( win_text, 'doscon', warn=warner, linenos=linenos, **highlight_args ) self.body.append(highlighted) self.body.append('</section>\n') self.body.append('</div>\n') raise nodes.SkipNode else: self.visit_literal_block(node) class ConsoleDirective(CodeBlock): """ A reStructuredText directive which renders a two-tab code block in which the second tab shows a Windows command line equivalent of the usual Unix-oriented examples. """ required_arguments = 0 # The 'doscon' Pygments formatter needs a prompt like this. '>' alone # won't do it because then it simply paints the whole command line as a # grey comment with no highlighting at all. WIN_PROMPT = r'...\> ' def run(self): def args_to_win(cmdline): changed = False out = [] for token in cmdline.split(): if token[:2] == './': token = token[2:] changed = True elif token[:2] == '~/': token = '%HOMEPATH%\\' + token[2:] changed = True elif token == 'make': token = 'make.bat' changed = True if '://' not in token and 'git' not in cmdline: out.append(token.replace('/', '\\')) changed = True else: out.append(token) if changed: return ' '.join(out) return cmdline def cmdline_to_win(line): if line.startswith('# '): return 'REM ' + args_to_win(line[2:]) if line.startswith('$ # '): return 'REM ' + args_to_win(line[4:]) if line.startswith('$ ./manage.py'): return 'manage.py ' + args_to_win(line[13:]) if line.startswith('$ manage.py'): return 'manage.py ' + args_to_win(line[11:]) if line.startswith('$ ./runtests.py'): return 'runtests.py ' + args_to_win(line[15:]) if line.startswith('$ ./'): return args_to_win(line[4:]) if line.startswith('$ python3'): return 'py ' + args_to_win(line[9:]) if line.startswith('$ python'): return 'py ' + args_to_win(line[8:]) if line.startswith('$ '): return args_to_win(line[2:]) return None def code_block_to_win(content): bchanged = False lines = [] for line in content: modline = cmdline_to_win(line) if modline is None: lines.append(line) else: lines.append(self.WIN_PROMPT + modline) bchanged = True if bchanged: return ViewList(lines) return None env = self.state.document.settings.env self.arguments = ['console'] lit_blk_obj = super().run()[0] # Only do work when the djangohtml HTML Sphinx builder is being used, # invoke the default behavior for the rest. if env.app.builder.name not in ('djangohtml', 'json'): return [lit_blk_obj] lit_blk_obj['uid'] = str(env.new_serialno('console')) # Only add the tabbed UI if there is actually a Windows-specific # version of the CLI example. win_content = code_block_to_win(self.content) if win_content is None: lit_blk_obj['win_console_text'] = None else: self.content = win_content lit_blk_obj['win_console_text'] = super().run()[0].rawsource # Replace the literal_node object returned by Sphinx's CodeBlock with # the ConsoleNode wrapper. return [ConsoleNode(lit_blk_obj)] def html_page_context_hook(app, pagename, templatename, context, doctree): # Put a bool on the context used to render the template. It's used to # control inclusion of console-tabs.css and activation of the JavaScript. # This way it's include only from HTML files rendered from reST files where # the ConsoleDirective is used. context['include_console_assets'] = getattr(doctree, '_console_directive_used_flag', False) def default_role_error( name, rawtext, text, lineno, inliner, options=None, content=None ): msg = ( "Default role used (`single backticks`): %s. Did you mean to use two " "backticks for ``code``, or miss an underscore for a `link`_ ?" % rawtext ) logger.warning(msg, location=(inliner.document.current_source, lineno)) return [nodes.Text(text)], []
99bcbf4e236808da253072e8057cc900e57ccc1da66bee78c765eebc7fb7bece
""" Helper functions for creating Form classes from Django models and database field objects. """ import warnings from itertools import chain from django.core.exceptions import ( NON_FIELD_ERRORS, FieldError, ImproperlyConfigured, ValidationError, ) from django.forms.fields import ChoiceField, Field from django.forms.forms import BaseForm, DeclarativeFieldsMetaclass from django.forms.formsets import BaseFormSet, formset_factory from django.forms.utils import ErrorList from django.forms.widgets import ( HiddenInput, MultipleHiddenInput, RadioSelect, SelectMultiple, ) from django.utils.deprecation import RemovedInDjango40Warning from django.utils.text import capfirst, get_text_list from django.utils.translation import gettext, gettext_lazy as _ __all__ = ( 'ModelForm', 'BaseModelForm', 'model_to_dict', 'fields_for_model', 'ModelChoiceField', 'ModelMultipleChoiceField', 'ALL_FIELDS', 'BaseModelFormSet', 'modelformset_factory', 'BaseInlineFormSet', 'inlineformset_factory', 'modelform_factory', ) ALL_FIELDS = '__all__' def construct_instance(form, instance, fields=None, exclude=None): """ Construct and return a model instance from the bound ``form``'s ``cleaned_data``, but do not save the returned instance to the database. """ from django.db import models opts = instance._meta cleaned_data = form.cleaned_data file_field_list = [] for f in opts.fields: if not f.editable or isinstance(f, models.AutoField) \ or f.name not in cleaned_data: continue if fields is not None and f.name not in fields: continue if exclude and f.name in exclude: continue # Leave defaults for fields that aren't in POST data, except for # checkbox inputs because they don't appear in POST data if not checked. if ( f.has_default() and form[f.name].field.widget.value_omitted_from_data(form.data, form.files, form.add_prefix(f.name)) and cleaned_data.get(f.name) in form[f.name].field.empty_values ): continue # Defer saving file-type fields until after the other fields, so a # callable upload_to can use the values from other fields. if isinstance(f, models.FileField): file_field_list.append(f) else: f.save_form_data(instance, cleaned_data[f.name]) for f in file_field_list: f.save_form_data(instance, cleaned_data[f.name]) return instance # ModelForms ################################################################# def model_to_dict(instance, fields=None, exclude=None): """ Return a dict containing the data in ``instance`` suitable for passing as a Form's ``initial`` keyword argument. ``fields`` is an optional list of field names. If provided, return only the named. ``exclude`` is an optional list of field names. If provided, exclude the named from the returned dict, even if they are listed in the ``fields`` argument. """ opts = instance._meta data = {} for f in chain(opts.concrete_fields, opts.private_fields, opts.many_to_many): if not getattr(f, 'editable', False): continue if fields is not None and f.name not in fields: continue if exclude and f.name in exclude: continue data[f.name] = f.value_from_object(instance) return data def apply_limit_choices_to_to_formfield(formfield): """Apply limit_choices_to to the formfield's queryset if needed.""" if hasattr(formfield, 'queryset') and hasattr(formfield, 'get_limit_choices_to'): limit_choices_to = formfield.get_limit_choices_to() if limit_choices_to is not None: formfield.queryset = formfield.queryset.complex_filter(limit_choices_to) def fields_for_model(model, fields=None, exclude=None, widgets=None, formfield_callback=None, localized_fields=None, labels=None, help_texts=None, error_messages=None, field_classes=None, *, apply_limit_choices_to=True): """ Return a dictionary containing form fields for the given model. ``fields`` is an optional list of field names. If provided, return only the named fields. ``exclude`` is an optional list of field names. If provided, exclude the named fields from the returned fields, even if they are listed in the ``fields`` argument. ``widgets`` is a dictionary of model field names mapped to a widget. ``formfield_callback`` is a callable that takes a model field and returns a form field. ``localized_fields`` is a list of names of fields which should be localized. ``labels`` is a dictionary of model field names mapped to a label. ``help_texts`` is a dictionary of model field names mapped to a help text. ``error_messages`` is a dictionary of model field names mapped to a dictionary of error messages. ``field_classes`` is a dictionary of model field names mapped to a form field class. ``apply_limit_choices_to`` is a boolean indicating if limit_choices_to should be applied to a field's queryset. """ field_dict = {} ignored = [] opts = model._meta # Avoid circular import from django.db.models import Field as ModelField sortable_private_fields = [f for f in opts.private_fields if isinstance(f, ModelField)] for f in sorted(chain(opts.concrete_fields, sortable_private_fields, opts.many_to_many)): if not getattr(f, 'editable', False): if (fields is not None and f.name in fields and (exclude is None or f.name not in exclude)): raise FieldError( "'%s' cannot be specified for %s model form as it is a non-editable field" % ( f.name, model.__name__) ) continue if fields is not None and f.name not in fields: continue if exclude and f.name in exclude: continue kwargs = {} if widgets and f.name in widgets: kwargs['widget'] = widgets[f.name] if localized_fields == ALL_FIELDS or (localized_fields and f.name in localized_fields): kwargs['localize'] = True if labels and f.name in labels: kwargs['label'] = labels[f.name] if help_texts and f.name in help_texts: kwargs['help_text'] = help_texts[f.name] if error_messages and f.name in error_messages: kwargs['error_messages'] = error_messages[f.name] if field_classes and f.name in field_classes: kwargs['form_class'] = field_classes[f.name] if formfield_callback is None: formfield = f.formfield(**kwargs) elif not callable(formfield_callback): raise TypeError('formfield_callback must be a function or callable') else: formfield = formfield_callback(f, **kwargs) if formfield: if apply_limit_choices_to: apply_limit_choices_to_to_formfield(formfield) field_dict[f.name] = formfield else: ignored.append(f.name) if fields: field_dict = { f: field_dict.get(f) for f in fields if (not exclude or f not in exclude) and f not in ignored } return field_dict class ModelFormOptions: def __init__(self, options=None): self.model = getattr(options, 'model', None) self.fields = getattr(options, 'fields', None) self.exclude = getattr(options, 'exclude', None) self.widgets = getattr(options, 'widgets', None) self.localized_fields = getattr(options, 'localized_fields', None) self.labels = getattr(options, 'labels', None) self.help_texts = getattr(options, 'help_texts', None) self.error_messages = getattr(options, 'error_messages', None) self.field_classes = getattr(options, 'field_classes', None) class ModelFormMetaclass(DeclarativeFieldsMetaclass): def __new__(mcs, name, bases, attrs): base_formfield_callback = None for b in bases: if hasattr(b, 'Meta') and hasattr(b.Meta, 'formfield_callback'): base_formfield_callback = b.Meta.formfield_callback break formfield_callback = attrs.pop('formfield_callback', base_formfield_callback) new_class = super().__new__(mcs, name, bases, attrs) if bases == (BaseModelForm,): return new_class opts = new_class._meta = ModelFormOptions(getattr(new_class, 'Meta', None)) # We check if a string was passed to `fields` or `exclude`, # which is likely to be a mistake where the user typed ('foo') instead # of ('foo',) for opt in ['fields', 'exclude', 'localized_fields']: value = getattr(opts, opt) if isinstance(value, str) and value != ALL_FIELDS: msg = ("%(model)s.Meta.%(opt)s cannot be a string. " "Did you mean to type: ('%(value)s',)?" % { 'model': new_class.__name__, 'opt': opt, 'value': value, }) raise TypeError(msg) if opts.model: # If a model is defined, extract form fields from it. if opts.fields is None and opts.exclude is None: raise ImproperlyConfigured( "Creating a ModelForm without either the 'fields' attribute " "or the 'exclude' attribute is prohibited; form %s " "needs updating." % name ) if opts.fields == ALL_FIELDS: # Sentinel for fields_for_model to indicate "get the list of # fields from the model" opts.fields = None fields = fields_for_model( opts.model, opts.fields, opts.exclude, opts.widgets, formfield_callback, opts.localized_fields, opts.labels, opts.help_texts, opts.error_messages, opts.field_classes, # limit_choices_to will be applied during ModelForm.__init__(). apply_limit_choices_to=False, ) # make sure opts.fields doesn't specify an invalid field none_model_fields = {k for k, v in fields.items() if not v} missing_fields = none_model_fields.difference(new_class.declared_fields) if missing_fields: message = 'Unknown field(s) (%s) specified for %s' message = message % (', '.join(missing_fields), opts.model.__name__) raise FieldError(message) # Override default model fields with any custom declared ones # (plus, include all the other declared fields). fields.update(new_class.declared_fields) else: fields = new_class.declared_fields new_class.base_fields = fields return new_class class BaseModelForm(BaseForm): def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None, initial=None, error_class=ErrorList, label_suffix=None, empty_permitted=False, instance=None, use_required_attribute=None, renderer=None): opts = self._meta if opts.model is None: raise ValueError('ModelForm has no model class specified.') if instance is None: # if we didn't get an instance, instantiate a new one self.instance = opts.model() object_data = {} else: self.instance = instance object_data = model_to_dict(instance, opts.fields, opts.exclude) # if initial was provided, it should override the values from instance if initial is not None: object_data.update(initial) # self._validate_unique will be set to True by BaseModelForm.clean(). # It is False by default so overriding self.clean() and failing to call # super will stop validate_unique from being called. self._validate_unique = False super().__init__( data, files, auto_id, prefix, object_data, error_class, label_suffix, empty_permitted, use_required_attribute=use_required_attribute, renderer=renderer, ) for formfield in self.fields.values(): apply_limit_choices_to_to_formfield(formfield) def _get_validation_exclusions(self): """ For backwards-compatibility, exclude several types of fields from model validation. See tickets #12507, #12521, #12553. """ exclude = [] # Build up a list of fields that should be excluded from model field # validation and unique checks. for f in self.instance._meta.fields: field = f.name # Exclude fields that aren't on the form. The developer may be # adding these values to the model after form validation. if field not in self.fields: exclude.append(f.name) # Don't perform model validation on fields that were defined # manually on the form and excluded via the ModelForm's Meta # class. See #12901. elif self._meta.fields and field not in self._meta.fields: exclude.append(f.name) elif self._meta.exclude and field in self._meta.exclude: exclude.append(f.name) # Exclude fields that failed form validation. There's no need for # the model fields to validate them as well. elif field in self._errors: exclude.append(f.name) # Exclude empty fields that are not required by the form, if the # underlying model field is required. This keeps the model field # from raising a required error. Note: don't exclude the field from # validation if the model field allows blanks. If it does, the blank # value may be included in a unique check, so cannot be excluded # from validation. else: form_field = self.fields[field] field_value = self.cleaned_data.get(field) if not f.blank and not form_field.required and field_value in form_field.empty_values: exclude.append(f.name) return exclude def clean(self): self._validate_unique = True return self.cleaned_data def _update_errors(self, errors): # Override any validation error messages defined at the model level # with those defined at the form level. opts = self._meta # Allow the model generated by construct_instance() to raise # ValidationError and have them handled in the same way as others. if hasattr(errors, 'error_dict'): error_dict = errors.error_dict else: error_dict = {NON_FIELD_ERRORS: errors} for field, messages in error_dict.items(): if (field == NON_FIELD_ERRORS and opts.error_messages and NON_FIELD_ERRORS in opts.error_messages): error_messages = opts.error_messages[NON_FIELD_ERRORS] elif field in self.fields: error_messages = self.fields[field].error_messages else: continue for message in messages: if (isinstance(message, ValidationError) and message.code in error_messages): message.message = error_messages[message.code] self.add_error(None, errors) def _post_clean(self): opts = self._meta exclude = self._get_validation_exclusions() # Foreign Keys being used to represent inline relationships # are excluded from basic field value validation. This is for two # reasons: firstly, the value may not be supplied (#12507; the # case of providing new values to the admin); secondly the # object being referred to may not yet fully exist (#12749). # However, these fields *must* be included in uniqueness checks, # so this can't be part of _get_validation_exclusions(). for name, field in self.fields.items(): if isinstance(field, InlineForeignKeyField): exclude.append(name) try: self.instance = construct_instance(self, self.instance, opts.fields, opts.exclude) except ValidationError as e: self._update_errors(e) try: self.instance.full_clean(exclude=exclude, validate_unique=False) except ValidationError as e: self._update_errors(e) # Validate uniqueness if needed. if self._validate_unique: self.validate_unique() def validate_unique(self): """ Call the instance's validate_unique() method and update the form's validation errors if any were raised. """ exclude = self._get_validation_exclusions() try: self.instance.validate_unique(exclude=exclude) except ValidationError as e: self._update_errors(e) def _save_m2m(self): """ Save the many-to-many fields and generic relations for this form. """ cleaned_data = self.cleaned_data exclude = self._meta.exclude fields = self._meta.fields opts = self.instance._meta # Note that for historical reasons we want to include also # private_fields here. (GenericRelation was previously a fake # m2m field). for f in chain(opts.many_to_many, opts.private_fields): if not hasattr(f, 'save_form_data'): continue if fields and f.name not in fields: continue if exclude and f.name in exclude: continue if f.name in cleaned_data: f.save_form_data(self.instance, cleaned_data[f.name]) def save(self, commit=True): """ Save this form's self.instance object if commit=True. Otherwise, add a save_m2m() method to the form which can be called after the instance is saved manually at a later time. Return the model instance. """ if self.errors: raise ValueError( "The %s could not be %s because the data didn't validate." % ( self.instance._meta.object_name, 'created' if self.instance._state.adding else 'changed', ) ) if commit: # If committing, save the instance and the m2m data immediately. self.instance.save() self._save_m2m() else: # If not committing, add a method to the form to allow deferred # saving of m2m data. self.save_m2m = self._save_m2m return self.instance save.alters_data = True class ModelForm(BaseModelForm, metaclass=ModelFormMetaclass): pass def modelform_factory(model, form=ModelForm, fields=None, exclude=None, formfield_callback=None, widgets=None, localized_fields=None, labels=None, help_texts=None, error_messages=None, field_classes=None): """ Return a ModelForm containing form fields for the given model. You can optionally pass a `form` argument to use as a starting point for constructing the ModelForm. ``fields`` is an optional list of field names. If provided, include only the named fields in the returned fields. If omitted or '__all__', use all fields. ``exclude`` is an optional list of field names. If provided, exclude the named fields from the returned fields, even if they are listed in the ``fields`` argument. ``widgets`` is a dictionary of model field names mapped to a widget. ``localized_fields`` is a list of names of fields which should be localized. ``formfield_callback`` is a callable that takes a model field and returns a form field. ``labels`` is a dictionary of model field names mapped to a label. ``help_texts`` is a dictionary of model field names mapped to a help text. ``error_messages`` is a dictionary of model field names mapped to a dictionary of error messages. ``field_classes`` is a dictionary of model field names mapped to a form field class. """ # Create the inner Meta class. FIXME: ideally, we should be able to # construct a ModelForm without creating and passing in a temporary # inner class. # Build up a list of attributes that the Meta object will have. attrs = {'model': model} if fields is not None: attrs['fields'] = fields if exclude is not None: attrs['exclude'] = exclude if widgets is not None: attrs['widgets'] = widgets if localized_fields is not None: attrs['localized_fields'] = localized_fields if labels is not None: attrs['labels'] = labels if help_texts is not None: attrs['help_texts'] = help_texts if error_messages is not None: attrs['error_messages'] = error_messages if field_classes is not None: attrs['field_classes'] = field_classes # If parent form class already has an inner Meta, the Meta we're # creating needs to inherit from the parent's inner meta. bases = (form.Meta,) if hasattr(form, 'Meta') else () Meta = type('Meta', bases, attrs) if formfield_callback: Meta.formfield_callback = staticmethod(formfield_callback) # Give this new form class a reasonable name. class_name = model.__name__ + 'Form' # Class attributes for the new form class. form_class_attrs = { 'Meta': Meta, 'formfield_callback': formfield_callback } if (getattr(Meta, 'fields', None) is None and getattr(Meta, 'exclude', None) is None): raise ImproperlyConfigured( "Calling modelform_factory without defining 'fields' or " "'exclude' explicitly is prohibited." ) # Instantiate type(form) in order to use the same metaclass as form. return type(form)(class_name, (form,), form_class_attrs) # ModelFormSets ############################################################## class BaseModelFormSet(BaseFormSet): """ A ``FormSet`` for editing a queryset and/or adding new objects to it. """ model = None # Set of fields that must be unique among forms of this set. unique_fields = set() def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None, queryset=None, *, initial=None, **kwargs): self.queryset = queryset self.initial_extra = initial super().__init__(**{'data': data, 'files': files, 'auto_id': auto_id, 'prefix': prefix, **kwargs}) def initial_form_count(self): """Return the number of forms that are required in this FormSet.""" if not self.is_bound: return len(self.get_queryset()) return super().initial_form_count() def _existing_object(self, pk): if not hasattr(self, '_object_dict'): self._object_dict = {o.pk: o for o in self.get_queryset()} return self._object_dict.get(pk) def _get_to_python(self, field): """ If the field is a related field, fetch the concrete field's (that is, the ultimate pointed-to field's) to_python. """ while field.remote_field is not None: field = field.remote_field.get_related_field() return field.to_python def _construct_form(self, i, **kwargs): pk_required = i < self.initial_form_count() if pk_required: if self.is_bound: pk_key = '%s-%s' % (self.add_prefix(i), self.model._meta.pk.name) try: pk = self.data[pk_key] except KeyError: # The primary key is missing. The user may have tampered # with POST data. pass else: to_python = self._get_to_python(self.model._meta.pk) try: pk = to_python(pk) except ValidationError: # The primary key exists but is an invalid value. The # user may have tampered with POST data. pass else: kwargs['instance'] = self._existing_object(pk) else: kwargs['instance'] = self.get_queryset()[i] elif self.initial_extra: # Set initial values for extra forms try: kwargs['initial'] = self.initial_extra[i - self.initial_form_count()] except IndexError: pass form = super()._construct_form(i, **kwargs) if pk_required: form.fields[self.model._meta.pk.name].required = True return form def get_queryset(self): if not hasattr(self, '_queryset'): if self.queryset is not None: qs = self.queryset else: qs = self.model._default_manager.get_queryset() # If the queryset isn't already ordered we need to add an # artificial ordering here to make sure that all formsets # constructed from this queryset have the same form order. if not qs.ordered: qs = qs.order_by(self.model._meta.pk.name) # Removed queryset limiting here. As per discussion re: #13023 # on django-dev, max_num should not prevent existing # related objects/inlines from being displayed. self._queryset = qs return self._queryset def save_new(self, form, commit=True): """Save and return a new model instance for the given form.""" return form.save(commit=commit) def save_existing(self, form, instance, commit=True): """Save and return an existing model instance for the given form.""" return form.save(commit=commit) def delete_existing(self, obj, commit=True): """Deletes an existing model instance.""" if commit: obj.delete() def save(self, commit=True): """ Save model instances for every form, adding and changing instances as necessary, and return the list of instances. """ if not commit: self.saved_forms = [] def save_m2m(): for form in self.saved_forms: form.save_m2m() self.save_m2m = save_m2m return self.save_existing_objects(commit) + self.save_new_objects(commit) save.alters_data = True def clean(self): self.validate_unique() def validate_unique(self): # Collect unique_checks and date_checks to run from all the forms. all_unique_checks = set() all_date_checks = set() forms_to_delete = self.deleted_forms valid_forms = [form for form in self.forms if form.is_valid() and form not in forms_to_delete] for form in valid_forms: exclude = form._get_validation_exclusions() unique_checks, date_checks = form.instance._get_unique_checks(exclude=exclude) all_unique_checks.update(unique_checks) all_date_checks.update(date_checks) errors = [] # Do each of the unique checks (unique and unique_together) for uclass, unique_check in all_unique_checks: seen_data = set() for form in valid_forms: # Get the data for the set of fields that must be unique among the forms. row_data = ( field if field in self.unique_fields else form.cleaned_data[field] for field in unique_check if field in form.cleaned_data ) # Reduce Model instances to their primary key values row_data = tuple( d._get_pk_val() if hasattr(d, '_get_pk_val') # Prevent "unhashable type: list" errors later on. else tuple(d) if isinstance(d, list) else d for d in row_data ) if row_data and None not in row_data: # if we've already seen it then we have a uniqueness failure if row_data in seen_data: # poke error messages into the right places and mark # the form as invalid errors.append(self.get_unique_error_message(unique_check)) form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()]) # remove the data from the cleaned_data dict since it was invalid for field in unique_check: if field in form.cleaned_data: del form.cleaned_data[field] # mark the data as seen seen_data.add(row_data) # iterate over each of the date checks now for date_check in all_date_checks: seen_data = set() uclass, lookup, field, unique_for = date_check for form in valid_forms: # see if we have data for both fields if (form.cleaned_data and form.cleaned_data[field] is not None and form.cleaned_data[unique_for] is not None): # if it's a date lookup we need to get the data for all the fields if lookup == 'date': date = form.cleaned_data[unique_for] date_data = (date.year, date.month, date.day) # otherwise it's just the attribute on the date/datetime # object else: date_data = (getattr(form.cleaned_data[unique_for], lookup),) data = (form.cleaned_data[field],) + date_data # if we've already seen it then we have a uniqueness failure if data in seen_data: # poke error messages into the right places and mark # the form as invalid errors.append(self.get_date_error_message(date_check)) form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()]) # remove the data from the cleaned_data dict since it was invalid del form.cleaned_data[field] # mark the data as seen seen_data.add(data) if errors: raise ValidationError(errors) def get_unique_error_message(self, unique_check): if len(unique_check) == 1: return gettext("Please correct the duplicate data for %(field)s.") % { "field": unique_check[0], } else: return gettext("Please correct the duplicate data for %(field)s, which must be unique.") % { "field": get_text_list(unique_check, _("and")), } def get_date_error_message(self, date_check): return gettext( "Please correct the duplicate data for %(field_name)s " "which must be unique for the %(lookup)s in %(date_field)s." ) % { 'field_name': date_check[2], 'date_field': date_check[3], 'lookup': str(date_check[1]), } def get_form_error(self): return gettext("Please correct the duplicate values below.") def save_existing_objects(self, commit=True): self.changed_objects = [] self.deleted_objects = [] if not self.initial_forms: return [] saved_instances = [] forms_to_delete = self.deleted_forms for form in self.initial_forms: obj = form.instance # If the pk is None, it means either: # 1. The object is an unexpected empty model, created by invalid # POST data such as an object outside the formset's queryset. # 2. The object was already deleted from the database. if obj.pk is None: continue if form in forms_to_delete: self.deleted_objects.append(obj) self.delete_existing(obj, commit=commit) elif form.has_changed(): self.changed_objects.append((obj, form.changed_data)) saved_instances.append(self.save_existing(form, obj, commit=commit)) if not commit: self.saved_forms.append(form) return saved_instances def save_new_objects(self, commit=True): self.new_objects = [] for form in self.extra_forms: if not form.has_changed(): continue # If someone has marked an add form for deletion, don't save the # object. if self.can_delete and self._should_delete_form(form): continue self.new_objects.append(self.save_new(form, commit=commit)) if not commit: self.saved_forms.append(form) return self.new_objects def add_fields(self, form, index): """Add a hidden field for the object's primary key.""" from django.db.models import AutoField, OneToOneField, ForeignKey self._pk_field = pk = self.model._meta.pk # If a pk isn't editable, then it won't be on the form, so we need to # add it here so we can tell which object is which when we get the # data back. Generally, pk.editable should be false, but for some # reason, auto_created pk fields and AutoField's editable attribute is # True, so check for that as well. def pk_is_not_editable(pk): return ( (not pk.editable) or (pk.auto_created or isinstance(pk, AutoField)) or ( pk.remote_field and pk.remote_field.parent_link and pk_is_not_editable(pk.remote_field.model._meta.pk) ) ) if pk_is_not_editable(pk) or pk.name not in form.fields: if form.is_bound: # If we're adding the related instance, ignore its primary key # as it could be an auto-generated default which isn't actually # in the database. pk_value = None if form.instance._state.adding else form.instance.pk else: try: if index is not None: pk_value = self.get_queryset()[index].pk else: pk_value = None except IndexError: pk_value = None if isinstance(pk, (ForeignKey, OneToOneField)): qs = pk.remote_field.model._default_manager.get_queryset() else: qs = self.model._default_manager.get_queryset() qs = qs.using(form.instance._state.db) if form._meta.widgets: widget = form._meta.widgets.get(self._pk_field.name, HiddenInput) else: widget = HiddenInput form.fields[self._pk_field.name] = ModelChoiceField(qs, initial=pk_value, required=False, widget=widget) super().add_fields(form, index) def modelformset_factory(model, form=ModelForm, formfield_callback=None, formset=BaseModelFormSet, extra=1, can_delete=False, can_order=False, max_num=None, fields=None, exclude=None, widgets=None, validate_max=False, localized_fields=None, labels=None, help_texts=None, error_messages=None, min_num=None, validate_min=False, field_classes=None, absolute_max=None): """Return a FormSet class for the given Django model class.""" meta = getattr(form, 'Meta', None) if (getattr(meta, 'fields', fields) is None and getattr(meta, 'exclude', exclude) is None): raise ImproperlyConfigured( "Calling modelformset_factory without defining 'fields' or " "'exclude' explicitly is prohibited." ) form = modelform_factory(model, form=form, fields=fields, exclude=exclude, formfield_callback=formfield_callback, widgets=widgets, localized_fields=localized_fields, labels=labels, help_texts=help_texts, error_messages=error_messages, field_classes=field_classes) FormSet = formset_factory(form, formset, extra=extra, min_num=min_num, max_num=max_num, can_order=can_order, can_delete=can_delete, validate_min=validate_min, validate_max=validate_max, absolute_max=absolute_max) FormSet.model = model return FormSet # InlineFormSets ############################################################# class BaseInlineFormSet(BaseModelFormSet): """A formset for child objects related to a parent.""" def __init__(self, data=None, files=None, instance=None, save_as_new=False, prefix=None, queryset=None, **kwargs): if instance is None: self.instance = self.fk.remote_field.model() else: self.instance = instance self.save_as_new = save_as_new if queryset is None: queryset = self.model._default_manager if self.instance.pk is not None: qs = queryset.filter(**{self.fk.name: self.instance}) else: qs = queryset.none() self.unique_fields = {self.fk.name} super().__init__(data, files, prefix=prefix, queryset=qs, **kwargs) # Add the generated field to form._meta.fields if it's defined to make # sure validation isn't skipped on that field. if self.form._meta.fields and self.fk.name not in self.form._meta.fields: if isinstance(self.form._meta.fields, tuple): self.form._meta.fields = list(self.form._meta.fields) self.form._meta.fields.append(self.fk.name) def initial_form_count(self): if self.save_as_new: return 0 return super().initial_form_count() def _construct_form(self, i, **kwargs): form = super()._construct_form(i, **kwargs) if self.save_as_new: mutable = getattr(form.data, '_mutable', None) # Allow modifying an immutable QueryDict. if mutable is not None: form.data._mutable = True # Remove the primary key from the form's data, we are only # creating new instances form.data[form.add_prefix(self._pk_field.name)] = None # Remove the foreign key from the form's data form.data[form.add_prefix(self.fk.name)] = None if mutable is not None: form.data._mutable = mutable # Set the fk value here so that the form can do its validation. fk_value = self.instance.pk if self.fk.remote_field.field_name != self.fk.remote_field.model._meta.pk.name: fk_value = getattr(self.instance, self.fk.remote_field.field_name) fk_value = getattr(fk_value, 'pk', fk_value) setattr(form.instance, self.fk.get_attname(), fk_value) return form @classmethod def get_default_prefix(cls): return cls.fk.remote_field.get_accessor_name(model=cls.model).replace('+', '') def save_new(self, form, commit=True): # Ensure the latest copy of the related instance is present on each # form (it may have been saved after the formset was originally # instantiated). setattr(form.instance, self.fk.name, self.instance) return super().save_new(form, commit=commit) def add_fields(self, form, index): super().add_fields(form, index) if self._pk_field == self.fk: name = self._pk_field.name kwargs = {'pk_field': True} else: # The foreign key field might not be on the form, so we poke at the # Model field to get the label, since we need that for error messages. name = self.fk.name kwargs = { 'label': getattr(form.fields.get(name), 'label', capfirst(self.fk.verbose_name)) } # The InlineForeignKeyField assumes that the foreign key relation is # based on the parent model's pk. If this isn't the case, set to_field # to correctly resolve the initial form value. if self.fk.remote_field.field_name != self.fk.remote_field.model._meta.pk.name: kwargs['to_field'] = self.fk.remote_field.field_name # If we're adding a new object, ignore a parent's auto-generated key # as it will be regenerated on the save request. if self.instance._state.adding: if kwargs.get('to_field') is not None: to_field = self.instance._meta.get_field(kwargs['to_field']) else: to_field = self.instance._meta.pk if to_field.has_default(): setattr(self.instance, to_field.attname, None) form.fields[name] = InlineForeignKeyField(self.instance, **kwargs) def get_unique_error_message(self, unique_check): unique_check = [field for field in unique_check if field != self.fk.name] return super().get_unique_error_message(unique_check) def _get_foreign_key(parent_model, model, fk_name=None, can_fail=False): """ Find and return the ForeignKey from model to parent if there is one (return None if can_fail is True and no such field exists). If fk_name is provided, assume it is the name of the ForeignKey field. Unless can_fail is True, raise an exception if there isn't a ForeignKey from model to parent_model. """ # avoid circular import from django.db.models import ForeignKey opts = model._meta if fk_name: fks_to_parent = [f for f in opts.fields if f.name == fk_name] if len(fks_to_parent) == 1: fk = fks_to_parent[0] if not isinstance(fk, ForeignKey) or \ (fk.remote_field.model != parent_model and fk.remote_field.model not in parent_model._meta.get_parent_list()): raise ValueError( "fk_name '%s' is not a ForeignKey to '%s'." % (fk_name, parent_model._meta.label) ) elif not fks_to_parent: raise ValueError( "'%s' has no field named '%s'." % (model._meta.label, fk_name) ) else: # Try to discover what the ForeignKey from model to parent_model is fks_to_parent = [ f for f in opts.fields if isinstance(f, ForeignKey) and ( f.remote_field.model == parent_model or f.remote_field.model in parent_model._meta.get_parent_list() ) ] if len(fks_to_parent) == 1: fk = fks_to_parent[0] elif not fks_to_parent: if can_fail: return raise ValueError( "'%s' has no ForeignKey to '%s'." % ( model._meta.label, parent_model._meta.label, ) ) else: raise ValueError( "'%s' has more than one ForeignKey to '%s'. You must specify " "a 'fk_name' attribute." % ( model._meta.label, parent_model._meta.label, ) ) return fk def inlineformset_factory(parent_model, model, form=ModelForm, formset=BaseInlineFormSet, fk_name=None, fields=None, exclude=None, extra=3, can_order=False, can_delete=True, max_num=None, formfield_callback=None, widgets=None, validate_max=False, localized_fields=None, labels=None, help_texts=None, error_messages=None, min_num=None, validate_min=False, field_classes=None, absolute_max=None): """ Return an ``InlineFormSet`` for the given kwargs. ``fk_name`` must be provided if ``model`` has more than one ``ForeignKey`` to ``parent_model``. """ fk = _get_foreign_key(parent_model, model, fk_name=fk_name) # enforce a max_num=1 when the foreign key to the parent model is unique. if fk.unique: max_num = 1 kwargs = { 'form': form, 'formfield_callback': formfield_callback, 'formset': formset, 'extra': extra, 'can_delete': can_delete, 'can_order': can_order, 'fields': fields, 'exclude': exclude, 'min_num': min_num, 'max_num': max_num, 'widgets': widgets, 'validate_min': validate_min, 'validate_max': validate_max, 'localized_fields': localized_fields, 'labels': labels, 'help_texts': help_texts, 'error_messages': error_messages, 'field_classes': field_classes, 'absolute_max': absolute_max, } FormSet = modelformset_factory(model, **kwargs) FormSet.fk = fk return FormSet # Fields ##################################################################### class InlineForeignKeyField(Field): """ A basic integer field that deals with validating the given value to a given parent instance in an inline. """ widget = HiddenInput default_error_messages = { 'invalid_choice': _('The inline value did not match the parent instance.'), } def __init__(self, parent_instance, *args, pk_field=False, to_field=None, **kwargs): self.parent_instance = parent_instance self.pk_field = pk_field self.to_field = to_field if self.parent_instance is not None: if self.to_field: kwargs["initial"] = getattr(self.parent_instance, self.to_field) else: kwargs["initial"] = self.parent_instance.pk kwargs["required"] = False super().__init__(*args, **kwargs) def clean(self, value): if value in self.empty_values: if self.pk_field: return None # if there is no value act as we did before. return self.parent_instance # ensure the we compare the values as equal types. if self.to_field: orig = getattr(self.parent_instance, self.to_field) else: orig = self.parent_instance.pk if str(value) != str(orig): raise ValidationError(self.error_messages['invalid_choice'], code='invalid_choice') return self.parent_instance def has_changed(self, initial, data): return False class ModelChoiceIteratorValue: def __init__(self, value, instance): self.value = value self.instance = instance def __str__(self): return str(self.value) def __eq__(self, other): if isinstance(other, ModelChoiceIteratorValue): other = other.value return self.value == other class ModelChoiceIterator: def __init__(self, field): self.field = field self.queryset = field.queryset def __iter__(self): if self.field.empty_label is not None: yield ("", self.field.empty_label) queryset = self.queryset # Can't use iterator() when queryset uses prefetch_related() if not queryset._prefetch_related_lookups: queryset = queryset.iterator() for obj in queryset: yield self.choice(obj) def __len__(self): # count() adds a query but uses less memory since the QuerySet results # won't be cached. In most cases, the choices will only be iterated on, # and __len__() won't be called. return self.queryset.count() + (1 if self.field.empty_label is not None else 0) def __bool__(self): return self.field.empty_label is not None or self.queryset.exists() def choice(self, obj): return ( ModelChoiceIteratorValue(self.field.prepare_value(obj), obj), self.field.label_from_instance(obj), ) class ModelChoiceField(ChoiceField): """A ChoiceField whose choices are a model QuerySet.""" # This class is a subclass of ChoiceField for purity, but it doesn't # actually use any of ChoiceField's implementation. default_error_messages = { 'invalid_choice': _('Select a valid choice. That choice is not one of' ' the available choices.'), } iterator = ModelChoiceIterator def __init__(self, queryset, *, empty_label="---------", required=True, widget=None, label=None, initial=None, help_text='', to_field_name=None, limit_choices_to=None, blank=False, **kwargs): # Call Field instead of ChoiceField __init__() because we don't need # ChoiceField.__init__(). Field.__init__( self, required=required, widget=widget, label=label, initial=initial, help_text=help_text, **kwargs ) if ( (required and initial is not None) or (isinstance(self.widget, RadioSelect) and not blank) ): self.empty_label = None else: self.empty_label = empty_label self.queryset = queryset self.limit_choices_to = limit_choices_to # limit the queryset later. self.to_field_name = to_field_name def get_limit_choices_to(self): """ Return ``limit_choices_to`` for this form field. If it is a callable, invoke it and return the result. """ if callable(self.limit_choices_to): return self.limit_choices_to() return self.limit_choices_to def __deepcopy__(self, memo): result = super(ChoiceField, self).__deepcopy__(memo) # Need to force a new ModelChoiceIterator to be created, bug #11183 if self.queryset is not None: result.queryset = self.queryset.all() return result def _get_queryset(self): return self._queryset def _set_queryset(self, queryset): self._queryset = None if queryset is None else queryset.all() self.widget.choices = self.choices queryset = property(_get_queryset, _set_queryset) # this method will be used to create object labels by the QuerySetIterator. # Override it to customize the label. def label_from_instance(self, obj): """ Convert objects into strings and generate the labels for the choices presented by this object. Subclasses can override this method to customize the display of the choices. """ return str(obj) def _get_choices(self): # If self._choices is set, then somebody must have manually set # the property self.choices. In this case, just return self._choices. if hasattr(self, '_choices'): return self._choices # Otherwise, execute the QuerySet in self.queryset to determine the # choices dynamically. Return a fresh ModelChoiceIterator that has not been # consumed. Note that we're instantiating a new ModelChoiceIterator *each* # time _get_choices() is called (and, thus, each time self.choices is # accessed) so that we can ensure the QuerySet has not been consumed. This # construct might look complicated but it allows for lazy evaluation of # the queryset. return self.iterator(self) choices = property(_get_choices, ChoiceField._set_choices) def prepare_value(self, value): if hasattr(value, '_meta'): if self.to_field_name: return value.serializable_value(self.to_field_name) else: return value.pk return super().prepare_value(value) def to_python(self, value): if value in self.empty_values: return None try: key = self.to_field_name or 'pk' if isinstance(value, self.queryset.model): value = getattr(value, key) value = self.queryset.get(**{key: value}) except (ValueError, TypeError, self.queryset.model.DoesNotExist): raise ValidationError(self.error_messages['invalid_choice'], code='invalid_choice') return value def validate(self, value): return Field.validate(self, value) def has_changed(self, initial, data): if self.disabled: return False initial_value = initial if initial is not None else '' data_value = data if data is not None else '' return str(self.prepare_value(initial_value)) != str(data_value) class ModelMultipleChoiceField(ModelChoiceField): """A MultipleChoiceField whose choices are a model QuerySet.""" widget = SelectMultiple hidden_widget = MultipleHiddenInput default_error_messages = { 'invalid_list': _('Enter a list of values.'), 'invalid_choice': _('Select a valid choice. %(value)s is not one of the' ' available choices.'), 'invalid_pk_value': _('“%(pk)s” is not a valid value.') } def __init__(self, queryset, **kwargs): super().__init__(queryset, empty_label=None, **kwargs) if self.error_messages.get('list') is not None: warnings.warn( "The 'list' error message key is deprecated in favor of " "'invalid_list'.", RemovedInDjango40Warning, stacklevel=2, ) self.error_messages['invalid_list'] = self.error_messages['list'] def to_python(self, value): if not value: return [] return list(self._check_values(value)) def clean(self, value): value = self.prepare_value(value) if self.required and not value: raise ValidationError(self.error_messages['required'], code='required') elif not self.required and not value: return self.queryset.none() if not isinstance(value, (list, tuple)): raise ValidationError( self.error_messages['invalid_list'], code='invalid_list', ) qs = self._check_values(value) # Since this overrides the inherited ModelChoiceField.clean # we run custom validators here self.run_validators(value) return qs def _check_values(self, value): """ Given a list of possible PK values, return a QuerySet of the corresponding objects. Raise a ValidationError if a given value is invalid (not a valid PK, not in the queryset, etc.) """ key = self.to_field_name or 'pk' # deduplicate given values to avoid creating many querysets or # requiring the database backend deduplicate efficiently. try: value = frozenset(value) except TypeError: # list of lists isn't hashable, for example raise ValidationError( self.error_messages['invalid_list'], code='invalid_list', ) for pk in value: try: self.queryset.filter(**{key: pk}) except (ValueError, TypeError): raise ValidationError( self.error_messages['invalid_pk_value'], code='invalid_pk_value', params={'pk': pk}, ) qs = self.queryset.filter(**{'%s__in' % key: value}) pks = {str(getattr(o, key)) for o in qs} for val in value: if str(val) not in pks: raise ValidationError( self.error_messages['invalid_choice'], code='invalid_choice', params={'value': val}, ) return qs def prepare_value(self, value): if (hasattr(value, '__iter__') and not isinstance(value, str) and not hasattr(value, '_meta')): prepare_value = super().prepare_value return [prepare_value(v) for v in value] return super().prepare_value(value) def has_changed(self, initial, data): if self.disabled: return False if initial is None: initial = [] if data is None: data = [] if len(initial) != len(data): return True initial_set = {str(value) for value in self.prepare_value(initial)} data_set = {str(value) for value in data} return data_set != initial_set def modelform_defines_fields(form_class): return hasattr(form_class, '_meta') and ( form_class._meta.fields is not None or form_class._meta.exclude is not None )
21ab8cf9d503a75cd4d027fc21cd578e04576f86b660b45a10057c02b23835e4
from django.core.exceptions import ValidationError from django.forms import Form from django.forms.fields import BooleanField, IntegerField from django.forms.utils import ErrorList from django.forms.widgets import HiddenInput, NumberInput from django.utils.functional import cached_property from django.utils.html import html_safe from django.utils.safestring import mark_safe from django.utils.translation import gettext as _, ngettext __all__ = ('BaseFormSet', 'formset_factory', 'all_valid') # special field names TOTAL_FORM_COUNT = 'TOTAL_FORMS' INITIAL_FORM_COUNT = 'INITIAL_FORMS' MIN_NUM_FORM_COUNT = 'MIN_NUM_FORMS' MAX_NUM_FORM_COUNT = 'MAX_NUM_FORMS' ORDERING_FIELD_NAME = 'ORDER' DELETION_FIELD_NAME = 'DELETE' # default minimum number of forms in a formset DEFAULT_MIN_NUM = 0 # default maximum number of forms in a formset, to prevent memory exhaustion DEFAULT_MAX_NUM = 1000 class ManagementForm(Form): """ Keep track of how many form instances are displayed on the page. If adding new forms via JavaScript, you should increment the count field of this form as well. """ def __init__(self, *args, **kwargs): self.base_fields[TOTAL_FORM_COUNT] = IntegerField(widget=HiddenInput) self.base_fields[INITIAL_FORM_COUNT] = IntegerField(widget=HiddenInput) # MIN_NUM_FORM_COUNT and MAX_NUM_FORM_COUNT are output with the rest of # the management form, but only for the convenience of client-side # code. The POST value of them returned from the client is not checked. self.base_fields[MIN_NUM_FORM_COUNT] = IntegerField(required=False, widget=HiddenInput) self.base_fields[MAX_NUM_FORM_COUNT] = IntegerField(required=False, widget=HiddenInput) super().__init__(*args, **kwargs) @html_safe class BaseFormSet: """ A collection of instances of the same Form class. """ ordering_widget = NumberInput def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None, initial=None, error_class=ErrorList, form_kwargs=None): self.is_bound = data is not None or files is not None self.prefix = prefix or self.get_default_prefix() self.auto_id = auto_id self.data = data or {} self.files = files or {} self.initial = initial self.form_kwargs = form_kwargs or {} self.error_class = error_class self._errors = None self._non_form_errors = None def __str__(self): return self.as_table() def __iter__(self): """Yield the forms in the order they should be rendered.""" return iter(self.forms) def __getitem__(self, index): """Return the form at the given index, based on the rendering order.""" return self.forms[index] def __len__(self): return len(self.forms) def __bool__(self): """ Return True since all formsets have a management form which is not included in the length. """ return True @cached_property def management_form(self): """Return the ManagementForm instance for this FormSet.""" if self.is_bound: form = ManagementForm(self.data, auto_id=self.auto_id, prefix=self.prefix) if not form.is_valid(): raise ValidationError( _('ManagementForm data is missing or has been tampered with'), code='missing_management_form', ) else: form = ManagementForm(auto_id=self.auto_id, prefix=self.prefix, initial={ TOTAL_FORM_COUNT: self.total_form_count(), INITIAL_FORM_COUNT: self.initial_form_count(), MIN_NUM_FORM_COUNT: self.min_num, MAX_NUM_FORM_COUNT: self.max_num }) return form def total_form_count(self): """Return the total number of forms in this FormSet.""" if self.is_bound: # return absolute_max if it is lower than the actual total form # count in the data; this is DoS protection to prevent clients # from forcing the server to instantiate arbitrary numbers of # forms return min(self.management_form.cleaned_data[TOTAL_FORM_COUNT], self.absolute_max) else: initial_forms = self.initial_form_count() total_forms = max(initial_forms, self.min_num) + self.extra # Allow all existing related objects/inlines to be displayed, # but don't allow extra beyond max_num. if initial_forms > self.max_num >= 0: total_forms = initial_forms elif total_forms > self.max_num >= 0: total_forms = self.max_num return total_forms def initial_form_count(self): """Return the number of forms that are required in this FormSet.""" if self.is_bound: return self.management_form.cleaned_data[INITIAL_FORM_COUNT] else: # Use the length of the initial data if it's there, 0 otherwise. initial_forms = len(self.initial) if self.initial else 0 return initial_forms @cached_property def forms(self): """Instantiate forms at first property access.""" # DoS protection is included in total_form_count() return [ self._construct_form(i, **self.get_form_kwargs(i)) for i in range(self.total_form_count()) ] def get_form_kwargs(self, index): """ Return additional keyword arguments for each individual formset form. index will be None if the form being constructed is a new empty form. """ return self.form_kwargs.copy() def _construct_form(self, i, **kwargs): """Instantiate and return the i-th form instance in a formset.""" defaults = { 'auto_id': self.auto_id, 'prefix': self.add_prefix(i), 'error_class': self.error_class, # Don't render the HTML 'required' attribute as it may cause # incorrect validation for extra, optional, and deleted # forms in the formset. 'use_required_attribute': False, } if self.is_bound: defaults['data'] = self.data defaults['files'] = self.files if self.initial and 'initial' not in kwargs: try: defaults['initial'] = self.initial[i] except IndexError: pass # Allow extra forms to be empty, unless they're part of # the minimum forms. if i >= self.initial_form_count() and i >= self.min_num: defaults['empty_permitted'] = True defaults.update(kwargs) form = self.form(**defaults) self.add_fields(form, i) return form @property def initial_forms(self): """Return a list of all the initial forms in this formset.""" return self.forms[:self.initial_form_count()] @property def extra_forms(self): """Return a list of all the extra forms in this formset.""" return self.forms[self.initial_form_count():] @property def empty_form(self): form = self.form( auto_id=self.auto_id, prefix=self.add_prefix('__prefix__'), empty_permitted=True, use_required_attribute=False, **self.get_form_kwargs(None) ) self.add_fields(form, None) return form @property def cleaned_data(self): """ Return a list of form.cleaned_data dicts for every form in self.forms. """ if not self.is_valid(): raise AttributeError("'%s' object has no attribute 'cleaned_data'" % self.__class__.__name__) return [form.cleaned_data for form in self.forms] @property def deleted_forms(self): """Return a list of forms that have been marked for deletion.""" if not self.is_valid() or not self.can_delete: return [] # construct _deleted_form_indexes which is just a list of form indexes # that have had their deletion widget set to True if not hasattr(self, '_deleted_form_indexes'): self._deleted_form_indexes = [] for i in range(0, self.total_form_count()): form = self.forms[i] # if this is an extra form and hasn't changed, don't consider it if i >= self.initial_form_count() and not form.has_changed(): continue if self._should_delete_form(form): self._deleted_form_indexes.append(i) return [self.forms[i] for i in self._deleted_form_indexes] @property def ordered_forms(self): """ Return a list of form in the order specified by the incoming data. Raise an AttributeError if ordering is not allowed. """ if not self.is_valid() or not self.can_order: raise AttributeError("'%s' object has no attribute 'ordered_forms'" % self.__class__.__name__) # Construct _ordering, which is a list of (form_index, order_field_value) # tuples. After constructing this list, we'll sort it by order_field_value # so we have a way to get to the form indexes in the order specified # by the form data. if not hasattr(self, '_ordering'): self._ordering = [] for i in range(0, self.total_form_count()): form = self.forms[i] # if this is an extra form and hasn't changed, don't consider it if i >= self.initial_form_count() and not form.has_changed(): continue # don't add data marked for deletion to self.ordered_data if self.can_delete and self._should_delete_form(form): continue self._ordering.append((i, form.cleaned_data[ORDERING_FIELD_NAME])) # After we're done populating self._ordering, sort it. # A sort function to order things numerically ascending, but # None should be sorted below anything else. Allowing None as # a comparison value makes it so we can leave ordering fields # blank. def compare_ordering_key(k): if k[1] is None: return (1, 0) # +infinity, larger than any number return (0, k[1]) self._ordering.sort(key=compare_ordering_key) # Return a list of form.cleaned_data dicts in the order specified by # the form data. return [self.forms[i[0]] for i in self._ordering] @classmethod def get_default_prefix(cls): return 'form' @classmethod def get_ordering_widget(cls): return cls.ordering_widget def non_form_errors(self): """ Return an ErrorList of errors that aren't associated with a particular form -- i.e., from formset.clean(). Return an empty ErrorList if there are none. """ if self._non_form_errors is None: self.full_clean() return self._non_form_errors @property def errors(self): """Return a list of form.errors for every form in self.forms.""" if self._errors is None: self.full_clean() return self._errors def total_error_count(self): """Return the number of errors across all forms in the formset.""" return len(self.non_form_errors()) +\ sum(len(form_errors) for form_errors in self.errors) def _should_delete_form(self, form): """Return whether or not the form was marked for deletion.""" return form.cleaned_data.get(DELETION_FIELD_NAME, False) def is_valid(self): """Return True if every form in self.forms is valid.""" if not self.is_bound: return False # We loop over every form.errors here rather than short circuiting on the # first failure to make sure validation gets triggered for every form. forms_valid = True # This triggers a full clean. self.errors for i in range(0, self.total_form_count()): form = self.forms[i] if self.can_delete and self._should_delete_form(form): # This form is going to be deleted so any of its errors # shouldn't cause the entire formset to be invalid. continue forms_valid &= form.is_valid() return forms_valid and not self.non_form_errors() def full_clean(self): """ Clean all of self.data and populate self._errors and self._non_form_errors. """ self._errors = [] self._non_form_errors = self.error_class() empty_forms_count = 0 if not self.is_bound: # Stop further processing. return for i in range(0, self.total_form_count()): form = self.forms[i] # Empty forms are unchanged forms beyond those with initial data. if not form.has_changed() and i >= self.initial_form_count(): empty_forms_count += 1 # Accessing errors calls full_clean() if necessary. # _should_delete_form() requires cleaned_data. form_errors = form.errors if self.can_delete and self._should_delete_form(form): continue self._errors.append(form_errors) try: if (self.validate_max and self.total_form_count() - len(self.deleted_forms) > self.max_num) or \ self.management_form.cleaned_data[TOTAL_FORM_COUNT] > self.absolute_max: raise ValidationError(ngettext( "Please submit %d or fewer forms.", "Please submit %d or fewer forms.", self.max_num) % self.max_num, code='too_many_forms', ) if (self.validate_min and self.total_form_count() - len(self.deleted_forms) - empty_forms_count < self.min_num): raise ValidationError(ngettext( "Please submit %d or more forms.", "Please submit %d or more forms.", self.min_num) % self.min_num, code='too_few_forms') # Give self.clean() a chance to do cross-form validation. self.clean() except ValidationError as e: self._non_form_errors = self.error_class(e.error_list) def clean(self): """ Hook for doing any extra formset-wide cleaning after Form.clean() has been called on every form. Any ValidationError raised by this method will not be associated with a particular form; it will be accessible via formset.non_form_errors() """ pass def has_changed(self): """Return True if data in any form differs from initial.""" return any(form.has_changed() for form in self) def add_fields(self, form, index): """A hook for adding extra fields on to each form instance.""" if self.can_order: # Only pre-fill the ordering field for initial forms. if index is not None and index < self.initial_form_count(): form.fields[ORDERING_FIELD_NAME] = IntegerField( label=_('Order'), initial=index + 1, required=False, widget=self.get_ordering_widget(), ) else: form.fields[ORDERING_FIELD_NAME] = IntegerField( label=_('Order'), required=False, widget=self.get_ordering_widget(), ) if self.can_delete: form.fields[DELETION_FIELD_NAME] = BooleanField(label=_('Delete'), required=False) def add_prefix(self, index): return '%s-%s' % (self.prefix, index) def is_multipart(self): """ Return True if the formset needs to be multipart, i.e. it has FileInput, or False otherwise. """ if self.forms: return self.forms[0].is_multipart() else: return self.empty_form.is_multipart() @property def media(self): # All the forms on a FormSet are the same, so you only need to # interrogate the first form for media. if self.forms: return self.forms[0].media else: return self.empty_form.media def as_table(self): "Return this formset rendered as HTML <tr>s -- excluding the <table></table>." # XXX: there is no semantic division between forms here, there # probably should be. It might make sense to render each form as a # table row with each field as a td. forms = ' '.join(form.as_table() for form in self) return mark_safe(str(self.management_form) + '\n' + forms) def as_p(self): "Return this formset rendered as HTML <p>s." forms = ' '.join(form.as_p() for form in self) return mark_safe(str(self.management_form) + '\n' + forms) def as_ul(self): "Return this formset rendered as HTML <li>s." forms = ' '.join(form.as_ul() for form in self) return mark_safe(str(self.management_form) + '\n' + forms) def formset_factory(form, formset=BaseFormSet, extra=1, can_order=False, can_delete=False, max_num=None, validate_max=False, min_num=None, validate_min=False, absolute_max=None): """Return a FormSet for the given form class.""" if min_num is None: min_num = DEFAULT_MIN_NUM if max_num is None: max_num = DEFAULT_MAX_NUM # absolute_max is a hard limit on forms instantiated, to prevent # memory-exhaustion attacks. Default to max_num + DEFAULT_MAX_NUM # (which is 2 * DEFAULT_MAX_NUM if max_num is None in the first place). if absolute_max is None: absolute_max = max_num + DEFAULT_MAX_NUM if max_num > absolute_max: raise ValueError( "'absolute_max' must be greater or equal to 'max_num'." ) attrs = { 'form': form, 'extra': extra, 'can_order': can_order, 'can_delete': can_delete, 'min_num': min_num, 'max_num': max_num, 'absolute_max': absolute_max, 'validate_min': validate_min, 'validate_max': validate_max, } return type(form.__name__ + 'FormSet', (formset,), attrs) def all_valid(formsets): """Validate every formset and return True if all are valid.""" valid = True for formset in formsets: valid &= formset.is_valid() return valid
cdaf5cc7e8c32ade7a70f039ce5d3a8890447d1ee1fd699014783be9bd3f778e
import copy import datetime import inspect from decimal import Decimal from django.core.exceptions import EmptyResultSet, FieldError from django.db import NotSupportedError, connection from django.db.models import fields from django.db.models.constants import LOOKUP_SEP from django.db.models.query_utils import Q from django.utils.deconstruct import deconstructible from django.utils.functional import cached_property from django.utils.hashable import make_hashable class SQLiteNumericMixin: """ Some expressions with output_field=DecimalField() must be cast to numeric to be properly filtered. """ def as_sqlite(self, compiler, connection, **extra_context): sql, params = self.as_sql(compiler, connection, **extra_context) try: if self.output_field.get_internal_type() == 'DecimalField': sql = 'CAST(%s AS NUMERIC)' % sql except FieldError: pass return sql, params class Combinable: """ Provide the ability to combine one or two objects with some connector. For example F('foo') + F('bar'). """ # Arithmetic connectors ADD = '+' SUB = '-' MUL = '*' DIV = '/' POW = '^' # The following is a quoted % operator - it is quoted because it can be # used in strings that also have parameter substitution. MOD = '%%' # Bitwise operators - note that these are generated by .bitand() # and .bitor(), the '&' and '|' are reserved for boolean operator # usage. BITAND = '&' BITOR = '|' BITLEFTSHIFT = '<<' BITRIGHTSHIFT = '>>' BITXOR = '#' def _combine(self, other, connector, reversed): if not hasattr(other, 'resolve_expression'): # everything must be resolvable to an expression if isinstance(other, datetime.timedelta): other = DurationValue(other, output_field=fields.DurationField()) else: other = Value(other) if reversed: return CombinedExpression(other, connector, self) return CombinedExpression(self, connector, other) ############# # OPERATORS # ############# def __neg__(self): return self._combine(-1, self.MUL, False) def __add__(self, other): return self._combine(other, self.ADD, False) def __sub__(self, other): return self._combine(other, self.SUB, False) def __mul__(self, other): return self._combine(other, self.MUL, False) def __truediv__(self, other): return self._combine(other, self.DIV, False) def __mod__(self, other): return self._combine(other, self.MOD, False) def __pow__(self, other): return self._combine(other, self.POW, False) def __and__(self, other): if getattr(self, 'conditional', False) and getattr(other, 'conditional', False): return Q(self) & Q(other) raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def bitand(self, other): return self._combine(other, self.BITAND, False) def bitleftshift(self, other): return self._combine(other, self.BITLEFTSHIFT, False) def bitrightshift(self, other): return self._combine(other, self.BITRIGHTSHIFT, False) def bitxor(self, other): return self._combine(other, self.BITXOR, False) def __or__(self, other): if getattr(self, 'conditional', False) and getattr(other, 'conditional', False): return Q(self) | Q(other) raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def bitor(self, other): return self._combine(other, self.BITOR, False) def __radd__(self, other): return self._combine(other, self.ADD, True) def __rsub__(self, other): return self._combine(other, self.SUB, True) def __rmul__(self, other): return self._combine(other, self.MUL, True) def __rtruediv__(self, other): return self._combine(other, self.DIV, True) def __rmod__(self, other): return self._combine(other, self.MOD, True) def __rpow__(self, other): return self._combine(other, self.POW, True) def __rand__(self, other): raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def __ror__(self, other): raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) @deconstructible class BaseExpression: """Base class for all query expressions.""" # aggregate specific fields is_summary = False _output_field_resolved_to_none = False # Can the expression be used in a WHERE clause? filterable = True # Can the expression can be used as a source expression in Window? window_compatible = False def __init__(self, output_field=None): if output_field is not None: self.output_field = output_field def __getstate__(self): state = self.__dict__.copy() state.pop('convert_value', None) return state def get_db_converters(self, connection): return ( [] if self.convert_value is self._convert_value_noop else [self.convert_value] ) + self.output_field.get_db_converters(connection) def get_source_expressions(self): return [] def set_source_expressions(self, exprs): assert not exprs def _parse_expressions(self, *expressions): return [ arg if hasattr(arg, 'resolve_expression') else ( F(arg) if isinstance(arg, str) else Value(arg) ) for arg in expressions ] def as_sql(self, compiler, connection): """ Responsible for returning a (sql, [params]) tuple to be included in the current query. Different backends can provide their own implementation, by providing an `as_{vendor}` method and patching the Expression: ``` def override_as_sql(self, compiler, connection): # custom logic return super().as_sql(compiler, connection) setattr(Expression, 'as_' + connection.vendor, override_as_sql) ``` Arguments: * compiler: the query compiler responsible for generating the query. Must have a compile method, returning a (sql, [params]) tuple. Calling compiler(value) will return a quoted `value`. * connection: the database connection used for the current query. Return: (sql, params) Where `sql` is a string containing ordered sql parameters to be replaced with the elements of the list `params`. """ raise NotImplementedError("Subclasses must implement as_sql()") @cached_property def contains_aggregate(self): return any(expr and expr.contains_aggregate for expr in self.get_source_expressions()) @cached_property def contains_over_clause(self): return any(expr and expr.contains_over_clause for expr in self.get_source_expressions()) @cached_property def contains_column_references(self): return any(expr and expr.contains_column_references for expr in self.get_source_expressions()) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): """ Provide the chance to do any preprocessing or validation before being added to the query. Arguments: * query: the backend query implementation * allow_joins: boolean allowing or denying use of joins in this query * reuse: a set of reusable joins for multijoins * summarize: a terminal aggregate clause * for_save: whether this expression about to be used in a save or update Return: an Expression to be added to the query. """ c = self.copy() c.is_summary = summarize c.set_source_expressions([ expr.resolve_expression(query, allow_joins, reuse, summarize) if expr else None for expr in c.get_source_expressions() ]) return c @property def conditional(self): return isinstance(self.output_field, fields.BooleanField) @property def field(self): return self.output_field @cached_property def output_field(self): """Return the output type of this expressions.""" output_field = self._resolve_output_field() if output_field is None: self._output_field_resolved_to_none = True raise FieldError('Cannot resolve expression type, unknown output_field') return output_field @cached_property def _output_field_or_none(self): """ Return the output field of this expression, or None if _resolve_output_field() didn't return an output type. """ try: return self.output_field except FieldError: if not self._output_field_resolved_to_none: raise def _resolve_output_field(self): """ Attempt to infer the output type of the expression. If the output fields of all source fields match then, simply infer the same type here. This isn't always correct, but it makes sense most of the time. Consider the difference between `2 + 2` and `2 / 3`. Inferring the type here is a convenience for the common case. The user should supply their own output_field with more complex computations. If a source's output field resolves to None, exclude it from this check. If all sources are None, then an error is raised higher up the stack in the output_field property. """ sources_iter = (source for source in self.get_source_fields() if source is not None) for output_field in sources_iter: for source in sources_iter: if not isinstance(output_field, source.__class__): raise FieldError( 'Expression contains mixed types: %s, %s. You must ' 'set output_field.' % ( output_field.__class__.__name__, source.__class__.__name__, ) ) return output_field @staticmethod def _convert_value_noop(value, expression, connection): return value @cached_property def convert_value(self): """ Expressions provide their own converters because users have the option of manually specifying the output_field which may be a different type from the one the database returns. """ field = self.output_field internal_type = field.get_internal_type() if internal_type == 'FloatField': return lambda value, expression, connection: None if value is None else float(value) elif internal_type.endswith('IntegerField'): return lambda value, expression, connection: None if value is None else int(value) elif internal_type == 'DecimalField': return lambda value, expression, connection: None if value is None else Decimal(value) return self._convert_value_noop def get_lookup(self, lookup): return self.output_field.get_lookup(lookup) def get_transform(self, name): return self.output_field.get_transform(name) def relabeled_clone(self, change_map): clone = self.copy() clone.set_source_expressions([ e.relabeled_clone(change_map) if e is not None else None for e in self.get_source_expressions() ]) return clone def copy(self): return copy.copy(self) def get_group_by_cols(self, alias=None): if not self.contains_aggregate: return [self] cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols def get_source_fields(self): """Return the underlying field types used by this aggregate.""" return [e._output_field_or_none for e in self.get_source_expressions()] def asc(self, **kwargs): return OrderBy(self, **kwargs) def desc(self, **kwargs): return OrderBy(self, descending=True, **kwargs) def reverse_ordering(self): return self def flatten(self): """ Recursively yield this expression and all subexpressions, in depth-first order. """ yield self for expr in self.get_source_expressions(): if expr: yield from expr.flatten() def select_format(self, compiler, sql, params): """ Custom format for select clauses. For example, EXISTS expressions need to be wrapped in CASE WHEN on Oracle. """ if hasattr(self.output_field, 'select_format'): return self.output_field.select_format(compiler, sql, params) return sql, params @cached_property def identity(self): constructor_signature = inspect.signature(self.__init__) args, kwargs = self._constructor_args signature = constructor_signature.bind_partial(*args, **kwargs) signature.apply_defaults() arguments = signature.arguments.items() identity = [self.__class__] for arg, value in arguments: if isinstance(value, fields.Field): if value.name and value.model: value = (value.model._meta.label, value.name) else: value = type(value) else: value = make_hashable(value) identity.append((arg, value)) return tuple(identity) def __eq__(self, other): if not isinstance(other, BaseExpression): return NotImplemented return other.identity == self.identity def __hash__(self): return hash(self.identity) class Expression(BaseExpression, Combinable): """An expression that can be combined with other expressions.""" pass class CombinedExpression(SQLiteNumericMixin, Expression): def __init__(self, lhs, connector, rhs, output_field=None): super().__init__(output_field=output_field) self.connector = connector self.lhs = lhs self.rhs = rhs def __repr__(self): return "<{}: {}>".format(self.__class__.__name__, self) def __str__(self): return "{} {} {}".format(self.lhs, self.connector, self.rhs) def get_source_expressions(self): return [self.lhs, self.rhs] def set_source_expressions(self, exprs): self.lhs, self.rhs = exprs def as_sql(self, compiler, connection): try: lhs_output = self.lhs.output_field except FieldError: lhs_output = None try: rhs_output = self.rhs.output_field except FieldError: rhs_output = None if (not connection.features.has_native_duration_field and ((lhs_output and lhs_output.get_internal_type() == 'DurationField') or (rhs_output and rhs_output.get_internal_type() == 'DurationField'))): return DurationExpression(self.lhs, self.connector, self.rhs).as_sql(compiler, connection) if (lhs_output and rhs_output and self.connector == self.SUB and lhs_output.get_internal_type() in {'DateField', 'DateTimeField', 'TimeField'} and lhs_output.get_internal_type() == rhs_output.get_internal_type()): return TemporalSubtraction(self.lhs, self.rhs).as_sql(compiler, connection) expressions = [] expression_params = [] sql, params = compiler.compile(self.lhs) expressions.append(sql) expression_params.extend(params) sql, params = compiler.compile(self.rhs) expressions.append(sql) expression_params.extend(params) # order of precedence expression_wrapper = '(%s)' sql = connection.ops.combine_expression(self.connector, expressions) return expression_wrapper % sql, expression_params def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize c.lhs = c.lhs.resolve_expression(query, allow_joins, reuse, summarize, for_save) c.rhs = c.rhs.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c class DurationExpression(CombinedExpression): def compile(self, side, compiler, connection): if not isinstance(side, DurationValue): try: output = side.output_field except FieldError: pass else: if output.get_internal_type() == 'DurationField': sql, params = compiler.compile(side) return connection.ops.format_for_duration_arithmetic(sql), params return compiler.compile(side) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) expressions = [] expression_params = [] sql, params = self.compile(self.lhs, compiler, connection) expressions.append(sql) expression_params.extend(params) sql, params = self.compile(self.rhs, compiler, connection) expressions.append(sql) expression_params.extend(params) # order of precedence expression_wrapper = '(%s)' sql = connection.ops.combine_duration_expression(self.connector, expressions) return expression_wrapper % sql, expression_params class TemporalSubtraction(CombinedExpression): output_field = fields.DurationField() def __init__(self, lhs, rhs): super().__init__(lhs, self.SUB, rhs) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) lhs = compiler.compile(self.lhs) rhs = compiler.compile(self.rhs) return connection.ops.subtract_temporals(self.lhs.output_field.get_internal_type(), lhs, rhs) @deconstructible class F(Combinable): """An object capable of resolving references to existing query objects.""" def __init__(self, name): """ Arguments: * name: the name of the field this expression references """ self.name = name def __repr__(self): return "{}({})".format(self.__class__.__name__, self.name) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): return query.resolve_ref(self.name, allow_joins, reuse, summarize) def asc(self, **kwargs): return OrderBy(self, **kwargs) def desc(self, **kwargs): return OrderBy(self, descending=True, **kwargs) def __eq__(self, other): return self.__class__ == other.__class__ and self.name == other.name def __hash__(self): return hash(self.name) class ResolvedOuterRef(F): """ An object that contains a reference to an outer query. In this case, the reference to the outer query has been resolved because the inner query has been used as a subquery. """ contains_aggregate = False def as_sql(self, *args, **kwargs): raise ValueError( 'This queryset contains a reference to an outer query and may ' 'only be used in a subquery.' ) def resolve_expression(self, *args, **kwargs): col = super().resolve_expression(*args, **kwargs) # FIXME: Rename possibly_multivalued to multivalued and fix detection # for non-multivalued JOINs (e.g. foreign key fields). This should take # into account only many-to-many and one-to-many relationships. col.possibly_multivalued = LOOKUP_SEP in self.name return col def relabeled_clone(self, relabels): return self def get_group_by_cols(self, alias=None): return [] class OuterRef(F): def resolve_expression(self, *args, **kwargs): if isinstance(self.name, self.__class__): return self.name return ResolvedOuterRef(self.name) def relabeled_clone(self, relabels): return self class Func(SQLiteNumericMixin, Expression): """An SQL function call.""" function = None template = '%(function)s(%(expressions)s)' arg_joiner = ', ' arity = None # The number of arguments the function accepts. def __init__(self, *expressions, output_field=None, **extra): if self.arity is not None and len(expressions) != self.arity: raise TypeError( "'%s' takes exactly %s %s (%s given)" % ( self.__class__.__name__, self.arity, "argument" if self.arity == 1 else "arguments", len(expressions), ) ) super().__init__(output_field=output_field) self.source_expressions = self._parse_expressions(*expressions) self.extra = extra def __repr__(self): args = self.arg_joiner.join(str(arg) for arg in self.source_expressions) extra = {**self.extra, **self._get_repr_options()} if extra: extra = ', '.join(str(key) + '=' + str(val) for key, val in sorted(extra.items())) return "{}({}, {})".format(self.__class__.__name__, args, extra) return "{}({})".format(self.__class__.__name__, args) def _get_repr_options(self): """Return a dict of extra __init__() options to include in the repr.""" return {} def get_source_expressions(self): return self.source_expressions def set_source_expressions(self, exprs): self.source_expressions = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize for pos, arg in enumerate(c.source_expressions): c.source_expressions[pos] = arg.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def as_sql(self, compiler, connection, function=None, template=None, arg_joiner=None, **extra_context): connection.ops.check_expression_support(self) sql_parts = [] params = [] for arg in self.source_expressions: arg_sql, arg_params = compiler.compile(arg) sql_parts.append(arg_sql) params.extend(arg_params) data = {**self.extra, **extra_context} # Use the first supplied value in this order: the parameter to this # method, a value supplied in __init__()'s **extra (the value in # `data`), or the value defined on the class. if function is not None: data['function'] = function else: data.setdefault('function', self.function) template = template or data.get('template', self.template) arg_joiner = arg_joiner or data.get('arg_joiner', self.arg_joiner) data['expressions'] = data['field'] = arg_joiner.join(sql_parts) return template % data, params def copy(self): copy = super().copy() copy.source_expressions = self.source_expressions[:] copy.extra = self.extra.copy() return copy class Value(Expression): """Represent a wrapped value as a node within an expression.""" def __init__(self, value, output_field=None): """ Arguments: * value: the value this expression represents. The value will be added into the sql parameter list and properly quoted. * output_field: an instance of the model field type that this expression will return, such as IntegerField() or CharField(). """ super().__init__(output_field=output_field) self.value = value def __repr__(self): return "{}({})".format(self.__class__.__name__, self.value) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) val = self.value output_field = self._output_field_or_none if output_field is not None: if self.for_save: val = output_field.get_db_prep_save(val, connection=connection) else: val = output_field.get_db_prep_value(val, connection=connection) if hasattr(output_field, 'get_placeholder'): return output_field.get_placeholder(val, compiler, connection), [val] if val is None: # cx_Oracle does not always convert None to the appropriate # NULL type (like in case expressions using numbers), so we # use a literal SQL NULL return 'NULL', [] return '%s', [val] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = super().resolve_expression(query, allow_joins, reuse, summarize, for_save) c.for_save = for_save return c def get_group_by_cols(self, alias=None): return [] class DurationValue(Value): def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) if connection.features.has_native_duration_field: return super().as_sql(compiler, connection) return connection.ops.date_interval_sql(self.value), [] class RawSQL(Expression): def __init__(self, sql, params, output_field=None): if output_field is None: output_field = fields.Field() self.sql, self.params = sql, params super().__init__(output_field=output_field) def __repr__(self): return "{}({}, {})".format(self.__class__.__name__, self.sql, self.params) def as_sql(self, compiler, connection): return '(%s)' % self.sql, self.params def get_group_by_cols(self, alias=None): return [self] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): # Resolve parents fields used in raw SQL. for parent in query.model._meta.get_parent_list(): for parent_field in parent._meta.local_fields: _, column_name = parent_field.get_attname_column() if column_name.lower() in self.sql.lower(): query.resolve_ref(parent_field.name, allow_joins, reuse, summarize) break return super().resolve_expression(query, allow_joins, reuse, summarize, for_save) class Star(Expression): def __repr__(self): return "'*'" def as_sql(self, compiler, connection): return '*', [] class Random(Expression): output_field = fields.FloatField() def __repr__(self): return "Random()" def as_sql(self, compiler, connection): return connection.ops.random_function_sql(), [] class Col(Expression): contains_column_references = True possibly_multivalued = False def __init__(self, alias, target, output_field=None): if output_field is None: output_field = target super().__init__(output_field=output_field) self.alias, self.target = alias, target def __repr__(self): alias, target = self.alias, self.target identifiers = (alias, str(target)) if alias else (str(target),) return '{}({})'.format(self.__class__.__name__, ', '.join(identifiers)) def as_sql(self, compiler, connection): alias, column = self.alias, self.target.column identifiers = (alias, column) if alias else (column,) sql = '.'.join(map(compiler.quote_name_unless_alias, identifiers)) return sql, [] def relabeled_clone(self, relabels): if self.alias is None: return self return self.__class__(relabels.get(self.alias, self.alias), self.target, self.output_field) def get_group_by_cols(self, alias=None): return [self] def get_db_converters(self, connection): if self.target == self.output_field: return self.output_field.get_db_converters(connection) return (self.output_field.get_db_converters(connection) + self.target.get_db_converters(connection)) class Ref(Expression): """ Reference to column alias of the query. For example, Ref('sum_cost') in qs.annotate(sum_cost=Sum('cost')) query. """ def __init__(self, refs, source): super().__init__() self.refs, self.source = refs, source def __repr__(self): return "{}({}, {})".format(self.__class__.__name__, self.refs, self.source) def get_source_expressions(self): return [self.source] def set_source_expressions(self, exprs): self.source, = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): # The sub-expression `source` has already been resolved, as this is # just a reference to the name of `source`. return self def relabeled_clone(self, relabels): return self def as_sql(self, compiler, connection): return connection.ops.quote_name(self.refs), [] def get_group_by_cols(self, alias=None): return [self] class ExpressionList(Func): """ An expression containing multiple expressions. Can be used to provide a list of expressions as an argument to another expression, like an ordering clause. """ template = '%(expressions)s' def __init__(self, *expressions, **extra): if not expressions: raise ValueError('%s requires at least one expression.' % self.__class__.__name__) super().__init__(*expressions, **extra) def __str__(self): return self.arg_joiner.join(str(arg) for arg in self.source_expressions) class ExpressionWrapper(Expression): """ An expression that can wrap another expression so that it can provide extra context to the inner expression, such as the output_field. """ def __init__(self, expression, output_field): super().__init__(output_field=output_field) self.expression = expression def set_source_expressions(self, exprs): self.expression = exprs[0] def get_source_expressions(self): return [self.expression] def get_group_by_cols(self, alias=None): return self.expression.get_group_by_cols(alias=alias) def as_sql(self, compiler, connection): return self.expression.as_sql(compiler, connection) def __repr__(self): return "{}({})".format(self.__class__.__name__, self.expression) class When(Expression): template = 'WHEN %(condition)s THEN %(result)s' # This isn't a complete conditional expression, must be used in Case(). conditional = False def __init__(self, condition=None, then=None, **lookups): if lookups: if condition is None: condition, lookups = Q(**lookups), None elif getattr(condition, 'conditional', False): condition, lookups = Q(condition, **lookups), None if condition is None or not getattr(condition, 'conditional', False) or lookups: raise TypeError( 'When() supports a Q object, a boolean expression, or lookups ' 'as a condition.' ) if isinstance(condition, Q) and not condition: raise ValueError("An empty Q() can't be used as a When() condition.") super().__init__(output_field=None) self.condition = condition self.result = self._parse_expressions(then)[0] def __str__(self): return "WHEN %r THEN %r" % (self.condition, self.result) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) def get_source_expressions(self): return [self.condition, self.result] def set_source_expressions(self, exprs): self.condition, self.result = exprs def get_source_fields(self): # We're only interested in the fields of the result expressions. return [self.result._output_field_or_none] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize if hasattr(c.condition, 'resolve_expression'): c.condition = c.condition.resolve_expression(query, allow_joins, reuse, summarize, False) c.result = c.result.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def as_sql(self, compiler, connection, template=None, **extra_context): connection.ops.check_expression_support(self) template_params = extra_context sql_params = [] condition_sql, condition_params = compiler.compile(self.condition) template_params['condition'] = condition_sql sql_params.extend(condition_params) result_sql, result_params = compiler.compile(self.result) template_params['result'] = result_sql sql_params.extend(result_params) template = template or self.template return template % template_params, sql_params def get_group_by_cols(self, alias=None): # This is not a complete expression and cannot be used in GROUP BY. cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols class Case(Expression): """ An SQL searched CASE expression: CASE WHEN n > 0 THEN 'positive' WHEN n < 0 THEN 'negative' ELSE 'zero' END """ template = 'CASE %(cases)s ELSE %(default)s END' case_joiner = ' ' def __init__(self, *cases, default=None, output_field=None, **extra): if not all(isinstance(case, When) for case in cases): raise TypeError("Positional arguments must all be When objects.") super().__init__(output_field) self.cases = list(cases) self.default = self._parse_expressions(default)[0] self.extra = extra def __str__(self): return "CASE %s, ELSE %r" % (', '.join(str(c) for c in self.cases), self.default) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) def get_source_expressions(self): return self.cases + [self.default] def set_source_expressions(self, exprs): *self.cases, self.default = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize for pos, case in enumerate(c.cases): c.cases[pos] = case.resolve_expression(query, allow_joins, reuse, summarize, for_save) c.default = c.default.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def copy(self): c = super().copy() c.cases = c.cases[:] return c def as_sql(self, compiler, connection, template=None, case_joiner=None, **extra_context): connection.ops.check_expression_support(self) if not self.cases: return compiler.compile(self.default) template_params = {**self.extra, **extra_context} case_parts = [] sql_params = [] for case in self.cases: try: case_sql, case_params = compiler.compile(case) except EmptyResultSet: continue case_parts.append(case_sql) sql_params.extend(case_params) default_sql, default_params = compiler.compile(self.default) if not case_parts: return default_sql, default_params case_joiner = case_joiner or self.case_joiner template_params['cases'] = case_joiner.join(case_parts) template_params['default'] = default_sql sql_params.extend(default_params) template = template or template_params.get('template', self.template) sql = template % template_params if self._output_field_or_none is not None: sql = connection.ops.unification_cast_sql(self.output_field) % sql return sql, sql_params class Subquery(Expression): """ An explicit subquery. It may contain OuterRef() references to the outer query which will be resolved when it is applied to that query. """ template = '(%(subquery)s)' contains_aggregate = False def __init__(self, queryset, output_field=None, **extra): self.query = queryset.query self.extra = extra # Prevent the QuerySet from being evaluated. self.queryset = queryset._chain(_result_cache=[], prefetch_done=True) super().__init__(output_field) def __getstate__(self): state = super().__getstate__() args, kwargs = state['_constructor_args'] if args: args = (self.queryset, *args[1:]) else: kwargs['queryset'] = self.queryset state['_constructor_args'] = args, kwargs return state def get_source_expressions(self): return [self.query] def set_source_expressions(self, exprs): self.query = exprs[0] def _resolve_output_field(self): return self.query.output_field def copy(self): clone = super().copy() clone.query = clone.query.clone() return clone @property def external_aliases(self): return self.query.external_aliases def as_sql(self, compiler, connection, template=None, **extra_context): connection.ops.check_expression_support(self) template_params = {**self.extra, **extra_context} subquery_sql, sql_params = self.query.as_sql(compiler, connection) template_params['subquery'] = subquery_sql[1:-1] template = template or template_params.get('template', self.template) sql = template % template_params return sql, sql_params def get_group_by_cols(self, alias=None): if alias: return [Ref(alias, self)] external_cols = self.query.get_external_cols() if any(col.possibly_multivalued for col in external_cols): return [self] return external_cols class Exists(Subquery): template = 'EXISTS(%(subquery)s)' output_field = fields.BooleanField() def __init__(self, queryset, negated=False, **kwargs): # As a performance optimization, remove ordering since EXISTS doesn't # care about it, just whether or not a row matches. queryset = queryset.order_by() self.negated = negated super().__init__(queryset, **kwargs) def __invert__(self): clone = self.copy() clone.negated = not self.negated return clone def as_sql(self, compiler, connection, template=None, **extra_context): sql, params = super().as_sql(compiler, connection, template, **extra_context) if self.negated: sql = 'NOT {}'.format(sql) return sql, params def select_format(self, compiler, sql, params): # Wrap EXISTS() with a CASE WHEN expression if a database backend # (e.g. Oracle) doesn't support boolean expression in SELECT or GROUP # BY list. if not compiler.connection.features.supports_boolean_expr_in_select_clause: sql = 'CASE WHEN {} THEN 1 ELSE 0 END'.format(sql) return sql, params class OrderBy(BaseExpression): template = '%(expression)s %(ordering)s' conditional = False def __init__(self, expression, descending=False, nulls_first=False, nulls_last=False): if nulls_first and nulls_last: raise ValueError('nulls_first and nulls_last are mutually exclusive') self.nulls_first = nulls_first self.nulls_last = nulls_last self.descending = descending if not hasattr(expression, 'resolve_expression'): raise ValueError('expression must be an expression type') self.expression = expression def __repr__(self): return "{}({}, descending={})".format( self.__class__.__name__, self.expression, self.descending) def set_source_expressions(self, exprs): self.expression = exprs[0] def get_source_expressions(self): return [self.expression] def as_sql(self, compiler, connection, template=None, **extra_context): template = template or self.template if connection.features.supports_order_by_nulls_modifier: if self.nulls_last: template = '%s NULLS LAST' % template elif self.nulls_first: template = '%s NULLS FIRST' % template else: if self.nulls_last and not ( self.descending and connection.features.order_by_nulls_first ): template = '%%(expression)s IS NULL, %s' % template elif self.nulls_first and not ( not self.descending and connection.features.order_by_nulls_first ): template = '%%(expression)s IS NOT NULL, %s' % template connection.ops.check_expression_support(self) expression_sql, params = compiler.compile(self.expression) placeholders = { 'expression': expression_sql, 'ordering': 'DESC' if self.descending else 'ASC', **extra_context, } template = template or self.template params *= template.count('%(expression)s') return (template % placeholders).rstrip(), params def as_oracle(self, compiler, connection): # Oracle doesn't allow ORDER BY EXISTS() unless it's wrapped in # a CASE WHEN. if isinstance(self.expression, Exists): copy = self.copy() copy.expression = Case( When(self.expression, then=True), default=False, output_field=fields.BooleanField(), ) return copy.as_sql(compiler, connection) return self.as_sql(compiler, connection) def get_group_by_cols(self, alias=None): cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols def reverse_ordering(self): self.descending = not self.descending if self.nulls_first or self.nulls_last: self.nulls_first = not self.nulls_first self.nulls_last = not self.nulls_last return self def asc(self): self.descending = False def desc(self): self.descending = True class Window(Expression): template = '%(expression)s OVER (%(window)s)' # Although the main expression may either be an aggregate or an # expression with an aggregate function, the GROUP BY that will # be introduced in the query as a result is not desired. contains_aggregate = False contains_over_clause = True filterable = False def __init__(self, expression, partition_by=None, order_by=None, frame=None, output_field=None): self.partition_by = partition_by self.order_by = order_by self.frame = frame if not getattr(expression, 'window_compatible', False): raise ValueError( "Expression '%s' isn't compatible with OVER clauses." % expression.__class__.__name__ ) if self.partition_by is not None: if not isinstance(self.partition_by, (tuple, list)): self.partition_by = (self.partition_by,) self.partition_by = ExpressionList(*self.partition_by) if self.order_by is not None: if isinstance(self.order_by, (list, tuple)): self.order_by = ExpressionList(*self.order_by) elif not isinstance(self.order_by, BaseExpression): raise ValueError( 'order_by must be either an Expression or a sequence of ' 'expressions.' ) super().__init__(output_field=output_field) self.source_expression = self._parse_expressions(expression)[0] def _resolve_output_field(self): return self.source_expression.output_field def get_source_expressions(self): return [self.source_expression, self.partition_by, self.order_by, self.frame] def set_source_expressions(self, exprs): self.source_expression, self.partition_by, self.order_by, self.frame = exprs def as_sql(self, compiler, connection, template=None): connection.ops.check_expression_support(self) if not connection.features.supports_over_clause: raise NotSupportedError('This backend does not support window expressions.') expr_sql, params = compiler.compile(self.source_expression) window_sql, window_params = [], [] if self.partition_by is not None: sql_expr, sql_params = self.partition_by.as_sql( compiler=compiler, connection=connection, template='PARTITION BY %(expressions)s', ) window_sql.extend(sql_expr) window_params.extend(sql_params) if self.order_by is not None: window_sql.append(' ORDER BY ') order_sql, order_params = compiler.compile(self.order_by) window_sql.extend(order_sql) window_params.extend(order_params) if self.frame: frame_sql, frame_params = compiler.compile(self.frame) window_sql.append(' ' + frame_sql) window_params.extend(frame_params) params.extend(window_params) template = template or self.template return template % { 'expression': expr_sql, 'window': ''.join(window_sql).strip() }, params def __str__(self): return '{} OVER ({}{}{})'.format( str(self.source_expression), 'PARTITION BY ' + str(self.partition_by) if self.partition_by else '', 'ORDER BY ' + str(self.order_by) if self.order_by else '', str(self.frame or ''), ) def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def get_group_by_cols(self, alias=None): return [] class WindowFrame(Expression): """ Model the frame clause in window expressions. There are two types of frame clauses which are subclasses, however, all processing and validation (by no means intended to be complete) is done here. Thus, providing an end for a frame is optional (the default is UNBOUNDED FOLLOWING, which is the last row in the frame). """ template = '%(frame_type)s BETWEEN %(start)s AND %(end)s' def __init__(self, start=None, end=None): self.start = Value(start) self.end = Value(end) def set_source_expressions(self, exprs): self.start, self.end = exprs def get_source_expressions(self): return [self.start, self.end] def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) start, end = self.window_frame_start_end(connection, self.start.value, self.end.value) return self.template % { 'frame_type': self.frame_type, 'start': start, 'end': end, }, [] def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def get_group_by_cols(self, alias=None): return [] def __str__(self): if self.start.value is not None and self.start.value < 0: start = '%d %s' % (abs(self.start.value), connection.ops.PRECEDING) elif self.start.value is not None and self.start.value == 0: start = connection.ops.CURRENT_ROW else: start = connection.ops.UNBOUNDED_PRECEDING if self.end.value is not None and self.end.value > 0: end = '%d %s' % (self.end.value, connection.ops.FOLLOWING) elif self.end.value is not None and self.end.value == 0: end = connection.ops.CURRENT_ROW else: end = connection.ops.UNBOUNDED_FOLLOWING return self.template % { 'frame_type': self.frame_type, 'start': start, 'end': end, } def window_frame_start_end(self, connection, start, end): raise NotImplementedError('Subclasses must implement window_frame_start_end().') class RowRange(WindowFrame): frame_type = 'ROWS' def window_frame_start_end(self, connection, start, end): return connection.ops.window_frame_rows_start_end(start, end) class ValueRange(WindowFrame): frame_type = 'RANGE' def window_frame_start_end(self, connection, start, end): return connection.ops.window_frame_range_start_end(start, end)
698c8f5c4dde73670f74b9d121fb52c45a294be1862f155ee1134afbe4eb8ca5
import collections.abc import copy import datetime import decimal import operator import uuid import warnings from base64 import b64decode, b64encode from functools import partialmethod, total_ordering from django import forms from django.apps import apps from django.conf import settings from django.core import checks, exceptions, validators from django.db import connection, connections, router from django.db.models.constants import LOOKUP_SEP from django.db.models.query_utils import DeferredAttribute, RegisterLookupMixin from django.utils import timezone from django.utils.datastructures import DictWrapper from django.utils.dateparse import ( parse_date, parse_datetime, parse_duration, parse_time, ) from django.utils.duration import duration_microseconds, duration_string from django.utils.functional import Promise, cached_property from django.utils.ipv6 import clean_ipv6_address from django.utils.itercompat import is_iterable from django.utils.text import capfirst from django.utils.translation import gettext_lazy as _ __all__ = [ 'AutoField', 'BLANK_CHOICE_DASH', 'BigAutoField', 'BigIntegerField', 'BinaryField', 'BooleanField', 'CharField', 'CommaSeparatedIntegerField', 'DateField', 'DateTimeField', 'DecimalField', 'DurationField', 'EmailField', 'Empty', 'Field', 'FilePathField', 'FloatField', 'GenericIPAddressField', 'IPAddressField', 'IntegerField', 'NOT_PROVIDED', 'NullBooleanField', 'PositiveBigIntegerField', 'PositiveIntegerField', 'PositiveSmallIntegerField', 'SlugField', 'SmallAutoField', 'SmallIntegerField', 'TextField', 'TimeField', 'URLField', 'UUIDField', ] class Empty: pass class NOT_PROVIDED: pass # The values to use for "blank" in SelectFields. Will be appended to the start # of most "choices" lists. BLANK_CHOICE_DASH = [("", "---------")] def _load_field(app_label, model_name, field_name): return apps.get_model(app_label, model_name)._meta.get_field(field_name) # A guide to Field parameters: # # * name: The name of the field specified in the model. # * attname: The attribute to use on the model object. This is the same as # "name", except in the case of ForeignKeys, where "_id" is # appended. # * db_column: The db_column specified in the model (or None). # * column: The database column for this field. This is the same as # "attname", except if db_column is specified. # # Code that introspects values, or does other dynamic things, should use # attname. For example, this gets the primary key value of object "obj": # # getattr(obj, opts.pk.attname) def _empty(of_cls): new = Empty() new.__class__ = of_cls return new def return_None(): return None @total_ordering class Field(RegisterLookupMixin): """Base class for all field types""" # Designates whether empty strings fundamentally are allowed at the # database level. empty_strings_allowed = True empty_values = list(validators.EMPTY_VALUES) # These track each time a Field instance is created. Used to retain order. # The auto_creation_counter is used for fields that Django implicitly # creates, creation_counter is used for all user-specified fields. creation_counter = 0 auto_creation_counter = -1 default_validators = [] # Default set of validators default_error_messages = { 'invalid_choice': _('Value %(value)r is not a valid choice.'), 'null': _('This field cannot be null.'), 'blank': _('This field cannot be blank.'), 'unique': _('%(model_name)s with this %(field_label)s ' 'already exists.'), # Translators: The 'lookup_type' is one of 'date', 'year' or 'month'. # Eg: "Title must be unique for pub_date year" 'unique_for_date': _("%(field_label)s must be unique for " "%(date_field_label)s %(lookup_type)s."), } system_check_deprecated_details = None system_check_removed_details = None # Field flags hidden = False many_to_many = None many_to_one = None one_to_many = None one_to_one = None related_model = None descriptor_class = DeferredAttribute # Generic field type description, usually overridden by subclasses def _description(self): return _('Field of type: %(field_type)s') % { 'field_type': self.__class__.__name__ } description = property(_description) def __init__(self, verbose_name=None, name=None, primary_key=False, max_length=None, unique=False, blank=False, null=False, db_index=False, rel=None, default=NOT_PROVIDED, editable=True, serialize=True, unique_for_date=None, unique_for_month=None, unique_for_year=None, choices=None, help_text='', db_column=None, db_tablespace=None, auto_created=False, validators=(), error_messages=None): self.name = name self.verbose_name = verbose_name # May be set by set_attributes_from_name self._verbose_name = verbose_name # Store original for deconstruction self.primary_key = primary_key self.max_length, self._unique = max_length, unique self.blank, self.null = blank, null self.remote_field = rel self.is_relation = self.remote_field is not None self.default = default self.editable = editable self.serialize = serialize self.unique_for_date = unique_for_date self.unique_for_month = unique_for_month self.unique_for_year = unique_for_year if isinstance(choices, collections.abc.Iterator): choices = list(choices) self.choices = choices self.help_text = help_text self.db_index = db_index self.db_column = db_column self._db_tablespace = db_tablespace self.auto_created = auto_created # Adjust the appropriate creation counter, and save our local copy. if auto_created: self.creation_counter = Field.auto_creation_counter Field.auto_creation_counter -= 1 else: self.creation_counter = Field.creation_counter Field.creation_counter += 1 self._validators = list(validators) # Store for deconstruction later messages = {} for c in reversed(self.__class__.__mro__): messages.update(getattr(c, 'default_error_messages', {})) messages.update(error_messages or {}) self._error_messages = error_messages # Store for deconstruction later self.error_messages = messages def __str__(self): """ Return "app_label.model_label.field_name" for fields attached to models. """ if not hasattr(self, 'model'): return super().__str__() model = self.model app = model._meta.app_label return '%s.%s.%s' % (app, model._meta.object_name, self.name) def __repr__(self): """Display the module, class, and name of the field.""" path = '%s.%s' % (self.__class__.__module__, self.__class__.__qualname__) name = getattr(self, 'name', None) if name is not None: return '<%s: %s>' % (path, name) return '<%s>' % path def check(self, **kwargs): return [ *self._check_field_name(), *self._check_choices(), *self._check_db_index(), *self._check_null_allowed_for_primary_keys(), *self._check_backend_specific_checks(**kwargs), *self._check_validators(), *self._check_deprecation_details(), ] def _check_field_name(self): """ Check if field name is valid, i.e. 1) does not end with an underscore, 2) does not contain "__" and 3) is not "pk". """ if self.name.endswith('_'): return [ checks.Error( 'Field names must not end with an underscore.', obj=self, id='fields.E001', ) ] elif LOOKUP_SEP in self.name: return [ checks.Error( 'Field names must not contain "%s".' % LOOKUP_SEP, obj=self, id='fields.E002', ) ] elif self.name == 'pk': return [ checks.Error( "'pk' is a reserved word that cannot be used as a field name.", obj=self, id='fields.E003', ) ] else: return [] @classmethod def _choices_is_value(cls, value): return isinstance(value, (str, Promise)) or not is_iterable(value) def _check_choices(self): if not self.choices: return [] if not is_iterable(self.choices) or isinstance(self.choices, str): return [ checks.Error( "'choices' must be an iterable (e.g., a list or tuple).", obj=self, id='fields.E004', ) ] choice_max_length = 0 # Expect [group_name, [value, display]] for choices_group in self.choices: try: group_name, group_choices = choices_group except (TypeError, ValueError): # Containing non-pairs break try: if not all( self._choices_is_value(value) and self._choices_is_value(human_name) for value, human_name in group_choices ): break if self.max_length is not None and group_choices: choice_max_length = max([ choice_max_length, *(len(value) for value, _ in group_choices if isinstance(value, str)), ]) except (TypeError, ValueError): # No groups, choices in the form [value, display] value, human_name = group_name, group_choices if not self._choices_is_value(value) or not self._choices_is_value(human_name): break if self.max_length is not None and isinstance(value, str): choice_max_length = max(choice_max_length, len(value)) # Special case: choices=['ab'] if isinstance(choices_group, str): break else: if self.max_length is not None and choice_max_length > self.max_length: return [ checks.Error( "'max_length' is too small to fit the longest value " "in 'choices' (%d characters)." % choice_max_length, obj=self, id='fields.E009', ), ] return [] return [ checks.Error( "'choices' must be an iterable containing " "(actual value, human readable name) tuples.", obj=self, id='fields.E005', ) ] def _check_db_index(self): if self.db_index not in (None, True, False): return [ checks.Error( "'db_index' must be None, True or False.", obj=self, id='fields.E006', ) ] else: return [] def _check_null_allowed_for_primary_keys(self): if (self.primary_key and self.null and not connection.features.interprets_empty_strings_as_nulls): # We cannot reliably check this for backends like Oracle which # consider NULL and '' to be equal (and thus set up # character-based fields a little differently). return [ checks.Error( 'Primary keys must not have null=True.', hint=('Set null=False on the field, or ' 'remove primary_key=True argument.'), obj=self, id='fields.E007', ) ] else: return [] def _check_backend_specific_checks(self, databases=None, **kwargs): if databases is None: return [] app_label = self.model._meta.app_label errors = [] for alias in databases: if router.allow_migrate(alias, app_label, model_name=self.model._meta.model_name): errors.extend(connections[alias].validation.check_field(self, **kwargs)) return errors def _check_validators(self): errors = [] for i, validator in enumerate(self.validators): if not callable(validator): errors.append( checks.Error( "All 'validators' must be callable.", hint=( "validators[{i}] ({repr}) isn't a function or " "instance of a validator class.".format( i=i, repr=repr(validator), ) ), obj=self, id='fields.E008', ) ) return errors def _check_deprecation_details(self): if self.system_check_removed_details is not None: return [ checks.Error( self.system_check_removed_details.get( 'msg', '%s has been removed except for support in historical ' 'migrations.' % self.__class__.__name__ ), hint=self.system_check_removed_details.get('hint'), obj=self, id=self.system_check_removed_details.get('id', 'fields.EXXX'), ) ] elif self.system_check_deprecated_details is not None: return [ checks.Warning( self.system_check_deprecated_details.get( 'msg', '%s has been deprecated.' % self.__class__.__name__ ), hint=self.system_check_deprecated_details.get('hint'), obj=self, id=self.system_check_deprecated_details.get('id', 'fields.WXXX'), ) ] return [] def get_col(self, alias, output_field=None): if output_field is None: output_field = self if alias != self.model._meta.db_table or output_field != self: from django.db.models.expressions import Col return Col(alias, self, output_field) else: return self.cached_col @cached_property def cached_col(self): from django.db.models.expressions import Col return Col(self.model._meta.db_table, self) def select_format(self, compiler, sql, params): """ Custom format for select clauses. For example, GIS columns need to be selected as AsText(table.col) on MySQL as the table.col data can't be used by Django. """ return sql, params def deconstruct(self): """ Return enough information to recreate the field as a 4-tuple: * The name of the field on the model, if contribute_to_class() has been run. * The import path of the field, including the class:e.g. django.db.models.IntegerField This should be the most portable version, so less specific may be better. * A list of positional arguments. * A dict of keyword arguments. Note that the positional or keyword arguments must contain values of the following types (including inner values of collection types): * None, bool, str, int, float, complex, set, frozenset, list, tuple, dict * UUID * datetime.datetime (naive), datetime.date * top-level classes, top-level functions - will be referenced by their full import path * Storage instances - these have their own deconstruct() method This is because the values here must be serialized into a text format (possibly new Python code, possibly JSON) and these are the only types with encoding handlers defined. There's no need to return the exact way the field was instantiated this time, just ensure that the resulting field is the same - prefer keyword arguments over positional ones, and omit parameters with their default values. """ # Short-form way of fetching all the default parameters keywords = {} possibles = { "verbose_name": None, "primary_key": False, "max_length": None, "unique": False, "blank": False, "null": False, "db_index": False, "default": NOT_PROVIDED, "editable": True, "serialize": True, "unique_for_date": None, "unique_for_month": None, "unique_for_year": None, "choices": None, "help_text": '', "db_column": None, "db_tablespace": None, "auto_created": False, "validators": [], "error_messages": None, } attr_overrides = { "unique": "_unique", "error_messages": "_error_messages", "validators": "_validators", "verbose_name": "_verbose_name", "db_tablespace": "_db_tablespace", } equals_comparison = {"choices", "validators"} for name, default in possibles.items(): value = getattr(self, attr_overrides.get(name, name)) # Unroll anything iterable for choices into a concrete list if name == "choices" and isinstance(value, collections.abc.Iterable): value = list(value) # Do correct kind of comparison if name in equals_comparison: if value != default: keywords[name] = value else: if value is not default: keywords[name] = value # Work out path - we shorten it for known Django core fields path = "%s.%s" % (self.__class__.__module__, self.__class__.__qualname__) if path.startswith("django.db.models.fields.related"): path = path.replace("django.db.models.fields.related", "django.db.models") elif path.startswith("django.db.models.fields.files"): path = path.replace("django.db.models.fields.files", "django.db.models") elif path.startswith('django.db.models.fields.json'): path = path.replace('django.db.models.fields.json', 'django.db.models') elif path.startswith("django.db.models.fields.proxy"): path = path.replace("django.db.models.fields.proxy", "django.db.models") elif path.startswith("django.db.models.fields"): path = path.replace("django.db.models.fields", "django.db.models") # Return basic info - other fields should override this. return (self.name, path, [], keywords) def clone(self): """ Uses deconstruct() to clone a new copy of this Field. Will not preserve any class attachments/attribute names. """ name, path, args, kwargs = self.deconstruct() return self.__class__(*args, **kwargs) def __eq__(self, other): # Needed for @total_ordering if isinstance(other, Field): return self.creation_counter == other.creation_counter return NotImplemented def __lt__(self, other): # This is needed because bisect does not take a comparison function. if isinstance(other, Field): return self.creation_counter < other.creation_counter return NotImplemented def __hash__(self): return hash(self.creation_counter) def __deepcopy__(self, memodict): # We don't have to deepcopy very much here, since most things are not # intended to be altered after initial creation. obj = copy.copy(self) if self.remote_field: obj.remote_field = copy.copy(self.remote_field) if hasattr(self.remote_field, 'field') and self.remote_field.field is self: obj.remote_field.field = obj memodict[id(self)] = obj return obj def __copy__(self): # We need to avoid hitting __reduce__, so define this # slightly weird copy construct. obj = Empty() obj.__class__ = self.__class__ obj.__dict__ = self.__dict__.copy() return obj def __reduce__(self): """ Pickling should return the model._meta.fields instance of the field, not a new copy of that field. So, use the app registry to load the model and then the field back. """ if not hasattr(self, 'model'): # Fields are sometimes used without attaching them to models (for # example in aggregation). In this case give back a plain field # instance. The code below will create a new empty instance of # class self.__class__, then update its dict with self.__dict__ # values - so, this is very close to normal pickle. state = self.__dict__.copy() # The _get_default cached_property can't be pickled due to lambda # usage. state.pop('_get_default', None) return _empty, (self.__class__,), state return _load_field, (self.model._meta.app_label, self.model._meta.object_name, self.name) def get_pk_value_on_save(self, instance): """ Hook to generate new PK values on save. This method is called when saving instances with no primary key value set. If this method returns something else than None, then the returned value is used when saving the new instance. """ if self.default: return self.get_default() return None def to_python(self, value): """ Convert the input value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Return the converted value. Subclasses should override this. """ return value @cached_property def validators(self): """ Some validators can't be created at field initialization time. This method provides a way to delay their creation until required. """ return [*self.default_validators, *self._validators] def run_validators(self, value): if value in self.empty_values: return errors = [] for v in self.validators: try: v(value) except exceptions.ValidationError as e: if hasattr(e, 'code') and e.code in self.error_messages: e.message = self.error_messages[e.code] errors.extend(e.error_list) if errors: raise exceptions.ValidationError(errors) def validate(self, value, model_instance): """ Validate value and raise ValidationError if necessary. Subclasses should override this to provide validation logic. """ if not self.editable: # Skip validation for non-editable fields. return if self.choices is not None and value not in self.empty_values: for option_key, option_value in self.choices: if isinstance(option_value, (list, tuple)): # This is an optgroup, so look inside the group for # options. for optgroup_key, optgroup_value in option_value: if value == optgroup_key: return elif value == option_key: return raise exceptions.ValidationError( self.error_messages['invalid_choice'], code='invalid_choice', params={'value': value}, ) if value is None and not self.null: raise exceptions.ValidationError(self.error_messages['null'], code='null') if not self.blank and value in self.empty_values: raise exceptions.ValidationError(self.error_messages['blank'], code='blank') def clean(self, value, model_instance): """ Convert the value's type and run validation. Validation errors from to_python() and validate() are propagated. Return the correct value if no error is raised. """ value = self.to_python(value) self.validate(value, model_instance) self.run_validators(value) return value def db_type_parameters(self, connection): return DictWrapper(self.__dict__, connection.ops.quote_name, 'qn_') def db_check(self, connection): """ Return the database column check constraint for this field, for the provided connection. Works the same way as db_type() for the case that get_internal_type() does not map to a preexisting model field. """ data = self.db_type_parameters(connection) try: return connection.data_type_check_constraints[self.get_internal_type()] % data except KeyError: return None def db_type(self, connection): """ Return the database column data type for this field, for the provided connection. """ # The default implementation of this method looks at the # backend-specific data_types dictionary, looking up the field by its # "internal type". # # A Field class can implement the get_internal_type() method to specify # which *preexisting* Django Field class it's most similar to -- i.e., # a custom field might be represented by a TEXT column type, which is # the same as the TextField Django field type, which means the custom # field's get_internal_type() returns 'TextField'. # # But the limitation of the get_internal_type() / data_types approach # is that it cannot handle database column types that aren't already # mapped to one of the built-in Django field types. In this case, you # can implement db_type() instead of get_internal_type() to specify # exactly which wacky database column type you want to use. data = self.db_type_parameters(connection) try: return connection.data_types[self.get_internal_type()] % data except KeyError: return None def rel_db_type(self, connection): """ Return the data type that a related field pointing to this field should use. For example, this method is called by ForeignKey and OneToOneField to determine its data type. """ return self.db_type(connection) def cast_db_type(self, connection): """Return the data type to use in the Cast() function.""" db_type = connection.ops.cast_data_types.get(self.get_internal_type()) if db_type: return db_type % self.db_type_parameters(connection) return self.db_type(connection) def db_parameters(self, connection): """ Extension of db_type(), providing a range of different return values (type, checks). This will look at db_type(), allowing custom model fields to override it. """ type_string = self.db_type(connection) check_string = self.db_check(connection) return { "type": type_string, "check": check_string, } def db_type_suffix(self, connection): return connection.data_types_suffix.get(self.get_internal_type()) def get_db_converters(self, connection): if hasattr(self, 'from_db_value'): return [self.from_db_value] return [] @property def unique(self): return self._unique or self.primary_key @property def db_tablespace(self): return self._db_tablespace or settings.DEFAULT_INDEX_TABLESPACE @property def db_returning(self): """ Private API intended only to be used by Django itself. Currently only the PostgreSQL backend supports returning multiple fields on a model. """ return False def set_attributes_from_name(self, name): self.name = self.name or name self.attname, self.column = self.get_attname_column() self.concrete = self.column is not None if self.verbose_name is None and self.name: self.verbose_name = self.name.replace('_', ' ') def contribute_to_class(self, cls, name, private_only=False): """ Register the field with the model class it belongs to. If private_only is True, create a separate instance of this field for every subclass of cls, even if cls is not an abstract model. """ self.set_attributes_from_name(name) self.model = cls cls._meta.add_field(self, private=private_only) if self.column: # Don't override classmethods with the descriptor. This means that # if you have a classmethod and a field with the same name, then # such fields can't be deferred (we don't have a check for this). if not getattr(cls, self.attname, None): setattr(cls, self.attname, self.descriptor_class(self)) if self.choices is not None: # Don't override a get_FOO_display() method defined explicitly on # this class, but don't check methods derived from inheritance, to # allow overriding inherited choices. For more complex inheritance # structures users should override contribute_to_class(). if 'get_%s_display' % self.name not in cls.__dict__: setattr( cls, 'get_%s_display' % self.name, partialmethod(cls._get_FIELD_display, field=self), ) def get_filter_kwargs_for_object(self, obj): """ Return a dict that when passed as kwargs to self.model.filter(), would yield all instances having the same value for this field as obj has. """ return {self.name: getattr(obj, self.attname)} def get_attname(self): return self.name def get_attname_column(self): attname = self.get_attname() column = self.db_column or attname return attname, column def get_internal_type(self): return self.__class__.__name__ def pre_save(self, model_instance, add): """Return field's value just before saving.""" return getattr(model_instance, self.attname) def get_prep_value(self, value): """Perform preliminary non-db specific value checks and conversions.""" if isinstance(value, Promise): value = value._proxy____cast() return value def get_db_prep_value(self, value, connection, prepared=False): """ Return field's value prepared for interacting with the database backend. Used by the default implementations of get_db_prep_save(). """ if not prepared: value = self.get_prep_value(value) return value def get_db_prep_save(self, value, connection): """Return field's value prepared for saving into a database.""" return self.get_db_prep_value(value, connection=connection, prepared=False) def has_default(self): """Return a boolean of whether this field has a default value.""" return self.default is not NOT_PROVIDED def get_default(self): """Return the default value for this field.""" return self._get_default() @cached_property def _get_default(self): if self.has_default(): if callable(self.default): return self.default return lambda: self.default if not self.empty_strings_allowed or self.null and not connection.features.interprets_empty_strings_as_nulls: return return_None return str # return empty string def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_choices_to=None, ordering=()): """ Return choices with a default blank choices included, for use as <select> choices for this field. """ if self.choices is not None: choices = list(self.choices) if include_blank: blank_defined = any(choice in ('', None) for choice, _ in self.flatchoices) if not blank_defined: choices = blank_choice + choices return choices rel_model = self.remote_field.model limit_choices_to = limit_choices_to or self.get_limit_choices_to() choice_func = operator.attrgetter( self.remote_field.get_related_field().attname if hasattr(self.remote_field, 'get_related_field') else 'pk' ) qs = rel_model._default_manager.complex_filter(limit_choices_to) if ordering: qs = qs.order_by(*ordering) return (blank_choice if include_blank else []) + [ (choice_func(x), str(x)) for x in qs ] def value_to_string(self, obj): """ Return a string value of this field from the passed obj. This is used by the serialization framework. """ return str(self.value_from_object(obj)) def _get_flatchoices(self): """Flattened version of choices tuple.""" if self.choices is None: return [] flat = [] for choice, value in self.choices: if isinstance(value, (list, tuple)): flat.extend(value) else: flat.append((choice, value)) return flat flatchoices = property(_get_flatchoices) def save_form_data(self, instance, data): setattr(instance, self.name, data) def formfield(self, form_class=None, choices_form_class=None, **kwargs): """Return a django.forms.Field instance for this field.""" defaults = { 'required': not self.blank, 'label': capfirst(self.verbose_name), 'help_text': self.help_text, } if self.has_default(): if callable(self.default): defaults['initial'] = self.default defaults['show_hidden_initial'] = True else: defaults['initial'] = self.get_default() if self.choices is not None: # Fields with choices get special treatment. include_blank = (self.blank or not (self.has_default() or 'initial' in kwargs)) defaults['choices'] = self.get_choices(include_blank=include_blank) defaults['coerce'] = self.to_python if self.null: defaults['empty_value'] = None if choices_form_class is not None: form_class = choices_form_class else: form_class = forms.TypedChoiceField # Many of the subclass-specific formfield arguments (min_value, # max_value) don't apply for choice fields, so be sure to only pass # the values that TypedChoiceField will understand. for k in list(kwargs): if k not in ('coerce', 'empty_value', 'choices', 'required', 'widget', 'label', 'initial', 'help_text', 'error_messages', 'show_hidden_initial', 'disabled'): del kwargs[k] defaults.update(kwargs) if form_class is None: form_class = forms.CharField return form_class(**defaults) def value_from_object(self, obj): """Return the value of this field in the given model instance.""" return getattr(obj, self.attname) class BooleanField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be either True or False.'), 'invalid_nullable': _('“%(value)s” value must be either True, False, or None.'), } description = _("Boolean (Either True or False)") def get_internal_type(self): return "BooleanField" def to_python(self, value): if self.null and value in self.empty_values: return None if value in (True, False): # 1/0 are equal to True/False. bool() converts former to latter. return bool(value) if value in ('t', 'True', '1'): return True if value in ('f', 'False', '0'): return False raise exceptions.ValidationError( self.error_messages['invalid_nullable' if self.null else 'invalid'], code='invalid', params={'value': value}, ) def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None return self.to_python(value) def formfield(self, **kwargs): if self.choices is not None: include_blank = not (self.has_default() or 'initial' in kwargs) defaults = {'choices': self.get_choices(include_blank=include_blank)} else: form_class = forms.NullBooleanField if self.null else forms.BooleanField # In HTML checkboxes, 'required' means "must be checked" which is # different from the choices case ("must select some value"). # required=False allows unchecked checkboxes. defaults = {'form_class': form_class, 'required': False} return super().formfield(**{**defaults, **kwargs}) class CharField(Field): description = _("String (up to %(max_length)s)") def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.validators.append(validators.MaxLengthValidator(self.max_length)) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_max_length_attribute(**kwargs), ] def _check_max_length_attribute(self, **kwargs): if self.max_length is None: return [ checks.Error( "CharFields must define a 'max_length' attribute.", obj=self, id='fields.E120', ) ] elif (not isinstance(self.max_length, int) or isinstance(self.max_length, bool) or self.max_length <= 0): return [ checks.Error( "'max_length' must be a positive integer.", obj=self, id='fields.E121', ) ] else: return [] def cast_db_type(self, connection): if self.max_length is None: return connection.ops.cast_char_field_without_max_length return super().cast_db_type(connection) def get_internal_type(self): return "CharField" def to_python(self, value): if isinstance(value, str) or value is None: return value return str(value) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def formfield(self, **kwargs): # Passing max_length to forms.CharField means that the value's length # will be validated twice. This is considered acceptable since we want # the value in the form field (to pass into widget for example). defaults = {'max_length': self.max_length} # TODO: Handle multiple backends with different feature flags. if self.null and not connection.features.interprets_empty_strings_as_nulls: defaults['empty_value'] = None defaults.update(kwargs) return super().formfield(**defaults) class CommaSeparatedIntegerField(CharField): default_validators = [validators.validate_comma_separated_integer_list] description = _("Comma-separated integers") system_check_removed_details = { 'msg': ( 'CommaSeparatedIntegerField is removed except for support in ' 'historical migrations.' ), 'hint': ( 'Use CharField(validators=[validate_comma_separated_integer_list]) ' 'instead.' ), 'id': 'fields.E901', } class DateTimeCheckMixin: def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_mutually_exclusive_options(), *self._check_fix_default_value(), ] def _check_mutually_exclusive_options(self): # auto_now, auto_now_add, and default are mutually exclusive # options. The use of more than one of these options together # will trigger an Error mutually_exclusive_options = [self.auto_now_add, self.auto_now, self.has_default()] enabled_options = [option not in (None, False) for option in mutually_exclusive_options].count(True) if enabled_options > 1: return [ checks.Error( "The options auto_now, auto_now_add, and default " "are mutually exclusive. Only one of these options " "may be present.", obj=self, id='fields.E160', ) ] else: return [] def _check_fix_default_value(self): return [] class DateField(DateTimeCheckMixin, Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid date format. It must be ' 'in YYYY-MM-DD format.'), 'invalid_date': _('“%(value)s” value has the correct format (YYYY-MM-DD) ' 'but it is an invalid date.'), } description = _("Date (without time)") def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs): self.auto_now, self.auto_now_add = auto_now, auto_now_add if auto_now or auto_now_add: kwargs['editable'] = False kwargs['blank'] = True super().__init__(verbose_name, name, **kwargs) def _check_fix_default_value(self): """ Warn that using an actual date or datetime value is probably wrong; it's only evaluated on server startup. """ if not self.has_default(): return [] now = timezone.now() if not timezone.is_naive(now): now = timezone.make_naive(now, timezone.utc) value = self.default if isinstance(value, datetime.datetime): if not timezone.is_naive(value): value = timezone.make_naive(value, timezone.utc) value = value.date() elif isinstance(value, datetime.date): # Nothing to do, as dates don't have tz information pass else: # No explicit date / datetime value -- no checks necessary return [] offset = datetime.timedelta(days=1) lower = (now - offset).date() upper = (now + offset).date() if lower <= value <= upper: return [ checks.Warning( 'Fixed default value provided.', hint='It seems you set a fixed date / time / datetime ' 'value as default for this field. This may not be ' 'what you want. If you want to have the current date ' 'as default, use `django.utils.timezone.now`', obj=self, id='fields.W161', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.auto_now: kwargs['auto_now'] = True if self.auto_now_add: kwargs['auto_now_add'] = True if self.auto_now or self.auto_now_add: del kwargs['editable'] del kwargs['blank'] return name, path, args, kwargs def get_internal_type(self): return "DateField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): if settings.USE_TZ and timezone.is_aware(value): # Convert aware datetimes to the default time zone # before casting them to dates (#17742). default_timezone = timezone.get_default_timezone() value = timezone.make_naive(value, default_timezone) return value.date() if isinstance(value, datetime.date): return value try: parsed = parse_date(value) if parsed is not None: return parsed except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_date'], code='invalid_date', params={'value': value}, ) raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = datetime.date.today() setattr(model_instance, self.attname, value) return value else: return super().pre_save(model_instance, add) def contribute_to_class(self, cls, name, **kwargs): super().contribute_to_class(cls, name, **kwargs) if not self.null: setattr( cls, 'get_next_by_%s' % self.name, partialmethod(cls._get_next_or_previous_by_FIELD, field=self, is_next=True) ) setattr( cls, 'get_previous_by_%s' % self.name, partialmethod(cls._get_next_or_previous_by_FIELD, field=self, is_next=False) ) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): # Casts dates into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_datefield_value(value) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.DateField, **kwargs, }) class DateTimeField(DateField): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid format. It must be in ' 'YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format.'), 'invalid_date': _("“%(value)s” value has the correct format " "(YYYY-MM-DD) but it is an invalid date."), 'invalid_datetime': _('“%(value)s” value has the correct format ' '(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) ' 'but it is an invalid date/time.'), } description = _("Date (with time)") # __init__ is inherited from DateField def _check_fix_default_value(self): """ Warn that using an actual date or datetime value is probably wrong; it's only evaluated on server startup. """ if not self.has_default(): return [] now = timezone.now() if not timezone.is_naive(now): now = timezone.make_naive(now, timezone.utc) value = self.default if isinstance(value, datetime.datetime): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset upper = now + second_offset if timezone.is_aware(value): value = timezone.make_naive(value, timezone.utc) elif isinstance(value, datetime.date): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset lower = datetime.datetime(lower.year, lower.month, lower.day) upper = now + second_offset upper = datetime.datetime(upper.year, upper.month, upper.day) value = datetime.datetime(value.year, value.month, value.day) else: # No explicit date / datetime value -- no checks necessary return [] if lower <= value <= upper: return [ checks.Warning( 'Fixed default value provided.', hint='It seems you set a fixed date / time / datetime ' 'value as default for this field. This may not be ' 'what you want. If you want to have the current date ' 'as default, use `django.utils.timezone.now`', obj=self, id='fields.W161', ) ] return [] def get_internal_type(self): return "DateTimeField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): return value if isinstance(value, datetime.date): value = datetime.datetime(value.year, value.month, value.day) if settings.USE_TZ: # For backwards compatibility, interpret naive datetimes in # local time. This won't work during DST change, but we can't # do much about it, so we let the exceptions percolate up the # call stack. warnings.warn("DateTimeField %s.%s received a naive datetime " "(%s) while time zone support is active." % (self.model.__name__, self.name, value), RuntimeWarning) default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) return value try: parsed = parse_datetime(value) if parsed is not None: return parsed except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_datetime'], code='invalid_datetime', params={'value': value}, ) try: parsed = parse_date(value) if parsed is not None: return datetime.datetime(parsed.year, parsed.month, parsed.day) except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_date'], code='invalid_date', params={'value': value}, ) raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = timezone.now() setattr(model_instance, self.attname, value) return value else: return super().pre_save(model_instance, add) # contribute_to_class is inherited from DateField, it registers # get_next_by_FOO and get_prev_by_FOO def get_prep_value(self, value): value = super().get_prep_value(value) value = self.to_python(value) if value is not None and settings.USE_TZ and timezone.is_naive(value): # For backwards compatibility, interpret naive datetimes in local # time. This won't work during DST change, but we can't do much # about it, so we let the exceptions percolate up the call stack. try: name = '%s.%s' % (self.model.__name__, self.name) except AttributeError: name = '(unbound)' warnings.warn("DateTimeField %s received a naive datetime (%s)" " while time zone support is active." % (name, value), RuntimeWarning) default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) return value def get_db_prep_value(self, value, connection, prepared=False): # Casts datetimes into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_datetimefield_value(value) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.DateTimeField, **kwargs, }) class DecimalField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be a decimal number.'), } description = _("Decimal number") def __init__(self, verbose_name=None, name=None, max_digits=None, decimal_places=None, **kwargs): self.max_digits, self.decimal_places = max_digits, decimal_places super().__init__(verbose_name, name, **kwargs) def check(self, **kwargs): errors = super().check(**kwargs) digits_errors = [ *self._check_decimal_places(), *self._check_max_digits(), ] if not digits_errors: errors.extend(self._check_decimal_places_and_max_digits(**kwargs)) else: errors.extend(digits_errors) return errors def _check_decimal_places(self): try: decimal_places = int(self.decimal_places) if decimal_places < 0: raise ValueError() except TypeError: return [ checks.Error( "DecimalFields must define a 'decimal_places' attribute.", obj=self, id='fields.E130', ) ] except ValueError: return [ checks.Error( "'decimal_places' must be a non-negative integer.", obj=self, id='fields.E131', ) ] else: return [] def _check_max_digits(self): try: max_digits = int(self.max_digits) if max_digits <= 0: raise ValueError() except TypeError: return [ checks.Error( "DecimalFields must define a 'max_digits' attribute.", obj=self, id='fields.E132', ) ] except ValueError: return [ checks.Error( "'max_digits' must be a positive integer.", obj=self, id='fields.E133', ) ] else: return [] def _check_decimal_places_and_max_digits(self, **kwargs): if int(self.decimal_places) > int(self.max_digits): return [ checks.Error( "'max_digits' must be greater or equal to 'decimal_places'.", obj=self, id='fields.E134', ) ] return [] @cached_property def validators(self): return super().validators + [ validators.DecimalValidator(self.max_digits, self.decimal_places) ] @cached_property def context(self): return decimal.Context(prec=self.max_digits) def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.max_digits is not None: kwargs['max_digits'] = self.max_digits if self.decimal_places is not None: kwargs['decimal_places'] = self.decimal_places return name, path, args, kwargs def get_internal_type(self): return "DecimalField" def to_python(self, value): if value is None: return value if isinstance(value, float): return self.context.create_decimal_from_float(value) try: return decimal.Decimal(value) except (decimal.InvalidOperation, TypeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def get_db_prep_save(self, value, connection): return connection.ops.adapt_decimalfield_value(self.to_python(value), self.max_digits, self.decimal_places) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def formfield(self, **kwargs): return super().formfield(**{ 'max_digits': self.max_digits, 'decimal_places': self.decimal_places, 'form_class': forms.DecimalField, **kwargs, }) class DurationField(Field): """ Store timedelta objects. Use interval on PostgreSQL, INTERVAL DAY TO SECOND on Oracle, and bigint of microseconds on other databases. """ empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid format. It must be in ' '[DD] [[HH:]MM:]ss[.uuuuuu] format.') } description = _("Duration") def get_internal_type(self): return "DurationField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.timedelta): return value try: parsed = parse_duration(value) except ValueError: pass else: if parsed is not None: return parsed raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def get_db_prep_value(self, value, connection, prepared=False): if connection.features.has_native_duration_field: return value if value is None: return None return duration_microseconds(value) def get_db_converters(self, connection): converters = [] if not connection.features.has_native_duration_field: converters.append(connection.ops.convert_durationfield_value) return converters + super().get_db_converters(connection) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else duration_string(val) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.DurationField, **kwargs, }) class EmailField(CharField): default_validators = [validators.validate_email] description = _("Email address") def __init__(self, *args, **kwargs): # max_length=254 to be compliant with RFCs 3696 and 5321 kwargs.setdefault('max_length', 254) super().__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() # We do not exclude max_length if it matches default as we want to change # the default in future. return name, path, args, kwargs def formfield(self, **kwargs): # As with CharField, this will cause email validation to be performed # twice. return super().formfield(**{ 'form_class': forms.EmailField, **kwargs, }) class FilePathField(Field): description = _("File path") def __init__(self, verbose_name=None, name=None, path='', match=None, recursive=False, allow_files=True, allow_folders=False, **kwargs): self.path, self.match, self.recursive = path, match, recursive self.allow_files, self.allow_folders = allow_files, allow_folders kwargs.setdefault('max_length', 100) super().__init__(verbose_name, name, **kwargs) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_allowing_files_or_folders(**kwargs), ] def _check_allowing_files_or_folders(self, **kwargs): if not self.allow_files and not self.allow_folders: return [ checks.Error( "FilePathFields must have either 'allow_files' or 'allow_folders' set to True.", obj=self, id='fields.E140', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.path != '': kwargs['path'] = self.path if self.match is not None: kwargs['match'] = self.match if self.recursive is not False: kwargs['recursive'] = self.recursive if self.allow_files is not True: kwargs['allow_files'] = self.allow_files if self.allow_folders is not False: kwargs['allow_folders'] = self.allow_folders if kwargs.get("max_length") == 100: del kwargs["max_length"] return name, path, args, kwargs def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None return str(value) def formfield(self, **kwargs): return super().formfield(**{ 'path': self.path() if callable(self.path) else self.path, 'match': self.match, 'recursive': self.recursive, 'form_class': forms.FilePathField, 'allow_files': self.allow_files, 'allow_folders': self.allow_folders, **kwargs, }) def get_internal_type(self): return "FilePathField" class FloatField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be a float.'), } description = _("Floating point number") def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None try: return float(value) except (TypeError, ValueError) as e: raise e.__class__( "Field '%s' expected a number but got %r." % (self.name, value), ) from e def get_internal_type(self): return "FloatField" def to_python(self, value): if value is None: return value try: return float(value) except (TypeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.FloatField, **kwargs, }) class IntegerField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be an integer.'), } description = _("Integer") def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_max_length_warning(), ] def _check_max_length_warning(self): if self.max_length is not None: return [ checks.Warning( "'max_length' is ignored when used with %s." % self.__class__.__name__, hint="Remove 'max_length' from field", obj=self, id='fields.W122', ) ] return [] @cached_property def validators(self): # These validators can't be added at field initialization time since # they're based on values retrieved from `connection`. validators_ = super().validators internal_type = self.get_internal_type() min_value, max_value = connection.ops.integer_field_range(internal_type) if min_value is not None and not any( ( isinstance(validator, validators.MinValueValidator) and ( validator.limit_value() if callable(validator.limit_value) else validator.limit_value ) >= min_value ) for validator in validators_ ): validators_.append(validators.MinValueValidator(min_value)) if max_value is not None and not any( ( isinstance(validator, validators.MaxValueValidator) and ( validator.limit_value() if callable(validator.limit_value) else validator.limit_value ) <= max_value ) for validator in validators_ ): validators_.append(validators.MaxValueValidator(max_value)) return validators_ def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None try: return int(value) except (TypeError, ValueError) as e: raise e.__class__( "Field '%s' expected a number but got %r." % (self.name, value), ) from e def get_internal_type(self): return "IntegerField" def to_python(self, value): if value is None: return value try: return int(value) except (TypeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.IntegerField, **kwargs, }) class BigIntegerField(IntegerField): description = _("Big (8 byte) integer") MAX_BIGINT = 9223372036854775807 def get_internal_type(self): return "BigIntegerField" def formfield(self, **kwargs): return super().formfield(**{ 'min_value': -BigIntegerField.MAX_BIGINT - 1, 'max_value': BigIntegerField.MAX_BIGINT, **kwargs, }) class IPAddressField(Field): empty_strings_allowed = False description = _("IPv4 address") system_check_removed_details = { 'msg': ( 'IPAddressField has been removed except for support in ' 'historical migrations.' ), 'hint': 'Use GenericIPAddressField instead.', 'id': 'fields.E900', } def __init__(self, *args, **kwargs): kwargs['max_length'] = 15 super().__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['max_length'] return name, path, args, kwargs def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None return str(value) def get_internal_type(self): return "IPAddressField" class GenericIPAddressField(Field): empty_strings_allowed = False description = _("IP address") default_error_messages = {} def __init__(self, verbose_name=None, name=None, protocol='both', unpack_ipv4=False, *args, **kwargs): self.unpack_ipv4 = unpack_ipv4 self.protocol = protocol self.default_validators, invalid_error_message = \ validators.ip_address_validators(protocol, unpack_ipv4) self.default_error_messages['invalid'] = invalid_error_message kwargs['max_length'] = 39 super().__init__(verbose_name, name, *args, **kwargs) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_blank_and_null_values(**kwargs), ] def _check_blank_and_null_values(self, **kwargs): if not getattr(self, 'null', False) and getattr(self, 'blank', False): return [ checks.Error( 'GenericIPAddressFields cannot have blank=True if null=False, ' 'as blank values are stored as nulls.', obj=self, id='fields.E150', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.unpack_ipv4 is not False: kwargs['unpack_ipv4'] = self.unpack_ipv4 if self.protocol != "both": kwargs['protocol'] = self.protocol if kwargs.get("max_length") == 39: del kwargs['max_length'] return name, path, args, kwargs def get_internal_type(self): return "GenericIPAddressField" def to_python(self, value): if value is None: return None if not isinstance(value, str): value = str(value) value = value.strip() if ':' in value: return clean_ipv6_address(value, self.unpack_ipv4, self.error_messages['invalid']) return value def get_db_prep_value(self, value, connection, prepared=False): if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_ipaddressfield_value(value) def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None if value and ':' in value: try: return clean_ipv6_address(value, self.unpack_ipv4) except exceptions.ValidationError: pass return str(value) def formfield(self, **kwargs): return super().formfield(**{ 'protocol': self.protocol, 'form_class': forms.GenericIPAddressField, **kwargs, }) class NullBooleanField(BooleanField): default_error_messages = { 'invalid': _('“%(value)s” value must be either None, True or False.'), 'invalid_nullable': _('“%(value)s” value must be either None, True or False.'), } description = _("Boolean (Either True, False or None)") system_check_deprecated_details = { 'msg': ( 'NullBooleanField is deprecated. Support for it (except in ' 'historical migrations) will be removed in Django 4.0.' ), 'hint': 'Use BooleanField(null=True) instead.', 'id': 'fields.W903', } def __init__(self, *args, **kwargs): kwargs['null'] = True kwargs['blank'] = True super().__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['null'] del kwargs['blank'] return name, path, args, kwargs def get_internal_type(self): return "NullBooleanField" class PositiveIntegerRelDbTypeMixin: def rel_db_type(self, connection): """ Return the data type that a related field pointing to this field should use. In most cases, a foreign key pointing to a positive integer primary key will have an integer column data type but some databases (e.g. MySQL) have an unsigned integer type. In that case (related_fields_match_type=True), the primary key should return its db_type. """ if connection.features.related_fields_match_type: return self.db_type(connection) else: return IntegerField().db_type(connection=connection) class PositiveBigIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField): description = _('Positive big integer') def get_internal_type(self): return 'PositiveBigIntegerField' def formfield(self, **kwargs): return super().formfield(**{ 'min_value': 0, **kwargs, }) class PositiveIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField): description = _("Positive integer") def get_internal_type(self): return "PositiveIntegerField" def formfield(self, **kwargs): return super().formfield(**{ 'min_value': 0, **kwargs, }) class PositiveSmallIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField): description = _("Positive small integer") def get_internal_type(self): return "PositiveSmallIntegerField" def formfield(self, **kwargs): return super().formfield(**{ 'min_value': 0, **kwargs, }) class SlugField(CharField): default_validators = [validators.validate_slug] description = _("Slug (up to %(max_length)s)") def __init__(self, *args, max_length=50, db_index=True, allow_unicode=False, **kwargs): self.allow_unicode = allow_unicode if self.allow_unicode: self.default_validators = [validators.validate_unicode_slug] super().__init__(*args, max_length=max_length, db_index=db_index, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() if kwargs.get("max_length") == 50: del kwargs['max_length'] if self.db_index is False: kwargs['db_index'] = False else: del kwargs['db_index'] if self.allow_unicode is not False: kwargs['allow_unicode'] = self.allow_unicode return name, path, args, kwargs def get_internal_type(self): return "SlugField" def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.SlugField, 'allow_unicode': self.allow_unicode, **kwargs, }) class SmallIntegerField(IntegerField): description = _("Small integer") def get_internal_type(self): return "SmallIntegerField" class TextField(Field): description = _("Text") def get_internal_type(self): return "TextField" def to_python(self, value): if isinstance(value, str) or value is None: return value return str(value) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def formfield(self, **kwargs): # Passing max_length to forms.CharField means that the value's length # will be validated twice. This is considered acceptable since we want # the value in the form field (to pass into widget for example). return super().formfield(**{ 'max_length': self.max_length, **({} if self.choices is not None else {'widget': forms.Textarea}), **kwargs, }) class TimeField(DateTimeCheckMixin, Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid format. It must be in ' 'HH:MM[:ss[.uuuuuu]] format.'), 'invalid_time': _('“%(value)s” value has the correct format ' '(HH:MM[:ss[.uuuuuu]]) but it is an invalid time.'), } description = _("Time") def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs): self.auto_now, self.auto_now_add = auto_now, auto_now_add if auto_now or auto_now_add: kwargs['editable'] = False kwargs['blank'] = True super().__init__(verbose_name, name, **kwargs) def _check_fix_default_value(self): """ Warn that using an actual date or datetime value is probably wrong; it's only evaluated on server startup. """ if not self.has_default(): return [] now = timezone.now() if not timezone.is_naive(now): now = timezone.make_naive(now, timezone.utc) value = self.default if isinstance(value, datetime.datetime): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset upper = now + second_offset if timezone.is_aware(value): value = timezone.make_naive(value, timezone.utc) elif isinstance(value, datetime.time): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset upper = now + second_offset value = datetime.datetime.combine(now.date(), value) if timezone.is_aware(value): value = timezone.make_naive(value, timezone.utc).time() else: # No explicit time / datetime value -- no checks necessary return [] if lower <= value <= upper: return [ checks.Warning( 'Fixed default value provided.', hint='It seems you set a fixed date / time / datetime ' 'value as default for this field. This may not be ' 'what you want. If you want to have the current date ' 'as default, use `django.utils.timezone.now`', obj=self, id='fields.W161', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.auto_now is not False: kwargs["auto_now"] = self.auto_now if self.auto_now_add is not False: kwargs["auto_now_add"] = self.auto_now_add if self.auto_now or self.auto_now_add: del kwargs['blank'] del kwargs['editable'] return name, path, args, kwargs def get_internal_type(self): return "TimeField" def to_python(self, value): if value is None: return None if isinstance(value, datetime.time): return value if isinstance(value, datetime.datetime): # Not usually a good idea to pass in a datetime here (it loses # information), but this can be a side-effect of interacting with a # database backend (e.g. Oracle), so we'll be accommodating. return value.time() try: parsed = parse_time(value) if parsed is not None: return parsed except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_time'], code='invalid_time', params={'value': value}, ) raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = datetime.datetime.now().time() setattr(model_instance, self.attname, value) return value else: return super().pre_save(model_instance, add) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): # Casts times into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_timefield_value(value) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.TimeField, **kwargs, }) class URLField(CharField): default_validators = [validators.URLValidator()] description = _("URL") def __init__(self, verbose_name=None, name=None, **kwargs): kwargs.setdefault('max_length', 200) super().__init__(verbose_name, name, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() if kwargs.get("max_length") == 200: del kwargs['max_length'] return name, path, args, kwargs def formfield(self, **kwargs): # As with CharField, this will cause URL validation to be performed # twice. return super().formfield(**{ 'form_class': forms.URLField, **kwargs, }) class BinaryField(Field): description = _("Raw binary data") empty_values = [None, b''] def __init__(self, *args, **kwargs): kwargs.setdefault('editable', False) super().__init__(*args, **kwargs) if self.max_length is not None: self.validators.append(validators.MaxLengthValidator(self.max_length)) def check(self, **kwargs): return [*super().check(**kwargs), *self._check_str_default_value()] def _check_str_default_value(self): if self.has_default() and isinstance(self.default, str): return [ checks.Error( "BinaryField's default cannot be a string. Use bytes " "content instead.", obj=self, id='fields.E170', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.editable: kwargs['editable'] = True else: del kwargs['editable'] return name, path, args, kwargs def get_internal_type(self): return "BinaryField" def get_placeholder(self, value, compiler, connection): return connection.ops.binary_placeholder_sql(value) def get_default(self): if self.has_default() and not callable(self.default): return self.default default = super().get_default() if default == '': return b'' return default def get_db_prep_value(self, value, connection, prepared=False): value = super().get_db_prep_value(value, connection, prepared) if value is not None: return connection.Database.Binary(value) return value def value_to_string(self, obj): """Binary data is serialized as base64""" return b64encode(self.value_from_object(obj)).decode('ascii') def to_python(self, value): # If it's a string, it should be base64-encoded data if isinstance(value, str): return memoryview(b64decode(value.encode('ascii'))) return value class UUIDField(Field): default_error_messages = { 'invalid': _('“%(value)s” is not a valid UUID.'), } description = _('Universally unique identifier') empty_strings_allowed = False def __init__(self, verbose_name=None, **kwargs): kwargs['max_length'] = 32 super().__init__(verbose_name, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['max_length'] return name, path, args, kwargs def get_internal_type(self): return "UUIDField" def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): if value is None: return None if not isinstance(value, uuid.UUID): value = self.to_python(value) if connection.features.has_native_uuid_field: return value return value.hex def to_python(self, value): if value is not None and not isinstance(value, uuid.UUID): input_form = 'int' if isinstance(value, int) else 'hex' try: return uuid.UUID(**{input_form: value}) except (AttributeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) return value def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.UUIDField, **kwargs, }) class AutoFieldMixin: db_returning = True def __init__(self, *args, **kwargs): kwargs['blank'] = True super().__init__(*args, **kwargs) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_primary_key(), ] def _check_primary_key(self): if not self.primary_key: return [ checks.Error( 'AutoFields must set primary_key=True.', obj=self, id='fields.E100', ), ] else: return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['blank'] kwargs['primary_key'] = True return name, path, args, kwargs def validate(self, value, model_instance): pass def get_db_prep_value(self, value, connection, prepared=False): if not prepared: value = self.get_prep_value(value) value = connection.ops.validate_autopk_value(value) return value def contribute_to_class(self, cls, name, **kwargs): assert not cls._meta.auto_field, ( "Model %s can't have more than one auto-generated field." % cls._meta.label ) super().contribute_to_class(cls, name, **kwargs) cls._meta.auto_field = self def formfield(self, **kwargs): return None class AutoFieldMeta(type): """ Metaclass to maintain backward inheritance compatibility for AutoField. It is intended that AutoFieldMixin become public API when it is possible to create a non-integer automatically-generated field using column defaults stored in the database. In many areas Django also relies on using isinstance() to check for an automatically-generated field as a subclass of AutoField. A new flag needs to be implemented on Field to be used instead. When these issues have been addressed, this metaclass could be used to deprecate inheritance from AutoField and use of isinstance() with AutoField for detecting automatically-generated fields. """ @property def _subclasses(self): return (BigAutoField, SmallAutoField) def __instancecheck__(self, instance): return isinstance(instance, self._subclasses) or super().__instancecheck__(instance) def __subclasscheck__(self, subclass): return subclass in self._subclasses or super().__subclasscheck__(subclass) class AutoField(AutoFieldMixin, IntegerField, metaclass=AutoFieldMeta): def get_internal_type(self): return 'AutoField' def rel_db_type(self, connection): return IntegerField().db_type(connection=connection) class BigAutoField(AutoFieldMixin, BigIntegerField): def get_internal_type(self): return 'BigAutoField' def rel_db_type(self, connection): return BigIntegerField().db_type(connection=connection) class SmallAutoField(AutoFieldMixin, SmallIntegerField): def get_internal_type(self): return 'SmallAutoField' def rel_db_type(self, connection): return SmallIntegerField().db_type(connection=connection)
5c4091a74d36a3484ac45c63170b87cb4dd85064c9309ae7bf02275cf1e9cbfc
import os import sys import warnings from itertools import takewhile from django.apps import apps from django.conf import settings from django.core.management.base import ( BaseCommand, CommandError, no_translations, ) from django.db import DEFAULT_DB_ALIAS, OperationalError, connections, router from django.db.migrations import Migration from django.db.migrations.autodetector import MigrationAutodetector from django.db.migrations.loader import MigrationLoader from django.db.migrations.questioner import ( InteractiveMigrationQuestioner, MigrationQuestioner, NonInteractiveMigrationQuestioner, ) from django.db.migrations.state import ProjectState from django.db.migrations.utils import get_migration_name_timestamp from django.db.migrations.writer import MigrationWriter class Command(BaseCommand): help = "Creates new migration(s) for apps." def add_arguments(self, parser): parser.add_argument( 'args', metavar='app_label', nargs='*', help='Specify the app label(s) to create migrations for.', ) parser.add_argument( '--dry-run', action='store_true', help="Just show what migrations would be made; don't actually write them.", ) parser.add_argument( '--merge', action='store_true', help="Enable fixing of migration conflicts.", ) parser.add_argument( '--empty', action='store_true', help="Create an empty migration.", ) parser.add_argument( '--noinput', '--no-input', action='store_false', dest='interactive', help='Tells Django to NOT prompt the user for input of any kind.', ) parser.add_argument( '-n', '--name', help="Use this name for migration file(s).", ) parser.add_argument( '--no-header', action='store_false', dest='include_header', help='Do not add header comments to new migration file(s).', ) parser.add_argument( '--check', action='store_true', dest='check_changes', help='Exit with a non-zero status if model changes are missing migrations.', ) @no_translations def handle(self, *app_labels, **options): self.verbosity = options['verbosity'] self.interactive = options['interactive'] self.dry_run = options['dry_run'] self.merge = options['merge'] self.empty = options['empty'] self.migration_name = options['name'] if self.migration_name and not self.migration_name.isidentifier(): raise CommandError('The migration name must be a valid Python identifier.') self.include_header = options['include_header'] check_changes = options['check_changes'] # Make sure the app they asked for exists app_labels = set(app_labels) has_bad_labels = False for app_label in app_labels: try: apps.get_app_config(app_label) except LookupError as err: self.stderr.write(str(err)) has_bad_labels = True if has_bad_labels: sys.exit(2) # Load the current graph state. Pass in None for the connection so # the loader doesn't try to resolve replaced migrations from DB. loader = MigrationLoader(None, ignore_no_migrations=True) # Raise an error if any migrations are applied before their dependencies. consistency_check_labels = {config.label for config in apps.get_app_configs()} # Non-default databases are only checked if database routers used. aliases_to_check = connections if settings.DATABASE_ROUTERS else [DEFAULT_DB_ALIAS] for alias in sorted(aliases_to_check): connection = connections[alias] if (connection.settings_dict['ENGINE'] != 'django.db.backends.dummy' and any( # At least one model must be migrated to the database. router.allow_migrate(connection.alias, app_label, model_name=model._meta.object_name) for app_label in consistency_check_labels for model in apps.get_app_config(app_label).get_models() )): try: loader.check_consistent_history(connection) except OperationalError as error: warnings.warn( "Got an error checking a consistent migration history " "performed for database connection '%s': %s" % (alias, error), RuntimeWarning, ) # Before anything else, see if there's conflicting apps and drop out # hard if there are any and they don't want to merge conflicts = loader.detect_conflicts() # If app_labels is specified, filter out conflicting migrations for unspecified apps if app_labels: conflicts = { app_label: conflict for app_label, conflict in conflicts.items() if app_label in app_labels } if conflicts and not self.merge: name_str = "; ".join( "%s in %s" % (", ".join(names), app) for app, names in conflicts.items() ) raise CommandError( "Conflicting migrations detected; multiple leaf nodes in the " "migration graph: (%s).\nTo fix them run " "'python manage.py makemigrations --merge'" % name_str ) # If they want to merge and there's nothing to merge, then politely exit if self.merge and not conflicts: self.stdout.write("No conflicts detected to merge.") return # If they want to merge and there is something to merge, then # divert into the merge code if self.merge and conflicts: return self.handle_merge(loader, conflicts) if self.interactive: questioner = InteractiveMigrationQuestioner(specified_apps=app_labels, dry_run=self.dry_run) else: questioner = NonInteractiveMigrationQuestioner(specified_apps=app_labels, dry_run=self.dry_run) # Set up autodetector autodetector = MigrationAutodetector( loader.project_state(), ProjectState.from_apps(apps), questioner, ) # If they want to make an empty migration, make one for each app if self.empty: if not app_labels: raise CommandError("You must supply at least one app label when using --empty.") # Make a fake changes() result we can pass to arrange_for_graph changes = { app: [Migration("custom", app)] for app in app_labels } changes = autodetector.arrange_for_graph( changes=changes, graph=loader.graph, migration_name=self.migration_name, ) self.write_migration_files(changes) return # Detect changes changes = autodetector.changes( graph=loader.graph, trim_to_apps=app_labels or None, convert_apps=app_labels or None, migration_name=self.migration_name, ) if not changes: # No changes? Tell them. if self.verbosity >= 1: if app_labels: if len(app_labels) == 1: self.stdout.write("No changes detected in app '%s'" % app_labels.pop()) else: self.stdout.write("No changes detected in apps '%s'" % ("', '".join(app_labels))) else: self.stdout.write("No changes detected") else: self.write_migration_files(changes) if check_changes: sys.exit(1) def write_migration_files(self, changes): """ Take a changes dict and write them out as migration files. """ directory_created = {} for app_label, app_migrations in changes.items(): if self.verbosity >= 1: self.stdout.write(self.style.MIGRATE_HEADING("Migrations for '%s':" % app_label)) for migration in app_migrations: # Describe the migration writer = MigrationWriter(migration, self.include_header) if self.verbosity >= 1: # Display a relative path if it's below the current working # directory, or an absolute path otherwise. try: migration_string = os.path.relpath(writer.path) except ValueError: migration_string = writer.path if migration_string.startswith('..'): migration_string = writer.path self.stdout.write(' %s\n' % self.style.MIGRATE_LABEL(migration_string)) for operation in migration.operations: self.stdout.write(' - %s' % operation.describe()) if not self.dry_run: # Write the migrations file to the disk. migrations_directory = os.path.dirname(writer.path) if not directory_created.get(app_label): os.makedirs(migrations_directory, exist_ok=True) init_path = os.path.join(migrations_directory, "__init__.py") if not os.path.isfile(init_path): open(init_path, "w").close() # We just do this once per app directory_created[app_label] = True migration_string = writer.as_string() with open(writer.path, "w", encoding='utf-8') as fh: fh.write(migration_string) elif self.verbosity == 3: # Alternatively, makemigrations --dry-run --verbosity 3 # will output the migrations to stdout rather than saving # the file to the disk. self.stdout.write(self.style.MIGRATE_HEADING( "Full migrations file '%s':" % writer.filename )) self.stdout.write(writer.as_string()) def handle_merge(self, loader, conflicts): """ Handles merging together conflicted migrations interactively, if it's safe; otherwise, advises on how to fix it. """ if self.interactive: questioner = InteractiveMigrationQuestioner() else: questioner = MigrationQuestioner(defaults={'ask_merge': True}) for app_label, migration_names in conflicts.items(): # Grab out the migrations in question, and work out their # common ancestor. merge_migrations = [] for migration_name in migration_names: migration = loader.get_migration(app_label, migration_name) migration.ancestry = [ mig for mig in loader.graph.forwards_plan((app_label, migration_name)) if mig[0] == migration.app_label ] merge_migrations.append(migration) def all_items_equal(seq): return all(item == seq[0] for item in seq[1:]) merge_migrations_generations = zip(*(m.ancestry for m in merge_migrations)) common_ancestor_count = sum(1 for common_ancestor_generation in takewhile(all_items_equal, merge_migrations_generations)) if not common_ancestor_count: raise ValueError("Could not find common ancestor of %s" % migration_names) # Now work out the operations along each divergent branch for migration in merge_migrations: migration.branch = migration.ancestry[common_ancestor_count:] migrations_ops = (loader.get_migration(node_app, node_name).operations for node_app, node_name in migration.branch) migration.merged_operations = sum(migrations_ops, []) # In future, this could use some of the Optimizer code # (can_optimize_through) to automatically see if they're # mergeable. For now, we always just prompt the user. if self.verbosity > 0: self.stdout.write(self.style.MIGRATE_HEADING("Merging %s" % app_label)) for migration in merge_migrations: self.stdout.write(self.style.MIGRATE_LABEL(" Branch %s" % migration.name)) for operation in migration.merged_operations: self.stdout.write(' - %s' % operation.describe()) if questioner.ask_merge(app_label): # If they still want to merge it, then write out an empty # file depending on the migrations needing merging. numbers = [ MigrationAutodetector.parse_number(migration.name) for migration in merge_migrations ] try: biggest_number = max(x for x in numbers if x is not None) except ValueError: biggest_number = 1 subclass = type("Migration", (Migration,), { "dependencies": [(app_label, migration.name) for migration in merge_migrations], }) migration_name = "%04i_%s" % ( biggest_number + 1, self.migration_name or ("merge_%s" % get_migration_name_timestamp()) ) new_migration = subclass(migration_name, app_label) writer = MigrationWriter(new_migration, self.include_header) if not self.dry_run: # Write the merge migrations file to the disk with open(writer.path, "w", encoding='utf-8') as fh: fh.write(writer.as_string()) if self.verbosity > 0: self.stdout.write("\nCreated new merge migration %s" % writer.path) elif self.verbosity == 3: # Alternatively, makemigrations --merge --dry-run --verbosity 3 # will output the merge migrations to stdout rather than saving # the file to the disk. self.stdout.write(self.style.MIGRATE_HEADING( "Full merge migrations file '%s':" % writer.filename )) self.stdout.write(writer.as_string())
d377955cb063d22c3be0c264ade35fd44e387d48b26376252e590fceacb7d2df
"Base Cache class." import time import warnings from django.core.exceptions import ImproperlyConfigured from django.utils.module_loading import import_string class InvalidCacheBackendError(ImproperlyConfigured): pass class CacheKeyWarning(RuntimeWarning): pass class InvalidCacheKey(ValueError): pass # Stub class to ensure not passing in a `timeout` argument results in # the default timeout DEFAULT_TIMEOUT = object() # Memcached does not accept keys longer than this. MEMCACHE_MAX_KEY_LENGTH = 250 def default_key_func(key, key_prefix, version): """ Default function to generate keys. Construct the key used by all other methods. By default, prepend the `key_prefix'. KEY_FUNCTION can be used to specify an alternate function with custom key making behavior. """ return '%s:%s:%s' % (key_prefix, version, key) def get_key_func(key_func): """ Function to decide which key function to use. Default to ``default_key_func``. """ if key_func is not None: if callable(key_func): return key_func else: return import_string(key_func) return default_key_func class BaseCache: def __init__(self, params): timeout = params.get('timeout', params.get('TIMEOUT', 300)) if timeout is not None: try: timeout = int(timeout) except (ValueError, TypeError): timeout = 300 self.default_timeout = timeout options = params.get('OPTIONS', {}) max_entries = params.get('max_entries', options.get('MAX_ENTRIES', 300)) try: self._max_entries = int(max_entries) except (ValueError, TypeError): self._max_entries = 300 cull_frequency = params.get('cull_frequency', options.get('CULL_FREQUENCY', 3)) try: self._cull_frequency = int(cull_frequency) except (ValueError, TypeError): self._cull_frequency = 3 self.key_prefix = params.get('KEY_PREFIX', '') self.version = params.get('VERSION', 1) self.key_func = get_key_func(params.get('KEY_FUNCTION')) def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT): """ Return the timeout value usable by this backend based upon the provided timeout. """ if timeout == DEFAULT_TIMEOUT: timeout = self.default_timeout elif timeout == 0: # ticket 21147 - avoid time.time() related precision issues timeout = -1 return None if timeout is None else time.time() + timeout def make_key(self, key, version=None): """ Construct the key used by all other methods. By default, use the key_func to generate a key (which, by default, prepends the `key_prefix' and 'version'). A different key function can be provided at the time of cache construction; alternatively, you can subclass the cache backend to provide custom key making behavior. """ if version is None: version = self.version return self.key_func(key, self.key_prefix, version) def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): """ Set a value in the cache if the key does not already exist. If timeout is given, use that timeout for the key; otherwise use the default cache timeout. Return True if the value was stored, False otherwise. """ raise NotImplementedError('subclasses of BaseCache must provide an add() method') def get(self, key, default=None, version=None): """ Fetch a given key from the cache. If the key does not exist, return default, which itself defaults to None. """ raise NotImplementedError('subclasses of BaseCache must provide a get() method') def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): """ Set a value in the cache. If timeout is given, use that timeout for the key; otherwise use the default cache timeout. """ raise NotImplementedError('subclasses of BaseCache must provide a set() method') def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None): """ Update the key's expiry time using timeout. Return True if successful or False if the key does not exist. """ raise NotImplementedError('subclasses of BaseCache must provide a touch() method') def delete(self, key, version=None): """ Delete a key from the cache and return whether it succeeded, failing silently. """ raise NotImplementedError('subclasses of BaseCache must provide a delete() method') def get_many(self, keys, version=None): """ Fetch a bunch of keys from the cache. For certain backends (memcached, pgsql) this can be *much* faster when fetching multiple values. Return a dict mapping each key in keys to its value. If the given key is missing, it will be missing from the response dict. """ d = {} for k in keys: val = self.get(k, version=version) if val is not None: d[k] = val return d def get_or_set(self, key, default, timeout=DEFAULT_TIMEOUT, version=None): """ Fetch a given key from the cache. If the key does not exist, add the key and set it to the default value. The default value can also be any callable. If timeout is given, use that timeout for the key; otherwise use the default cache timeout. Return the value of the key stored or retrieved. """ val = self.get(key, version=version) if val is None: if callable(default): default = default() if default is not None: self.add(key, default, timeout=timeout, version=version) # Fetch the value again to avoid a race condition if another # caller added a value between the first get() and the add() # above. return self.get(key, default, version=version) return val def has_key(self, key, version=None): """ Return True if the key is in the cache and has not expired. """ return self.get(key, version=version) is not None def incr(self, key, delta=1, version=None): """ Add delta to value in the cache. If the key does not exist, raise a ValueError exception. """ value = self.get(key, version=version) if value is None: raise ValueError("Key '%s' not found" % key) new_value = value + delta self.set(key, new_value, version=version) return new_value def decr(self, key, delta=1, version=None): """ Subtract delta from value in the cache. If the key does not exist, raise a ValueError exception. """ return self.incr(key, -delta, version=version) def __contains__(self, key): """ Return True if the key is in the cache and has not expired. """ # This is a separate method, rather than just a copy of has_key(), # so that it always has the same functionality as has_key(), even # if a subclass overrides it. return self.has_key(key) def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None): """ Set a bunch of values in the cache at once from a dict of key/value pairs. For certain backends (memcached), this is much more efficient than calling set() multiple times. If timeout is given, use that timeout for the key; otherwise use the default cache timeout. On backends that support it, return a list of keys that failed insertion, or an empty list if all keys were inserted successfully. """ for key, value in data.items(): self.set(key, value, timeout=timeout, version=version) return [] def delete_many(self, keys, version=None): """ Delete a bunch of values in the cache at once. For certain backends (memcached), this is much more efficient than calling delete() multiple times. """ for key in keys: self.delete(key, version=version) def clear(self): """Remove *all* values from the cache at once.""" raise NotImplementedError('subclasses of BaseCache must provide a clear() method') def validate_key(self, key): """ Warn about keys that would not be portable to the memcached backend. This encourages (but does not force) writing backend-portable cache code. """ for warning in memcache_key_warnings(key): warnings.warn(warning, CacheKeyWarning) def incr_version(self, key, delta=1, version=None): """ Add delta to the cache version for the supplied key. Return the new version. """ if version is None: version = self.version value = self.get(key, version=version) if value is None: raise ValueError("Key '%s' not found" % key) self.set(key, value, version=version + delta) self.delete(key, version=version) return version + delta def decr_version(self, key, delta=1, version=None): """ Subtract delta from the cache version for the supplied key. Return the new version. """ return self.incr_version(key, -delta, version) def close(self, **kwargs): """Close the cache connection""" pass def memcache_key_warnings(key): if len(key) > MEMCACHE_MAX_KEY_LENGTH: yield ( 'Cache key will cause errors if used with memcached: %r ' '(longer than %s)' % (key, MEMCACHE_MAX_KEY_LENGTH) ) for char in key: if ord(char) < 33 or ord(char) == 127: yield ( 'Cache key contains characters that will cause errors if ' 'used with memcached: %r' % key ) break
1b8d15469af91d4fadfcc08620c62b4207d7b8f84e886b2f29e8aa2f4ea2b995
import datetime import importlib import io import os import sys from unittest import mock from django.apps import apps from django.core.management import CommandError, call_command from django.db import ( ConnectionHandler, DatabaseError, OperationalError, connection, connections, models, ) from django.db.backends.base.schema import BaseDatabaseSchemaEditor from django.db.backends.utils import truncate_name from django.db.migrations.exceptions import InconsistentMigrationHistory from django.db.migrations.recorder import MigrationRecorder from django.test import TestCase, override_settings, skipUnlessDBFeature from .models import UnicodeModel, UnserializableModel from .routers import TestRouter from .test_base import MigrationTestBase class MigrateTests(MigrationTestBase): """ Tests running the migrate command. """ databases = {'default', 'other'} @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_migrate(self): """ Tests basic usage of the migrate command. """ # No tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run the migrations to 0001 only stdout = io.StringIO() call_command('migrate', 'migrations', '0001', verbosity=1, stdout=stdout, no_color=True) stdout = stdout.getvalue() self.assertIn('Target specific migration: 0001_initial, from migrations', stdout) self.assertIn('Applying migrations.0001_initial... OK', stdout) # The correct tables exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run migrations all the way call_command("migrate", verbosity=0) # The correct tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableExists("migrations_book") # Unmigrate everything stdout = io.StringIO() call_command('migrate', 'migrations', 'zero', verbosity=1, stdout=stdout, no_color=True) stdout = stdout.getvalue() self.assertIn('Unapply all migrations: migrations', stdout) self.assertIn('Unapplying migrations.0002_second... OK', stdout) # Tables are gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings(INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'migrations.migrations_test_apps.migrated_app', ]) def test_migrate_with_system_checks(self): out = io.StringIO() call_command('migrate', skip_checks=False, no_color=True, stdout=out) self.assertIn('Apply all migrations: migrated_app', out.getvalue()) @override_settings(INSTALLED_APPS=['migrations', 'migrations.migrations_test_apps.unmigrated_app_syncdb']) def test_app_without_migrations(self): msg = "App 'unmigrated_app_syncdb' does not have migrations." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='unmigrated_app_syncdb') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_clashing_prefix'}) def test_ambiguous_prefix(self): msg = ( "More than one migration matches 'a' in app 'migrations'. Please " "be more specific." ) with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='migrations', migration_name='a') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_unknown_prefix(self): msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='migrations', migration_name='nonexistent') @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_initial_false"}) def test_migrate_initial_false(self): """ `Migration.initial = False` skips fake-initial detection. """ # Make sure no tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) # Fake rollback call_command("migrate", "migrations", "zero", fake=True, verbosity=0) # Make sure fake-initial detection does not run with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0) call_command("migrate", "migrations", "0001", fake=True, verbosity=0) # Real rollback call_command("migrate", "migrations", "zero", verbosity=0) # Make sure it's all gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings( MIGRATION_MODULES={"migrations": "migrations.test_migrations"}, DATABASE_ROUTERS=['migrations.routers.TestRouter'], ) def test_migrate_fake_initial(self): """ --fake-initial only works if all tables created in the initial migration of an app exists. Database routers must be obeyed when doing that check. """ # Make sure no tables are created for db in self.databases: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) call_command("migrate", "migrations", "0001", verbosity=0, database="other") # Make sure the right tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Also check the "other" database self.assertTableNotExists("migrations_author", using="other") self.assertTableExists("migrations_tribble", using="other") # Fake a roll-back call_command("migrate", "migrations", "zero", fake=True, verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0, database="other") # Make sure the tables still exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble", using="other") # Try to run initial migration with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", verbosity=0) # Run initial migration with an explicit --fake-initial out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command("migrate", "migrations", "0001", fake_initial=True, stdout=out, verbosity=1) call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0, database="other") self.assertIn( "migrations.0001_initial... faked", out.getvalue().lower() ) try: # Run migrations all the way. call_command('migrate', verbosity=0) call_command('migrate', verbosity=0, database="other") self.assertTableExists('migrations_author') self.assertTableNotExists('migrations_tribble') self.assertTableExists('migrations_book') self.assertTableNotExists('migrations_author', using='other') self.assertTableNotExists('migrations_tribble', using='other') self.assertTableNotExists('migrations_book', using='other') # Fake a roll-back. call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0) call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0, database='other') self.assertTableExists('migrations_author') self.assertTableNotExists('migrations_tribble') self.assertTableExists('migrations_book') # Run initial migration. with self.assertRaises(DatabaseError): call_command('migrate', 'migrations', verbosity=0) # Run initial migration with an explicit --fake-initial. with self.assertRaises(DatabaseError): # Fails because "migrations_tribble" does not exist but needs # to in order to make --fake-initial work. call_command('migrate', 'migrations', fake_initial=True, verbosity=0) # Fake an apply. call_command('migrate', 'migrations', fake=True, verbosity=0) call_command('migrate', 'migrations', fake=True, verbosity=0, database='other') finally: # Unmigrate everything. call_command('migrate', 'migrations', 'zero', verbosity=0) call_command('migrate', 'migrations', 'zero', verbosity=0, database='other') # Make sure it's all gone for db in self.databases: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) self.assertTableNotExists("migrations_book", using=db) @skipUnlessDBFeature('ignores_table_name_case') def test_migrate_fake_initial_case_insensitive(self): with override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_fake_initial_case_insensitive.initial', }): call_command('migrate', 'migrations', '0001', verbosity=0) call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0) with override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_fake_initial_case_insensitive.fake_initial', }): out = io.StringIO() call_command( 'migrate', 'migrations', '0001', fake_initial=True, stdout=out, verbosity=1, no_color=True, ) self.assertIn( 'migrations.0001_initial... faked', out.getvalue().lower(), ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_fake_split_initial"}) def test_migrate_fake_split_initial(self): """ Split initial migrations can be faked with --fake-initial. """ call_command("migrate", "migrations", "0002", verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0) out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command("migrate", "migrations", "0002", fake_initial=True, stdout=out, verbosity=1) value = out.getvalue().lower() self.assertIn("migrations.0001_initial... faked", value) self.assertIn("migrations.0002_second... faked", value) # Fake an apply call_command("migrate", "migrations", fake=True, verbosity=0) # Unmigrate everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"}) def test_migrate_conflict_exit(self): """ migrate exits if it detects a conflict. """ with self.assertRaisesMessage(CommandError, "Conflicting migrations detected"): call_command("migrate", "migrations") @override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_migrations', }) def test_migrate_check(self): with self.assertRaises(SystemExit): call_command('migrate', 'migrations', '0001', check_unapplied=True) self.assertTableNotExists('migrations_author') self.assertTableNotExists('migrations_tribble') self.assertTableNotExists('migrations_book') @override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_migrations_plan', }) def test_migrate_check_plan(self): out = io.StringIO() with self.assertRaises(SystemExit): call_command( 'migrate', 'migrations', '0001', check_unapplied=True, plan=True, stdout=out, no_color=True, ) self.assertEqual( 'Planned operations:\n' 'migrations.0001_initial\n' ' Create model Salamander\n' ' Raw Python operation -> Grow salamander tail.\n', out.getvalue(), ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_showmigrations_list(self): """ showmigrations --list displays migrations and whether or not they're applied. """ out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: True): call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False) self.assertEqual( '\x1b[1mmigrations\n\x1b[0m' ' [ ] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) call_command("migrate", "migrations", "0001", verbosity=0) out = io.StringIO() # Giving the explicit app_label tests for selective `show_list` in the command call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) out = io.StringIO() # Applied datetimes are displayed at verbosity 2+. call_command('showmigrations', 'migrations', stdout=out, verbosity=2, no_color=True) migration1 = MigrationRecorder(connection).migration_qs.get(app='migrations', name='0001_initial') self.assertEqual( 'migrations\n' ' [x] 0001_initial (applied at %s)\n' ' [ ] 0002_second\n' % migration1.applied.strftime('%Y-%m-%d %H:%M:%S'), out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}) def test_showmigrations_plan(self): """ Tests --plan output of showmigrations command """ out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "0003", verbosity=0) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_plan'}) def test_migrate_plan(self): """Tests migrate --plan output.""" out = io.StringIO() # Show the plan up to the third migration. call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0001_initial\n' ' Create model Salamander\n' ' Raw Python operation -> Grow salamander tail.\n' 'migrations.0002_second\n' ' Create model Book\n' " Raw SQL operation -> ['SELECT * FROM migrations_book']\n" 'migrations.0003_third\n' ' Create model Author\n' " Raw SQL operation -> ['SELECT * FROM migrations_author']\n", out.getvalue() ) try: # Migrate to the third migration. call_command('migrate', 'migrations', '0003', verbosity=0) out = io.StringIO() # Show the plan for when there is nothing to apply. call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' ' No planned migration operations.\n', out.getvalue() ) out = io.StringIO() # Show the plan for reverse migration back to 0001. call_command('migrate', 'migrations', '0001', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0003_third\n' ' Undo Create model Author\n' " Raw SQL operation -> ['SELECT * FROM migrations_book']\n" 'migrations.0002_second\n' ' Undo Create model Book\n' " Raw SQL operation -> ['SELECT * FROM migrations_salamand…\n", out.getvalue() ) out = io.StringIO() # Show the migration plan to fourth, with truncated details. call_command('migrate', 'migrations', '0004', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0004_fourth\n' ' Raw SQL operation -> SELECT * FROM migrations_author WHE…\n', out.getvalue() ) # Show the plan when an operation is irreversible. # Migrate to the fourth migration. call_command('migrate', 'migrations', '0004', verbosity=0) out = io.StringIO() call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0004_fourth\n' ' Raw SQL operation -> IRREVERSIBLE\n', out.getvalue() ) out = io.StringIO() call_command('migrate', 'migrations', '0005', plan=True, stdout=out, no_color=True) # Operation is marked as irreversible only in the revert plan. self.assertEqual( 'Planned operations:\n' 'migrations.0005_fifth\n' ' Raw Python operation\n' ' Raw Python operation\n' ' Raw Python operation -> Feed salamander.\n', out.getvalue() ) call_command('migrate', 'migrations', '0005', verbosity=0) out = io.StringIO() call_command('migrate', 'migrations', '0004', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0005_fifth\n' ' Raw Python operation -> IRREVERSIBLE\n' ' Raw Python operation -> IRREVERSIBLE\n' ' Raw Python operation\n', out.getvalue() ) finally: # Cleanup by unmigrating everything: fake the irreversible, then # migrate all to zero. call_command('migrate', 'migrations', '0003', fake=True, verbosity=0) call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_empty'}) def test_showmigrations_no_migrations(self): out = io.StringIO() call_command('showmigrations', stdout=out, no_color=True) self.assertEqual('migrations\n (no migrations)\n', out.getvalue().lower()) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app']) def test_showmigrations_unmigrated_app(self): out = io.StringIO() call_command('showmigrations', 'unmigrated_app', stdout=out, no_color=True) self.assertEqual('unmigrated_app\n (no migrations)\n', out.getvalue().lower()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}) def test_showmigrations_plan_no_migrations(self): """ Tests --plan output of showmigrations command without migrations """ out = io.StringIO() call_command('showmigrations', format='plan', stdout=out, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue().lower()) out = io.StringIO() call_command('showmigrations', format='plan', stdout=out, verbosity=2, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue().lower()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}) def test_showmigrations_plan_squashed(self): """ Tests --plan output of showmigrations command with squashed migrations. """ out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto\n" "[ ] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto ... (migrations.1_auto)\n" "[ ] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "3_squashed_5", verbosity=0) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto\n" "[x] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto ... (migrations.1_auto)\n" "[x] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_b', 'migrations.migrations_test_apps.alter_fk.author_app', 'migrations.migrations_test_apps.alter_fk.book_app', ]) def test_showmigrations_plan_single_app_label(self): """ `showmigrations --plan app_label` output with a single app_label. """ # Single app with no dependencies on other apps. out = io.StringIO() call_command('showmigrations', 'mutate_state_b', format='plan', stdout=out) self.assertEqual( '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) # Single app with dependencies. out = io.StringIO() call_command('showmigrations', 'author_app', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n', out.getvalue() ) # Some migrations already applied. call_command('migrate', 'author_app', '0001', verbosity=0) out = io.StringIO() call_command('showmigrations', 'author_app', format='plan', stdout=out) self.assertEqual( '[X] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n', out.getvalue() ) # Cleanup by unmigrating author_app. call_command('migrate', 'author_app', 'zero', verbosity=0) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_b', 'migrations.migrations_test_apps.alter_fk.author_app', 'migrations.migrations_test_apps.alter_fk.book_app', ]) def test_showmigrations_plan_multiple_app_labels(self): """ `showmigrations --plan app_label` output with multiple app_labels. """ # Multiple apps: author_app depends on book_app; mutate_state_b doesn't # depend on other apps. out = io.StringIO() call_command('showmigrations', 'mutate_state_b', 'author_app', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n' '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) # Multiple apps: args order shouldn't matter (the same result is # expected as above). out = io.StringIO() call_command('showmigrations', 'author_app', 'mutate_state_b', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n' '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app']) def test_showmigrations_plan_app_label_no_migrations(self): out = io.StringIO() call_command('showmigrations', 'unmigrated_app', format='plan', stdout=out, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_forwards(self): """ sqlmigrate outputs forward looking SQL. """ out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_author = output.find('-- create model author') index_create_table = output.find('create table') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_unique_together = output.find('-- alter unique_together') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) if connection.features.can_rollback_ddl: self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before operation description (unique_together)" ) self.assertGreater( index_op_desc_author, index_tx_start, "Operation description (author) not found or found before transaction start" ) self.assertGreater( index_create_table, index_op_desc_author, "CREATE TABLE not found or found before operation description (author)" ) self.assertGreater( index_op_desc_tribble, index_create_table, "Operation description (tribble) not found or found before CREATE TABLE (author)" ) self.assertGreater( index_op_desc_unique_together, index_op_desc_tribble, "Operation description (unique_together) not found or found before operation description (tribble)" ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_backwards(self): """ sqlmigrate outputs reverse looking SQL. """ # Cannot generate the reverse SQL unless we've applied the migration. call_command("migrate", "migrations", verbosity=0) out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out, backwards=True) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_unique_together = output.find('-- alter unique_together') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_author = output.find('-- create model author') index_drop_table = output.rfind('drop table') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) if connection.features.can_rollback_ddl: self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before DROP TABLE" ) self.assertGreater( index_op_desc_unique_together, index_tx_start, "Operation description (unique_together) not found or found before transaction start" ) self.assertGreater( index_op_desc_tribble, index_op_desc_unique_together, "Operation description (tribble) not found or found before operation description (unique_together)" ) self.assertGreater( index_op_desc_author, index_op_desc_tribble, "Operation description (author) not found or found before operation description (tribble)" ) self.assertGreater( index_drop_table, index_op_desc_author, "DROP TABLE not found or found before operation description (author)" ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}) def test_sqlmigrate_for_non_atomic_migration(self): """ Transaction wrappers aren't shown for non-atomic migrations. """ out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() queries = [q.strip() for q in output.splitlines()] if connection.ops.start_transaction_sql(): self.assertNotIn(connection.ops.start_transaction_sql().lower(), queries) self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_sqlmigrate_for_non_transactional_databases(self): """ Transaction wrappers aren't shown for databases that don't support transactional DDL. """ out = io.StringIO() with mock.patch.object(connection.features, 'can_rollback_ddl', False): call_command('sqlmigrate', 'migrations', '0001', stdout=out) output = out.getvalue().lower() queries = [q.strip() for q in output.splitlines()] start_transaction_sql = connection.ops.start_transaction_sql() if start_transaction_sql: self.assertNotIn(start_transaction_sql.lower(), queries) self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_sqlmigrate_ambiguous_prefix_squashed_migrations(self): msg = ( "More than one migration matches '0001' in app 'migrations'. " "Please be more specific." ) with self.assertRaisesMessage(CommandError, msg): call_command('sqlmigrate', 'migrations', '0001') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_sqlmigrate_squashed_migration(self): out = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_squashed_0002', stdout=out) output = out.getvalue().lower() self.assertIn('-- create model author', output) self.assertIn('-- create model book', output) self.assertNotIn('-- create model tribble', output) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_sqlmigrate_replaced_migration(self): out = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_initial', stdout=out) output = out.getvalue().lower() self.assertIn('-- create model author', output) self.assertIn('-- create model tribble', output) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_no_operations'}) def test_migrations_no_operations(self): err = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_initial', stderr=err) self.assertEqual(err.getvalue(), 'No operations found.\n') @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.migrated_unapplied_app", "migrations.migrations_test_apps.unmigrated_app", ], ) def test_regression_22823_unmigrated_fk_to_migrated_model(self): """ Assuming you have 3 apps, `A`, `B`, and `C`, such that: * `A` has migrations * `B` has a migration we want to apply * `C` has no migrations, but has an FK to `A` When we try to migrate "B", an exception occurs because the "B" was not included in the ProjectState that is used to detect soft-applied migrations (#22823). """ call_command('migrate', 'migrated_unapplied_app', verbosity=0) # unmigrated_app.SillyModel has a foreign key to 'migrations.Tribble', # but that model is only defined in a migration, so the global app # registry never sees it and the reference is left dangling. Remove it # to avoid problems in subsequent tests. del apps._pending_operations[('migrations', 'tribble')] @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app_syncdb']) def test_migrate_syncdb_deferred_sql_executed_with_schemaeditor(self): """ For an app without migrations, editor.execute() is used for executing the syncdb deferred SQL. """ stdout = io.StringIO() with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute: call_command('migrate', run_syncdb=True, verbosity=1, stdout=stdout, no_color=True) create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)]) self.assertEqual(create_table_count, 2) # There's at least one deferred SQL for creating the foreign key # index. self.assertGreater(len(execute.mock_calls), 2) stdout = stdout.getvalue() self.assertIn('Synchronize unmigrated apps: unmigrated_app_syncdb', stdout) self.assertIn('Creating tables...', stdout) table_name = truncate_name('unmigrated_app_syncdb_classroom', connection.ops.max_name_length()) self.assertIn('Creating table %s' % table_name, stdout) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrate_syncdb_app_with_migrations(self): msg = "Can't use run_syncdb with app 'migrations' as it has migrations." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', 'migrations', run_syncdb=True, verbosity=0) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.unmigrated_app_syncdb', 'migrations.migrations_test_apps.unmigrated_app_simple', ]) def test_migrate_syncdb_app_label(self): """ Running migrate --run-syncdb with an app_label only creates tables for the specified app. """ stdout = io.StringIO() with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute: call_command('migrate', 'unmigrated_app_syncdb', run_syncdb=True, stdout=stdout) create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)]) self.assertEqual(create_table_count, 2) self.assertGreater(len(execute.mock_calls), 2) self.assertIn('Synchronize unmigrated app: unmigrated_app_syncdb', stdout.getvalue()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_replaced(self): """ Running a single squashed migration should record all of the original replaced migrations as run. """ recorder = MigrationRecorder(connection) out = io.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) applied_migrations = recorder.applied_migrations() self.assertIn(("migrations", "0001_initial"), applied_migrations) self.assertIn(("migrations", "0002_second"), applied_migrations) self.assertIn(("migrations", "0001_squashed_0002"), applied_migrations) # Rollback changes call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_squashed(self): """ Running migrate for a squashed migration should record as run if all of the replaced migrations have been run (#25231). """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0001_initial") recorder.record_applied("migrations", "0002_second") out = io.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations() ) # No changes were actually applied so there is nothing to rollback @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrate_inconsistent_history(self): """ Running migrate with some migrations applied before their dependencies should not be allowed. """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0002_second") msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("migrate") applied_migrations = recorder.applied_migrations() self.assertNotIn(("migrations", "0001_initial"), applied_migrations) class MakeMigrationsTests(MigrationTestBase): """ Tests running the makemigrations command. """ def setUp(self): super().setUp() self._old_models = apps.app_configs['migrations'].models.copy() def tearDown(self): apps.app_configs['migrations'].models = self._old_models apps.all_models['migrations'] = self._old_models apps.clear_cache() super().tearDown() def test_files_content(self): self.assertTableNotExists("migrations_unicodemodel") apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) # Check for empty __init__.py file in migrations folder init_file = os.path.join(migration_dir, "__init__.py") self.assertTrue(os.path.exists(init_file)) with open(init_file) as fp: content = fp.read() self.assertEqual(content, '') # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with open(initial_file, encoding='utf-8') as fp: content = fp.read() self.assertIn('migrations.CreateModel', content) self.assertIn('initial = True', content) self.assertIn('úñí©óðé µóðéø', content) # Meta.verbose_name self.assertIn('úñí©óðé µóðéøß', content) # Meta.verbose_name_plural self.assertIn('ÚÑÍ¢ÓÐÉ', content) # title.verbose_name self.assertIn('“Ðjáñgó”', content) # title.default def test_makemigrations_order(self): """ makemigrations should recognize number-only migrations (0001.py). """ module = 'migrations.test_migrations_order' with self.temporary_migration_module(module=module) as migration_dir: if hasattr(importlib, 'invalidate_caches'): # importlib caches os.listdir() on some platforms like macOS # (#23850). importlib.invalidate_caches() call_command('makemigrations', 'migrations', '--empty', '-n', 'a', '-v', '0') self.assertTrue(os.path.exists(os.path.join(migration_dir, '0002_a.py'))) def test_makemigrations_empty_connections(self): empty_connections = ConnectionHandler({'default': {}}) with mock.patch('django.core.management.commands.makemigrations.connections', new=empty_connections): # with no apps out = io.StringIO() call_command('makemigrations', stdout=out) self.assertIn('No changes detected', out.getvalue()) # with an app with self.temporary_migration_module() as migration_dir: call_command('makemigrations', 'migrations', verbosity=0) init_file = os.path.join(migration_dir, '__init__.py') self.assertTrue(os.path.exists(init_file)) @override_settings(INSTALLED_APPS=['migrations', 'migrations2']) def test_makemigrations_consistency_checks_respect_routers(self): """ The history consistency checks in makemigrations respect settings.DATABASE_ROUTERS. """ def patched_has_table(migration_recorder): if migration_recorder.connection is connections['other']: raise Exception('Other connection') else: return mock.DEFAULT self.assertTableNotExists('migrations_unicodemodel') apps.register_model('migrations', UnicodeModel) with mock.patch.object( MigrationRecorder, 'has_table', autospec=True, side_effect=patched_has_table) as has_table: with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) self.assertEqual(has_table.call_count, 1) # 'default' is checked # Router says not to migrate 'other' so consistency shouldn't # be checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(has_table.call_count, 2) # 'default' again # With a router that doesn't prohibit migrating 'other', # consistency is checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.DefaultOtherRouter']): with self.assertRaisesMessage(Exception, 'Other connection'): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(has_table.call_count, 4) # 'default' and 'other' # With a router that doesn't allow migrating on any database, # no consistency checks are made. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): with mock.patch.object(TestRouter, 'allow_migrate', return_value=False) as allow_migrate: call_command('makemigrations', 'migrations', verbosity=0) allow_migrate.assert_any_call('other', 'migrations', model_name='UnicodeModel') # allow_migrate() is called with the correct arguments. self.assertGreater(len(allow_migrate.mock_calls), 0) called_aliases = set() for mock_call in allow_migrate.mock_calls: _, call_args, call_kwargs = mock_call connection_alias, app_name = call_args called_aliases.add(connection_alias) # Raises an error if invalid app_name/model_name occurs. apps.get_app_config(app_name).get_model(call_kwargs['model_name']) self.assertEqual(called_aliases, set(connections)) self.assertEqual(has_table.call_count, 4) def test_failing_migration(self): # If a migration fails to serialize, it shouldn't generate an empty file. #21280 apps.register_model('migrations', UnserializableModel) with self.temporary_migration_module() as migration_dir: with self.assertRaisesMessage(ValueError, 'Cannot serialize'): call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertFalse(os.path.exists(initial_file)) def test_makemigrations_conflict_exit(self): """ makemigrations exits if it detects a conflict. """ with self.temporary_migration_module(module="migrations.test_migrations_conflict"): with self.assertRaises(CommandError) as context: call_command("makemigrations") exception_message = str(context.exception) self.assertIn( 'Conflicting migrations detected; multiple leaf nodes ' 'in the migration graph:', exception_message ) self.assertIn('0002_second', exception_message) self.assertIn('0002_conflicting_second', exception_message) self.assertIn('in migrations', exception_message) self.assertIn("To fix them run 'python manage.py makemigrations --merge'", exception_message) def test_makemigrations_merge_no_conflict(self): """ makemigrations exits if in merge mode with no conflicts. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("makemigrations", merge=True, stdout=out) self.assertIn("No conflicts detected to merge.", out.getvalue()) def test_makemigrations_empty_no_app_specified(self): """ makemigrations exits if no app is specified with 'empty' mode. """ msg = 'You must supply at least one app label when using --empty.' with self.assertRaisesMessage(CommandError, msg): call_command("makemigrations", empty=True) def test_makemigrations_empty_migration(self): """ makemigrations properly constructs an empty migration. """ with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", empty=True, verbosity=0) # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with open(initial_file, encoding='utf-8') as fp: content = fp.read() # Remove all whitespace to check for empty dependencies and operations content = content.replace(' ', '') self.assertIn('dependencies=[\n]', content) self.assertIn('operations=[\n]', content) @override_settings(MIGRATION_MODULES={"migrations": None}) def test_makemigrations_disabled_migrations_for_app(self): """ makemigrations raises a nice error when migrations are disabled for an app. """ msg = ( "Django can't create migrations for app 'migrations' because migrations " "have been disabled via the MIGRATION_MODULES setting." ) with self.assertRaisesMessage(ValueError, msg): call_command("makemigrations", "migrations", empty=True, verbosity=0) def test_makemigrations_no_changes_no_apps(self): """ makemigrations exits when there are no changes and no apps are specified. """ out = io.StringIO() call_command("makemigrations", stdout=out) self.assertIn("No changes detected", out.getvalue()) def test_makemigrations_no_changes(self): """ makemigrations exits when there are no changes to an app. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", stdout=out) self.assertIn("No changes detected in app 'migrations'", out.getvalue()) def test_makemigrations_no_apps_initial(self): """ makemigrations should detect initial is needed on empty migration modules if no app provided. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_empty"): call_command("makemigrations", stdout=out) self.assertIn("0001_initial.py", out.getvalue()) def test_makemigrations_no_init(self): """Migration directories without an __init__.py file are allowed.""" out = io.StringIO() with self.temporary_migration_module(module='migrations.test_migrations_no_init'): call_command('makemigrations', stdout=out) self.assertIn('0001_initial.py', out.getvalue()) def test_makemigrations_migrations_announce(self): """ makemigrations announces the migration at the default verbosity level. """ out = io.StringIO() with self.temporary_migration_module(): call_command("makemigrations", "migrations", stdout=out) self.assertIn("Migrations for 'migrations'", out.getvalue()) def test_makemigrations_no_common_ancestor(self): """ makemigrations fails to merge migrations with no common ancestor. """ with self.assertRaises(ValueError) as context: with self.temporary_migration_module(module="migrations.test_migrations_no_ancestor"): call_command("makemigrations", "migrations", merge=True) exception_message = str(context.exception) self.assertIn("Could not find common ancestor of", exception_message) self.assertIn("0002_second", exception_message) self.assertIn("0002_conflicting_second", exception_message) def test_makemigrations_interactive_reject(self): """ makemigrations enters and exits interactive mode properly. """ # Monkeypatch interactive questioner to auto reject with mock.patch('builtins.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, verbosity=0) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) def test_makemigrations_interactive_accept(self): """ makemigrations enters interactive mode and merges properly. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) @mock.patch('django.db.migrations.utils.datetime') def test_makemigrations_default_merge_name(self, mock_datetime): mock_datetime.datetime.now.return_value = datetime.datetime(2016, 1, 2, 3, 4) with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge_20160102_0304.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) def test_makemigrations_non_interactive_not_null_addition(self): """ Non-interactive makemigrations fails when a default is missing on a new not-null field. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_int = models.IntegerField() class Meta: app_label = "migrations" with self.assertRaises(SystemExit): with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False) def test_makemigrations_non_interactive_not_null_alteration(self): """ Non-interactive makemigrations fails when a default is missing on a field changed to not-null. """ class Author(models.Model): name = models.CharField(max_length=255) slug = models.SlugField() age = models.IntegerField(default=0) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Alter field slug on author", out.getvalue()) def test_makemigrations_non_interactive_no_model_rename(self): """ makemigrations adds and removes a possible model rename in non-interactive mode. """ class RenamedModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Delete model SillyModel", out.getvalue()) self.assertIn("Create model RenamedModel", out.getvalue()) def test_makemigrations_non_interactive_no_field_rename(self): """ makemigrations adds and removes a possible field rename in non-interactive mode. """ class SillyModel(models.Model): silly_rename = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Remove field silly_field from sillymodel", out.getvalue()) self.assertIn("Add field silly_rename to sillymodel", out.getvalue()) def test_makemigrations_handle_merge(self): """ makemigrations properly merges the conflicting migrations with --noinput. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=False, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertIn("Created new merge migration", output) def test_makemigration_merge_dry_run(self): """ makemigrations respects --dry-run option when fixing migration conflicts (#24427). """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) def test_makemigration_merge_dry_run_verbosity_3(self): """ `makemigrations --merge --dry-run` writes the merge migration file to stdout with `verbosity == 3` (#24427). """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, verbosity=3, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) # Additional output caused by verbosity 3 # The complete merge migration file that would be written self.assertIn("class Migration(migrations.Migration):", output) self.assertIn("dependencies = [", output) self.assertIn("('migrations', '0002_second')", output) self.assertIn("('migrations', '0002_conflicting_second')", output) self.assertIn("operations = [", output) self.assertIn("]", output) def test_makemigrations_dry_run(self): """ `makemigrations --dry-run` should not ask for defaults. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_date = models.DateField() # Added field without a default class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out) # Output the expected changes directly, without asking for defaults self.assertIn("Add field silly_date to sillymodel", out.getvalue()) def test_makemigrations_dry_run_verbosity_3(self): """ Allow `makemigrations --dry-run` to output the migrations file to stdout (with verbosity == 3). """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_char = models.CharField(default="") class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out, verbosity=3) # Normal --dry-run output self.assertIn("- Add field silly_char to sillymodel", out.getvalue()) # Additional output caused by verbosity 3 # The complete migrations file that would be written self.assertIn("class Migration(migrations.Migration):", out.getvalue()) self.assertIn("dependencies = [", out.getvalue()) self.assertIn("('migrations', '0001_initial'),", out.getvalue()) self.assertIn("migrations.AddField(", out.getvalue()) self.assertIn("model_name='sillymodel',", out.getvalue()) self.assertIn("name='silly_char',", out.getvalue()) def test_makemigrations_migrations_modules_path_not_exist(self): """ makemigrations creates migrations when specifying a custom location for migration files using MIGRATION_MODULES if the custom path doesn't already exist. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() migration_module = "migrations.test_migrations_path_doesnt_exist.foo.bar" with self.temporary_migration_module(module=migration_module) as migration_dir: call_command("makemigrations", "migrations", stdout=out) # Migrations file is actually created in the expected path. initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) # Command output indicates the migration is created. self.assertIn(" - Create model SillyModel", out.getvalue()) @override_settings(MIGRATION_MODULES={'migrations': 'some.nonexistent.path'}) def test_makemigrations_migrations_modules_nonexistent_toplevel_package(self): msg = ( 'Could not locate an appropriate location to create migrations ' 'package some.nonexistent.path. Make sure the toplevel package ' 'exists and can be imported.' ) with self.assertRaisesMessage(ValueError, msg): call_command('makemigrations', 'migrations', empty=True, verbosity=0) def test_makemigrations_interactive_by_default(self): """ The user is prompted to merge by default if there are conflicts and merge is True. Answer negative to differentiate it from behavior when --noinput is specified. """ # Monkeypatch interactive questioner to auto reject out = io.StringIO() with mock.patch('builtins.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') # This will fail if interactive is False by default self.assertFalse(os.path.exists(merge_file)) self.assertNotIn("Created new merge migration", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_no_merge(self): """ makemigrations does not raise a CommandError when an unspecified app has conflicting migrations. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", merge=False, verbosity=0) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_merge(self): """ makemigrations does not create a merge for an unspecified app even if it has conflicting migrations. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(app_label="migrated_app") as migration_dir: call_command("makemigrations", "migrated_app", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) self.assertIn("No conflicts detected to merge.", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.conflicting_app_with_dependencies"]) def test_makemigrations_merge_dont_output_dependency_operations(self): """ makemigrations --merge does not output any operations from apps that don't belong to a given app. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='N')): out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command( "makemigrations", "conflicting_app_with_dependencies", merge=True, interactive=True, stdout=out ) val = out.getvalue().lower() self.assertIn('merging conflicting_app_with_dependencies\n', val) self.assertIn( ' branch 0002_conflicting_second\n' ' - create model something\n', val ) self.assertIn( ' branch 0002_second\n' ' - delete model tribble\n' ' - remove field silly_field from author\n' ' - add field rating to author\n' ' - create model book\n', val ) def test_makemigrations_with_custom_name(self): """ makemigrations --name generate a custom migration name. """ with self.temporary_migration_module() as migration_dir: def cmd(migration_count, migration_name, *args): call_command("makemigrations", "migrations", "--verbosity", "0", "--name", migration_name, *args) migration_file = os.path.join(migration_dir, "%s_%s.py" % (migration_count, migration_name)) # Check for existing migration file in migration folder self.assertTrue(os.path.exists(migration_file)) with open(migration_file, encoding='utf-8') as fp: content = fp.read() content = content.replace(" ", "") return content # generate an initial migration migration_name_0001 = "my_initial_migration" content = cmd("0001", migration_name_0001) self.assertIn("dependencies=[\n]", content) # importlib caches os.listdir() on some platforms like macOS # (#23850). if hasattr(importlib, 'invalidate_caches'): importlib.invalidate_caches() # generate an empty migration migration_name_0002 = "my_custom_migration" content = cmd("0002", migration_name_0002, "--empty") self.assertIn("dependencies=[\n('migrations','0001_%s'),\n]" % migration_name_0001, content) self.assertIn("operations=[\n]", content) def test_makemigrations_with_invalid_custom_name(self): msg = 'The migration name must be a valid Python identifier.' with self.assertRaisesMessage(CommandError, msg): call_command('makemigrations', 'migrations', '--name', 'invalid name', '--empty') def test_makemigrations_check(self): """ makemigrations --check should exit with a non-zero status when there are changes to an app requiring migrations. """ with self.temporary_migration_module(): with self.assertRaises(SystemExit): call_command("makemigrations", "--check", "migrations", verbosity=0) with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "--check", "migrations", verbosity=0) def test_makemigrations_migration_path_output(self): """ makemigrations should print the relative paths to the migrations unless they are outside of the current tree, in which case the absolute path should be shown. """ out = io.StringIO() apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_migration_path_output_valueerror(self): """ makemigrations prints the absolute path if os.path.relpath() raises a ValueError when it's impossible to obtain a relative path, e.g. on Windows if Django is installed on a different drive than where the migration files are created. """ out = io.StringIO() with self.temporary_migration_module() as migration_dir: with mock.patch('os.path.relpath', side_effect=ValueError): call_command('makemigrations', 'migrations', stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_inconsistent_history(self): """ makemigrations should raise InconsistentMigrationHistory exception if there are some migrations applied before their dependencies. """ recorder = MigrationRecorder(connection) recorder.record_applied('migrations', '0002_second') msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.temporary_migration_module(module="migrations.test_migrations"): with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("makemigrations") def test_makemigrations_inconsistent_history_db_failure(self): msg = ( "Got an error checking a consistent migration history performed " "for database connection 'default': could not connect to server" ) with mock.patch( 'django.db.migrations.loader.MigrationLoader.check_consistent_history', side_effect=OperationalError('could not connect to server'), ): with self.temporary_migration_module(): with self.assertWarns(RuntimeWarning) as cm: call_command('makemigrations', verbosity=0) self.assertEqual(str(cm.warning), msg) @mock.patch('builtins.input', return_value='1') @mock.patch('django.db.migrations.questioner.sys.stdin', mock.MagicMock(encoding=sys.getdefaultencoding())) def test_makemigrations_auto_now_add_interactive(self, *args): """ makemigrations prompts the user when adding auto_now_add to an existing model. """ class Entry(models.Model): title = models.CharField(max_length=255) creation_date = models.DateTimeField(auto_now_add=True) class Meta: app_label = 'migrations' # Monkeypatch interactive questioner to auto accept with mock.patch('django.db.migrations.questioner.sys.stdout', new_callable=io.StringIO) as prompt_stdout: out = io.StringIO() with self.temporary_migration_module(module='migrations.test_auto_now_add'): call_command('makemigrations', 'migrations', interactive=True, stdout=out) output = out.getvalue() prompt_output = prompt_stdout.getvalue() self.assertIn("You can accept the default 'timezone.now' by pressing 'Enter'", prompt_output) self.assertIn("Add field creation_date to entry", output) class SquashMigrationsTests(MigrationTestBase): """ Tests running the squashmigrations command. """ def test_squashmigrations_squashes(self): """ squashmigrations squashes migrations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command('squashmigrations', 'migrations', '0002', interactive=False, stdout=out, no_color=True) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") self.assertTrue(os.path.exists(squashed_migration_file)) self.assertEqual( out.getvalue(), 'Will squash the following migrations:\n' ' - 0001_initial\n' ' - 0002_second\n' 'Optimizing...\n' ' Optimized from 8 operations to 2 operations.\n' 'Created new squashed migration %s\n' ' You should commit this migration but leave the old ones in place;\n' ' the new migration will be used for new installs. Once you are sure\n' ' all instances of the codebase have applied the migrations you squashed,\n' ' you can delete them.\n' % squashed_migration_file ) def test_squashmigrations_initial_attribute(self): with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") with open(squashed_migration_file, encoding='utf-8') as fp: content = fp.read() self.assertIn("initial = True", content) def test_squashmigrations_optimizes(self): """ squashmigrations optimizes operations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, stdout=out) self.assertIn("Optimized from 8 operations to 2 operations.", out.getvalue()) def test_ticket_23799_squashmigrations_no_optimize(self): """ squashmigrations --no-optimize doesn't optimize operations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, no_optimize=True, stdout=out) self.assertIn("Skipping optimization", out.getvalue()) def test_squashmigrations_valid_start(self): """ squashmigrations accepts a starting migration. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes") as migration_dir: call_command("squashmigrations", "migrations", "0002", "0003", interactive=False, verbosity=1, stdout=out) squashed_migration_file = os.path.join(migration_dir, "0002_second_squashed_0003_third.py") with open(squashed_migration_file, encoding='utf-8') as fp: content = fp.read() self.assertIn(" ('migrations', '0001_initial')", content) self.assertNotIn("initial = True", content) out = out.getvalue() self.assertNotIn(" - 0001_initial", out) self.assertIn(" - 0002_second", out) self.assertIn(" - 0003_third", out) def test_squashmigrations_invalid_start(self): """ squashmigrations doesn't accept a starting migration after the ending migration. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): msg = ( "The migration 'migrations.0003_third' cannot be found. Maybe " "it comes after the migration 'migrations.0002_second'" ) with self.assertRaisesMessage(CommandError, msg): call_command("squashmigrations", "migrations", "0003", "0002", interactive=False, verbosity=0) def test_squashed_name_with_start_migration_name(self): """--squashed-name specifies the new migration's name.""" squashed_name = 'squashed_name' with self.temporary_migration_module(module='migrations.test_migrations') as migration_dir: call_command( 'squashmigrations', 'migrations', '0001', '0002', squashed_name=squashed_name, interactive=False, verbosity=0, ) squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name) self.assertTrue(os.path.exists(squashed_migration_file)) def test_squashed_name_without_start_migration_name(self): """--squashed-name also works if a start migration is omitted.""" squashed_name = 'squashed_name' with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command( 'squashmigrations', 'migrations', '0001', squashed_name=squashed_name, interactive=False, verbosity=0, ) squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name) self.assertTrue(os.path.exists(squashed_migration_file)) class AppLabelErrorTests(TestCase): """ This class inherits TestCase because MigrationTestBase uses `available_apps = ['migrations']` which means that it's the only installed app. 'django.contrib.auth' must be in INSTALLED_APPS for some of these tests. """ nonexistent_app_error = "No installed app with label 'nonexistent_app'." did_you_mean_auth_error = ( "No installed app with label 'django.contrib.auth'. Did you mean " "'auth'?" ) def test_makemigrations_nonexistent_app_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('makemigrations', 'nonexistent_app', stderr=err) self.assertIn(self.nonexistent_app_error, err.getvalue()) def test_makemigrations_app_name_specified_as_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('makemigrations', 'django.contrib.auth', stderr=err) self.assertIn(self.did_you_mean_auth_error, err.getvalue()) def test_migrate_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('migrate', 'nonexistent_app') def test_migrate_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('migrate', 'django.contrib.auth') def test_showmigrations_nonexistent_app_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('showmigrations', 'nonexistent_app', stderr=err) self.assertIn(self.nonexistent_app_error, err.getvalue()) def test_showmigrations_app_name_specified_as_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('showmigrations', 'django.contrib.auth', stderr=err) self.assertIn(self.did_you_mean_auth_error, err.getvalue()) def test_sqlmigrate_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('sqlmigrate', 'nonexistent_app', '0002') def test_sqlmigrate_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('sqlmigrate', 'django.contrib.auth', '0002') def test_squashmigrations_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('squashmigrations', 'nonexistent_app', '0002') def test_squashmigrations_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('squashmigrations', 'django.contrib.auth', '0002')
18d324f8ab698e7196b5e51a4cc75bf52c463e7c9a3875f31350a77a210a29cf
import datetime from decimal import Decimal from unittest import skipIf from django.core.exceptions import FieldDoesNotExist, FieldError from django.db import connection from django.db.models import ( BooleanField, Case, CharField, Count, DateTimeField, Exists, ExpressionWrapper, F, Func, IntegerField, Max, NullBooleanField, OuterRef, Q, Subquery, Sum, Value, When, ) from django.db.models.expressions import RawSQL from django.db.models.functions import Length, Lower from django.test import TestCase, skipUnlessDBFeature from .models import ( Author, Book, Company, DepartmentStore, Employee, Publisher, Store, Ticket, ) def cxOracle_py3_bug(func): """ There's a bug in Django/cx_Oracle with respect to string handling under Python 3 (essentially, they treat Python 3 strings as Python 2 strings rather than unicode). This makes some tests here fail under Python 3, so we mark them as expected failures until someone fixes them in #23843. """ from unittest import expectedFailure from django.db import connection return expectedFailure(func) if connection.vendor == 'oracle' else func class NonAggregateAnnotationTestCase(TestCase): @classmethod def setUpTestData(cls): cls.a1 = Author.objects.create(name='Adrian Holovaty', age=34) cls.a2 = Author.objects.create(name='Jacob Kaplan-Moss', age=35) cls.a3 = Author.objects.create(name='Brad Dayley', age=45) cls.a4 = Author.objects.create(name='James Bennett', age=29) cls.a5 = Author.objects.create(name='Jeffrey Forcier', age=37) cls.a6 = Author.objects.create(name='Paul Bissex', age=29) cls.a7 = Author.objects.create(name='Wesley J. Chun', age=25) cls.a8 = Author.objects.create(name='Peter Norvig', age=57) cls.a9 = Author.objects.create(name='Stuart Russell', age=46) cls.a1.friends.add(cls.a2, cls.a4) cls.a2.friends.add(cls.a1, cls.a7) cls.a4.friends.add(cls.a1) cls.a5.friends.add(cls.a6, cls.a7) cls.a6.friends.add(cls.a5, cls.a7) cls.a7.friends.add(cls.a2, cls.a5, cls.a6) cls.a8.friends.add(cls.a9) cls.a9.friends.add(cls.a8) cls.p1 = Publisher.objects.create(name='Apress', num_awards=3) cls.p2 = Publisher.objects.create(name='Sams', num_awards=1) cls.p3 = Publisher.objects.create(name='Prentice Hall', num_awards=7) cls.p4 = Publisher.objects.create(name='Morgan Kaufmann', num_awards=9) cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0) cls.b1 = Book.objects.create( isbn='159059725', name='The Definitive Guide to Django: Web Development Done Right', pages=447, rating=4.5, price=Decimal('30.00'), contact=cls.a1, publisher=cls.p1, pubdate=datetime.date(2007, 12, 6) ) cls.b2 = Book.objects.create( isbn='067232959', name='Sams Teach Yourself Django in 24 Hours', pages=528, rating=3.0, price=Decimal('23.09'), contact=cls.a3, publisher=cls.p2, pubdate=datetime.date(2008, 3, 3) ) cls.b3 = Book.objects.create( isbn='159059996', name='Practical Django Projects', pages=300, rating=4.0, price=Decimal('29.69'), contact=cls.a4, publisher=cls.p1, pubdate=datetime.date(2008, 6, 23) ) cls.b4 = Book.objects.create( isbn='013235613', name='Python Web Development with Django', pages=350, rating=4.0, price=Decimal('29.69'), contact=cls.a5, publisher=cls.p3, pubdate=datetime.date(2008, 11, 3) ) cls.b5 = Book.objects.create( isbn='013790395', name='Artificial Intelligence: A Modern Approach', pages=1132, rating=4.0, price=Decimal('82.80'), contact=cls.a8, publisher=cls.p3, pubdate=datetime.date(1995, 1, 15) ) cls.b6 = Book.objects.create( isbn='155860191', name='Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', pages=946, rating=5.0, price=Decimal('75.00'), contact=cls.a8, publisher=cls.p4, pubdate=datetime.date(1991, 10, 15) ) cls.b1.authors.add(cls.a1, cls.a2) cls.b2.authors.add(cls.a3) cls.b3.authors.add(cls.a4) cls.b4.authors.add(cls.a5, cls.a6, cls.a7) cls.b5.authors.add(cls.a8, cls.a9) cls.b6.authors.add(cls.a8) s1 = Store.objects.create( name='Amazon.com', original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42), friday_night_closing=datetime.time(23, 59, 59) ) s2 = Store.objects.create( name='Books.com', original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37), friday_night_closing=datetime.time(23, 59, 59) ) s3 = Store.objects.create( name="Mamma and Pappa's Books", original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14), friday_night_closing=datetime.time(21, 30) ) s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6) s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6) s3.books.add(cls.b3, cls.b4, cls.b6) def test_basic_annotation(self): books = Book.objects.annotate( is_book=Value(1, output_field=IntegerField())) for book in books: self.assertEqual(book.is_book, 1) def test_basic_f_annotation(self): books = Book.objects.annotate(another_rating=F('rating')) for book in books: self.assertEqual(book.another_rating, book.rating) def test_joined_annotation(self): books = Book.objects.select_related('publisher').annotate( num_awards=F('publisher__num_awards')) for book in books: self.assertEqual(book.num_awards, book.publisher.num_awards) def test_mixed_type_annotation_date_interval(self): active = datetime.datetime(2015, 3, 20, 14, 0, 0) duration = datetime.timedelta(hours=1) expires = datetime.datetime(2015, 3, 20, 14, 0, 0) + duration Ticket.objects.create(active_at=active, duration=duration) t = Ticket.objects.annotate( expires=ExpressionWrapper(F('active_at') + F('duration'), output_field=DateTimeField()) ).first() self.assertEqual(t.expires, expires) def test_mixed_type_annotation_numbers(self): test = self.b1 b = Book.objects.annotate( combined=ExpressionWrapper(F('pages') + F('rating'), output_field=IntegerField()) ).get(isbn=test.isbn) combined = int(test.pages + test.rating) self.assertEqual(b.combined, combined) def test_empty_expression_annotation(self): books = Book.objects.annotate( selected=ExpressionWrapper(Q(pk__in=[]), output_field=BooleanField()) ) self.assertEqual(len(books), Book.objects.count()) self.assertTrue(all(not book.selected for book in books)) books = Book.objects.annotate( selected=ExpressionWrapper(Q(pk__in=Book.objects.none()), output_field=BooleanField()) ) self.assertEqual(len(books), Book.objects.count()) self.assertTrue(all(not book.selected for book in books)) def test_annotate_with_aggregation(self): books = Book.objects.annotate( is_book=Value(1, output_field=IntegerField()), rating_count=Count('rating')) for book in books: self.assertEqual(book.is_book, 1) self.assertEqual(book.rating_count, 1) def test_aggregate_over_annotation(self): agg = Author.objects.annotate(other_age=F('age')).aggregate(otherage_sum=Sum('other_age')) other_agg = Author.objects.aggregate(age_sum=Sum('age')) self.assertEqual(agg['otherage_sum'], other_agg['age_sum']) @skipUnlessDBFeature('can_distinct_on_fields') def test_distinct_on_with_annotation(self): store = Store.objects.create( name='test store', original_opening=datetime.datetime.now(), friday_night_closing=datetime.time(21, 00, 00), ) names = [ 'Theodore Roosevelt', 'Eleanor Roosevelt', 'Franklin Roosevelt', 'Ned Stark', 'Catelyn Stark', ] for name in names: Employee.objects.create( store=store, first_name=name.split()[0], last_name=name.split()[1], age=30, salary=2000, ) people = Employee.objects.annotate( name_lower=Lower('last_name'), ).distinct('name_lower') self.assertEqual({p.last_name for p in people}, {'Stark', 'Roosevelt'}) self.assertEqual(len(people), 2) people2 = Employee.objects.annotate( test_alias=F('store__name'), ).distinct('test_alias') self.assertEqual(len(people2), 1) lengths = Employee.objects.annotate( name_len=Length('first_name'), ).distinct('name_len').values_list('name_len', flat=True) self.assertCountEqual(lengths, [3, 7, 8]) def test_filter_annotation(self): books = Book.objects.annotate( is_book=Value(1, output_field=IntegerField()) ).filter(is_book=1) for book in books: self.assertEqual(book.is_book, 1) def test_filter_annotation_with_f(self): books = Book.objects.annotate( other_rating=F('rating') ).filter(other_rating=3.5) for book in books: self.assertEqual(book.other_rating, 3.5) def test_filter_annotation_with_double_f(self): books = Book.objects.annotate( other_rating=F('rating') ).filter(other_rating=F('rating')) for book in books: self.assertEqual(book.other_rating, book.rating) def test_filter_agg_with_double_f(self): books = Book.objects.annotate( sum_rating=Sum('rating') ).filter(sum_rating=F('sum_rating')) for book in books: self.assertEqual(book.sum_rating, book.rating) def test_filter_wrong_annotation(self): with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."): list(Book.objects.annotate( sum_rating=Sum('rating') ).filter(sum_rating=F('nope'))) def test_decimal_annotation(self): salary = Decimal(10) ** -Employee._meta.get_field('salary').decimal_places Employee.objects.create( first_name='Max', last_name='Paine', store=Store.objects.first(), age=23, salary=salary, ) self.assertEqual( Employee.objects.annotate(new_salary=F('salary') / 10).get().new_salary, salary / 10, ) def test_filter_decimal_annotation(self): qs = Book.objects.annotate(new_price=F('price') + 1).filter(new_price=Decimal(31)).values_list('new_price') self.assertEqual(qs.get(), (Decimal(31),)) def test_combined_annotation_commutative(self): book1 = Book.objects.annotate(adjusted_rating=F('rating') + 2).get(pk=self.b1.pk) book2 = Book.objects.annotate(adjusted_rating=2 + F('rating')).get(pk=self.b1.pk) self.assertEqual(book1.adjusted_rating, book2.adjusted_rating) book1 = Book.objects.annotate(adjusted_rating=F('rating') + None).get(pk=self.b1.pk) book2 = Book.objects.annotate(adjusted_rating=None + F('rating')).get(pk=self.b1.pk) self.assertEqual(book1.adjusted_rating, book2.adjusted_rating) def test_update_with_annotation(self): book_preupdate = Book.objects.get(pk=self.b2.pk) Book.objects.annotate(other_rating=F('rating') - 1).update(rating=F('other_rating')) book_postupdate = Book.objects.get(pk=self.b2.pk) self.assertEqual(book_preupdate.rating - 1, book_postupdate.rating) def test_annotation_with_m2m(self): books = Book.objects.annotate(author_age=F('authors__age')).filter(pk=self.b1.pk).order_by('author_age') self.assertEqual(books[0].author_age, 34) self.assertEqual(books[1].author_age, 35) def test_annotation_reverse_m2m(self): books = Book.objects.annotate( store_name=F('store__name'), ).filter( name='Practical Django Projects', ).order_by('store_name') self.assertQuerysetEqual( books, [ 'Amazon.com', 'Books.com', 'Mamma and Pappa\'s Books' ], lambda b: b.store_name ) def test_values_annotation(self): """ Annotations can reference fields in a values clause, and contribute to an existing values clause. """ # annotate references a field in values() qs = Book.objects.values('rating').annotate(other_rating=F('rating') - 1) book = qs.get(pk=self.b1.pk) self.assertEqual(book['rating'] - 1, book['other_rating']) # filter refs the annotated value book = qs.get(other_rating=4) self.assertEqual(book['other_rating'], 4) # can annotate an existing values with a new field book = qs.annotate(other_isbn=F('isbn')).get(other_rating=4) self.assertEqual(book['other_rating'], 4) self.assertEqual(book['other_isbn'], '155860191') def test_values_with_pk_annotation(self): # annotate references a field in values() with pk publishers = Publisher.objects.values('id', 'book__rating').annotate(total=Sum('book__rating')) for publisher in publishers.filter(pk=self.p1.pk): self.assertEqual(publisher['book__rating'], publisher['total']) @skipUnlessDBFeature('allows_group_by_pk') def test_rawsql_group_by_collapse(self): raw = RawSQL('SELECT MIN(id) FROM annotations_book', []) qs = Author.objects.values('id').annotate( min_book_id=raw, count_friends=Count('friends'), ).order_by() _, _, group_by = qs.query.get_compiler(using='default').pre_sql_setup() self.assertEqual(len(group_by), 1) self.assertNotEqual(raw, group_by[0]) def test_defer_annotation(self): """ Deferred attributes can be referenced by an annotation, but they are not themselves deferred, and cannot be deferred. """ qs = Book.objects.defer('rating').annotate(other_rating=F('rating') - 1) with self.assertNumQueries(2): book = qs.get(other_rating=4) self.assertEqual(book.rating, 5) self.assertEqual(book.other_rating, 4) with self.assertRaisesMessage(FieldDoesNotExist, "Book has no field named 'other_rating'"): book = qs.defer('other_rating').get(other_rating=4) def test_mti_annotations(self): """ Fields on an inherited model can be referenced by an annotated field. """ d = DepartmentStore.objects.create( name='Angus & Robinson', original_opening=datetime.date(2014, 3, 8), friday_night_closing=datetime.time(21, 00, 00), chain='Westfield' ) books = Book.objects.filter(rating__gt=4) for b in books: d.books.add(b) qs = DepartmentStore.objects.annotate( other_name=F('name'), other_chain=F('chain'), is_open=Value(True, BooleanField()), book_isbn=F('books__isbn') ).order_by('book_isbn').filter(chain='Westfield') self.assertQuerysetEqual( qs, [ ('Angus & Robinson', 'Westfield', True, '155860191'), ('Angus & Robinson', 'Westfield', True, '159059725') ], lambda d: (d.other_name, d.other_chain, d.is_open, d.book_isbn) ) def test_null_annotation(self): """ Annotating None onto a model round-trips """ book = Book.objects.annotate(no_value=Value(None, output_field=IntegerField())).first() self.assertIsNone(book.no_value) def test_order_by_annotation(self): authors = Author.objects.annotate(other_age=F('age')).order_by('other_age') self.assertQuerysetEqual( authors, [ 25, 29, 29, 34, 35, 37, 45, 46, 57, ], lambda a: a.other_age ) def test_order_by_aggregate(self): authors = Author.objects.values('age').annotate(age_count=Count('age')).order_by('age_count', 'age') self.assertQuerysetEqual( authors, [ (25, 1), (34, 1), (35, 1), (37, 1), (45, 1), (46, 1), (57, 1), (29, 2), ], lambda a: (a['age'], a['age_count']) ) def test_raw_sql_with_inherited_field(self): DepartmentStore.objects.create( name='Angus & Robinson', original_opening=datetime.date(2014, 3, 8), friday_night_closing=datetime.time(21), chain='Westfield', area=123, ) tests = ( ('name', 'Angus & Robinson'), ('surface', 123), ("case when name='Angus & Robinson' then chain else name end", 'Westfield'), ) for sql, expected_result in tests: with self.subTest(sql=sql): self.assertSequenceEqual( DepartmentStore.objects.annotate( annotation=RawSQL(sql, ()), ).values_list('annotation', flat=True), [expected_result], ) def test_annotate_exists(self): authors = Author.objects.annotate(c=Count('id')).filter(c__gt=1) self.assertFalse(authors.exists()) def test_column_field_ordering(self): """ Columns are aligned in the correct order for resolve_columns. This test will fail on MySQL if column ordering is out. Column fields should be aligned as: 1. extra_select 2. model_fields 3. annotation_fields 4. model_related_fields """ store = Store.objects.first() Employee.objects.create(id=1, first_name='Max', manager=True, last_name='Paine', store=store, age=23, salary=Decimal(50000.00)) Employee.objects.create(id=2, first_name='Buffy', manager=False, last_name='Summers', store=store, age=18, salary=Decimal(40000.00)) qs = Employee.objects.extra( select={'random_value': '42'} ).select_related('store').annotate( annotated_value=Value(17, output_field=IntegerField()) ) rows = [ (1, 'Max', True, 42, 'Paine', 23, Decimal(50000.00), store.name, 17), (2, 'Buffy', False, 42, 'Summers', 18, Decimal(40000.00), store.name, 17) ] self.assertQuerysetEqual( qs.order_by('id'), rows, lambda e: ( e.id, e.first_name, e.manager, e.random_value, e.last_name, e.age, e.salary, e.store.name, e.annotated_value)) def test_column_field_ordering_with_deferred(self): store = Store.objects.first() Employee.objects.create(id=1, first_name='Max', manager=True, last_name='Paine', store=store, age=23, salary=Decimal(50000.00)) Employee.objects.create(id=2, first_name='Buffy', manager=False, last_name='Summers', store=store, age=18, salary=Decimal(40000.00)) qs = Employee.objects.extra( select={'random_value': '42'} ).select_related('store').annotate( annotated_value=Value(17, output_field=IntegerField()) ) rows = [ (1, 'Max', True, 42, 'Paine', 23, Decimal(50000.00), store.name, 17), (2, 'Buffy', False, 42, 'Summers', 18, Decimal(40000.00), store.name, 17) ] # and we respect deferred columns! self.assertQuerysetEqual( qs.defer('age').order_by('id'), rows, lambda e: ( e.id, e.first_name, e.manager, e.random_value, e.last_name, e.age, e.salary, e.store.name, e.annotated_value)) @cxOracle_py3_bug def test_custom_functions(self): Company(name='Apple', motto=None, ticker_name='APPL', description='Beautiful Devices').save() Company(name='Django Software Foundation', motto=None, ticker_name=None, description=None).save() Company(name='Google', motto='Do No Evil', ticker_name='GOOG', description='Internet Company').save() Company(name='Yahoo', motto=None, ticker_name=None, description='Internet Company').save() qs = Company.objects.annotate( tagline=Func( F('motto'), F('ticker_name'), F('description'), Value('No Tag'), function='COALESCE' ) ).order_by('name') self.assertQuerysetEqual( qs, [ ('Apple', 'APPL'), ('Django Software Foundation', 'No Tag'), ('Google', 'Do No Evil'), ('Yahoo', 'Internet Company') ], lambda c: (c.name, c.tagline) ) @cxOracle_py3_bug def test_custom_functions_can_ref_other_functions(self): Company(name='Apple', motto=None, ticker_name='APPL', description='Beautiful Devices').save() Company(name='Django Software Foundation', motto=None, ticker_name=None, description=None).save() Company(name='Google', motto='Do No Evil', ticker_name='GOOG', description='Internet Company').save() Company(name='Yahoo', motto=None, ticker_name=None, description='Internet Company').save() class Lower(Func): function = 'LOWER' qs = Company.objects.annotate( tagline=Func( F('motto'), F('ticker_name'), F('description'), Value('No Tag'), function='COALESCE', ) ).annotate( tagline_lower=Lower(F('tagline'), output_field=CharField()) ).order_by('name') # LOWER function supported by: # oracle, postgres, mysql, sqlite, sqlserver self.assertQuerysetEqual( qs, [ ('Apple', 'APPL'.lower()), ('Django Software Foundation', 'No Tag'.lower()), ('Google', 'Do No Evil'.lower()), ('Yahoo', 'Internet Company'.lower()) ], lambda c: (c.name, c.tagline_lower) ) def test_boolean_value_annotation(self): books = Book.objects.annotate( is_book=Value(True, output_field=BooleanField()), is_pony=Value(False, output_field=BooleanField()), is_none=Value(None, output_field=BooleanField(null=True)), is_none_old=Value(None, output_field=NullBooleanField()), ) self.assertGreater(len(books), 0) for book in books: self.assertIs(book.is_book, True) self.assertIs(book.is_pony, False) self.assertIsNone(book.is_none) self.assertIsNone(book.is_none_old) def test_annotation_in_f_grouped_by_annotation(self): qs = ( Publisher.objects.annotate(multiplier=Value(3)) # group by option => sum of value * multiplier .values('name') .annotate(multiplied_value_sum=Sum(F('multiplier') * F('num_awards'))) .order_by() ) self.assertCountEqual( qs, [ {'multiplied_value_sum': 9, 'name': 'Apress'}, {'multiplied_value_sum': 0, 'name': "Jonno's House of Books"}, {'multiplied_value_sum': 27, 'name': 'Morgan Kaufmann'}, {'multiplied_value_sum': 21, 'name': 'Prentice Hall'}, {'multiplied_value_sum': 3, 'name': 'Sams'}, ] ) def test_arguments_must_be_expressions(self): msg = 'QuerySet.annotate() received non-expression(s): %s.' with self.assertRaisesMessage(TypeError, msg % BooleanField()): Book.objects.annotate(BooleanField()) with self.assertRaisesMessage(TypeError, msg % True): Book.objects.annotate(is_book=True) with self.assertRaisesMessage(TypeError, msg % ', '.join([str(BooleanField()), 'True'])): Book.objects.annotate(BooleanField(), Value(False), is_book=True) def test_chaining_annotation_filter_with_m2m(self): qs = Author.objects.filter( name='Adrian Holovaty', friends__age=35, ).annotate( jacob_name=F('friends__name'), ).filter( friends__age=29, ).annotate( james_name=F('friends__name'), ).values('jacob_name', 'james_name') self.assertCountEqual( qs, [{'jacob_name': 'Jacob Kaplan-Moss', 'james_name': 'James Bennett'}], ) def test_annotation_filter_with_subquery(self): long_books_qs = Book.objects.filter( publisher=OuterRef('pk'), pages__gt=400, ).values('publisher').annotate(count=Count('pk')).values('count') publisher_books_qs = Publisher.objects.annotate( total_books=Count('book'), ).filter( total_books=Subquery(long_books_qs, output_field=IntegerField()), ).values('name') self.assertCountEqual(publisher_books_qs, [{'name': 'Sams'}, {'name': 'Morgan Kaufmann'}]) def test_annotation_exists_aggregate_values_chaining(self): qs = Book.objects.values('publisher').annotate( has_authors=Exists(Book.authors.through.objects.filter(book=OuterRef('pk'))), max_pubdate=Max('pubdate'), ).values_list('max_pubdate', flat=True).order_by('max_pubdate') self.assertCountEqual(qs, [ datetime.date(1991, 10, 15), datetime.date(2008, 3, 3), datetime.date(2008, 6, 23), datetime.date(2008, 11, 3), ]) @skipIf( connection.vendor == 'mysql' and 'ONLY_FULL_GROUP_BY' in connection.sql_mode, 'GROUP BY optimization does not work properly when ONLY_FULL_GROUP_BY ' 'mode is enabled on MySQL, see #31331.', ) def test_annotation_aggregate_with_m2o(self): qs = Author.objects.filter(age__lt=30).annotate( max_pages=Case( When(book_contact_set__isnull=True, then=Value(0)), default=Max(F('book__pages')), output_field=IntegerField(), ), ).values('name', 'max_pages') self.assertCountEqual(qs, [ {'name': 'James Bennett', 'max_pages': 300}, {'name': 'Paul Bissex', 'max_pages': 0}, {'name': 'Wesley J. Chun', 'max_pages': 0}, ])
fefe73a6c55ff92c579276412092cc572e8321587d5182463252fc77b31dcd9e
# Unit tests for cache framework # Uses whatever cache backend is set in the test settings file. import copy import io import os import pickle import re import shutil import tempfile import threading import time import unittest from pathlib import Path from unittest import mock from django.conf import settings from django.core import management, signals from django.core.cache import ( DEFAULT_CACHE_ALIAS, CacheKeyWarning, InvalidCacheKey, cache, caches, ) from django.core.cache.backends.base import InvalidCacheBackendError from django.core.cache.utils import make_template_fragment_key from django.db import close_old_connections, connection, connections from django.http import ( HttpRequest, HttpResponse, HttpResponseNotModified, StreamingHttpResponse, ) from django.middleware.cache import ( CacheMiddleware, FetchFromCacheMiddleware, UpdateCacheMiddleware, ) from django.middleware.csrf import CsrfViewMiddleware from django.template import engines from django.template.context_processors import csrf from django.template.response import TemplateResponse from django.test import ( RequestFactory, SimpleTestCase, TestCase, TransactionTestCase, override_settings, ) from django.test.signals import setting_changed from django.utils import timezone, translation from django.utils.cache import ( get_cache_key, learn_cache_key, patch_cache_control, patch_vary_headers, ) from django.views.decorators.cache import cache_control, cache_page from .models import Poll, expensive_calculation # functions/classes for complex data type tests def f(): return 42 class C: def m(n): return 24 class Unpicklable: def __getstate__(self): raise pickle.PickleError() def empty_response(request): return HttpResponse() KEY_ERRORS_WITH_MEMCACHED_MSG = ( 'Cache key contains characters that will cause errors if used with ' 'memcached: %r' ) @override_settings(CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } }) class DummyCacheTests(SimpleTestCase): # The Dummy cache backend doesn't really behave like a test backend, # so it has its own test case. def test_simple(self): "Dummy cache backend ignores cache set calls" cache.set("key", "value") self.assertIsNone(cache.get("key")) def test_add(self): "Add doesn't do anything in dummy cache backend" self.assertIs(cache.add("addkey1", "value"), True) self.assertIs(cache.add("addkey1", "newvalue"), True) self.assertIsNone(cache.get("addkey1")) def test_non_existent(self): "Nonexistent keys aren't found in the dummy cache backend" self.assertIsNone(cache.get("does_not_exist")) self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") def test_get_many(self): "get_many returns nothing for the dummy cache backend" cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'}) self.assertEqual(cache.get_many(['a', 'c', 'd']), {}) self.assertEqual(cache.get_many(['a', 'b', 'e']), {}) def test_get_many_invalid_key(self): msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces' with self.assertWarnsMessage(CacheKeyWarning, msg): cache.get_many(['key with spaces']) def test_delete(self): "Cache deletion is transparently ignored on the dummy cache backend" cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertIsNone(cache.get("key1")) self.assertIs(cache.delete("key1"), False) self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_has_key(self): "The has_key method doesn't ever return True for the dummy cache backend" cache.set("hello1", "goodbye1") self.assertIs(cache.has_key("hello1"), False) self.assertIs(cache.has_key("goodbye1"), False) def test_in(self): "The in operator doesn't ever return True for the dummy cache backend" cache.set("hello2", "goodbye2") self.assertNotIn("hello2", cache) self.assertNotIn("goodbye2", cache) def test_incr(self): "Dummy cache values can't be incremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.incr('answer') with self.assertRaises(ValueError): cache.incr('does_not_exist') def test_decr(self): "Dummy cache values can't be decremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.decr('answer') with self.assertRaises(ValueError): cache.decr('does_not_exist') def test_touch(self): """Dummy cache can't do touch().""" self.assertIs(cache.touch('whatever'), False) def test_data_types(self): "All data types are ignored equally by the dummy cache" stuff = { 'string': 'this is a string', 'int': 42, 'list': [1, 2, 3, 4], 'tuple': (1, 2, 3, 4), 'dict': {'A': 1, 'B': 2}, 'function': f, 'class': C, } cache.set("stuff", stuff) self.assertIsNone(cache.get("stuff")) def test_expiration(self): "Expiration has no effect on the dummy cache" cache.set('expire1', 'very quickly', 1) cache.set('expire2', 'very quickly', 1) cache.set('expire3', 'very quickly', 1) time.sleep(2) self.assertIsNone(cache.get("expire1")) self.assertIs(cache.add("expire2", "newvalue"), True) self.assertIsNone(cache.get("expire2")) self.assertIs(cache.has_key("expire3"), False) def test_unicode(self): "Unicode values are ignored by the dummy cache" stuff = { 'ascii': 'ascii_value', 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', 'ascii2': {'x': 1} } for (key, value) in stuff.items(): with self.subTest(key=key): cache.set(key, value) self.assertIsNone(cache.get(key)) def test_set_many(self): "set_many does nothing for the dummy cache backend" self.assertEqual(cache.set_many({'a': 1, 'b': 2}), []) self.assertEqual(cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1'), []) def test_set_many_invalid_key(self): msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces' with self.assertWarnsMessage(CacheKeyWarning, msg): cache.set_many({'key with spaces': 'foo'}) def test_delete_many(self): "delete_many does nothing for the dummy cache backend" cache.delete_many(['a', 'b']) def test_delete_many_invalid_key(self): msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces' with self.assertWarnsMessage(CacheKeyWarning, msg): cache.delete_many({'key with spaces': 'foo'}) def test_clear(self): "clear does nothing for the dummy cache backend" cache.clear() def test_incr_version(self): "Dummy cache versions can't be incremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.incr_version('answer') with self.assertRaises(ValueError): cache.incr_version('does_not_exist') def test_decr_version(self): "Dummy cache versions can't be decremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.decr_version('answer') with self.assertRaises(ValueError): cache.decr_version('does_not_exist') def test_get_or_set(self): self.assertEqual(cache.get_or_set('mykey', 'default'), 'default') self.assertIsNone(cache.get_or_set('mykey', None)) def test_get_or_set_callable(self): def my_callable(): return 'default' self.assertEqual(cache.get_or_set('mykey', my_callable), 'default') self.assertEqual(cache.get_or_set('mykey', my_callable()), 'default') def custom_key_func(key, key_prefix, version): "A customized cache key function" return 'CUSTOM-' + '-'.join([key_prefix, str(version), key]) _caches_setting_base = { 'default': {}, 'prefix': {'KEY_PREFIX': 'cacheprefix{}'.format(os.getpid())}, 'v2': {'VERSION': 2}, 'custom_key': {'KEY_FUNCTION': custom_key_func}, 'custom_key2': {'KEY_FUNCTION': 'cache.tests.custom_key_func'}, 'cull': {'OPTIONS': {'MAX_ENTRIES': 30}}, 'zero_cull': {'OPTIONS': {'CULL_FREQUENCY': 0, 'MAX_ENTRIES': 30}}, } def caches_setting_for_tests(base=None, exclude=None, **params): # `base` is used to pull in the memcached config from the original settings, # `exclude` is a set of cache names denoting which `_caches_setting_base` keys # should be omitted. # `params` are test specific overrides and `_caches_settings_base` is the # base config for the tests. # This results in the following search order: # params -> _caches_setting_base -> base base = base or {} exclude = exclude or set() setting = {k: base.copy() for k in _caches_setting_base if k not in exclude} for key, cache_params in setting.items(): cache_params.update(_caches_setting_base[key]) cache_params.update(params) return setting class BaseCacheTests: # A common set of tests to apply to all cache backends factory = RequestFactory() def tearDown(self): cache.clear() def test_simple(self): # Simple cache set/get works cache.set("key", "value") self.assertEqual(cache.get("key"), "value") def test_default_used_when_none_is_set(self): """If None is cached, get() returns it instead of the default.""" cache.set('key_default_none', None) self.assertIsNone(cache.get('key_default_none', default='default')) def test_add(self): # A key can be added to a cache self.assertIs(cache.add("addkey1", "value"), True) self.assertIs(cache.add("addkey1", "newvalue"), False) self.assertEqual(cache.get("addkey1"), "value") def test_prefix(self): # Test for same cache key conflicts between shared backend cache.set('somekey', 'value') # should not be set in the prefixed cache self.assertIs(caches['prefix'].has_key('somekey'), False) caches['prefix'].set('somekey', 'value2') self.assertEqual(cache.get('somekey'), 'value') self.assertEqual(caches['prefix'].get('somekey'), 'value2') def test_non_existent(self): """Nonexistent cache keys return as None/default.""" self.assertIsNone(cache.get("does_not_exist")) self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") def test_get_many(self): # Multiple cache keys can be returned using get_many cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'}) self.assertEqual(cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'}) self.assertEqual(cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'}) self.assertEqual(cache.get_many(iter(['a', 'b', 'e'])), {'a': 'a', 'b': 'b'}) def test_delete(self): # Cache keys can be deleted cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertEqual(cache.get("key1"), "spam") self.assertIs(cache.delete("key1"), True) self.assertIsNone(cache.get("key1")) self.assertEqual(cache.get("key2"), "eggs") def test_delete_nonexistent(self): self.assertIs(cache.delete('nonexistent_key'), False) def test_has_key(self): # The cache can be inspected for cache keys cache.set("hello1", "goodbye1") self.assertIs(cache.has_key("hello1"), True) self.assertIs(cache.has_key("goodbye1"), False) cache.set("no_expiry", "here", None) self.assertIs(cache.has_key("no_expiry"), True) def test_in(self): # The in operator can be used to inspect cache contents cache.set("hello2", "goodbye2") self.assertIn("hello2", cache) self.assertNotIn("goodbye2", cache) def test_incr(self): # Cache values can be incremented cache.set('answer', 41) self.assertEqual(cache.incr('answer'), 42) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.incr('answer', 10), 52) self.assertEqual(cache.get('answer'), 52) self.assertEqual(cache.incr('answer', -10), 42) with self.assertRaises(ValueError): cache.incr('does_not_exist') def test_decr(self): # Cache values can be decremented cache.set('answer', 43) self.assertEqual(cache.decr('answer'), 42) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.decr('answer', 10), 32) self.assertEqual(cache.get('answer'), 32) self.assertEqual(cache.decr('answer', -10), 42) with self.assertRaises(ValueError): cache.decr('does_not_exist') def test_close(self): self.assertTrue(hasattr(cache, 'close')) cache.close() def test_data_types(self): # Many different data types can be cached stuff = { 'string': 'this is a string', 'int': 42, 'list': [1, 2, 3, 4], 'tuple': (1, 2, 3, 4), 'dict': {'A': 1, 'B': 2}, 'function': f, 'class': C, } cache.set("stuff", stuff) self.assertEqual(cache.get("stuff"), stuff) def test_cache_read_for_model_instance(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() my_poll = Poll.objects.create(question="Well?") self.assertEqual(Poll.objects.count(), 1) pub_date = my_poll.pub_date cache.set('question', my_poll) cached_poll = cache.get('question') self.assertEqual(cached_poll.pub_date, pub_date) # We only want the default expensive calculation run once self.assertEqual(expensive_calculation.num_runs, 1) def test_cache_write_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache write expensive_calculation.num_runs = 0 Poll.objects.all().delete() Poll.objects.create(question="What?") self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) self.assertEqual(expensive_calculation.num_runs, 1) cache.set('deferred_queryset', defer_qs) # cache set should not re-evaluate default functions self.assertEqual(expensive_calculation.num_runs, 1) def test_cache_read_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() Poll.objects.create(question="What?") self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) cache.set('deferred_queryset', defer_qs) self.assertEqual(expensive_calculation.num_runs, 1) runs_before_cache_read = expensive_calculation.num_runs cache.get('deferred_queryset') # We only want the default expensive calculation run on creation and set self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read) def test_expiration(self): # Cache values can be set to expire cache.set('expire1', 'very quickly', 1) cache.set('expire2', 'very quickly', 1) cache.set('expire3', 'very quickly', 1) time.sleep(2) self.assertIsNone(cache.get("expire1")) self.assertIs(cache.add("expire2", "newvalue"), True) self.assertEqual(cache.get("expire2"), "newvalue") self.assertIs(cache.has_key("expire3"), False) def test_touch(self): # cache.touch() updates the timeout. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1', timeout=4), True) time.sleep(2) self.assertIs(cache.has_key('expire1'), True) time.sleep(3) self.assertIs(cache.has_key('expire1'), False) # cache.touch() works without the timeout argument. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1'), True) time.sleep(2) self.assertIs(cache.has_key('expire1'), True) self.assertIs(cache.touch('nonexistent'), False) def test_unicode(self): # Unicode values can be cached stuff = { 'ascii': 'ascii_value', 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', 'ascii2': {'x': 1} } # Test `set` for (key, value) in stuff.items(): with self.subTest(key=key): cache.set(key, value) self.assertEqual(cache.get(key), value) # Test `add` for (key, value) in stuff.items(): with self.subTest(key=key): self.assertIs(cache.delete(key), True) self.assertIs(cache.add(key, value), True) self.assertEqual(cache.get(key), value) # Test `set_many` for (key, value) in stuff.items(): self.assertIs(cache.delete(key), True) cache.set_many(stuff) for (key, value) in stuff.items(): with self.subTest(key=key): self.assertEqual(cache.get(key), value) def test_binary_string(self): # Binary strings should be cacheable from zlib import compress, decompress value = 'value_to_be_compressed' compressed_value = compress(value.encode()) # Test set cache.set('binary1', compressed_value) compressed_result = cache.get('binary1') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test add self.assertIs(cache.add('binary1-add', compressed_value), True) compressed_result = cache.get('binary1-add') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test set_many cache.set_many({'binary1-set_many': compressed_value}) compressed_result = cache.get('binary1-set_many') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) def test_set_many(self): # Multiple keys can be set using set_many cache.set_many({"key1": "spam", "key2": "eggs"}) self.assertEqual(cache.get("key1"), "spam") self.assertEqual(cache.get("key2"), "eggs") def test_set_many_returns_empty_list_on_success(self): """set_many() returns an empty list when all keys are inserted.""" failing_keys = cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertEqual(failing_keys, []) def test_set_many_expiration(self): # set_many takes a second ``timeout`` parameter cache.set_many({"key1": "spam", "key2": "eggs"}, 1) time.sleep(2) self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_delete_many(self): # Multiple keys can be deleted using delete_many cache.set_many({'key1': 'spam', 'key2': 'eggs', 'key3': 'ham'}) cache.delete_many(["key1", "key2"]) self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) self.assertEqual(cache.get("key3"), "ham") def test_clear(self): # The cache can be emptied using clear cache.set_many({'key1': 'spam', 'key2': 'eggs'}) cache.clear() self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_long_timeout(self): """ Follow memcached's convention where a timeout greater than 30 days is treated as an absolute expiration timestamp instead of a relative offset (#12399). """ cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second self.assertEqual(cache.get('key1'), 'eggs') self.assertIs(cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1), True) self.assertEqual(cache.get('key2'), 'ham') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1) self.assertEqual(cache.get('key3'), 'sausage') self.assertEqual(cache.get('key4'), 'lobster bisque') def test_forever_timeout(self): """ Passing in None into timeout results in a value that is cached forever """ cache.set('key1', 'eggs', None) self.assertEqual(cache.get('key1'), 'eggs') self.assertIs(cache.add('key2', 'ham', None), True) self.assertEqual(cache.get('key2'), 'ham') self.assertIs(cache.add('key1', 'new eggs', None), False) self.assertEqual(cache.get('key1'), 'eggs') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None) self.assertEqual(cache.get('key3'), 'sausage') self.assertEqual(cache.get('key4'), 'lobster bisque') cache.set('key5', 'belgian fries', timeout=1) self.assertIs(cache.touch('key5', timeout=None), True) time.sleep(2) self.assertEqual(cache.get('key5'), 'belgian fries') def test_zero_timeout(self): """ Passing in zero into timeout results in a value that is not cached """ cache.set('key1', 'eggs', 0) self.assertIsNone(cache.get('key1')) self.assertIs(cache.add('key2', 'ham', 0), True) self.assertIsNone(cache.get('key2')) cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0) self.assertIsNone(cache.get('key3')) self.assertIsNone(cache.get('key4')) cache.set('key5', 'belgian fries', timeout=5) self.assertIs(cache.touch('key5', timeout=0), True) self.assertIsNone(cache.get('key5')) def test_float_timeout(self): # Make sure a timeout given as a float doesn't crash anything. cache.set("key1", "spam", 100.2) self.assertEqual(cache.get("key1"), "spam") def _perform_cull_test(self, cull_cache_name, initial_count, final_count): try: cull_cache = caches[cull_cache_name] except InvalidCacheBackendError: self.skipTest("Culling isn't implemented.") # Create initial cache key entries. This will overflow the cache, # causing a cull. for i in range(1, initial_count): cull_cache.set('cull%d' % i, 'value', 1000) count = 0 # Count how many keys are left in the cache. for i in range(1, initial_count): if cull_cache.has_key('cull%d' % i): count += 1 self.assertEqual(count, final_count) def test_cull(self): self._perform_cull_test('cull', 50, 29) def test_zero_cull(self): self._perform_cull_test('zero_cull', 50, 19) def _perform_invalid_key_test(self, key, expected_warning): """ All the builtin backends should warn (except memcached that should error) on keys that would be refused by memcached. This encourages portable caching code without making it too difficult to use production backends with more liberal key rules. Refs #6447. """ # mimic custom ``make_key`` method being defined since the default will # never show the below warnings def func(key, *args): return key old_func = cache.key_func cache.key_func = func try: with self.assertWarns(CacheKeyWarning) as cm: cache.set(key, 'value') self.assertEqual(str(cm.warning), expected_warning) finally: cache.key_func = old_func def test_invalid_key_characters(self): # memcached doesn't allow whitespace or control characters in keys. key = 'key with spaces and 清' self._perform_invalid_key_test(key, KEY_ERRORS_WITH_MEMCACHED_MSG % key) def test_invalid_key_length(self): # memcached limits key length to 250. key = ('a' * 250) + '清' expected_warning = ( 'Cache key will cause errors if used with memcached: ' '%r (longer than %s)' % (key, 250) ) self._perform_invalid_key_test(key, expected_warning) def test_cache_versioning_get_set(self): # set, using default version = 1 cache.set('answer1', 42) self.assertEqual(cache.get('answer1'), 42) self.assertEqual(cache.get('answer1', version=1), 42) self.assertIsNone(cache.get('answer1', version=2)) self.assertIsNone(caches['v2'].get('answer1')) self.assertEqual(caches['v2'].get('answer1', version=1), 42) self.assertIsNone(caches['v2'].get('answer1', version=2)) # set, default version = 1, but manually override version = 2 cache.set('answer2', 42, version=2) self.assertIsNone(cache.get('answer2')) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) # v2 set, using default version = 2 caches['v2'].set('answer3', 42) self.assertIsNone(cache.get('answer3')) self.assertIsNone(cache.get('answer3', version=1)) self.assertEqual(cache.get('answer3', version=2), 42) self.assertEqual(caches['v2'].get('answer3'), 42) self.assertIsNone(caches['v2'].get('answer3', version=1)) self.assertEqual(caches['v2'].get('answer3', version=2), 42) # v2 set, default version = 2, but manually override version = 1 caches['v2'].set('answer4', 42, version=1) self.assertEqual(cache.get('answer4'), 42) self.assertEqual(cache.get('answer4', version=1), 42) self.assertIsNone(cache.get('answer4', version=2)) self.assertIsNone(caches['v2'].get('answer4')) self.assertEqual(caches['v2'].get('answer4', version=1), 42) self.assertIsNone(caches['v2'].get('answer4', version=2)) def test_cache_versioning_add(self): # add, default version = 1, but manually override version = 2 self.assertIs(cache.add('answer1', 42, version=2), True) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) self.assertIs(cache.add('answer1', 37, version=2), False) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) self.assertIs(cache.add('answer1', 37, version=1), True) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) # v2 add, using default version = 2 self.assertIs(caches['v2'].add('answer2', 42), True) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) self.assertIs(caches['v2'].add('answer2', 37), False) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) self.assertIs(caches['v2'].add('answer2', 37, version=1), True) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) # v2 add, default version = 2, but manually override version = 1 self.assertIs(caches['v2'].add('answer3', 42, version=1), True) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) self.assertIs(caches['v2'].add('answer3', 37, version=1), False) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) self.assertIs(caches['v2'].add('answer3', 37), True) self.assertEqual(cache.get('answer3', version=1), 42) self.assertEqual(cache.get('answer3', version=2), 37) def test_cache_versioning_has_key(self): cache.set('answer1', 42) # has_key self.assertIs(cache.has_key('answer1'), True) self.assertIs(cache.has_key('answer1', version=1), True) self.assertIs(cache.has_key('answer1', version=2), False) self.assertIs(caches['v2'].has_key('answer1'), False) self.assertIs(caches['v2'].has_key('answer1', version=1), True) self.assertIs(caches['v2'].has_key('answer1', version=2), False) def test_cache_versioning_delete(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) self.assertIs(cache.delete('answer1'), True) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) self.assertIs(cache.delete('answer2', version=2), True) self.assertEqual(cache.get('answer2', version=1), 37) self.assertIsNone(cache.get('answer2', version=2)) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) self.assertIs(caches['v2'].delete('answer3'), True) self.assertEqual(cache.get('answer3', version=1), 37) self.assertIsNone(cache.get('answer3', version=2)) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) self.assertIs(caches['v2'].delete('answer4', version=1), True) self.assertIsNone(cache.get('answer4', version=1)) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_incr_decr(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) self.assertEqual(cache.incr('answer1'), 38) self.assertEqual(cache.get('answer1', version=1), 38) self.assertEqual(cache.get('answer1', version=2), 42) self.assertEqual(cache.decr('answer1'), 37) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) self.assertEqual(cache.incr('answer2', version=2), 43) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 43) self.assertEqual(cache.decr('answer2', version=2), 42) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) self.assertEqual(caches['v2'].incr('answer3'), 43) self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 43) self.assertEqual(caches['v2'].decr('answer3'), 42) self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 42) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) self.assertEqual(caches['v2'].incr('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=2), 42) self.assertEqual(caches['v2'].decr('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_get_set_many(self): # set, using default version = 1 cache.set_many({'ford1': 37, 'arthur1': 42}) self.assertEqual(cache.get_many(['ford1', 'arthur1']), {'ford1': 37, 'arthur1': 42}) self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=2), {}) self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1']), {}) self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=2), {}) # set, default version = 1, but manually override version = 2 cache.set_many({'ford2': 37, 'arthur2': 42}, version=2) self.assertEqual(cache.get_many(['ford2', 'arthur2']), {}) self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=1), {}) self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2']), {'ford2': 37, 'arthur2': 42}) self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=1), {}) self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) # v2 set, using default version = 2 caches['v2'].set_many({'ford3': 37, 'arthur3': 42}) self.assertEqual(cache.get_many(['ford3', 'arthur3']), {}) self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=1), {}) self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3']), {'ford3': 37, 'arthur3': 42}) self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=1), {}) self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) # v2 set, default version = 2, but manually override version = 1 caches['v2'].set_many({'ford4': 37, 'arthur4': 42}, version=1) self.assertEqual(cache.get_many(['ford4', 'arthur4']), {'ford4': 37, 'arthur4': 42}) self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=2), {}) self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4']), {}) self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=2), {}) def test_incr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertEqual(cache.get('answer', version=2), 42) self.assertIsNone(cache.get('answer', version=3)) self.assertEqual(cache.incr_version('answer', version=2), 3) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertIsNone(cache.get('answer', version=2)) self.assertEqual(cache.get('answer', version=3), 42) caches['v2'].set('answer2', 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) self.assertIsNone(caches['v2'].get('answer2', version=3)) self.assertEqual(caches['v2'].incr_version('answer2'), 3) self.assertIsNone(caches['v2'].get('answer2')) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertIsNone(caches['v2'].get('answer2', version=2)) self.assertEqual(caches['v2'].get('answer2', version=3), 42) with self.assertRaises(ValueError): cache.incr_version('does_not_exist') def test_decr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertEqual(cache.get('answer', version=2), 42) self.assertEqual(cache.decr_version('answer', version=2), 1) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.get('answer', version=1), 42) self.assertIsNone(cache.get('answer', version=2)) caches['v2'].set('answer2', 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) self.assertEqual(caches['v2'].decr_version('answer2'), 1) self.assertIsNone(caches['v2'].get('answer2')) self.assertEqual(caches['v2'].get('answer2', version=1), 42) self.assertIsNone(caches['v2'].get('answer2', version=2)) with self.assertRaises(ValueError): cache.decr_version('does_not_exist', version=2) def test_custom_key_func(self): # Two caches with different key functions aren't visible to each other cache.set('answer1', 42) self.assertEqual(cache.get('answer1'), 42) self.assertIsNone(caches['custom_key'].get('answer1')) self.assertIsNone(caches['custom_key2'].get('answer1')) caches['custom_key'].set('answer2', 42) self.assertIsNone(cache.get('answer2')) self.assertEqual(caches['custom_key'].get('answer2'), 42) self.assertEqual(caches['custom_key2'].get('answer2'), 42) def test_cache_write_unpicklable_object(self): fetch_middleware = FetchFromCacheMiddleware(empty_response) fetch_middleware.cache = cache request = self.factory.get('/cache/test') request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNone(get_cache_data) content = 'Testing cookie serialization.' def get_response(req): response = HttpResponse(content) response.set_cookie('foo', 'bar') return response update_middleware = UpdateCacheMiddleware(get_response) update_middleware.cache = cache response = update_middleware(request) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) self.assertEqual(get_cache_data.cookies, response.cookies) UpdateCacheMiddleware(lambda req: get_cache_data)(request) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) self.assertEqual(get_cache_data.cookies, response.cookies) def test_add_fail_on_pickleerror(self): # Shouldn't fail silently if trying to cache an unpicklable type. with self.assertRaises(pickle.PickleError): cache.add('unpicklable', Unpicklable()) def test_set_fail_on_pickleerror(self): with self.assertRaises(pickle.PickleError): cache.set('unpicklable', Unpicklable()) def test_get_or_set(self): self.assertIsNone(cache.get('projector')) self.assertEqual(cache.get_or_set('projector', 42), 42) self.assertEqual(cache.get('projector'), 42) self.assertIsNone(cache.get_or_set('null', None)) def test_get_or_set_callable(self): def my_callable(): return 'value' self.assertEqual(cache.get_or_set('mykey', my_callable), 'value') self.assertEqual(cache.get_or_set('mykey', my_callable()), 'value') def test_get_or_set_callable_returning_none(self): self.assertIsNone(cache.get_or_set('mykey', lambda: None)) # Previous get_or_set() doesn't store None in the cache. self.assertEqual(cache.get('mykey', 'default'), 'default') def test_get_or_set_version(self): msg = "get_or_set() missing 1 required positional argument: 'default'" self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979) with self.assertRaisesMessage(TypeError, msg): cache.get_or_set('brian') with self.assertRaisesMessage(TypeError, msg): cache.get_or_set('brian', version=1) self.assertIsNone(cache.get('brian', version=1)) self.assertEqual(cache.get_or_set('brian', 42, version=1), 42) self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979) self.assertIsNone(cache.get('brian', version=3)) def test_get_or_set_racing(self): with mock.patch('%s.%s' % (settings.CACHES['default']['BACKEND'], 'add')) as cache_add: # Simulate cache.add() failing to add a value. In that case, the # default value should be returned. cache_add.return_value = False self.assertEqual(cache.get_or_set('key', 'default'), 'default') @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.db.DatabaseCache', # Spaces are used in the table name to ensure quoting/escaping is working LOCATION='test cache table' )) class DBCacheTests(BaseCacheTests, TransactionTestCase): available_apps = ['cache'] def setUp(self): # The super calls needs to happen first for the settings override. super().setUp() self.create_table() def tearDown(self): # The super call needs to happen first because it uses the database. super().tearDown() self.drop_table() def create_table(self): management.call_command('createcachetable', verbosity=0) def drop_table(self): with connection.cursor() as cursor: table_name = connection.ops.quote_name('test cache table') cursor.execute('DROP TABLE %s' % table_name) def test_get_many_num_queries(self): cache.set_many({'a': 1, 'b': 2}) cache.set('expired', 'expired', 0.01) with self.assertNumQueries(1): self.assertEqual(cache.get_many(['a', 'b']), {'a': 1, 'b': 2}) time.sleep(0.02) with self.assertNumQueries(2): self.assertEqual(cache.get_many(['a', 'b', 'expired']), {'a': 1, 'b': 2}) def test_delete_many_num_queries(self): cache.set_many({'a': 1, 'b': 2, 'c': 3}) with self.assertNumQueries(1): cache.delete_many(['a', 'b', 'c']) def test_zero_cull(self): self._perform_cull_test('zero_cull', 50, 18) def test_second_call_doesnt_crash(self): out = io.StringIO() management.call_command('createcachetable', stdout=out) self.assertEqual(out.getvalue(), "Cache table 'test cache table' already exists.\n" * len(settings.CACHES)) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.db.DatabaseCache', # Use another table name to avoid the 'table already exists' message. LOCATION='createcachetable_dry_run_mode' )) def test_createcachetable_dry_run_mode(self): out = io.StringIO() management.call_command('createcachetable', dry_run=True, stdout=out) output = out.getvalue() self.assertTrue(output.startswith("CREATE TABLE")) def test_createcachetable_with_table_argument(self): """ Delete and recreate cache table with legacy behavior (explicitly specifying the table name). """ self.drop_table() out = io.StringIO() management.call_command( 'createcachetable', 'test cache table', verbosity=2, stdout=out, ) self.assertEqual(out.getvalue(), "Cache table 'test cache table' created.\n") @override_settings(USE_TZ=True) class DBCacheWithTimeZoneTests(DBCacheTests): pass class DBCacheRouter: """A router that puts the cache table on the 'other' database.""" def db_for_read(self, model, **hints): if model._meta.app_label == 'django_cache': return 'other' return None def db_for_write(self, model, **hints): if model._meta.app_label == 'django_cache': return 'other' return None def allow_migrate(self, db, app_label, **hints): if app_label == 'django_cache': return db == 'other' return None @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'my_cache_table', }, }, ) class CreateCacheTableForDBCacheTests(TestCase): databases = {'default', 'other'} @override_settings(DATABASE_ROUTERS=[DBCacheRouter()]) def test_createcachetable_observes_database_router(self): # cache table should not be created on 'default' with self.assertNumQueries(0, using='default'): management.call_command('createcachetable', database='default', verbosity=0) # cache table should be created on 'other' # Queries: # 1: check table doesn't already exist # 2: create savepoint (if transactional DDL is supported) # 3: create the table # 4: create the index # 5: release savepoint (if transactional DDL is supported) num = 5 if connections['other'].features.can_rollback_ddl else 3 with self.assertNumQueries(num, using='other'): management.call_command('createcachetable', database='other', verbosity=0) class PicklingSideEffect: def __init__(self, cache): self.cache = cache self.locked = False def __getstate__(self): self.locked = self.cache._lock.locked() return {} limit_locmem_entries = override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.locmem.LocMemCache', OPTIONS={'MAX_ENTRIES': 9}, )) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.locmem.LocMemCache', )) class LocMemCacheTests(BaseCacheTests, TestCase): def setUp(self): super().setUp() # LocMem requires a hack to make the other caches # share a data store with the 'normal' cache. caches['prefix']._cache = cache._cache caches['prefix']._expire_info = cache._expire_info caches['v2']._cache = cache._cache caches['v2']._expire_info = cache._expire_info caches['custom_key']._cache = cache._cache caches['custom_key']._expire_info = cache._expire_info caches['custom_key2']._cache = cache._cache caches['custom_key2']._expire_info = cache._expire_info @override_settings(CACHES={ 'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}, 'other': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'other' }, }) def test_multiple_caches(self): "Multiple locmem caches are isolated" cache.set('value', 42) self.assertEqual(caches['default'].get('value'), 42) self.assertIsNone(caches['other'].get('value')) def test_locking_on_pickle(self): """#20613/#18541 -- Ensures pickling is done outside of the lock.""" bad_obj = PicklingSideEffect(cache) cache.set('set', bad_obj) self.assertFalse(bad_obj.locked, "Cache was locked during pickling") self.assertIs(cache.add('add', bad_obj), True) self.assertFalse(bad_obj.locked, "Cache was locked during pickling") def test_incr_decr_timeout(self): """incr/decr does not modify expiry time (matches memcached behavior)""" key = 'value' _key = cache.make_key(key) cache.set(key, 1, timeout=cache.default_timeout * 10) expire = cache._expire_info[_key] self.assertEqual(cache.incr(key), 2) self.assertEqual(expire, cache._expire_info[_key]) self.assertEqual(cache.decr(key), 1) self.assertEqual(expire, cache._expire_info[_key]) @limit_locmem_entries def test_lru_get(self): """get() moves cache keys.""" for key in range(9): cache.set(key, key, timeout=None) for key in range(6): self.assertEqual(cache.get(key), key) cache.set(9, 9, timeout=None) for key in range(6): self.assertEqual(cache.get(key), key) for key in range(6, 9): self.assertIsNone(cache.get(key)) self.assertEqual(cache.get(9), 9) @limit_locmem_entries def test_lru_set(self): """set() moves cache keys.""" for key in range(9): cache.set(key, key, timeout=None) for key in range(3, 9): cache.set(key, key, timeout=None) cache.set(9, 9, timeout=None) for key in range(3, 10): self.assertEqual(cache.get(key), key) for key in range(3): self.assertIsNone(cache.get(key)) @limit_locmem_entries def test_lru_incr(self): """incr() moves cache keys.""" for key in range(9): cache.set(key, key, timeout=None) for key in range(6): self.assertEqual(cache.incr(key), key + 1) cache.set(9, 9, timeout=None) for key in range(6): self.assertEqual(cache.get(key), key + 1) for key in range(6, 9): self.assertIsNone(cache.get(key)) self.assertEqual(cache.get(9), 9) # memcached backend isn't guaranteed to be available. # To check the memcached backend, the test settings file will # need to contain at least one cache backend setting that points at # your memcache server. configured_caches = {} for _cache_params in settings.CACHES.values(): configured_caches[_cache_params['BACKEND']] = _cache_params MemcachedCache_params = configured_caches.get('django.core.cache.backends.memcached.MemcachedCache') PyLibMCCache_params = configured_caches.get('django.core.cache.backends.memcached.PyLibMCCache') # The memcached backends don't support cull-related options like `MAX_ENTRIES`. memcached_excluded_caches = {'cull', 'zero_cull'} class BaseMemcachedTests(BaseCacheTests): # By default it's assumed that the client doesn't clean up connections # properly, in which case the backend must do so after each request. should_disconnect_on_close = True def test_location_multiple_servers(self): locations = [ ['server1.tld', 'server2:11211'], 'server1.tld;server2:11211', 'server1.tld,server2:11211', ] for location in locations: with self.subTest(location=location): params = {'BACKEND': self.base_params['BACKEND'], 'LOCATION': location} with self.settings(CACHES={'default': params}): self.assertEqual(cache._servers, ['server1.tld', 'server2:11211']) def _perform_invalid_key_test(self, key, expected_warning): """ While other backends merely warn, memcached should raise for an invalid key. """ msg = expected_warning.replace(key, cache.make_key(key)) with self.assertRaises(InvalidCacheKey) as cm: cache.set(key, 'value') self.assertEqual(str(cm.exception), msg) def test_default_never_expiring_timeout(self): # Regression test for #22845 with self.settings(CACHES=caches_setting_for_tests( base=self.base_params, exclude=memcached_excluded_caches, TIMEOUT=None)): cache.set('infinite_foo', 'bar') self.assertEqual(cache.get('infinite_foo'), 'bar') def test_default_far_future_timeout(self): # Regression test for #22845 with self.settings(CACHES=caches_setting_for_tests( base=self.base_params, exclude=memcached_excluded_caches, # 60*60*24*365, 1 year TIMEOUT=31536000)): cache.set('future_foo', 'bar') self.assertEqual(cache.get('future_foo'), 'bar') def test_memcached_deletes_key_on_failed_set(self): # By default memcached allows objects up to 1MB. For the cache_db session # backend to always use the current session, memcached needs to delete # the old key if it fails to set. # pylibmc doesn't seem to have SERVER_MAX_VALUE_LENGTH as far as I can # tell from a quick check of its source code. This is falling back to # the default value exposed by python-memcached on my system. max_value_length = getattr(cache._lib, 'SERVER_MAX_VALUE_LENGTH', 1048576) cache.set('small_value', 'a') self.assertEqual(cache.get('small_value'), 'a') large_value = 'a' * (max_value_length + 1) try: cache.set('small_value', large_value) except Exception: # Some clients (e.g. pylibmc) raise when the value is too large, # while others (e.g. python-memcached) intentionally return True # indicating success. This test is primarily checking that the key # was deleted, so the return/exception behavior for the set() # itself is not important. pass # small_value should be deleted, or set if configured to accept larger values value = cache.get('small_value') self.assertTrue(value is None or value == large_value) def test_close(self): # For clients that don't manage their connections properly, the # connection is closed when the request is complete. signals.request_finished.disconnect(close_old_connections) try: with mock.patch.object(cache._lib.Client, 'disconnect_all', autospec=True) as mock_disconnect: signals.request_finished.send(self.__class__) self.assertIs(mock_disconnect.called, self.should_disconnect_on_close) finally: signals.request_finished.connect(close_old_connections) def test_set_many_returns_failing_keys(self): def fail_set_multi(mapping, *args, **kwargs): return mapping.keys() with mock.patch('%s.Client.set_multi' % self.client_library_name, side_effect=fail_set_multi): failing_keys = cache.set_many({'key': 'value'}) self.assertEqual(failing_keys, ['key']) @unittest.skipUnless(MemcachedCache_params, "MemcachedCache backend not configured") @override_settings(CACHES=caches_setting_for_tests( base=MemcachedCache_params, exclude=memcached_excluded_caches, )) class MemcachedCacheTests(BaseMemcachedTests, TestCase): base_params = MemcachedCache_params client_library_name = 'memcache' def test_memcached_uses_highest_pickle_version(self): # Regression test for #19810 for cache_key in settings.CACHES: with self.subTest(cache_key=cache_key): self.assertEqual(caches[cache_key]._cache.pickleProtocol, pickle.HIGHEST_PROTOCOL) @override_settings(CACHES=caches_setting_for_tests( base=MemcachedCache_params, exclude=memcached_excluded_caches, OPTIONS={'server_max_value_length': 9999}, )) def test_memcached_options(self): self.assertEqual(cache._cache.server_max_value_length, 9999) def test_default_used_when_none_is_set(self): """ python-memcached doesn't support default in get() so this test overrides the one in BaseCacheTests. """ cache.set('key_default_none', None) self.assertEqual(cache.get('key_default_none', default='default'), 'default') @unittest.skipUnless(PyLibMCCache_params, "PyLibMCCache backend not configured") @override_settings(CACHES=caches_setting_for_tests( base=PyLibMCCache_params, exclude=memcached_excluded_caches, )) class PyLibMCCacheTests(BaseMemcachedTests, TestCase): base_params = PyLibMCCache_params client_library_name = 'pylibmc' # libmemcached manages its own connections. should_disconnect_on_close = False @override_settings(CACHES=caches_setting_for_tests( base=PyLibMCCache_params, exclude=memcached_excluded_caches, OPTIONS={ 'binary': True, 'behaviors': {'tcp_nodelay': True}, }, )) def test_pylibmc_options(self): self.assertTrue(cache._cache.binary) self.assertEqual(cache._cache.behaviors['tcp_nodelay'], int(True)) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.filebased.FileBasedCache', )) class FileBasedCacheTests(BaseCacheTests, TestCase): """ Specific test cases for the file-based cache. """ def setUp(self): super().setUp() self.dirname = self.mkdtemp() # Caches location cannot be modified through override_settings / modify_settings, # hence settings are manipulated directly here and the setting_changed signal # is triggered manually. for cache_params in settings.CACHES.values(): cache_params['LOCATION'] = self.dirname setting_changed.send(self.__class__, setting='CACHES', enter=False) def tearDown(self): super().tearDown() # Call parent first, as cache.clear() may recreate cache base directory shutil.rmtree(self.dirname) def mkdtemp(self): return tempfile.mkdtemp() def test_ignores_non_cache_files(self): fname = os.path.join(self.dirname, 'not-a-cache-file') with open(fname, 'w'): os.utime(fname, None) cache.clear() self.assertTrue(os.path.exists(fname), 'Expected cache.clear to ignore non cache files') os.remove(fname) def test_clear_does_not_remove_cache_dir(self): cache.clear() self.assertTrue(os.path.exists(self.dirname), 'Expected cache.clear to keep the cache dir') def test_creates_cache_dir_if_nonexistent(self): os.rmdir(self.dirname) cache.set('foo', 'bar') self.assertTrue(os.path.exists(self.dirname)) def test_get_ignores_enoent(self): cache.set('foo', 'bar') os.unlink(cache._key_to_file('foo')) # Returns the default instead of erroring. self.assertEqual(cache.get('foo', 'baz'), 'baz') def test_get_does_not_ignore_non_filenotfound_exceptions(self): with mock.patch('builtins.open', side_effect=OSError): with self.assertRaises(OSError): cache.get('foo') def test_empty_cache_file_considered_expired(self): cache_file = cache._key_to_file('foo') with open(cache_file, 'wb') as fh: fh.write(b'') with open(cache_file, 'rb') as fh: self.assertIs(cache._is_expired(fh), True) class FileBasedCachePathLibTests(FileBasedCacheTests): def mkdtemp(self): tmp_dir = super().mkdtemp() return Path(tmp_dir) @override_settings(CACHES={ 'default': { 'BACKEND': 'cache.liberal_backend.CacheClass', }, }) class CustomCacheKeyValidationTests(SimpleTestCase): """ Tests for the ability to mixin a custom ``validate_key`` method to a custom cache backend that otherwise inherits from a builtin backend, and override the default key validation. Refs #6447. """ def test_custom_key_validation(self): # this key is both longer than 250 characters, and has spaces key = 'some key with spaces' * 15 val = 'a value' cache.set(key, val) self.assertEqual(cache.get(key), val) @override_settings( CACHES={ 'default': { 'BACKEND': 'cache.closeable_cache.CacheClass', } } ) class CacheClosingTests(SimpleTestCase): def test_close(self): self.assertFalse(cache.closed) signals.request_finished.send(self.__class__) self.assertTrue(cache.closed) DEFAULT_MEMORY_CACHES_SETTINGS = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'unique-snowflake', } } NEVER_EXPIRING_CACHES_SETTINGS = copy.deepcopy(DEFAULT_MEMORY_CACHES_SETTINGS) NEVER_EXPIRING_CACHES_SETTINGS['default']['TIMEOUT'] = None class DefaultNonExpiringCacheKeyTests(SimpleTestCase): """ Settings having Cache arguments with a TIMEOUT=None create Caches that will set non-expiring keys. """ def setUp(self): # The 5 minute (300 seconds) default expiration time for keys is # defined in the implementation of the initializer method of the # BaseCache type. self.DEFAULT_TIMEOUT = caches[DEFAULT_CACHE_ALIAS].default_timeout def tearDown(self): del(self.DEFAULT_TIMEOUT) def test_default_expiration_time_for_keys_is_5_minutes(self): """The default expiration time of a cache key is 5 minutes. This value is defined in django.core.cache.backends.base.BaseCache.__init__(). """ self.assertEqual(300, self.DEFAULT_TIMEOUT) def test_caches_with_unset_timeout_has_correct_default_timeout(self): """Caches that have the TIMEOUT parameter undefined in the default settings will use the default 5 minute timeout. """ cache = caches[DEFAULT_CACHE_ALIAS] self.assertEqual(self.DEFAULT_TIMEOUT, cache.default_timeout) @override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS) def test_caches_set_with_timeout_as_none_has_correct_default_timeout(self): """Memory caches that have the TIMEOUT parameter set to `None` in the default settings with have `None` as the default timeout. This means "no timeout". """ cache = caches[DEFAULT_CACHE_ALIAS] self.assertIsNone(cache.default_timeout) self.assertIsNone(cache.get_backend_timeout()) @override_settings(CACHES=DEFAULT_MEMORY_CACHES_SETTINGS) def test_caches_with_unset_timeout_set_expiring_key(self): """Memory caches that have the TIMEOUT parameter unset will set cache keys having the default 5 minute timeout. """ key = "my-key" value = "my-value" cache = caches[DEFAULT_CACHE_ALIAS] cache.set(key, value) cache_key = cache.make_key(key) self.assertIsNotNone(cache._expire_info[cache_key]) @override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS) def test_caches_set_with_timeout_as_none_set_non_expiring_key(self): """Memory caches that have the TIMEOUT parameter set to `None` will set a non expiring key by default. """ key = "another-key" value = "another-value" cache = caches[DEFAULT_CACHE_ALIAS] cache.set(key, value) cache_key = cache.make_key(key) self.assertIsNone(cache._expire_info[cache_key]) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHE_MIDDLEWARE_SECONDS=1, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, USE_I18N=False, ALLOWED_HOSTS=['.example.com'], ) class CacheUtils(SimpleTestCase): """TestCase for django.utils.cache functions.""" host = 'www.example.com' path = '/cache/test/' factory = RequestFactory(HTTP_HOST=host) def tearDown(self): cache.clear() def _get_request_cache(self, method='GET', query_string=None, update_cache=None): request = self._get_request(self.host, self.path, method, query_string=query_string) request._cache_update_cache = True if not update_cache else update_cache return request def test_patch_vary_headers(self): headers = ( # Initial vary, new headers, resulting vary. (None, ('Accept-Encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'), ('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), (None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('*', ('Accept-Language', 'Cookie'), '*'), ('Accept-Language, Cookie', ('*',), '*'), ) for initial_vary, newheaders, resulting_vary in headers: with self.subTest(initial_vary=initial_vary, newheaders=newheaders): response = HttpResponse() if initial_vary is not None: response['Vary'] = initial_vary patch_vary_headers(response, newheaders) self.assertEqual(response['Vary'], resulting_vary) def test_get_cache_key(self): request = self.factory.get(self.path) response = HttpResponse() # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) # A specified key_prefix is taken into account. key_prefix = 'localprefix' learn_cache_key(request, response, key_prefix=key_prefix) self.assertEqual( get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) def test_get_cache_key_with_query(self): request = self.factory.get(self.path, {'test': 1}) response = HttpResponse() # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) # The querystring is taken into account. self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' 'beaf87a9a99ee81c673ea2d67ccbec2a.d41d8cd98f00b204e9800998ecf8427e' ) def test_cache_key_varies_by_url(self): """ get_cache_key keys differ by fully-qualified URL instead of path """ request1 = self.factory.get(self.path, HTTP_HOST='sub-1.example.com') learn_cache_key(request1, HttpResponse()) request2 = self.factory.get(self.path, HTTP_HOST='sub-2.example.com') learn_cache_key(request2, HttpResponse()) self.assertNotEqual(get_cache_key(request1), get_cache_key(request2)) def test_learn_cache_key(self): request = self.factory.head(self.path) response = HttpResponse() response['Vary'] = 'Pony' # Make sure that the Vary header is added to the key hash learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) def test_patch_cache_control(self): tests = ( # Initial Cache-Control, kwargs to patch_cache_control, expected Cache-Control parts (None, {'private': True}, {'private'}), ('', {'private': True}, {'private'}), # no-cache. ('', {'no_cache': 'Set-Cookie'}, {'no-cache=Set-Cookie'}), ('', {'no-cache': 'Set-Cookie'}, {'no-cache=Set-Cookie'}), ('no-cache=Set-Cookie', {'no_cache': True}, {'no-cache'}), ('no-cache=Set-Cookie,no-cache=Link', {'no_cache': True}, {'no-cache'}), ('no-cache=Set-Cookie', {'no_cache': 'Link'}, {'no-cache=Set-Cookie', 'no-cache=Link'}), ( 'no-cache=Set-Cookie,no-cache=Link', {'no_cache': 'Custom'}, {'no-cache=Set-Cookie', 'no-cache=Link', 'no-cache=Custom'}, ), # Test whether private/public attributes are mutually exclusive ('private', {'private': True}, {'private'}), ('private', {'public': True}, {'public'}), ('public', {'public': True}, {'public'}), ('public', {'private': True}, {'private'}), ('must-revalidate,max-age=60,private', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}), ('must-revalidate,max-age=60,public', {'private': True}, {'must-revalidate', 'max-age=60', 'private'}), ('must-revalidate,max-age=60', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}), ) cc_delim_re = re.compile(r'\s*,\s*') for initial_cc, newheaders, expected_cc in tests: with self.subTest(initial_cc=initial_cc, newheaders=newheaders): response = HttpResponse() if initial_cc is not None: response['Cache-Control'] = initial_cc patch_cache_control(response, **newheaders) parts = set(cc_delim_re.split(response['Cache-Control'])) self.assertEqual(parts, expected_cc) @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'KEY_PREFIX': 'cacheprefix', }, }, ) class PrefixedCacheUtils(CacheUtils): pass @override_settings( CACHE_MIDDLEWARE_SECONDS=60, CACHE_MIDDLEWARE_KEY_PREFIX='test', CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, ) class CacheHEADTest(SimpleTestCase): path = '/cache/test/' factory = RequestFactory() def tearDown(self): cache.clear() def _set_cache(self, request, msg): return UpdateCacheMiddleware(lambda req: HttpResponse(msg))(request) def test_head_caches_correctly(self): test_content = 'test content' request = self.factory.head(self.path) request._cache_update_cache = True self._set_cache(request, test_content) request = self.factory.head(self.path) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(test_content.encode(), get_cache_data.content) def test_head_with_cached_get(self): test_content = 'test content' request = self.factory.get(self.path) request._cache_update_cache = True self._set_cache(request, test_content) request = self.factory.head(self.path) get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(test_content.encode(), get_cache_data.content) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, LANGUAGES=[ ('en', 'English'), ('es', 'Spanish'), ], ) class CacheI18nTest(SimpleTestCase): path = '/cache/test/' factory = RequestFactory() def tearDown(self): cache.clear() @override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False) def test_cache_key_i18n_translation(self): request = self.factory.get(self.path) lang = translation.get_language() response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when translation is active") key2 = get_cache_key(request) self.assertEqual(key, key2) def check_accept_language_vary(self, accept_language, vary, reference_key): request = self.factory.get(self.path) request.META['HTTP_ACCEPT_LANGUAGE'] = accept_language request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0' response = HttpResponse() response['Vary'] = vary key = learn_cache_key(request, response) key2 = get_cache_key(request) self.assertEqual(key, reference_key) self.assertEqual(key2, reference_key) @override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False) def test_cache_key_i18n_translation_accept_language(self): lang = translation.get_language() self.assertEqual(lang, 'en') request = self.factory.get(self.path) request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0' response = HttpResponse() response['Vary'] = 'accept-encoding' key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when translation is active") self.check_accept_language_vary( 'en-us', 'cookie, accept-language, accept-encoding', key ) self.check_accept_language_vary( 'en-US', 'cookie, accept-encoding, accept-language', key ) self.check_accept_language_vary( 'en-US,en;q=0.8', 'accept-encoding, accept-language, cookie', key ) self.check_accept_language_vary( 'en-US,en;q=0.8,ko;q=0.6', 'accept-language, cookie, accept-encoding', key ) self.check_accept_language_vary( 'ko-kr,ko;q=0.8,en-us;q=0.5,en;q=0.3 ', 'accept-encoding, cookie, accept-language', key ) self.check_accept_language_vary( 'ko-KR,ko;q=0.8,en-US;q=0.6,en;q=0.4', 'accept-language, accept-encoding, cookie', key ) self.check_accept_language_vary( 'ko;q=1.0,en;q=0.5', 'cookie, accept-language, accept-encoding', key ) self.check_accept_language_vary( 'ko, en', 'cookie, accept-encoding, accept-language', key ) self.check_accept_language_vary( 'ko-KR, en-US', 'accept-encoding, accept-language, cookie', key ) @override_settings(USE_I18N=False, USE_L10N=True, USE_TZ=False) def test_cache_key_i18n_formatting(self): request = self.factory.get(self.path) lang = translation.get_language() response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when formatting is active") key2 = get_cache_key(request) self.assertEqual(key, key2) @override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True) def test_cache_key_i18n_timezone(self): request = self.factory.get(self.path) tz = timezone.get_current_timezone_name() response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(tz, key, "Cache keys should include the time zone name when time zones are active") key2 = get_cache_key(request) self.assertEqual(key, key2) @override_settings(USE_I18N=False, USE_L10N=False) def test_cache_key_no_i18n(self): request = self.factory.get(self.path) lang = translation.get_language() tz = timezone.get_current_timezone_name() response = HttpResponse() key = learn_cache_key(request, response) self.assertNotIn(lang, key, "Cache keys shouldn't include the language name when i18n isn't active") self.assertNotIn(tz, key, "Cache keys shouldn't include the time zone name when i18n isn't active") @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX="test", CACHE_MIDDLEWARE_SECONDS=60, USE_I18N=True, ) def test_middleware(self): def set_cache(request, lang, msg): def get_response(req): return HttpResponse(msg) translation.activate(lang) return UpdateCacheMiddleware(get_response)(request) # cache with non empty request.GET request = self.factory.get(self.path, {'foo': 'bar', 'other': 'true'}) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) # first access, cache must return None self.assertIsNone(get_cache_data) content = 'Check for cache with QUERY_STRING' def get_response(req): return HttpResponse(content) UpdateCacheMiddleware(get_response)(request) get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) # cache must return content self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) # different QUERY_STRING, cache must be empty request = self.factory.get(self.path, {'foo': 'bar', 'somethingelse': 'true'}) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNone(get_cache_data) # i18n tests en_message = "Hello world!" es_message = "Hola mundo!" request = self.factory.get(self.path) request._cache_update_cache = True set_cache(request, 'en', en_message) get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) # The cache can be recovered self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, en_message.encode()) # change the session language and set content request = self.factory.get(self.path) request._cache_update_cache = True set_cache(request, 'es', es_message) # change again the language translation.activate('en') # retrieve the content from cache get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertEqual(get_cache_data.content, en_message.encode()) # change again the language translation.activate('es') get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertEqual(get_cache_data.content, es_message.encode()) # reset the language translation.deactivate() @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX="test", CACHE_MIDDLEWARE_SECONDS=60, ) def test_middleware_doesnt_cache_streaming_response(self): request = self.factory.get(self.path) get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNone(get_cache_data) def get_stream_response(req): return StreamingHttpResponse(['Check for cache with streaming content.']) UpdateCacheMiddleware(get_stream_response)(request) get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNone(get_cache_data) @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'KEY_PREFIX': 'cacheprefix' }, }, ) class PrefixedCacheI18nTest(CacheI18nTest): pass def hello_world_view(request, value): return HttpResponse('Hello World %s' % value) def csrf_view(request): return HttpResponse(csrf(request)['csrf_token']) @override_settings( CACHE_MIDDLEWARE_ALIAS='other', CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix', CACHE_MIDDLEWARE_SECONDS=30, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, 'other': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'other', 'TIMEOUT': '1', }, }, ) class CacheMiddlewareTest(SimpleTestCase): factory = RequestFactory() def setUp(self): self.default_cache = caches['default'] self.other_cache = caches['other'] def tearDown(self): self.default_cache.clear() self.other_cache.clear() super().tearDown() def test_constructor(self): """ Ensure the constructor is correctly distinguishing between usage of CacheMiddleware as Middleware vs. usage of CacheMiddleware as view decorator and setting attributes appropriately. """ # If only one argument is passed in construction, it's being used as # middleware. middleware = CacheMiddleware(empty_response) # Now test object attributes against values defined in setUp above self.assertEqual(middleware.cache_timeout, 30) self.assertEqual(middleware.key_prefix, 'middlewareprefix') self.assertEqual(middleware.cache_alias, 'other') # If more arguments are being passed in construction, it's being used # as a decorator. First, test with "defaults": as_view_decorator = CacheMiddleware(empty_response, cache_alias=None, key_prefix=None) self.assertEqual(as_view_decorator.cache_timeout, 30) # Timeout value for 'default' cache, i.e. 30 self.assertEqual(as_view_decorator.key_prefix, '') # Value of DEFAULT_CACHE_ALIAS from django.core.cache self.assertEqual(as_view_decorator.cache_alias, 'default') # Next, test with custom values: as_view_decorator_with_custom = CacheMiddleware( hello_world_view, cache_timeout=60, cache_alias='other', key_prefix='foo' ) self.assertEqual(as_view_decorator_with_custom.cache_timeout, 60) self.assertEqual(as_view_decorator_with_custom.key_prefix, 'foo') self.assertEqual(as_view_decorator_with_custom.cache_alias, 'other') def test_middleware(self): middleware = CacheMiddleware(hello_world_view) prefix_middleware = CacheMiddleware(hello_world_view, key_prefix='prefix1') timeout_middleware = CacheMiddleware(hello_world_view, cache_timeout=1) request = self.factory.get('/view/') # Put the request through the request middleware result = middleware.process_request(request) self.assertIsNone(result) response = hello_world_view(request, '1') # Now put the response through the response middleware response = middleware.process_response(request, response) # Repeating the request should result in a cache hit result = middleware.process_request(request) self.assertIsNotNone(result) self.assertEqual(result.content, b'Hello World 1') # The same request through a different middleware won't hit result = prefix_middleware.process_request(request) self.assertIsNone(result) # The same request with a timeout _will_ hit result = timeout_middleware.process_request(request) self.assertIsNotNone(result) self.assertEqual(result.content, b'Hello World 1') def test_view_decorator(self): # decorate the same view with different cache decorators default_view = cache_page(3)(hello_world_view) default_with_prefix_view = cache_page(3, key_prefix='prefix1')(hello_world_view) explicit_default_view = cache_page(3, cache='default')(hello_world_view) explicit_default_with_prefix_view = cache_page(3, cache='default', key_prefix='prefix1')(hello_world_view) other_view = cache_page(1, cache='other')(hello_world_view) other_with_prefix_view = cache_page(1, cache='other', key_prefix='prefix2')(hello_world_view) request = self.factory.get('/view/') # Request the view once response = default_view(request, '1') self.assertEqual(response.content, b'Hello World 1') # Request again -- hit the cache response = default_view(request, '2') self.assertEqual(response.content, b'Hello World 1') # Requesting the same view with the explicit cache should yield the same result response = explicit_default_view(request, '3') self.assertEqual(response.content, b'Hello World 1') # Requesting with a prefix will hit a different cache key response = explicit_default_with_prefix_view(request, '4') self.assertEqual(response.content, b'Hello World 4') # Hitting the same view again gives a cache hit response = explicit_default_with_prefix_view(request, '5') self.assertEqual(response.content, b'Hello World 4') # And going back to the implicit cache will hit the same cache response = default_with_prefix_view(request, '6') self.assertEqual(response.content, b'Hello World 4') # Requesting from an alternate cache won't hit cache response = other_view(request, '7') self.assertEqual(response.content, b'Hello World 7') # But a repeated hit will hit cache response = other_view(request, '8') self.assertEqual(response.content, b'Hello World 7') # And prefixing the alternate cache yields yet another cache entry response = other_with_prefix_view(request, '9') self.assertEqual(response.content, b'Hello World 9') # But if we wait a couple of seconds... time.sleep(2) # ... the default cache will still hit caches['default'] response = default_view(request, '11') self.assertEqual(response.content, b'Hello World 1') # ... the default cache with a prefix will still hit response = default_with_prefix_view(request, '12') self.assertEqual(response.content, b'Hello World 4') # ... the explicit default cache will still hit response = explicit_default_view(request, '13') self.assertEqual(response.content, b'Hello World 1') # ... the explicit default cache with a prefix will still hit response = explicit_default_with_prefix_view(request, '14') self.assertEqual(response.content, b'Hello World 4') # .. but a rapidly expiring cache won't hit response = other_view(request, '15') self.assertEqual(response.content, b'Hello World 15') # .. even if it has a prefix response = other_with_prefix_view(request, '16') self.assertEqual(response.content, b'Hello World 16') def test_cache_page_timeout(self): # Page timeout takes precedence over the "max-age" section of the # "Cache-Control". tests = [ (1, 3), # max_age < page_timeout. (3, 1), # max_age > page_timeout. ] for max_age, page_timeout in tests: with self.subTest(max_age=max_age, page_timeout=page_timeout): view = cache_page(timeout=page_timeout)( cache_control(max_age=max_age)(hello_world_view) ) request = self.factory.get('/view/') response = view(request, '1') self.assertEqual(response.content, b'Hello World 1') time.sleep(1) response = view(request, '2') self.assertEqual( response.content, b'Hello World 1' if page_timeout > max_age else b'Hello World 2', ) cache.clear() def test_cached_control_private_not_cached(self): """Responses with 'Cache-Control: private' are not cached.""" view_with_private_cache = cache_page(3)(cache_control(private=True)(hello_world_view)) request = self.factory.get('/view/') response = view_with_private_cache(request, '1') self.assertEqual(response.content, b'Hello World 1') response = view_with_private_cache(request, '2') self.assertEqual(response.content, b'Hello World 2') def test_sensitive_cookie_not_cached(self): """ Django must prevent caching of responses that set a user-specific (and maybe security sensitive) cookie in response to a cookie-less request. """ request = self.factory.get('/view/') csrf_middleware = CsrfViewMiddleware(csrf_view) csrf_middleware.process_view(request, csrf_view, (), {}) cache_middleware = CacheMiddleware(csrf_middleware) self.assertIsNone(cache_middleware.process_request(request)) cache_middleware(request) # Inserting a CSRF cookie in a cookie-less request prevented caching. self.assertIsNone(cache_middleware.process_request(request)) def test_304_response_has_http_caching_headers_but_not_cached(self): original_view = mock.Mock(return_value=HttpResponseNotModified()) view = cache_page(2)(original_view) request = self.factory.get('/view/') # The view shouldn't be cached on the second call. view(request).close() response = view(request) response.close() self.assertEqual(original_view.call_count, 2) self.assertIsInstance(response, HttpResponseNotModified) self.assertIn('Cache-Control', response) self.assertIn('Expires', response) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHE_MIDDLEWARE_SECONDS=1, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, USE_I18N=False, ) class TestWithTemplateResponse(SimpleTestCase): """ Tests various headers w/ TemplateResponse. Most are probably redundant since they manipulate the same object anyway but the ETag header is 'special' because it relies on the content being complete (which is not necessarily always the case with a TemplateResponse) """ path = '/cache/test/' factory = RequestFactory() def tearDown(self): cache.clear() def test_patch_vary_headers(self): headers = ( # Initial vary, new headers, resulting vary. (None, ('Accept-Encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'), ('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), (None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ) for initial_vary, newheaders, resulting_vary in headers: with self.subTest(initial_vary=initial_vary, newheaders=newheaders): template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) if initial_vary is not None: response['Vary'] = initial_vary patch_vary_headers(response, newheaders) self.assertEqual(response['Vary'], resulting_vary) def test_get_cache_key(self): request = self.factory.get(self.path) template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) key_prefix = 'localprefix' # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e' ) # A specified key_prefix is taken into account. learn_cache_key(request, response, key_prefix=key_prefix) self.assertEqual( get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.' '58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e' ) def test_get_cache_key_with_query(self): request = self.factory.get(self.path, {'test': 1}) template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) # The querystring is taken into account. self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '0f1c2d56633c943073c4569d9a9502fe.d41d8cd98f00b204e9800998ecf8427e' ) class TestMakeTemplateFragmentKey(SimpleTestCase): def test_without_vary_on(self): key = make_template_fragment_key('a.fragment') self.assertEqual(key, 'template.cache.a.fragment.d41d8cd98f00b204e9800998ecf8427e') def test_with_one_vary_on(self): key = make_template_fragment_key('foo', ['abc']) self.assertEqual(key, 'template.cache.foo.493e283d571a73056196f1a68efd0f66') def test_with_many_vary_on(self): key = make_template_fragment_key('bar', ['abc', 'def']) self.assertEqual(key, 'template.cache.bar.17c1a507a0cb58384f4c639067a93520') def test_proper_escaping(self): key = make_template_fragment_key('spam', ['abc:def%']) self.assertEqual(key, 'template.cache.spam.06c8ae8e8c430b69fb0a6443504153dc') def test_with_ints_vary_on(self): key = make_template_fragment_key('foo', [1, 2, 3, 4, 5]) self.assertEqual(key, 'template.cache.foo.7ae8fd2e0d25d651c683bdeebdb29461') def test_with_unicode_vary_on(self): key = make_template_fragment_key('foo', ['42º', '😀']) self.assertEqual(key, 'template.cache.foo.7ced1c94e543668590ba39b3c08b0237') def test_long_vary_on(self): key = make_template_fragment_key('foo', ['x' * 10000]) self.assertEqual(key, 'template.cache.foo.3670b349b5124aa56bdb50678b02b23a') class CacheHandlerTest(SimpleTestCase): def test_same_instance(self): """ Attempting to retrieve the same alias should yield the same instance. """ cache1 = caches['default'] cache2 = caches['default'] self.assertIs(cache1, cache2) def test_per_thread(self): """ Requesting the same alias from separate threads should yield separate instances. """ c = [] def runner(): c.append(caches['default']) for x in range(2): t = threading.Thread(target=runner) t.start() t.join() self.assertIsNot(c[0], c[1])
693f90a190631332b7df954e08a53248727903fd0e68cd18a05381682525db3a
import datetime import re from datetime import date from decimal import Decimal from django import forms from django.core.exceptions import ImproperlyConfigured, ValidationError from django.db import models from django.forms.models import ( BaseModelFormSet, _get_foreign_key, inlineformset_factory, modelformset_factory, ) from django.http import QueryDict from django.test import TestCase, skipUnlessDBFeature from .models import ( AlternateBook, Author, AuthorMeeting, BetterAuthor, Book, BookWithCustomPK, BookWithOptionalAltEditor, ClassyMexicanRestaurant, CustomPrimaryKey, Location, Membership, MexicanRestaurant, Owner, OwnerProfile, Person, Place, Player, Poem, Poet, Post, Price, Product, Repository, Restaurant, Revision, Team, ) class DeletionTests(TestCase): def test_deletion(self): PoetFormSet = modelformset_factory(Poet, fields="__all__", can_delete=True) poet = Poet.objects.create(name='test') data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '0', 'form-0-id': str(poet.pk), 'form-0-name': 'test', 'form-0-DELETE': 'on', } formset = PoetFormSet(data, queryset=Poet.objects.all()) formset.save(commit=False) self.assertEqual(Poet.objects.count(), 1) formset.save() self.assertTrue(formset.is_valid()) self.assertEqual(Poet.objects.count(), 0) def test_add_form_deletion_when_invalid(self): """ Make sure that an add form that is filled out, but marked for deletion doesn't cause validation errors. """ PoetFormSet = modelformset_factory(Poet, fields="__all__", can_delete=True) poet = Poet.objects.create(name='test') # One existing untouched and two new unvalid forms data = { 'form-TOTAL_FORMS': '3', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '0', 'form-0-id': str(poet.id), 'form-0-name': 'test', 'form-1-id': '', 'form-1-name': 'x' * 1000, # Too long 'form-2-id': str(poet.id), # Violate unique constraint 'form-2-name': 'test2', } formset = PoetFormSet(data, queryset=Poet.objects.all()) # Make sure this form doesn't pass validation. self.assertIs(formset.is_valid(), False) self.assertEqual(Poet.objects.count(), 1) # Then make sure that it *does* pass validation and delete the object, # even though the data in new forms aren't actually valid. data['form-0-DELETE'] = 'on' data['form-1-DELETE'] = 'on' data['form-2-DELETE'] = 'on' formset = PoetFormSet(data, queryset=Poet.objects.all()) self.assertIs(formset.is_valid(), True) formset.save() self.assertEqual(Poet.objects.count(), 0) def test_change_form_deletion_when_invalid(self): """ Make sure that a change form that is filled out, but marked for deletion doesn't cause validation errors. """ PoetFormSet = modelformset_factory(Poet, fields="__all__", can_delete=True) poet = Poet.objects.create(name='test') data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '0', 'form-0-id': str(poet.id), 'form-0-name': 'x' * 1000, } formset = PoetFormSet(data, queryset=Poet.objects.all()) # Make sure this form doesn't pass validation. self.assertIs(formset.is_valid(), False) self.assertEqual(Poet.objects.count(), 1) # Then make sure that it *does* pass validation and delete the object, # even though the data isn't actually valid. data['form-0-DELETE'] = 'on' formset = PoetFormSet(data, queryset=Poet.objects.all()) self.assertIs(formset.is_valid(), True) formset.save() self.assertEqual(Poet.objects.count(), 0) def test_outdated_deletion(self): poet = Poet.objects.create(name='test') poem = Poem.objects.create(name='Brevity is the soul of wit', poet=poet) PoemFormSet = inlineformset_factory(Poet, Poem, fields="__all__", can_delete=True) # Simulate deletion of an object that doesn't exist in the database data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '2', 'form-0-id': str(poem.pk), 'form-0-name': 'foo', 'form-1-id': str(poem.pk + 1), # doesn't exist 'form-1-name': 'bar', 'form-1-DELETE': 'on', } formset = PoemFormSet(data, instance=poet, prefix="form") # The formset is valid even though poem.pk + 1 doesn't exist, # because it's marked for deletion anyway self.assertTrue(formset.is_valid()) formset.save() # Make sure the save went through correctly self.assertEqual(Poem.objects.get(pk=poem.pk).name, "foo") self.assertEqual(poet.poem_set.count(), 1) self.assertFalse(Poem.objects.filter(pk=poem.pk + 1).exists()) class ModelFormsetTest(TestCase): def test_modelformset_factory_without_fields(self): """ Regression for #19733 """ message = ( "Calling modelformset_factory without defining 'fields' or 'exclude' " "explicitly is prohibited." ) with self.assertRaisesMessage(ImproperlyConfigured, message): modelformset_factory(Author) def test_simple_save(self): qs = Author.objects.all() AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=3) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 3) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_form-0-name">Name:</label>' '<input id="id_form-0-name" type="text" name="form-0-name" maxlength="100">' '<input type="hidden" name="form-0-id" id="id_form-0-id"></p>' ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_form-1-name">Name:</label>' '<input id="id_form-1-name" type="text" name="form-1-name" maxlength="100">' '<input type="hidden" name="form-1-id" id="id_form-1-id"></p>' ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_form-2-name">Name:</label>' ' <input id="id_form-2-name" type="text" name="form-2-name" maxlength="100">' '<input type="hidden" name="form-2-id" id="id_form-2-id"></p>' ) data = { 'form-TOTAL_FORMS': '3', # the number of forms rendered 'form-INITIAL_FORMS': '0', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-name': 'Charles Baudelaire', 'form-1-name': 'Arthur Rimbaud', 'form-2-name': '', } formset = AuthorFormSet(data=data, queryset=qs) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 2) author1, author2 = saved self.assertEqual(author1, Author.objects.get(name='Charles Baudelaire')) self.assertEqual(author2, Author.objects.get(name='Arthur Rimbaud')) authors = list(Author.objects.order_by('name')) self.assertEqual(authors, [author2, author1]) # Gah! We forgot Paul Verlaine. Let's create a formset to edit the # existing authors with an extra form to add him. We *could* pass in a # queryset to restrict the Author objects we edit, but in this case # we'll use it to display them in alphabetical order by name. qs = Author.objects.order_by('name') AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=1, can_delete=False) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 3) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_form-0-name">Name:</label>' '<input id="id_form-0-name" type="text" name="form-0-name" value="Arthur Rimbaud" maxlength="100">' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id"></p>' % author2.id ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_form-1-name">Name:</label>' '<input id="id_form-1-name" type="text" name="form-1-name" value="Charles Baudelaire" maxlength="100">' '<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id"></p>' % author1.id ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_form-2-name">Name:</label>' '<input id="id_form-2-name" type="text" name="form-2-name" maxlength="100">' '<input type="hidden" name="form-2-id" id="id_form-2-id"></p>' ) data = { 'form-TOTAL_FORMS': '3', # the number of forms rendered 'form-INITIAL_FORMS': '2', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-id': str(author2.id), 'form-0-name': 'Arthur Rimbaud', 'form-1-id': str(author1.id), 'form-1-name': 'Charles Baudelaire', 'form-2-name': 'Paul Verlaine', } formset = AuthorFormSet(data=data, queryset=qs) self.assertTrue(formset.is_valid()) # Only changed or new objects are returned from formset.save() saved = formset.save() self.assertEqual(len(saved), 1) author3 = saved[0] self.assertEqual(author3, Author.objects.get(name='Paul Verlaine')) authors = list(Author.objects.order_by('name')) self.assertEqual(authors, [author2, author1, author3]) # This probably shouldn't happen, but it will. If an add form was # marked for deletion, make sure we don't save that form. qs = Author.objects.order_by('name') AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=1, can_delete=True) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 4) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_form-0-name">Name:</label>' '<input id="id_form-0-name" type="text" name="form-0-name" ' 'value="Arthur Rimbaud" maxlength="100"></p>' '<p><label for="id_form-0-DELETE">Delete:</label>' '<input type="checkbox" name="form-0-DELETE" id="id_form-0-DELETE">' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id"></p>' % author2.id ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_form-1-name">Name:</label>' '<input id="id_form-1-name" type="text" name="form-1-name" ' 'value="Charles Baudelaire" maxlength="100"></p>' '<p><label for="id_form-1-DELETE">Delete:</label>' '<input type="checkbox" name="form-1-DELETE" id="id_form-1-DELETE">' '<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id"></p>' % author1.id ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_form-2-name">Name:</label>' '<input id="id_form-2-name" type="text" name="form-2-name" ' 'value="Paul Verlaine" maxlength="100"></p>' '<p><label for="id_form-2-DELETE">Delete:</label>' '<input type="checkbox" name="form-2-DELETE" id="id_form-2-DELETE">' '<input type="hidden" name="form-2-id" value="%d" id="id_form-2-id"></p>' % author3.id ) self.assertHTMLEqual( formset.forms[3].as_p(), '<p><label for="id_form-3-name">Name:</label>' '<input id="id_form-3-name" type="text" name="form-3-name" maxlength="100"></p>' '<p><label for="id_form-3-DELETE">Delete:</label>' '<input type="checkbox" name="form-3-DELETE" id="id_form-3-DELETE">' '<input type="hidden" name="form-3-id" id="id_form-3-id"></p>' ) data = { 'form-TOTAL_FORMS': '4', # the number of forms rendered 'form-INITIAL_FORMS': '3', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-id': str(author2.id), 'form-0-name': 'Arthur Rimbaud', 'form-1-id': str(author1.id), 'form-1-name': 'Charles Baudelaire', 'form-2-id': str(author3.id), 'form-2-name': 'Paul Verlaine', 'form-3-name': 'Walt Whitman', 'form-3-DELETE': 'on', } formset = AuthorFormSet(data=data, queryset=qs) self.assertTrue(formset.is_valid()) # No objects were changed or saved so nothing will come back. self.assertEqual(formset.save(), []) authors = list(Author.objects.order_by('name')) self.assertEqual(authors, [author2, author1, author3]) # Let's edit a record to ensure save only returns that one record. data = { 'form-TOTAL_FORMS': '4', # the number of forms rendered 'form-INITIAL_FORMS': '3', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-id': str(author2.id), 'form-0-name': 'Walt Whitman', 'form-1-id': str(author1.id), 'form-1-name': 'Charles Baudelaire', 'form-2-id': str(author3.id), 'form-2-name': 'Paul Verlaine', 'form-3-name': '', 'form-3-DELETE': '', } formset = AuthorFormSet(data=data, queryset=qs) self.assertTrue(formset.is_valid()) # One record has changed. saved = formset.save() self.assertEqual(len(saved), 1) self.assertEqual(saved[0], Author.objects.get(name='Walt Whitman')) def test_commit_false(self): # Test the behavior of commit=False and save_m2m author1 = Author.objects.create(name='Charles Baudelaire') author2 = Author.objects.create(name='Paul Verlaine') author3 = Author.objects.create(name='Walt Whitman') meeting = AuthorMeeting.objects.create(created=date.today()) meeting.authors.set(Author.objects.all()) # create an Author instance to add to the meeting. author4 = Author.objects.create(name='John Steinbeck') AuthorMeetingFormSet = modelformset_factory(AuthorMeeting, fields="__all__", extra=1, can_delete=True) data = { 'form-TOTAL_FORMS': '2', # the number of forms rendered 'form-INITIAL_FORMS': '1', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-id': str(meeting.id), 'form-0-name': '2nd Tuesday of the Week Meeting', 'form-0-authors': [author2.id, author1.id, author3.id, author4.id], 'form-1-name': '', 'form-1-authors': '', 'form-1-DELETE': '', } formset = AuthorMeetingFormSet(data=data, queryset=AuthorMeeting.objects.all()) self.assertTrue(formset.is_valid()) instances = formset.save(commit=False) for instance in instances: instance.created = date.today() instance.save() formset.save_m2m() self.assertQuerysetEqual(instances[0].authors.all(), [ '<Author: Charles Baudelaire>', '<Author: John Steinbeck>', '<Author: Paul Verlaine>', '<Author: Walt Whitman>', ]) def test_max_num(self): # Test the behavior of max_num with model formsets. It should allow # all existing related objects/inlines for a given object to be # displayed, but not allow the creation of new inlines beyond max_num. Author.objects.create(name='Charles Baudelaire') Author.objects.create(name='Paul Verlaine') Author.objects.create(name='Walt Whitman') qs = Author.objects.order_by('name') AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=None, extra=3) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 6) self.assertEqual(len(formset.extra_forms), 3) AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=4, extra=3) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 4) self.assertEqual(len(formset.extra_forms), 1) AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=0, extra=3) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 3) self.assertEqual(len(formset.extra_forms), 0) AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=None) formset = AuthorFormSet(queryset=qs) self.assertQuerysetEqual(formset.get_queryset(), [ '<Author: Charles Baudelaire>', '<Author: Paul Verlaine>', '<Author: Walt Whitman>', ]) AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=0) formset = AuthorFormSet(queryset=qs) self.assertQuerysetEqual(formset.get_queryset(), [ '<Author: Charles Baudelaire>', '<Author: Paul Verlaine>', '<Author: Walt Whitman>', ]) AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=4) formset = AuthorFormSet(queryset=qs) self.assertQuerysetEqual(formset.get_queryset(), [ '<Author: Charles Baudelaire>', '<Author: Paul Verlaine>', '<Author: Walt Whitman>', ]) def test_min_num(self): # Test the behavior of min_num with model formsets. It should be # added to extra. qs = Author.objects.none() AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=0) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 0) AuthorFormSet = modelformset_factory(Author, fields="__all__", min_num=1, extra=0) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 1) AuthorFormSet = modelformset_factory(Author, fields="__all__", min_num=1, extra=1) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 2) def test_min_num_with_existing(self): # Test the behavior of min_num with existing objects. Author.objects.create(name='Charles Baudelaire') qs = Author.objects.all() AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=0, min_num=1) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 1) def test_custom_save_method(self): class PoetForm(forms.ModelForm): def save(self, commit=True): # change the name to "Vladimir Mayakovsky" just to be a jerk. author = super().save(commit=False) author.name = "Vladimir Mayakovsky" if commit: author.save() return author PoetFormSet = modelformset_factory(Poet, fields="__all__", form=PoetForm) data = { 'form-TOTAL_FORMS': '3', # the number of forms rendered 'form-INITIAL_FORMS': '0', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-name': 'Walt Whitman', 'form-1-name': 'Charles Baudelaire', 'form-2-name': '', } qs = Poet.objects.all() formset = PoetFormSet(data=data, queryset=qs) self.assertTrue(formset.is_valid()) poets = formset.save() self.assertEqual(len(poets), 2) poet1, poet2 = poets self.assertEqual(poet1.name, 'Vladimir Mayakovsky') self.assertEqual(poet2.name, 'Vladimir Mayakovsky') def test_custom_form(self): """ model_formset_factory() respects fields and exclude parameters of a custom form. """ class PostForm1(forms.ModelForm): class Meta: model = Post fields = ('title', 'posted') class PostForm2(forms.ModelForm): class Meta: model = Post exclude = ('subtitle',) PostFormSet = modelformset_factory(Post, form=PostForm1) formset = PostFormSet() self.assertNotIn("subtitle", formset.forms[0].fields) PostFormSet = modelformset_factory(Post, form=PostForm2) formset = PostFormSet() self.assertNotIn("subtitle", formset.forms[0].fields) def test_custom_queryset_init(self): """ A queryset can be overridden in the formset's __init__() method. """ Author.objects.create(name='Charles Baudelaire') Author.objects.create(name='Paul Verlaine') class BaseAuthorFormSet(BaseModelFormSet): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.queryset = Author.objects.filter(name__startswith='Charles') AuthorFormSet = modelformset_factory(Author, fields='__all__', formset=BaseAuthorFormSet) formset = AuthorFormSet() self.assertEqual(len(formset.get_queryset()), 1) def test_model_inheritance(self): BetterAuthorFormSet = modelformset_factory(BetterAuthor, fields="__all__") formset = BetterAuthorFormSet() self.assertEqual(len(formset.forms), 1) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_form-0-name">Name:</label>' '<input id="id_form-0-name" type="text" name="form-0-name" maxlength="100"></p>' '<p><label for="id_form-0-write_speed">Write speed:</label>' '<input type="number" name="form-0-write_speed" id="id_form-0-write_speed">' '<input type="hidden" name="form-0-author_ptr" id="id_form-0-author_ptr"></p>' ) data = { 'form-TOTAL_FORMS': '1', # the number of forms rendered 'form-INITIAL_FORMS': '0', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-author_ptr': '', 'form-0-name': 'Ernest Hemingway', 'form-0-write_speed': '10', } formset = BetterAuthorFormSet(data) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) author1, = saved self.assertEqual(author1, BetterAuthor.objects.get(name='Ernest Hemingway')) hemingway_id = BetterAuthor.objects.get(name="Ernest Hemingway").pk formset = BetterAuthorFormSet() self.assertEqual(len(formset.forms), 2) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_form-0-name">Name:</label>' '<input id="id_form-0-name" type="text" name="form-0-name" value="Ernest Hemingway" maxlength="100"></p>' '<p><label for="id_form-0-write_speed">Write speed:</label>' '<input type="number" name="form-0-write_speed" value="10" id="id_form-0-write_speed">' '<input type="hidden" name="form-0-author_ptr" value="%d" id="id_form-0-author_ptr"></p>' % hemingway_id ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_form-1-name">Name:</label>' '<input id="id_form-1-name" type="text" name="form-1-name" maxlength="100"></p>' '<p><label for="id_form-1-write_speed">Write speed:</label>' '<input type="number" name="form-1-write_speed" id="id_form-1-write_speed">' '<input type="hidden" name="form-1-author_ptr" id="id_form-1-author_ptr"></p>' ) data = { 'form-TOTAL_FORMS': '2', # the number of forms rendered 'form-INITIAL_FORMS': '1', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-author_ptr': hemingway_id, 'form-0-name': 'Ernest Hemingway', 'form-0-write_speed': '10', 'form-1-author_ptr': '', 'form-1-name': '', 'form-1-write_speed': '', } formset = BetterAuthorFormSet(data) self.assertTrue(formset.is_valid()) self.assertEqual(formset.save(), []) def test_inline_formsets(self): # We can also create a formset that is tied to a parent model. This is # how the admin system's edit inline functionality works. AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=3, fields="__all__") author = Author.objects.create(name='Charles Baudelaire') formset = AuthorBooksFormSet(instance=author) self.assertEqual(len(formset.forms), 3) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_book_set-0-title">Title:</label> <input id="id_book_set-0-title" type="text" ' 'name="book_set-0-title" maxlength="100"><input type="hidden" name="book_set-0-author" value="%d" ' 'id="id_book_set-0-author"><input type="hidden" name="book_set-0-id" id="id_book_set-0-id">' '</p>' % author.id ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_book_set-1-title">Title:</label>' '<input id="id_book_set-1-title" type="text" name="book_set-1-title" maxlength="100">' '<input type="hidden" name="book_set-1-author" value="%d" id="id_book_set-1-author">' '<input type="hidden" name="book_set-1-id" id="id_book_set-1-id"></p>' % author.id ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_book_set-2-title">Title:</label>' '<input id="id_book_set-2-title" type="text" name="book_set-2-title" maxlength="100">' '<input type="hidden" name="book_set-2-author" value="%d" id="id_book_set-2-author">' '<input type="hidden" name="book_set-2-id" id="id_book_set-2-id"></p>' % author.id ) data = { 'book_set-TOTAL_FORMS': '3', # the number of forms rendered 'book_set-INITIAL_FORMS': '0', # the number of forms with initial data 'book_set-MAX_NUM_FORMS': '', # the max number of forms 'book_set-0-title': 'Les Fleurs du Mal', 'book_set-1-title': '', 'book_set-2-title': '', } formset = AuthorBooksFormSet(data, instance=author) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) book1, = saved self.assertEqual(book1, Book.objects.get(title='Les Fleurs du Mal')) self.assertQuerysetEqual(author.book_set.all(), ['<Book: Les Fleurs du Mal>']) # Now that we've added a book to Charles Baudelaire, let's try adding # another one. This time though, an edit form will be available for # every existing book. AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=2, fields="__all__") author = Author.objects.get(name='Charles Baudelaire') formset = AuthorBooksFormSet(instance=author) self.assertEqual(len(formset.forms), 3) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_book_set-0-title">Title:</label>' '<input id="id_book_set-0-title" type="text" name="book_set-0-title" ' 'value="Les Fleurs du Mal" maxlength="100">' '<input type="hidden" name="book_set-0-author" value="%d" id="id_book_set-0-author">' '<input type="hidden" name="book_set-0-id" value="%d" id="id_book_set-0-id"></p>' % ( author.id, book1.id, ) ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_book_set-1-title">Title:</label>' '<input id="id_book_set-1-title" type="text" name="book_set-1-title" maxlength="100">' '<input type="hidden" name="book_set-1-author" value="%d" id="id_book_set-1-author">' '<input type="hidden" name="book_set-1-id" id="id_book_set-1-id"></p>' % author.id ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_book_set-2-title">Title:</label>' '<input id="id_book_set-2-title" type="text" name="book_set-2-title" maxlength="100">' '<input type="hidden" name="book_set-2-author" value="%d" id="id_book_set-2-author">' '<input type="hidden" name="book_set-2-id" id="id_book_set-2-id"></p>' % author.id ) data = { 'book_set-TOTAL_FORMS': '3', # the number of forms rendered 'book_set-INITIAL_FORMS': '1', # the number of forms with initial data 'book_set-MAX_NUM_FORMS': '', # the max number of forms 'book_set-0-id': str(book1.id), 'book_set-0-title': 'Les Fleurs du Mal', 'book_set-1-title': 'Les Paradis Artificiels', 'book_set-2-title': '', } formset = AuthorBooksFormSet(data, instance=author) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) book2, = saved self.assertEqual(book2, Book.objects.get(title='Les Paradis Artificiels')) # As you can see, 'Les Paradis Artificiels' is now a book belonging to # Charles Baudelaire. self.assertQuerysetEqual(author.book_set.order_by('title'), [ '<Book: Les Fleurs du Mal>', '<Book: Les Paradis Artificiels>', ]) def test_inline_formsets_save_as_new(self): # The save_as_new parameter lets you re-associate the data to a new # instance. This is used in the admin for save_as functionality. AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=2, fields="__all__") Author.objects.create(name='Charles Baudelaire') # An immutable QueryDict simulates request.POST. data = QueryDict(mutable=True) data.update({ 'book_set-TOTAL_FORMS': '3', # the number of forms rendered 'book_set-INITIAL_FORMS': '2', # the number of forms with initial data 'book_set-MAX_NUM_FORMS': '', # the max number of forms 'book_set-0-id': '1', 'book_set-0-title': 'Les Fleurs du Mal', 'book_set-1-id': '2', 'book_set-1-title': 'Les Paradis Artificiels', 'book_set-2-title': '', }) data._mutable = False formset = AuthorBooksFormSet(data, instance=Author(), save_as_new=True) self.assertTrue(formset.is_valid()) self.assertIs(data._mutable, False) new_author = Author.objects.create(name='Charles Baudelaire') formset = AuthorBooksFormSet(data, instance=new_author, save_as_new=True) saved = formset.save() self.assertEqual(len(saved), 2) book1, book2 = saved self.assertEqual(book1.title, 'Les Fleurs du Mal') self.assertEqual(book2.title, 'Les Paradis Artificiels') # Test using a custom prefix on an inline formset. formset = AuthorBooksFormSet(prefix="test") self.assertEqual(len(formset.forms), 2) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_test-0-title">Title:</label>' '<input id="id_test-0-title" type="text" name="test-0-title" maxlength="100">' '<input type="hidden" name="test-0-author" id="id_test-0-author">' '<input type="hidden" name="test-0-id" id="id_test-0-id"></p>' ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_test-1-title">Title:</label>' '<input id="id_test-1-title" type="text" name="test-1-title" maxlength="100">' '<input type="hidden" name="test-1-author" id="id_test-1-author">' '<input type="hidden" name="test-1-id" id="id_test-1-id"></p>' ) def test_inline_formsets_with_custom_pk(self): # Test inline formsets where the inline-edited object has a custom # primary key that is not the fk to the parent object. self.maxDiff = 1024 AuthorBooksFormSet2 = inlineformset_factory( Author, BookWithCustomPK, can_delete=False, extra=1, fields="__all__" ) author = Author.objects.create(pk=1, name='Charles Baudelaire') formset = AuthorBooksFormSet2(instance=author) self.assertEqual(len(formset.forms), 1) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_bookwithcustompk_set-0-my_pk">My pk:</label>' '<input id="id_bookwithcustompk_set-0-my_pk" type="number" ' 'name="bookwithcustompk_set-0-my_pk" step="1"></p>' '<p><label for="id_bookwithcustompk_set-0-title">Title:</label>' '<input id="id_bookwithcustompk_set-0-title" type="text" ' 'name="bookwithcustompk_set-0-title" maxlength="100">' '<input type="hidden" name="bookwithcustompk_set-0-author" ' 'value="1" id="id_bookwithcustompk_set-0-author"></p>' ) data = { 'bookwithcustompk_set-TOTAL_FORMS': '1', # the number of forms rendered 'bookwithcustompk_set-INITIAL_FORMS': '0', # the number of forms with initial data 'bookwithcustompk_set-MAX_NUM_FORMS': '', # the max number of forms 'bookwithcustompk_set-0-my_pk': '77777', 'bookwithcustompk_set-0-title': 'Les Fleurs du Mal', } formset = AuthorBooksFormSet2(data, instance=author) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) book1, = saved self.assertEqual(book1.pk, 77777) book1 = author.bookwithcustompk_set.get() self.assertEqual(book1.title, 'Les Fleurs du Mal') def test_inline_formsets_with_multi_table_inheritance(self): # Test inline formsets where the inline-edited object uses multi-table # inheritance, thus has a non AutoField yet auto-created primary key. AuthorBooksFormSet3 = inlineformset_factory(Author, AlternateBook, can_delete=False, extra=1, fields="__all__") author = Author.objects.create(pk=1, name='Charles Baudelaire') formset = AuthorBooksFormSet3(instance=author) self.assertEqual(len(formset.forms), 1) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_alternatebook_set-0-title">Title:</label>' '<input id="id_alternatebook_set-0-title" type="text" ' 'name="alternatebook_set-0-title" maxlength="100"></p>' '<p><label for="id_alternatebook_set-0-notes">Notes:</label>' '<input id="id_alternatebook_set-0-notes" type="text" ' 'name="alternatebook_set-0-notes" maxlength="100">' '<input type="hidden" name="alternatebook_set-0-author" value="1" ' 'id="id_alternatebook_set-0-author">' '<input type="hidden" name="alternatebook_set-0-book_ptr" ' 'id="id_alternatebook_set-0-book_ptr"></p>' ) data = { 'alternatebook_set-TOTAL_FORMS': '1', # the number of forms rendered 'alternatebook_set-INITIAL_FORMS': '0', # the number of forms with initial data 'alternatebook_set-MAX_NUM_FORMS': '', # the max number of forms 'alternatebook_set-0-title': 'Flowers of Evil', 'alternatebook_set-0-notes': 'English translation of Les Fleurs du Mal' } formset = AuthorBooksFormSet3(data, instance=author) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) book1, = saved self.assertEqual(book1.title, 'Flowers of Evil') self.assertEqual(book1.notes, 'English translation of Les Fleurs du Mal') @skipUnlessDBFeature('supports_partially_nullable_unique_constraints') def test_inline_formsets_with_nullable_unique_together(self): # Test inline formsets where the inline-edited object has a # unique_together constraint with a nullable member AuthorBooksFormSet4 = inlineformset_factory( Author, BookWithOptionalAltEditor, can_delete=False, extra=2, fields="__all__" ) author = Author.objects.create(pk=1, name='Charles Baudelaire') data = { 'bookwithoptionalalteditor_set-TOTAL_FORMS': '2', # the number of forms rendered 'bookwithoptionalalteditor_set-INITIAL_FORMS': '0', # the number of forms with initial data 'bookwithoptionalalteditor_set-MAX_NUM_FORMS': '', # the max number of forms 'bookwithoptionalalteditor_set-0-author': '1', 'bookwithoptionalalteditor_set-0-title': 'Les Fleurs du Mal', 'bookwithoptionalalteditor_set-1-author': '1', 'bookwithoptionalalteditor_set-1-title': 'Les Fleurs du Mal', } formset = AuthorBooksFormSet4(data, instance=author) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 2) book1, book2 = saved self.assertEqual(book1.author_id, 1) self.assertEqual(book1.title, 'Les Fleurs du Mal') self.assertEqual(book2.author_id, 1) self.assertEqual(book2.title, 'Les Fleurs du Mal') def test_inline_formsets_with_custom_save_method(self): AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=2, fields="__all__") author = Author.objects.create(pk=1, name='Charles Baudelaire') book1 = Book.objects.create(pk=1, author=author, title='Les Paradis Artificiels') book2 = Book.objects.create(pk=2, author=author, title='Les Fleurs du Mal') book3 = Book.objects.create(pk=3, author=author, title='Flowers of Evil') class PoemForm(forms.ModelForm): def save(self, commit=True): # change the name to "Brooklyn Bridge" just to be a jerk. poem = super().save(commit=False) poem.name = "Brooklyn Bridge" if commit: poem.save() return poem PoemFormSet = inlineformset_factory(Poet, Poem, form=PoemForm, fields="__all__") data = { 'poem_set-TOTAL_FORMS': '3', # the number of forms rendered 'poem_set-INITIAL_FORMS': '0', # the number of forms with initial data 'poem_set-MAX_NUM_FORMS': '', # the max number of forms 'poem_set-0-name': 'The Cloud in Trousers', 'poem_set-1-name': 'I', 'poem_set-2-name': '', } poet = Poet.objects.create(name='Vladimir Mayakovsky') formset = PoemFormSet(data=data, instance=poet) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 2) poem1, poem2 = saved self.assertEqual(poem1.name, 'Brooklyn Bridge') self.assertEqual(poem2.name, 'Brooklyn Bridge') # We can provide a custom queryset to our InlineFormSet: custom_qs = Book.objects.order_by('-title') formset = AuthorBooksFormSet(instance=author, queryset=custom_qs) self.assertEqual(len(formset.forms), 5) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_book_set-0-title">Title:</label>' '<input id="id_book_set-0-title" type="text" name="book_set-0-title" ' 'value="Les Paradis Artificiels" maxlength="100">' '<input type="hidden" name="book_set-0-author" value="1" id="id_book_set-0-author">' '<input type="hidden" name="book_set-0-id" value="1" id="id_book_set-0-id"></p>' ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_book_set-1-title">Title:</label>' '<input id="id_book_set-1-title" type="text" name="book_set-1-title" ' 'value="Les Fleurs du Mal" maxlength="100">' '<input type="hidden" name="book_set-1-author" value="1" id="id_book_set-1-author">' '<input type="hidden" name="book_set-1-id" value="2" id="id_book_set-1-id"></p>' ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_book_set-2-title">Title:</label>' '<input id="id_book_set-2-title" type="text" name="book_set-2-title" ' 'value="Flowers of Evil" maxlength="100">' '<input type="hidden" name="book_set-2-author" value="1" id="id_book_set-2-author">' '<input type="hidden" name="book_set-2-id" value="3" id="id_book_set-2-id"></p>' ) self.assertHTMLEqual( formset.forms[3].as_p(), '<p><label for="id_book_set-3-title">Title:</label>' '<input id="id_book_set-3-title" type="text" name="book_set-3-title" maxlength="100">' '<input type="hidden" name="book_set-3-author" value="1" id="id_book_set-3-author">' '<input type="hidden" name="book_set-3-id" id="id_book_set-3-id"></p>' ) self.assertHTMLEqual( formset.forms[4].as_p(), '<p><label for="id_book_set-4-title">Title:</label>' '<input id="id_book_set-4-title" type="text" name="book_set-4-title" maxlength="100">' '<input type="hidden" name="book_set-4-author" value="1" id="id_book_set-4-author">' '<input type="hidden" name="book_set-4-id" id="id_book_set-4-id"></p>' ) data = { 'book_set-TOTAL_FORMS': '5', # the number of forms rendered 'book_set-INITIAL_FORMS': '3', # the number of forms with initial data 'book_set-MAX_NUM_FORMS': '', # the max number of forms 'book_set-0-id': str(book1.id), 'book_set-0-title': 'Les Paradis Artificiels', 'book_set-1-id': str(book2.id), 'book_set-1-title': 'Les Fleurs du Mal', 'book_set-2-id': str(book3.id), 'book_set-2-title': 'Flowers of Evil', 'book_set-3-title': 'Revue des deux mondes', 'book_set-4-title': '', } formset = AuthorBooksFormSet(data, instance=author, queryset=custom_qs) self.assertTrue(formset.is_valid()) custom_qs = Book.objects.filter(title__startswith='F') formset = AuthorBooksFormSet(instance=author, queryset=custom_qs) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_book_set-0-title">Title:</label>' '<input id="id_book_set-0-title" type="text" name="book_set-0-title" ' 'value="Flowers of Evil" maxlength="100">' '<input type="hidden" name="book_set-0-author" value="1" id="id_book_set-0-author">' '<input type="hidden" name="book_set-0-id" value="3" id="id_book_set-0-id"></p>' ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_book_set-1-title">Title:</label>' '<input id="id_book_set-1-title" type="text" name="book_set-1-title" maxlength="100">' '<input type="hidden" name="book_set-1-author" value="1" id="id_book_set-1-author">' '<input type="hidden" name="book_set-1-id" id="id_book_set-1-id"></p>' ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_book_set-2-title">Title:</label>' '<input id="id_book_set-2-title" type="text" name="book_set-2-title" maxlength="100">' '<input type="hidden" name="book_set-2-author" value="1" id="id_book_set-2-author">' '<input type="hidden" name="book_set-2-id" id="id_book_set-2-id"></p>' ) data = { 'book_set-TOTAL_FORMS': '3', # the number of forms rendered 'book_set-INITIAL_FORMS': '1', # the number of forms with initial data 'book_set-MAX_NUM_FORMS': '', # the max number of forms 'book_set-0-id': str(book3.id), 'book_set-0-title': 'Flowers of Evil', 'book_set-1-title': 'Revue des deux mondes', 'book_set-2-title': '', } formset = AuthorBooksFormSet(data, instance=author, queryset=custom_qs) self.assertTrue(formset.is_valid()) def test_inline_formsets_with_custom_save_method_related_instance(self): """ The ModelForm.save() method should be able to access the related object if it exists in the database (#24395). """ class PoemForm2(forms.ModelForm): def save(self, commit=True): poem = super().save(commit=False) poem.name = "%s by %s" % (poem.name, poem.poet.name) if commit: poem.save() return poem PoemFormSet = inlineformset_factory(Poet, Poem, form=PoemForm2, fields="__all__") data = { 'poem_set-TOTAL_FORMS': '1', 'poem_set-INITIAL_FORMS': '0', 'poem_set-MAX_NUM_FORMS': '', 'poem_set-0-name': 'Le Lac', } poet = Poet() formset = PoemFormSet(data=data, instance=poet) self.assertTrue(formset.is_valid()) # The Poet instance is saved after the formset instantiation. This # happens in admin's changeform_view() when adding a new object and # some inlines in the same request. poet.name = 'Lamartine' poet.save() poem = formset.save()[0] self.assertEqual(poem.name, 'Le Lac by Lamartine') def test_inline_formsets_with_wrong_fk_name(self): """ Regression for #23451 """ message = "fk_name 'title' is not a ForeignKey to 'model_formsets.Author'." with self.assertRaisesMessage(ValueError, message): inlineformset_factory(Author, Book, fields="__all__", fk_name='title') def test_custom_pk(self): # We need to ensure that it is displayed CustomPrimaryKeyFormSet = modelformset_factory(CustomPrimaryKey, fields="__all__") formset = CustomPrimaryKeyFormSet() self.assertEqual(len(formset.forms), 1) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_form-0-my_pk">My pk:</label> <input id="id_form-0-my_pk" type="text" ' 'name="form-0-my_pk" maxlength="10"></p>' '<p><label for="id_form-0-some_field">Some field:</label>' '<input id="id_form-0-some_field" type="text" name="form-0-some_field" maxlength="100"></p>' ) # Custom primary keys with ForeignKey, OneToOneField and AutoField ############ place = Place.objects.create(pk=1, name='Giordanos', city='Chicago') FormSet = inlineformset_factory(Place, Owner, extra=2, can_delete=False, fields="__all__") formset = FormSet(instance=place) self.assertEqual(len(formset.forms), 2) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_owner_set-0-name">Name:</label>' '<input id="id_owner_set-0-name" type="text" name="owner_set-0-name" maxlength="100">' '<input type="hidden" name="owner_set-0-place" value="1" id="id_owner_set-0-place">' '<input type="hidden" name="owner_set-0-auto_id" id="id_owner_set-0-auto_id"></p>' ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_owner_set-1-name">Name:</label>' '<input id="id_owner_set-1-name" type="text" name="owner_set-1-name" maxlength="100">' '<input type="hidden" name="owner_set-1-place" value="1" id="id_owner_set-1-place">' '<input type="hidden" name="owner_set-1-auto_id" id="id_owner_set-1-auto_id"></p>' ) data = { 'owner_set-TOTAL_FORMS': '2', 'owner_set-INITIAL_FORMS': '0', 'owner_set-MAX_NUM_FORMS': '', 'owner_set-0-auto_id': '', 'owner_set-0-name': 'Joe Perry', 'owner_set-1-auto_id': '', 'owner_set-1-name': '', } formset = FormSet(data, instance=place) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) owner1, = saved self.assertEqual(owner1.name, 'Joe Perry') self.assertEqual(owner1.place.name, 'Giordanos') formset = FormSet(instance=place) self.assertEqual(len(formset.forms), 3) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_owner_set-0-name">Name:</label>' '<input id="id_owner_set-0-name" type="text" name="owner_set-0-name" value="Joe Perry" maxlength="100">' '<input type="hidden" name="owner_set-0-place" value="1" id="id_owner_set-0-place">' '<input type="hidden" name="owner_set-0-auto_id" value="%d" id="id_owner_set-0-auto_id"></p>' % owner1.auto_id ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_owner_set-1-name">Name:</label>' '<input id="id_owner_set-1-name" type="text" name="owner_set-1-name" maxlength="100">' '<input type="hidden" name="owner_set-1-place" value="1" id="id_owner_set-1-place">' '<input type="hidden" name="owner_set-1-auto_id" id="id_owner_set-1-auto_id"></p>' ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_owner_set-2-name">Name:</label>' '<input id="id_owner_set-2-name" type="text" name="owner_set-2-name" maxlength="100">' '<input type="hidden" name="owner_set-2-place" value="1" id="id_owner_set-2-place">' '<input type="hidden" name="owner_set-2-auto_id" id="id_owner_set-2-auto_id"></p>' ) data = { 'owner_set-TOTAL_FORMS': '3', 'owner_set-INITIAL_FORMS': '1', 'owner_set-MAX_NUM_FORMS': '', 'owner_set-0-auto_id': str(owner1.auto_id), 'owner_set-0-name': 'Joe Perry', 'owner_set-1-auto_id': '', 'owner_set-1-name': 'Jack Berry', 'owner_set-2-auto_id': '', 'owner_set-2-name': '', } formset = FormSet(data, instance=place) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) owner2, = saved self.assertEqual(owner2.name, 'Jack Berry') self.assertEqual(owner2.place.name, 'Giordanos') # Ensure a custom primary key that is a ForeignKey or OneToOneField get rendered for the user to choose. FormSet = modelformset_factory(OwnerProfile, fields="__all__") formset = FormSet() self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_form-0-owner">Owner:</label>' '<select name="form-0-owner" id="id_form-0-owner">' '<option value="" selected>---------</option>' '<option value="%d">Joe Perry at Giordanos</option>' '<option value="%d">Jack Berry at Giordanos</option>' '</select></p>' '<p><label for="id_form-0-age">Age:</label>' '<input type="number" name="form-0-age" id="id_form-0-age" min="0"></p>' % (owner1.auto_id, owner2.auto_id) ) owner1 = Owner.objects.get(name='Joe Perry') FormSet = inlineformset_factory(Owner, OwnerProfile, max_num=1, can_delete=False, fields="__all__") self.assertEqual(FormSet.max_num, 1) formset = FormSet(instance=owner1) self.assertEqual(len(formset.forms), 1) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_ownerprofile-0-age">Age:</label>' '<input type="number" name="ownerprofile-0-age" id="id_ownerprofile-0-age" min="0">' '<input type="hidden" name="ownerprofile-0-owner" value="%d" id="id_ownerprofile-0-owner"></p>' % owner1.auto_id ) data = { 'ownerprofile-TOTAL_FORMS': '1', 'ownerprofile-INITIAL_FORMS': '0', 'ownerprofile-MAX_NUM_FORMS': '1', 'ownerprofile-0-owner': '', 'ownerprofile-0-age': '54', } formset = FormSet(data, instance=owner1) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) profile1, = saved self.assertEqual(profile1.owner, owner1) self.assertEqual(profile1.age, 54) formset = FormSet(instance=owner1) self.assertEqual(len(formset.forms), 1) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_ownerprofile-0-age">Age:</label>' '<input type="number" name="ownerprofile-0-age" value="54" id="id_ownerprofile-0-age" min="0">' '<input type="hidden" name="ownerprofile-0-owner" value="%d" id="id_ownerprofile-0-owner"></p>' % owner1.auto_id ) data = { 'ownerprofile-TOTAL_FORMS': '1', 'ownerprofile-INITIAL_FORMS': '1', 'ownerprofile-MAX_NUM_FORMS': '1', 'ownerprofile-0-owner': str(owner1.auto_id), 'ownerprofile-0-age': '55', } formset = FormSet(data, instance=owner1) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) profile1, = saved self.assertEqual(profile1.owner, owner1) self.assertEqual(profile1.age, 55) def test_unique_true_enforces_max_num_one(self): # ForeignKey with unique=True should enforce max_num=1 place = Place.objects.create(pk=1, name='Giordanos', city='Chicago') FormSet = inlineformset_factory(Place, Location, can_delete=False, fields="__all__") self.assertEqual(FormSet.max_num, 1) formset = FormSet(instance=place) self.assertEqual(len(formset.forms), 1) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_location_set-0-lat">Lat:</label>' '<input id="id_location_set-0-lat" type="text" name="location_set-0-lat" maxlength="100"></p>' '<p><label for="id_location_set-0-lon">Lon:</label> ' '<input id="id_location_set-0-lon" type="text" name="location_set-0-lon" maxlength="100">' '<input type="hidden" name="location_set-0-place" value="1" id="id_location_set-0-place">' '<input type="hidden" name="location_set-0-id" id="id_location_set-0-id"></p>' ) def test_foreign_keys_in_parents(self): self.assertEqual(type(_get_foreign_key(Restaurant, Owner)), models.ForeignKey) self.assertEqual(type(_get_foreign_key(MexicanRestaurant, Owner)), models.ForeignKey) def test_unique_validation(self): FormSet = modelformset_factory(Product, fields="__all__", extra=1) data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-slug': 'car-red', } formset = FormSet(data) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) product1, = saved self.assertEqual(product1.slug, 'car-red') data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-slug': 'car-red', } formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual(formset.errors, [{'slug': ['Product with this Slug already exists.']}]) def test_modelformset_validate_max_flag(self): # If validate_max is set and max_num is less than TOTAL_FORMS in the # data, then throw an exception. MAX_NUM_FORMS in the data is # irrelevant here (it's output as a hint for the client but its # value in the returned data is not checked) data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '2', # should be ignored 'form-0-price': '12.00', 'form-0-quantity': '1', 'form-1-price': '24.00', 'form-1-quantity': '2', } FormSet = modelformset_factory(Price, fields="__all__", extra=1, max_num=1, validate_max=True) formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual(formset.non_form_errors(), ['Please submit 1 or fewer forms.']) # Now test the same thing without the validate_max flag to ensure # default behavior is unchanged FormSet = modelformset_factory(Price, fields="__all__", extra=1, max_num=1) formset = FormSet(data) self.assertTrue(formset.is_valid()) def test_unique_together_validation(self): FormSet = modelformset_factory(Price, fields="__all__", extra=1) data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-price': '12.00', 'form-0-quantity': '1', } formset = FormSet(data) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) price1, = saved self.assertEqual(price1.price, Decimal('12.00')) self.assertEqual(price1.quantity, 1) data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-price': '12.00', 'form-0-quantity': '1', } formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual(formset.errors, [{'__all__': ['Price with this Price and Quantity already exists.']}]) def test_unique_together_with_inlineformset_factory(self): # Also see bug #8882. repository = Repository.objects.create(name='Test Repo') FormSet = inlineformset_factory(Repository, Revision, extra=1, fields="__all__") data = { 'revision_set-TOTAL_FORMS': '1', 'revision_set-INITIAL_FORMS': '0', 'revision_set-MAX_NUM_FORMS': '', 'revision_set-0-repository': repository.pk, 'revision_set-0-revision': '146239817507f148d448db38840db7c3cbf47c76', 'revision_set-0-DELETE': '', } formset = FormSet(data, instance=repository) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) revision1, = saved self.assertEqual(revision1.repository, repository) self.assertEqual(revision1.revision, '146239817507f148d448db38840db7c3cbf47c76') # attempt to save the same revision against the same repo. data = { 'revision_set-TOTAL_FORMS': '1', 'revision_set-INITIAL_FORMS': '0', 'revision_set-MAX_NUM_FORMS': '', 'revision_set-0-repository': repository.pk, 'revision_set-0-revision': '146239817507f148d448db38840db7c3cbf47c76', 'revision_set-0-DELETE': '', } formset = FormSet(data, instance=repository) self.assertFalse(formset.is_valid()) self.assertEqual(formset.errors, [{'__all__': ['Revision with this Repository and Revision already exists.']}]) # unique_together with inlineformset_factory with overridden form fields # Also see #9494 FormSet = inlineformset_factory(Repository, Revision, fields=('revision',), extra=1) data = { 'revision_set-TOTAL_FORMS': '1', 'revision_set-INITIAL_FORMS': '0', 'revision_set-MAX_NUM_FORMS': '', 'revision_set-0-repository': repository.pk, 'revision_set-0-revision': '146239817507f148d448db38840db7c3cbf47c76', 'revision_set-0-DELETE': '', } formset = FormSet(data, instance=repository) self.assertFalse(formset.is_valid()) def test_callable_defaults(self): # Use of callable defaults (see bug #7975). person = Person.objects.create(name='Ringo') FormSet = inlineformset_factory(Person, Membership, can_delete=False, extra=1, fields="__all__") formset = FormSet(instance=person) # Django will render a hidden field for model fields that have a callable # default. This is required to ensure the value is tested for change correctly # when determine what extra forms have changed to save. self.assertEqual(len(formset.forms), 1) # this formset only has one form form = formset.forms[0] now = form.fields['date_joined'].initial() result = form.as_p() result = re.sub(r'[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}(?:\.[0-9]+)?', '__DATETIME__', result) self.assertHTMLEqual( result, '<p><label for="id_membership_set-0-date_joined">Date joined:</label>' '<input type="text" name="membership_set-0-date_joined" ' 'value="__DATETIME__" id="id_membership_set-0-date_joined">' '<input type="hidden" name="initial-membership_set-0-date_joined" value="__DATETIME__" ' 'id="initial-membership_set-0-id_membership_set-0-date_joined"></p>' '<p><label for="id_membership_set-0-karma">Karma:</label>' '<input type="number" name="membership_set-0-karma" id="id_membership_set-0-karma">' '<input type="hidden" name="membership_set-0-person" value="%d" id="id_membership_set-0-person">' '<input type="hidden" name="membership_set-0-id" id="id_membership_set-0-id"></p>' % person.id) # test for validation with callable defaults. Validations rely on hidden fields data = { 'membership_set-TOTAL_FORMS': '1', 'membership_set-INITIAL_FORMS': '0', 'membership_set-MAX_NUM_FORMS': '', 'membership_set-0-date_joined': now.strftime('%Y-%m-%d %H:%M:%S'), 'initial-membership_set-0-date_joined': now.strftime('%Y-%m-%d %H:%M:%S'), 'membership_set-0-karma': '', } formset = FormSet(data, instance=person) self.assertTrue(formset.is_valid()) # now test for when the data changes one_day_later = now + datetime.timedelta(days=1) filled_data = { 'membership_set-TOTAL_FORMS': '1', 'membership_set-INITIAL_FORMS': '0', 'membership_set-MAX_NUM_FORMS': '', 'membership_set-0-date_joined': one_day_later.strftime('%Y-%m-%d %H:%M:%S'), 'initial-membership_set-0-date_joined': now.strftime('%Y-%m-%d %H:%M:%S'), 'membership_set-0-karma': '', } formset = FormSet(filled_data, instance=person) self.assertFalse(formset.is_valid()) # now test with split datetime fields class MembershipForm(forms.ModelForm): date_joined = forms.SplitDateTimeField(initial=now) class Meta: model = Membership fields = "__all__" def __init__(self, **kwargs): super().__init__(**kwargs) self.fields['date_joined'].widget = forms.SplitDateTimeWidget() FormSet = inlineformset_factory( Person, Membership, form=MembershipForm, can_delete=False, extra=1, fields="__all__", ) data = { 'membership_set-TOTAL_FORMS': '1', 'membership_set-INITIAL_FORMS': '0', 'membership_set-MAX_NUM_FORMS': '', 'membership_set-0-date_joined_0': now.strftime('%Y-%m-%d'), 'membership_set-0-date_joined_1': now.strftime('%H:%M:%S'), 'initial-membership_set-0-date_joined': now.strftime('%Y-%m-%d %H:%M:%S'), 'membership_set-0-karma': '', } formset = FormSet(data, instance=person) self.assertTrue(formset.is_valid()) def test_inlineformset_factory_with_null_fk(self): # inlineformset_factory tests with fk having null=True. see #9462. # create some data that will exhibit the issue team = Team.objects.create(name="Red Vipers") Player(name="Timmy").save() Player(name="Bobby", team=team).save() PlayerInlineFormSet = inlineformset_factory(Team, Player, fields="__all__") formset = PlayerInlineFormSet() self.assertQuerysetEqual(formset.get_queryset(), []) formset = PlayerInlineFormSet(instance=team) players = formset.get_queryset() self.assertEqual(len(players), 1) player1, = players self.assertEqual(player1.team, team) self.assertEqual(player1.name, 'Bobby') def test_inlineformset_with_arrayfield(self): class SimpleArrayField(forms.CharField): """A proxy for django.contrib.postgres.forms.SimpleArrayField.""" def to_python(self, value): value = super().to_python(value) return value.split(',') if value else [] class BookForm(forms.ModelForm): title = SimpleArrayField() class Meta: model = Book fields = ('title',) BookFormSet = inlineformset_factory(Author, Book, form=BookForm) data = { 'book_set-TOTAL_FORMS': '3', 'book_set-INITIAL_FORMS': '0', 'book_set-MAX_NUM_FORMS': '', 'book_set-0-title': 'test1,test2', 'book_set-1-title': 'test1,test2', 'book_set-2-title': 'test3,test4', } author = Author.objects.create(name='test') formset = BookFormSet(data, instance=author) self.assertEqual(formset.errors, [{}, {'__all__': ['Please correct the duplicate values below.']}, {}]) def test_model_formset_with_custom_pk(self): # a formset for a Model that has a custom primary key that still needs to be # added to the formset automatically FormSet = modelformset_factory(ClassyMexicanRestaurant, fields=["tacos_are_yummy"]) self.assertEqual(sorted(FormSet().forms[0].fields), ['tacos_are_yummy', 'the_restaurant']) def test_model_formset_with_initial_model_instance(self): # has_changed should compare model instance and primary key # see #18898 FormSet = modelformset_factory(Poem, fields='__all__') john_milton = Poet(name="John Milton") john_milton.save() data = { 'form-TOTAL_FORMS': 1, 'form-INITIAL_FORMS': 0, 'form-MAX_NUM_FORMS': '', 'form-0-name': '', 'form-0-poet': str(john_milton.id), } formset = FormSet(initial=[{'poet': john_milton}], data=data) self.assertFalse(formset.extra_forms[0].has_changed()) def test_model_formset_with_initial_queryset(self): # has_changed should work with queryset and list of pk's # see #18898 FormSet = modelformset_factory(AuthorMeeting, fields='__all__') Author.objects.create(pk=1, name='Charles Baudelaire') data = { 'form-TOTAL_FORMS': 1, 'form-INITIAL_FORMS': 0, 'form-MAX_NUM_FORMS': '', 'form-0-name': '', 'form-0-created': '', 'form-0-authors': list(Author.objects.values_list('id', flat=True)), } formset = FormSet(initial=[{'authors': Author.objects.all()}], data=data) self.assertFalse(formset.extra_forms[0].has_changed()) def test_prevent_duplicates_from_with_the_same_formset(self): FormSet = modelformset_factory(Product, fields="__all__", extra=2) data = { 'form-TOTAL_FORMS': 2, 'form-INITIAL_FORMS': 0, 'form-MAX_NUM_FORMS': '', 'form-0-slug': 'red_car', 'form-1-slug': 'red_car', } formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual(formset._non_form_errors, ['Please correct the duplicate data for slug.']) FormSet = modelformset_factory(Price, fields="__all__", extra=2) data = { 'form-TOTAL_FORMS': 2, 'form-INITIAL_FORMS': 0, 'form-MAX_NUM_FORMS': '', 'form-0-price': '25', 'form-0-quantity': '7', 'form-1-price': '25', 'form-1-quantity': '7', } formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual( formset._non_form_errors, ['Please correct the duplicate data for price and quantity, which must be unique.'] ) # Only the price field is specified, this should skip any unique checks since # the unique_together is not fulfilled. This will fail with a KeyError if broken. FormSet = modelformset_factory(Price, fields=("price",), extra=2) data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-price': '24', 'form-1-price': '24', } formset = FormSet(data) self.assertTrue(formset.is_valid()) FormSet = inlineformset_factory(Author, Book, extra=0, fields="__all__") author = Author.objects.create(pk=1, name='Charles Baudelaire') Book.objects.create(pk=1, author=author, title='Les Paradis Artificiels') Book.objects.create(pk=2, author=author, title='Les Fleurs du Mal') Book.objects.create(pk=3, author=author, title='Flowers of Evil') book_ids = author.book_set.order_by('id').values_list('id', flat=True) data = { 'book_set-TOTAL_FORMS': '2', 'book_set-INITIAL_FORMS': '2', 'book_set-MAX_NUM_FORMS': '', 'book_set-0-title': 'The 2008 Election', 'book_set-0-author': str(author.id), 'book_set-0-id': str(book_ids[0]), 'book_set-1-title': 'The 2008 Election', 'book_set-1-author': str(author.id), 'book_set-1-id': str(book_ids[1]), } formset = FormSet(data=data, instance=author) self.assertFalse(formset.is_valid()) self.assertEqual(formset._non_form_errors, ['Please correct the duplicate data for title.']) self.assertEqual(formset.errors, [{}, {'__all__': ['Please correct the duplicate values below.']}]) FormSet = modelformset_factory(Post, fields="__all__", extra=2) data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-title': 'blah', 'form-0-slug': 'Morning', 'form-0-subtitle': 'foo', 'form-0-posted': '2009-01-01', 'form-1-title': 'blah', 'form-1-slug': 'Morning in Prague', 'form-1-subtitle': 'rawr', 'form-1-posted': '2009-01-01' } formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual( formset._non_form_errors, ['Please correct the duplicate data for title which must be unique for the date in posted.'] ) self.assertEqual( formset.errors, [{}, {'__all__': ['Please correct the duplicate values below.']}] ) data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-title': 'foo', 'form-0-slug': 'Morning in Prague', 'form-0-subtitle': 'foo', 'form-0-posted': '2009-01-01', 'form-1-title': 'blah', 'form-1-slug': 'Morning in Prague', 'form-1-subtitle': 'rawr', 'form-1-posted': '2009-08-02' } formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual( formset._non_form_errors, ['Please correct the duplicate data for slug which must be unique for the year in posted.'] ) data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-title': 'foo', 'form-0-slug': 'Morning in Prague', 'form-0-subtitle': 'rawr', 'form-0-posted': '2008-08-01', 'form-1-title': 'blah', 'form-1-slug': 'Prague', 'form-1-subtitle': 'rawr', 'form-1-posted': '2009-08-02' } formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual( formset._non_form_errors, ['Please correct the duplicate data for subtitle which must be unique for the month in posted.'] ) def test_prevent_change_outer_model_and_create_invalid_data(self): author = Author.objects.create(name='Charles') other_author = Author.objects.create(name='Walt') AuthorFormSet = modelformset_factory(Author, fields='__all__') data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '2', 'form-MAX_NUM_FORMS': '', 'form-0-id': str(author.id), 'form-0-name': 'Charles', 'form-1-id': str(other_author.id), # A model not in the formset's queryset. 'form-1-name': 'Changed name', } # This formset is only for Walt Whitman and shouldn't accept data for # other_author. formset = AuthorFormSet(data=data, queryset=Author.objects.filter(id__in=(author.id,))) self.assertTrue(formset.is_valid()) formset.save() # The name of other_author shouldn't be changed and new models aren't # created. self.assertQuerysetEqual(Author.objects.all(), ['<Author: Charles>', '<Author: Walt>']) def test_validation_without_id(self): AuthorFormSet = modelformset_factory(Author, fields='__all__') data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '', 'form-0-name': 'Charles', } formset = AuthorFormSet(data) self.assertEqual( formset.errors, [{'id': ['This field is required.']}], ) def test_validation_with_child_model_without_id(self): BetterAuthorFormSet = modelformset_factory(BetterAuthor, fields='__all__') data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '', 'form-0-name': 'Charles', 'form-0-write_speed': '10', } formset = BetterAuthorFormSet(data) self.assertEqual( formset.errors, [{'author_ptr': ['This field is required.']}], ) def test_validation_with_invalid_id(self): AuthorFormSet = modelformset_factory(Author, fields='__all__') data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '', 'form-0-id': 'abc', 'form-0-name': 'Charles', } formset = AuthorFormSet(data) self.assertEqual( formset.errors, [{'id': ['Select a valid choice. That choice is not one of the available choices.']}], ) def test_validation_with_nonexistent_id(self): AuthorFormSet = modelformset_factory(Author, fields='__all__') data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '', 'form-0-id': '12345', 'form-0-name': 'Charles', } formset = AuthorFormSet(data) self.assertEqual( formset.errors, [{'id': ['Select a valid choice. That choice is not one of the available choices.']}], ) def test_initial_form_count_empty_data_raises_validation_error(self): AuthorFormSet = modelformset_factory(Author, fields='__all__') msg = 'ManagementForm data is missing or has been tampered with' with self.assertRaisesMessage(ValidationError, msg): AuthorFormSet({}).initial_form_count() class TestModelFormsetOverridesTroughFormMeta(TestCase): def test_modelformset_factory_widgets(self): widgets = { 'name': forms.TextInput(attrs={'class': 'poet'}) } PoetFormSet = modelformset_factory(Poet, fields="__all__", widgets=widgets) form = PoetFormSet.form() self.assertHTMLEqual( str(form['name']), '<input id="id_name" maxlength="100" type="text" class="poet" name="name" required>' ) def test_inlineformset_factory_widgets(self): widgets = { 'title': forms.TextInput(attrs={'class': 'book'}) } BookFormSet = inlineformset_factory(Author, Book, widgets=widgets, fields="__all__") form = BookFormSet.form() self.assertHTMLEqual( str(form['title']), '<input class="book" id="id_title" maxlength="100" name="title" type="text" required>' ) def test_modelformset_factory_labels_overrides(self): BookFormSet = modelformset_factory(Book, fields="__all__", labels={ 'title': 'Name' }) form = BookFormSet.form() self.assertHTMLEqual(form['title'].label_tag(), '<label for="id_title">Name:</label>') def test_inlineformset_factory_labels_overrides(self): BookFormSet = inlineformset_factory(Author, Book, fields="__all__", labels={ 'title': 'Name' }) form = BookFormSet.form() self.assertHTMLEqual(form['title'].label_tag(), '<label for="id_title">Name:</label>') def test_modelformset_factory_help_text_overrides(self): BookFormSet = modelformset_factory(Book, fields="__all__", help_texts={ 'title': 'Choose carefully.' }) form = BookFormSet.form() self.assertEqual(form['title'].help_text, 'Choose carefully.') def test_inlineformset_factory_help_text_overrides(self): BookFormSet = inlineformset_factory(Author, Book, fields="__all__", help_texts={ 'title': 'Choose carefully.' }) form = BookFormSet.form() self.assertEqual(form['title'].help_text, 'Choose carefully.') def test_modelformset_factory_error_messages_overrides(self): author = Author.objects.create(pk=1, name='Charles Baudelaire') BookFormSet = modelformset_factory(Book, fields="__all__", error_messages={ 'title': { 'max_length': 'Title too long!!' } }) form = BookFormSet.form(data={'title': 'Foo ' * 30, 'author': author.id}) form.full_clean() self.assertEqual(form.errors, {'title': ['Title too long!!']}) def test_inlineformset_factory_error_messages_overrides(self): author = Author.objects.create(pk=1, name='Charles Baudelaire') BookFormSet = inlineformset_factory(Author, Book, fields="__all__", error_messages={ 'title': { 'max_length': 'Title too long!!' } }) form = BookFormSet.form(data={'title': 'Foo ' * 30, 'author': author.id}) form.full_clean() self.assertEqual(form.errors, {'title': ['Title too long!!']}) def test_modelformset_factory_field_class_overrides(self): author = Author.objects.create(pk=1, name='Charles Baudelaire') BookFormSet = modelformset_factory(Book, fields="__all__", field_classes={ 'title': forms.SlugField, }) form = BookFormSet.form(data={'title': 'Foo ' * 30, 'author': author.id}) self.assertIs(Book._meta.get_field('title').__class__, models.CharField) self.assertIsInstance(form.fields['title'], forms.SlugField) def test_inlineformset_factory_field_class_overrides(self): author = Author.objects.create(pk=1, name='Charles Baudelaire') BookFormSet = inlineformset_factory(Author, Book, fields="__all__", field_classes={ 'title': forms.SlugField, }) form = BookFormSet.form(data={'title': 'Foo ' * 30, 'author': author.id}) self.assertIs(Book._meta.get_field('title').__class__, models.CharField) self.assertIsInstance(form.fields['title'], forms.SlugField) def test_modelformset_factory_absolute_max(self): AuthorFormSet = modelformset_factory(Author, fields='__all__', absolute_max=1500) data = { 'form-TOTAL_FORMS': '1501', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '0', } formset = AuthorFormSet(data=data) self.assertIs(formset.is_valid(), False) self.assertEqual(len(formset.forms), 1500) self.assertEqual( formset.non_form_errors(), ['Please submit 1000 or fewer forms.'], ) def test_modelformset_factory_absolute_max_with_max_num(self): AuthorFormSet = modelformset_factory( Author, fields='__all__', max_num=20, absolute_max=100, ) data = { 'form-TOTAL_FORMS': '101', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '0', } formset = AuthorFormSet(data=data) self.assertIs(formset.is_valid(), False) self.assertEqual(len(formset.forms), 100) self.assertEqual( formset.non_form_errors(), ['Please submit 20 or fewer forms.'], ) def test_inlineformset_factory_absolute_max(self): author = Author.objects.create(name='Charles Baudelaire') BookFormSet = inlineformset_factory( Author, Book, fields='__all__', absolute_max=1500, ) data = { 'book_set-TOTAL_FORMS': '1501', 'book_set-INITIAL_FORMS': '0', 'book_set-MAX_NUM_FORMS': '0', } formset = BookFormSet(data, instance=author) self.assertIs(formset.is_valid(), False) self.assertEqual(len(formset.forms), 1500) self.assertEqual( formset.non_form_errors(), ['Please submit 1000 or fewer forms.'], ) def test_inlineformset_factory_absolute_max_with_max_num(self): author = Author.objects.create(name='Charles Baudelaire') BookFormSet = inlineformset_factory( Author, Book, fields='__all__', max_num=20, absolute_max=100, ) data = { 'book_set-TOTAL_FORMS': '101', 'book_set-INITIAL_FORMS': '0', 'book_set-MAX_NUM_FORMS': '0', } formset = BookFormSet(data, instance=author) self.assertIs(formset.is_valid(), False) self.assertEqual(len(formset.forms), 100) self.assertEqual( formset.non_form_errors(), ['Please submit 20 or fewer forms.'], )