hash
stringlengths
64
64
content
stringlengths
0
1.51M
ab88c4fd5c35afedaed04dc280bf9405eca2b9e6cc9a133bfaef6a1e9be38e74
import logging from django.contrib.gis.gdal import GDALException from django.contrib.gis.geos import GEOSException, GEOSGeometry from django.forms.widgets import Textarea from django.template import loader from django.utils import six, translation # Creating a template context that contains Django settings # values needed by admin map templates. geo_context = {'LANGUAGE_BIDI': translation.get_language_bidi()} logger = logging.getLogger('django.contrib.gis') class OpenLayersWidget(Textarea): """ Renders an OpenLayers map using the WKT of the geometry. """ def render(self, name, value, attrs=None): # Update the template parameters with any attributes passed in. if attrs: self.params.update(attrs) self.params['editable'] = self.params['modifiable'] else: self.params['editable'] = True # Defaulting the WKT value to a blank string -- this # will be tested in the JavaScript and the appropriate # interface will be constructed. self.params['wkt'] = '' # If a string reaches here (via a validation error on another # field) then just reconstruct the Geometry. if value and isinstance(value, six.string_types): try: value = GEOSGeometry(value) except (GEOSException, ValueError) as err: logger.error("Error creating geometry from value '%s' (%s)", value, err) value = None if (value and value.geom_type.upper() != self.geom_type and self.geom_type != 'GEOMETRY'): value = None # Constructing the dictionary of the map options. self.params['map_options'] = self.map_options() # Constructing the JavaScript module name using the name of # the GeometryField (passed in via the `attrs` keyword). # Use the 'name' attr for the field name (rather than 'field') self.params['name'] = name # note: we must switch out dashes for underscores since js # functions are created using the module variable js_safe_name = self.params['name'].replace('-', '_') self.params['module'] = 'geodjango_%s' % js_safe_name if value: # Transforming the geometry to the projection used on the # OpenLayers map. srid = self.params['srid'] if value.srid != srid: try: ogr = value.ogr ogr.transform(srid) wkt = ogr.wkt except GDALException as err: logger.error( "Error transforming geometry from srid '%s' to srid '%s' (%s)", value.srid, srid, err ) wkt = '' else: wkt = value.wkt # Setting the parameter WKT with that of the transformed # geometry. self.params['wkt'] = wkt self.params.update(geo_context) return loader.render_to_string(self.template, self.params) def map_options(self): "Builds the map options hash for the OpenLayers template." # JavaScript construction utilities for the Bounds and Projection. def ol_bounds(extent): return 'new OpenLayers.Bounds(%s)' % str(extent) def ol_projection(srid): return 'new OpenLayers.Projection("EPSG:%s")' % srid # An array of the parameter name, the name of their OpenLayers # counterpart, and the type of variable they are. map_types = [('srid', 'projection', 'srid'), ('display_srid', 'displayProjection', 'srid'), ('units', 'units', str), ('max_resolution', 'maxResolution', float), ('max_extent', 'maxExtent', 'bounds'), ('num_zoom', 'numZoomLevels', int), ('max_zoom', 'maxZoomLevels', int), ('min_zoom', 'minZoomLevel', int), ] # Building the map options hash. map_options = {} for param_name, js_name, option_type in map_types: if self.params.get(param_name, False): if option_type == 'srid': value = ol_projection(self.params[param_name]) elif option_type == 'bounds': value = ol_bounds(self.params[param_name]) elif option_type in (float, int): value = self.params[param_name] elif option_type in (str,): value = '"%s"' % self.params[param_name] else: raise TypeError map_options[js_name] = value return map_options
56c909beb47c1349f71d5b31551fd045a7d1f89f6005116c18867dcecffcd12c
# Copyright (c) 2008-2009 Aryeh Leib Taurog, all rights reserved. # Released under the New BSD license. """ This module contains a base type which provides list-style mutations without specific data storage methods. See also http://static.aryehleib.com/oldsite/MutableLists.html Author: Aryeh Leib Taurog. """ from functools import total_ordering from django.utils import six from django.utils.six.moves import range @total_ordering class ListMixin(object): """ A base class which provides complete list interface. Derived classes must call ListMixin's __init__() function and implement the following: function _get_single_external(self, i): Return single item with index i for general use. The index i will always satisfy 0 <= i < len(self). function _get_single_internal(self, i): Same as above, but for use within the class [Optional] Note that if _get_single_internal and _get_single_internal return different types of objects, _set_list must distinguish between the two and handle each appropriately. function _set_list(self, length, items): Recreate the entire object. NOTE: items may be a generator which calls _get_single_internal. Therefore, it is necessary to cache the values in a temporary: temp = list(items) before clobbering the original storage. function _set_single(self, i, value): Set the single item at index i to value [Optional] If left undefined, all mutations will result in rebuilding the object using _set_list. function __len__(self): Return the length int _minlength: The minimum legal length [Optional] int _maxlength: The maximum legal length [Optional] type or tuple _allowed: A type or tuple of allowed item types [Optional] """ _minlength = 0 _maxlength = None # ### Python initialization and special list interface methods ### def __init__(self, *args, **kwargs): if not hasattr(self, '_get_single_internal'): self._get_single_internal = self._get_single_external if not hasattr(self, '_set_single'): self._set_single = self._set_single_rebuild self._assign_extended_slice = self._assign_extended_slice_rebuild super(ListMixin, self).__init__(*args, **kwargs) def __getitem__(self, index): "Get the item(s) at the specified index/slice." if isinstance(index, slice): return [self._get_single_external(i) for i in range(*index.indices(len(self)))] else: index = self._checkindex(index) return self._get_single_external(index) def __delitem__(self, index): "Delete the item(s) at the specified index/slice." if not isinstance(index, six.integer_types + (slice,)): raise TypeError("%s is not a legal index" % index) # calculate new length and dimensions origLen = len(self) if isinstance(index, six.integer_types): index = self._checkindex(index) indexRange = [index] else: indexRange = range(*index.indices(origLen)) newLen = origLen - len(indexRange) newItems = (self._get_single_internal(i) for i in range(origLen) if i not in indexRange) self._rebuild(newLen, newItems) def __setitem__(self, index, val): "Set the item(s) at the specified index/slice." if isinstance(index, slice): self._set_slice(index, val) else: index = self._checkindex(index) self._check_allowed((val,)) self._set_single(index, val) # ### Special methods for arithmetic operations ### def __add__(self, other): 'add another list-like object' return self.__class__(list(self) + list(other)) def __radd__(self, other): 'add to another list-like object' return other.__class__(list(other) + list(self)) def __iadd__(self, other): 'add another list-like object to self' self.extend(list(other)) return self def __mul__(self, n): 'multiply' return self.__class__(list(self) * n) def __rmul__(self, n): 'multiply' return self.__class__(list(self) * n) def __imul__(self, n): 'multiply' if n <= 0: del self[:] else: cache = list(self) for i in range(n - 1): self.extend(cache) return self def __eq__(self, other): olen = len(other) for i in range(olen): try: c = self[i] == other[i] except IndexError: # self must be shorter return False if not c: return False return len(self) == olen def __lt__(self, other): olen = len(other) for i in range(olen): try: c = self[i] < other[i] except IndexError: # self must be shorter return True if c: return c elif other[i] < self[i]: return False return len(self) < olen # ### Public list interface Methods ### # ## Non-mutating ## def count(self, val): "Standard list count method" count = 0 for i in self: if val == i: count += 1 return count def index(self, val): "Standard list index method" for i in range(0, len(self)): if self[i] == val: return i raise ValueError('%s not found in object' % str(val)) # ## Mutating ## def append(self, val): "Standard list append method" self[len(self):] = [val] def extend(self, vals): "Standard list extend method" self[len(self):] = vals def insert(self, index, val): "Standard list insert method" if not isinstance(index, six.integer_types): raise TypeError("%s is not a legal index" % index) self[index:index] = [val] def pop(self, index=-1): "Standard list pop method" result = self[index] del self[index] return result def remove(self, val): "Standard list remove method" del self[self.index(val)] def reverse(self): "Standard list reverse method" self[:] = self[-1::-1] def sort(self, cmp=None, key=None, reverse=False): "Standard list sort method" if key: temp = [(key(v), v) for v in self] temp.sort(key=lambda x: x[0], reverse=reverse) self[:] = [v[1] for v in temp] else: temp = list(self) if cmp is not None: temp.sort(cmp=cmp, reverse=reverse) else: temp.sort(reverse=reverse) self[:] = temp # ### Private routines ### def _rebuild(self, newLen, newItems): if newLen and newLen < self._minlength: raise ValueError('Must have at least %d items' % self._minlength) if self._maxlength is not None and newLen > self._maxlength: raise ValueError('Cannot have more than %d items' % self._maxlength) self._set_list(newLen, newItems) def _set_single_rebuild(self, index, value): self._set_slice(slice(index, index + 1, 1), [value]) def _checkindex(self, index, correct=True): length = len(self) if 0 <= index < length: return index if correct and -length <= index < 0: return index + length raise IndexError('invalid index: %s' % str(index)) def _check_allowed(self, items): if hasattr(self, '_allowed'): if False in [isinstance(val, self._allowed) for val in items]: raise TypeError('Invalid type encountered in the arguments.') def _set_slice(self, index, values): "Assign values to a slice of the object" try: iter(values) except TypeError: raise TypeError('can only assign an iterable to a slice') self._check_allowed(values) origLen = len(self) valueList = list(values) start, stop, step = index.indices(origLen) # CAREFUL: index.step and step are not the same! # step will never be None if index.step is None: self._assign_simple_slice(start, stop, valueList) else: self._assign_extended_slice(start, stop, step, valueList) def _assign_extended_slice_rebuild(self, start, stop, step, valueList): 'Assign an extended slice by rebuilding entire list' indexList = range(start, stop, step) # extended slice, only allow assigning slice of same size if len(valueList) != len(indexList): raise ValueError('attempt to assign sequence of size %d ' 'to extended slice of size %d' % (len(valueList), len(indexList))) # we're not changing the length of the sequence newLen = len(self) newVals = dict(zip(indexList, valueList)) def newItems(): for i in range(newLen): if i in newVals: yield newVals[i] else: yield self._get_single_internal(i) self._rebuild(newLen, newItems()) def _assign_extended_slice(self, start, stop, step, valueList): 'Assign an extended slice by re-assigning individual items' indexList = range(start, stop, step) # extended slice, only allow assigning slice of same size if len(valueList) != len(indexList): raise ValueError('attempt to assign sequence of size %d ' 'to extended slice of size %d' % (len(valueList), len(indexList))) for i, val in zip(indexList, valueList): self._set_single(i, val) def _assign_simple_slice(self, start, stop, valueList): 'Assign a simple slice; Can assign slice of any length' origLen = len(self) stop = max(start, stop) newLen = origLen - stop + start + len(valueList) def newItems(): for i in range(origLen + 1): if i == start: for val in valueList: yield val if i < origLen: if i < start or i >= stop: yield self._get_single_internal(i) self._rebuild(newLen, newItems())
c9ea95e2409c91f6bd1377df866159dad3866b3d28ba048b130f7a56f7fd6605
""" This module houses the Geometry Collection objects: GeometryCollection, MultiPoint, MultiLineString, and MultiPolygon """ import json import warnings from ctypes import byref, c_int, c_uint from django.contrib.gis.geos import prototypes as capi from django.contrib.gis.geos.error import GEOSException from django.contrib.gis.geos.geometry import GEOSGeometry, LinearGeometryMixin from django.contrib.gis.geos.libgeos import geos_version_info, get_pointer_arr from django.contrib.gis.geos.linestring import LinearRing, LineString from django.contrib.gis.geos.point import Point from django.contrib.gis.geos.polygon import Polygon from django.utils.deprecation import RemovedInDjango20Warning from django.utils.six.moves import range class GeometryCollection(GEOSGeometry): _typeid = 7 def __init__(self, *args, **kwargs): "Initializes a Geometry Collection from a sequence of Geometry objects." # Checking the arguments if len(args) == 1: # If only one geometry provided or a list of geometries is provided # in the first argument. if isinstance(args[0], (tuple, list)): init_geoms = args[0] else: init_geoms = args else: init_geoms = args # Ensuring that only the permitted geometries are allowed in this collection # this is moved to list mixin super class self._check_allowed(init_geoms) # Creating the geometry pointer array. collection = self._create_collection(len(init_geoms), iter(init_geoms)) super(GeometryCollection, self).__init__(collection, **kwargs) def __iter__(self): "Iterates over each Geometry in the Collection." for i in range(len(self)): yield self[i] def __len__(self): "Returns the number of geometries in this Collection." return self.num_geom # ### Methods for compatibility with ListMixin ### def _create_collection(self, length, items): # Creating the geometry pointer array. geoms = get_pointer_arr(length) for i, g in enumerate(items): # this is a little sloppy, but makes life easier # allow GEOSGeometry types (python wrappers) or pointer types geoms[i] = capi.geom_clone(getattr(g, 'ptr', g)) return capi.create_collection(c_int(self._typeid), byref(geoms), c_uint(length)) def _get_single_internal(self, index): return capi.get_geomn(self.ptr, index) def _get_single_external(self, index): "Returns the Geometry from this Collection at the given index (0-based)." # Checking the index and returning the corresponding GEOS geometry. return GEOSGeometry(capi.geom_clone(self._get_single_internal(index)), srid=self.srid) def _set_list(self, length, items): "Create a new collection, and destroy the contents of the previous pointer." prev_ptr = self.ptr srid = self.srid self.ptr = self._create_collection(length, items) if srid: self.srid = srid capi.destroy_geom(prev_ptr) _set_single = GEOSGeometry._set_single_rebuild _assign_extended_slice = GEOSGeometry._assign_extended_slice_rebuild @property def json(self): if self.__class__.__name__ == 'GeometryCollection': return json.dumps({ 'type': self.__class__.__name__, 'geometries': [ {'type': geom.__class__.__name__, 'coordinates': geom.coords} for geom in self ], }) return super(GeometryCollection, self).json geojson = json @property def kml(self): "Returns the KML for this Geometry Collection." return '<MultiGeometry>%s</MultiGeometry>' % ''.join(g.kml for g in self) @property def tuple(self): "Returns a tuple of all the coordinates in this Geometry Collection" return tuple(g.tuple for g in self) coords = tuple # MultiPoint, MultiLineString, and MultiPolygon class definitions. class MultiPoint(GeometryCollection): _allowed = Point _typeid = 4 class MultiLineString(LinearGeometryMixin, GeometryCollection): _allowed = (LineString, LinearRing) _typeid = 5 @property def closed(self): if geos_version_info()['version'] < '3.5': raise GEOSException("MultiLineString.closed requires GEOS >= 3.5.0.") return super(MultiLineString, self).closed class MultiPolygon(GeometryCollection): _allowed = Polygon _typeid = 6 @property def cascaded_union(self): "Returns a cascaded union of this MultiPolygon." warnings.warn( "`cascaded_union` is deprecated, use the `unary_union` property instead.", RemovedInDjango20Warning, 2 ) return GEOSGeometry(capi.geos_cascaded_union(self.ptr), self.srid) # Setting the allowed types here since GeometryCollection is defined before # its subclasses. GeometryCollection._allowed = (Point, LineString, LinearRing, Polygon, MultiPoint, MultiLineString, MultiPolygon)
f3c3bed2e04d8d2cb88f2487268f6a4c7ab6d88c4d976007ff82f4c780e13527
""" This module contains the 'base' GEOSGeometry object -- all GEOS Geometries inherit from this object. """ from __future__ import unicode_literals import json import warnings from ctypes import addressof, byref, c_double from django.contrib.gis import gdal from django.contrib.gis.geometry.regex import hex_regex, json_regex, wkt_regex from django.contrib.gis.geos import prototypes as capi from django.contrib.gis.geos.base import GEOSBase from django.contrib.gis.geos.coordseq import GEOSCoordSeq from django.contrib.gis.geos.error import GEOSException from django.contrib.gis.geos.libgeos import GEOM_PTR from django.contrib.gis.geos.mutable_list import ListMixin from django.contrib.gis.geos.prepared import PreparedGeometry from django.contrib.gis.geos.prototypes.io import ( ewkb_w, wkb_r, wkb_w, wkt_r, wkt_w, ) from django.utils import six from django.utils.deconstruct import deconstructible from django.utils.deprecation import RemovedInDjango20Warning from django.utils.encoding import force_bytes, force_text @deconstructible class GEOSGeometry(GEOSBase, ListMixin): "A class that, generally, encapsulates a GEOS geometry." _GEOS_CLASSES = None ptr_type = GEOM_PTR has_cs = False # Only Point, LineString, LinearRing have coordinate sequences def __init__(self, geo_input, srid=None): """ The base constructor for GEOS geometry objects, and may take the following inputs: * strings: - WKT - HEXEWKB (a PostGIS-specific canonical form) - GeoJSON (requires GDAL) * buffer: - WKB The `srid` keyword is used to specify the Source Reference Identifier (SRID) number for this Geometry. If not set, the SRID will be None. """ if isinstance(geo_input, bytes): geo_input = force_text(geo_input) if isinstance(geo_input, six.string_types): wkt_m = wkt_regex.match(geo_input) if wkt_m: # Handling WKT input. if wkt_m.group('srid'): srid = int(wkt_m.group('srid')) g = wkt_r().read(force_bytes(wkt_m.group('wkt'))) elif hex_regex.match(geo_input): # Handling HEXEWKB input. g = wkb_r().read(force_bytes(geo_input)) elif json_regex.match(geo_input): # Handling GeoJSON input. g = wkb_r().read(gdal.OGRGeometry(geo_input).wkb) else: raise ValueError('String or unicode input unrecognized as WKT EWKT, and HEXEWKB.') elif isinstance(geo_input, GEOM_PTR): # When the input is a pointer to a geometry (GEOM_PTR). g = geo_input elif isinstance(geo_input, six.memoryview): # When the input is a buffer (WKB). g = wkb_r().read(geo_input) elif isinstance(geo_input, GEOSGeometry): g = capi.geom_clone(geo_input.ptr) else: # Invalid geometry type. raise TypeError('Improper geometry input type: %s' % str(type(geo_input))) if g: # Setting the pointer object with a valid pointer. self.ptr = g else: raise GEOSException('Could not initialize GEOS Geometry with given input.') # Post-initialization setup. self._post_init(srid) def _post_init(self, srid): "Helper routine for performing post-initialization setup." # Setting the SRID, if given. if srid and isinstance(srid, int): self.srid = srid # Setting the class type (e.g., Point, Polygon, etc.) if GEOSGeometry._GEOS_CLASSES is None: # Lazy-loaded variable to avoid import conflicts with GEOSGeometry. from .linestring import LineString, LinearRing from .point import Point from .polygon import Polygon from .collections import ( GeometryCollection, MultiPoint, MultiLineString, MultiPolygon) GEOSGeometry._GEOS_CLASSES = { 0: Point, 1: LineString, 2: LinearRing, 3: Polygon, 4: MultiPoint, 5: MultiLineString, 6: MultiPolygon, 7: GeometryCollection, } self.__class__ = GEOSGeometry._GEOS_CLASSES[self.geom_typeid] # Setting the coordinate sequence for the geometry (will be None on # geometries that do not have coordinate sequences) self._set_cs() def __del__(self): """ Destroys this Geometry; in other words, frees the memory used by the GEOS C++ object. """ try: capi.destroy_geom(self._ptr) except (AttributeError, TypeError): pass # Some part might already have been garbage collected def __copy__(self): """ Returns a clone because the copy of a GEOSGeometry may contain an invalid pointer location if the original is garbage collected. """ return self.clone() def __deepcopy__(self, memodict): """ The `deepcopy` routine is used by the `Node` class of django.utils.tree; thus, the protocol routine needs to be implemented to return correct copies (clones) of these GEOS objects, which use C pointers. """ return self.clone() def __str__(self): "EWKT is used for the string representation." return self.ewkt def __repr__(self): "Short-hand representation because WKT may be very large." return '<%s object at %s>' % (self.geom_type, hex(addressof(self.ptr))) # Pickling support def __getstate__(self): # The pickled state is simply a tuple of the WKB (in string form) # and the SRID. return bytes(self.wkb), self.srid def __setstate__(self, state): # Instantiating from the tuple state that was pickled. wkb, srid = state ptr = wkb_r().read(six.memoryview(wkb)) if not ptr: raise GEOSException('Invalid Geometry loaded from pickled state.') self.ptr = ptr self._post_init(srid) @classmethod def from_gml(cls, gml_string): return gdal.OGRGeometry.from_gml(gml_string).geos # Comparison operators def __eq__(self, other): """ Equivalence testing, a Geometry may be compared with another Geometry or a WKT representation. """ if isinstance(other, six.string_types): return self.wkt == other elif isinstance(other, GEOSGeometry): return self.equals_exact(other) else: return False def __ne__(self, other): "The not equals operator." return not (self == other) # ### Geometry set-like operations ### # Thanks to Sean Gillies for inspiration: # http://lists.gispython.org/pipermail/community/2007-July/001034.html # g = g1 | g2 def __or__(self, other): "Returns the union of this Geometry and the other." return self.union(other) # g = g1 & g2 def __and__(self, other): "Returns the intersection of this Geometry and the other." return self.intersection(other) # g = g1 - g2 def __sub__(self, other): "Return the difference this Geometry and the other." return self.difference(other) # g = g1 ^ g2 def __xor__(self, other): "Return the symmetric difference of this Geometry and the other." return self.sym_difference(other) # #### Coordinate Sequence Routines #### def _set_cs(self): "Sets the coordinate sequence for this Geometry." if self.has_cs: self._cs = GEOSCoordSeq(capi.get_cs(self.ptr), self.hasz) else: self._cs = None @property def coord_seq(self): "Returns a clone of the coordinate sequence for this Geometry." if self.has_cs: return self._cs.clone() # #### Geometry Info #### @property def geom_type(self): "Returns a string representing the Geometry type, e.g. 'Polygon'" return capi.geos_type(self.ptr).decode() @property def geom_typeid(self): "Returns an integer representing the Geometry type." return capi.geos_typeid(self.ptr) @property def num_geom(self): "Returns the number of geometries in the Geometry." return capi.get_num_geoms(self.ptr) @property def num_coords(self): "Returns the number of coordinates in the Geometry." return capi.get_num_coords(self.ptr) @property def num_points(self): "Returns the number points, or coordinates, in the Geometry." return self.num_coords @property def dims(self): "Returns the dimension of this Geometry (0=point, 1=line, 2=surface)." return capi.get_dims(self.ptr) def normalize(self): "Converts this Geometry to normal form (or canonical form)." return capi.geos_normalize(self.ptr) # #### Unary predicates #### @property def empty(self): """ Returns a boolean indicating whether the set of points in this Geometry are empty. """ return capi.geos_isempty(self.ptr) @property def hasz(self): "Returns whether the geometry has a 3D dimension." return capi.geos_hasz(self.ptr) @property def ring(self): "Returns whether or not the geometry is a ring." return capi.geos_isring(self.ptr) @property def simple(self): "Returns false if the Geometry not simple." return capi.geos_issimple(self.ptr) @property def valid(self): "This property tests the validity of this Geometry." return capi.geos_isvalid(self.ptr) @property def valid_reason(self): """ Returns a string containing the reason for any invalidity. """ return capi.geos_isvalidreason(self.ptr).decode() # #### Binary predicates. #### def contains(self, other): "Returns true if other.within(this) returns true." return capi.geos_contains(self.ptr, other.ptr) def covers(self, other): """ Return True if the DE-9IM Intersection Matrix for the two geometries is T*****FF*, *T****FF*, ***T**FF*, or ****T*FF*. If either geometry is empty, return False. """ return capi.geos_covers(self.ptr, other.ptr) def crosses(self, other): """ Returns true if the DE-9IM intersection matrix for the two Geometries is T*T****** (for a point and a curve,a point and an area or a line and an area) 0******** (for two curves). """ return capi.geos_crosses(self.ptr, other.ptr) def disjoint(self, other): """ Returns true if the DE-9IM intersection matrix for the two Geometries is FF*FF****. """ return capi.geos_disjoint(self.ptr, other.ptr) def equals(self, other): """ Returns true if the DE-9IM intersection matrix for the two Geometries is T*F**FFF*. """ return capi.geos_equals(self.ptr, other.ptr) def equals_exact(self, other, tolerance=0): """ Returns true if the two Geometries are exactly equal, up to a specified tolerance. """ return capi.geos_equalsexact(self.ptr, other.ptr, float(tolerance)) def intersects(self, other): "Returns true if disjoint returns false." return capi.geos_intersects(self.ptr, other.ptr) def overlaps(self, other): """ Returns true if the DE-9IM intersection matrix for the two Geometries is T*T***T** (for two points or two surfaces) 1*T***T** (for two curves). """ return capi.geos_overlaps(self.ptr, other.ptr) def relate_pattern(self, other, pattern): """ Returns true if the elements in the DE-9IM intersection matrix for the two Geometries match the elements in pattern. """ if not isinstance(pattern, six.string_types) or len(pattern) > 9: raise GEOSException('invalid intersection matrix pattern') return capi.geos_relatepattern(self.ptr, other.ptr, force_bytes(pattern)) def touches(self, other): """ Returns true if the DE-9IM intersection matrix for the two Geometries is FT*******, F**T***** or F***T****. """ return capi.geos_touches(self.ptr, other.ptr) def within(self, other): """ Returns true if the DE-9IM intersection matrix for the two Geometries is T*F**F***. """ return capi.geos_within(self.ptr, other.ptr) # #### SRID Routines #### @property def srid(self): "Gets the SRID for the geometry, returns None if no SRID is set." s = capi.geos_get_srid(self.ptr) if s == 0: return None else: return s @srid.setter def srid(self, srid): "Sets the SRID for the geometry." capi.geos_set_srid(self.ptr, 0 if srid is None else srid) def get_srid(self): warnings.warn( "`get_srid()` is deprecated, use the `srid` property instead.", RemovedInDjango20Warning, 2 ) return self.srid def set_srid(self, srid): warnings.warn( "`set_srid()` is deprecated, use the `srid` property instead.", RemovedInDjango20Warning, 2 ) self.srid = srid # #### Output Routines #### @property def ewkt(self): """ Returns the EWKT (SRID + WKT) of the Geometry. """ srid = self.srid return 'SRID=%s;%s' % (srid, self.wkt) if srid else self.wkt @property def wkt(self): "Returns the WKT (Well-Known Text) representation of this Geometry." return wkt_w(dim=3 if self.hasz else 2, trim=True).write(self).decode() @property def hex(self): """ Returns the WKB of this Geometry in hexadecimal form. Please note that the SRID is not included in this representation because it is not a part of the OGC specification (use the `hexewkb` property instead). """ # A possible faster, all-python, implementation: # str(self.wkb).encode('hex') return wkb_w(dim=3 if self.hasz else 2).write_hex(self) @property def hexewkb(self): """ Returns the EWKB of this Geometry in hexadecimal form. This is an extension of the WKB specification that includes SRID value that are a part of this geometry. """ return ewkb_w(dim=3 if self.hasz else 2).write_hex(self) @property def json(self): """ Returns GeoJSON representation of this Geometry. """ return json.dumps({'type': self.__class__.__name__, 'coordinates': self.coords}) geojson = json @property def wkb(self): """ Returns the WKB (Well-Known Binary) representation of this Geometry as a Python buffer. SRID and Z values are not included, use the `ewkb` property instead. """ return wkb_w(3 if self.hasz else 2).write(self) @property def ewkb(self): """ Return the EWKB representation of this Geometry as a Python buffer. This is an extension of the WKB specification that includes any SRID value that are a part of this geometry. """ return ewkb_w(3 if self.hasz else 2).write(self) @property def kml(self): "Returns the KML representation of this Geometry." gtype = self.geom_type return '<%s>%s</%s>' % (gtype, self.coord_seq.kml, gtype) @property def prepared(self): """ Returns a PreparedGeometry corresponding to this geometry -- it is optimized for the contains, intersects, and covers operations. """ return PreparedGeometry(self) # #### GDAL-specific output routines #### @property def ogr(self): "Returns the OGR Geometry for this Geometry." if self.srid: try: return gdal.OGRGeometry(self.wkb, self.srid) except gdal.SRSException: pass return gdal.OGRGeometry(self.wkb) @property def srs(self): "Returns the OSR SpatialReference for SRID of this Geometry." if self.srid: try: return gdal.SpatialReference(self.srid) except gdal.SRSException: pass return None @property def crs(self): "Alias for `srs` property." return self.srs def transform(self, ct, clone=False): """ Requires GDAL. Transforms the geometry according to the given transformation object, which may be an integer SRID, and WKT or PROJ.4 string. By default, the geometry is transformed in-place and nothing is returned. However if the `clone` keyword is set, then this geometry will not be modified and a transformed clone will be returned instead. """ srid = self.srid if ct == srid: # short-circuit where source & dest SRIDs match if clone: return self.clone() else: return if isinstance(ct, gdal.CoordTransform): # We don't care about SRID because CoordTransform presupposes # source SRS. srid = None elif srid is None or srid < 0: raise GEOSException("Calling transform() with no SRID set is not supported") # Creating an OGR Geometry, which is then transformed. g = gdal.OGRGeometry(self.wkb, srid) g.transform(ct) # Getting a new GEOS pointer ptr = wkb_r().read(g.wkb) if clone: # User wants a cloned transformed geometry returned. return GEOSGeometry(ptr, srid=g.srid) if ptr: # Reassigning pointer, and performing post-initialization setup # again due to the reassignment. capi.destroy_geom(self.ptr) self.ptr = ptr self._post_init(g.srid) else: raise GEOSException('Transformed WKB was invalid.') # #### Topology Routines #### def _topology(self, gptr): "Helper routine to return Geometry from the given pointer." return GEOSGeometry(gptr, srid=self.srid) @property def boundary(self): "Returns the boundary as a newly allocated Geometry object." return self._topology(capi.geos_boundary(self.ptr)) def buffer(self, width, quadsegs=8): """ Returns a geometry that represents all points whose distance from this Geometry is less than or equal to distance. Calculations are in the Spatial Reference System of this Geometry. The optional third parameter sets the number of segment used to approximate a quarter circle (defaults to 8). (Text from PostGIS documentation at ch. 6.1.3) """ return self._topology(capi.geos_buffer(self.ptr, width, quadsegs)) @property def centroid(self): """ The centroid is equal to the centroid of the set of component Geometries of highest dimension (since the lower-dimension geometries contribute zero "weight" to the centroid). """ return self._topology(capi.geos_centroid(self.ptr)) @property def convex_hull(self): """ Returns the smallest convex Polygon that contains all the points in the Geometry. """ return self._topology(capi.geos_convexhull(self.ptr)) def difference(self, other): """ Returns a Geometry representing the points making up this Geometry that do not make up other. """ return self._topology(capi.geos_difference(self.ptr, other.ptr)) @property def envelope(self): "Return the envelope for this geometry (a polygon)." return self._topology(capi.geos_envelope(self.ptr)) def intersection(self, other): "Returns a Geometry representing the points shared by this Geometry and other." return self._topology(capi.geos_intersection(self.ptr, other.ptr)) @property def point_on_surface(self): "Computes an interior point of this Geometry." return self._topology(capi.geos_pointonsurface(self.ptr)) def relate(self, other): "Returns the DE-9IM intersection matrix for this Geometry and the other." return capi.geos_relate(self.ptr, other.ptr).decode() def simplify(self, tolerance=0.0, preserve_topology=False): """ Returns the Geometry, simplified using the Douglas-Peucker algorithm to the specified tolerance (higher tolerance => less points). If no tolerance provided, defaults to 0. By default, this function does not preserve topology - e.g. polygons can be split, collapse to lines or disappear holes can be created or disappear, and lines can cross. By specifying preserve_topology=True, the result will have the same dimension and number of components as the input. This is significantly slower. """ if preserve_topology: return self._topology(capi.geos_preservesimplify(self.ptr, tolerance)) else: return self._topology(capi.geos_simplify(self.ptr, tolerance)) def sym_difference(self, other): """ Returns a set combining the points in this Geometry not in other, and the points in other not in this Geometry. """ return self._topology(capi.geos_symdifference(self.ptr, other.ptr)) @property def unary_union(self): "Return the union of all the elements of this geometry." return self._topology(capi.geos_unary_union(self.ptr)) def union(self, other): "Returns a Geometry representing all the points in this Geometry and other." return self._topology(capi.geos_union(self.ptr, other.ptr)) # #### Other Routines #### @property def area(self): "Returns the area of the Geometry." return capi.geos_area(self.ptr, byref(c_double())) def distance(self, other): """ Returns the distance between the closest points on this Geometry and the other. Units will be in those of the coordinate system of the Geometry. """ if not isinstance(other, GEOSGeometry): raise TypeError('distance() works only on other GEOS Geometries.') return capi.geos_distance(self.ptr, other.ptr, byref(c_double())) @property def extent(self): """ Returns the extent of this geometry as a 4-tuple, consisting of (xmin, ymin, xmax, ymax). """ from .point import Point env = self.envelope if isinstance(env, Point): xmin, ymin = env.tuple xmax, ymax = xmin, ymin else: xmin, ymin = env[0][0] xmax, ymax = env[0][2] return (xmin, ymin, xmax, ymax) @property def length(self): """ Returns the length of this Geometry (e.g., 0 for point, or the circumference of a Polygon). """ return capi.geos_length(self.ptr, byref(c_double())) def clone(self): "Clones this Geometry." return GEOSGeometry(capi.geom_clone(self.ptr), srid=self.srid) class LinearGeometryMixin(object): """ Used for LineString and MultiLineString. """ def interpolate(self, distance): return self._topology(capi.geos_interpolate(self.ptr, distance)) def interpolate_normalized(self, distance): return self._topology(capi.geos_interpolate_normalized(self.ptr, distance)) def project(self, point): from .point import Point if not isinstance(point, Point): raise TypeError('locate_point argument must be a Point') return capi.geos_project(self.ptr, point.ptr) def project_normalized(self, point): from .point import Point if not isinstance(point, Point): raise TypeError('locate_point argument must be a Point') return capi.geos_project_normalized(self.ptr, point.ptr) @property def merged(self): """ Return the line merge of this Geometry. """ return self._topology(capi.geos_linemerge(self.ptr)) @property def closed(self): """ Return whether or not this Geometry is closed. """ return capi.geos_isclosed(self.ptr)
10887b47962bff19794a446de3bfc8edbb31a40303b4c9f316b982be95cd8e82
from django.contrib.gis.geos.geometry import GEOSGeometry, hex_regex, wkt_regex from django.utils import six def fromfile(file_h): """ Given a string file name, returns a GEOSGeometry. The file may contain WKB, WKT, or HEX. """ # If given a file name, get a real handle. if isinstance(file_h, six.string_types): with open(file_h, 'rb') as file_h: buf = file_h.read() else: buf = file_h.read() # If we get WKB need to wrap in memoryview(), so run through regexes. if isinstance(buf, bytes): try: decoded = buf.decode() if wkt_regex.match(decoded) or hex_regex.match(decoded): return GEOSGeometry(decoded) except UnicodeDecodeError: pass else: return GEOSGeometry(buf) return GEOSGeometry(six.memoryview(buf)) def fromstr(string, **kwargs): "Given a string value, returns a GEOSGeometry object." return GEOSGeometry(string, **kwargs)
f50a5ebce9a4751b9f8eef5f67eebb5a8eaeee025119675044fcf6da00b218d9
""" The GeoDjango GEOS module. Please consult the GeoDjango documentation for more details: https://docs.djangoproject.com/en/dev/ref/contrib/gis/geos/ """ from .collections import ( # NOQA GeometryCollection, MultiLineString, MultiPoint, MultiPolygon, ) from .error import GEOSException # NOQA from .factory import fromfile, fromstr # NOQA from .geometry import GEOSGeometry, hex_regex, wkt_regex # NOQA from .io import WKBReader, WKBWriter, WKTReader, WKTWriter # NOQA from .libgeos import geos_version, geos_version_info # NOQA from .linestring import LinearRing, LineString # NOQA from .point import Point # NOQA from .polygon import Polygon # NOQA try: HAS_GEOS = geos_version_info()['version'] >= '3.3.0' except ImportError: HAS_GEOS = False
9b44f81329fc2af0a76be199b2f82db6a128f601d2f6f820c8afb712040c6769
import warnings from ctypes import c_uint from django.contrib.gis.geos import prototypes as capi from django.contrib.gis.geos.error import GEOSException from django.contrib.gis.geos.geometry import GEOSGeometry from django.utils import six from django.utils.deprecation import RemovedInDjango20Warning from django.utils.six.moves import range class Point(GEOSGeometry): _minlength = 2 _maxlength = 3 has_cs = True def __init__(self, x=None, y=None, z=None, srid=None): """ The Point object may be initialized with either a tuple, or individual parameters. For Example: >>> p = Point((5, 23)) # 2D point, passed in as a tuple >>> p = Point(5, 23, 8) # 3D point, passed in with individual parameters """ if x is None: coords = [] elif isinstance(x, (tuple, list)): # Here a tuple or list was passed in under the `x` parameter. coords = x elif isinstance(x, six.integer_types + (float,)) and isinstance(y, six.integer_types + (float,)): # Here X, Y, and (optionally) Z were passed in individually, as parameters. if isinstance(z, six.integer_types + (float,)): coords = [x, y, z] else: coords = [x, y] else: raise TypeError('Invalid parameters given for Point initialization.') point = self._create_point(len(coords), coords) # Initializing using the address returned from the GEOS # createPoint factory. super(Point, self).__init__(point, srid=srid) def _create_point(self, ndim, coords): """ Create a coordinate sequence, set X, Y, [Z], and create point """ if not ndim: return capi.create_point(None) if ndim < 2 or ndim > 3: raise TypeError('Invalid point dimension: %s' % str(ndim)) cs = capi.create_cs(c_uint(1), c_uint(ndim)) i = iter(coords) capi.cs_setx(cs, 0, next(i)) capi.cs_sety(cs, 0, next(i)) if ndim == 3: capi.cs_setz(cs, 0, next(i)) return capi.create_point(cs) def _set_list(self, length, items): ptr = self._create_point(length, items) if ptr: capi.destroy_geom(self.ptr) self._ptr = ptr self._set_cs() else: # can this happen? raise GEOSException('Geometry resulting from slice deletion was invalid.') def _set_single(self, index, value): self._cs.setOrdinate(index, 0, value) def __iter__(self): "Allows iteration over coordinates of this Point." for i in range(len(self)): yield self[i] def __len__(self): "Returns the number of dimensions for this Point (either 0, 2 or 3)." if self.empty: return 0 if self.hasz: return 3 else: return 2 def _get_single_external(self, index): if index == 0: return self.x elif index == 1: return self.y elif index == 2: return self.z _get_single_internal = _get_single_external @property def x(self): "Returns the X component of the Point." return self._cs.getOrdinate(0, 0) @x.setter def x(self, value): "Sets the X component of the Point." self._cs.setOrdinate(0, 0, value) @property def y(self): "Returns the Y component of the Point." return self._cs.getOrdinate(1, 0) @y.setter def y(self, value): "Sets the Y component of the Point." self._cs.setOrdinate(1, 0, value) @property def z(self): "Returns the Z component of the Point." return self._cs.getOrdinate(2, 0) if self.hasz else None @z.setter def z(self, value): "Sets the Z component of the Point." if not self.hasz: raise GEOSException('Cannot set Z on 2D Point.') self._cs.setOrdinate(2, 0, value) def get_x(self): warnings.warn( "`get_x()` is deprecated, use the `x` property instead.", RemovedInDjango20Warning, 2 ) return self.x def set_x(self, value): warnings.warn( "`set_x()` is deprecated, use the `x` property instead.", RemovedInDjango20Warning, 2 ) self.x = value def get_y(self): warnings.warn( "`get_y()` is deprecated, use the `y` property instead.", RemovedInDjango20Warning, 2 ) return self.y def set_y(self, value): warnings.warn( "`set_y()` is deprecated, use the `y` property instead.", RemovedInDjango20Warning, 2 ) self.y = value def get_z(self): warnings.warn( "`get_z()` is deprecated, use the `z` property instead.", RemovedInDjango20Warning, 2 ) return self.z def set_z(self, value): warnings.warn( "`set_z()` is deprecated, use the `z` property instead.", RemovedInDjango20Warning, 2 ) self.z = value # ### Tuple setting and retrieval routines. ### @property def tuple(self): "Returns a tuple of the point." return self._cs.tuple @tuple.setter def tuple(self, tup): "Sets the coordinates of the point with the given tuple." self._cs[0] = tup def get_coords(self): warnings.warn( "`get_coords()` is deprecated, use the `tuple` property instead.", RemovedInDjango20Warning, 2 ) return self.tuple def set_coords(self, tup): warnings.warn( "`set_coords()` is deprecated, use the `tuple` property instead.", RemovedInDjango20Warning, 2 ) self.tuple = tup # The tuple and coords properties coords = tuple
5a856a2ac13916508d87a647eafc5ed5d3e290643fc63fd02d46d337022f79b8
from ctypes import c_void_p from django.contrib.gis.geos.error import GEOSException class GEOSBase(object): """ Base object for GEOS objects that has a pointer access property that controls access to the underlying C pointer. """ # Initially the pointer is NULL. _ptr = None # Default allowed pointer type. ptr_type = c_void_p # Pointer access property. def _get_ptr(self): # Raise an exception if the pointer isn't valid don't # want to be passing NULL pointers to routines -- # that's very bad. if self._ptr: return self._ptr else: raise GEOSException('NULL GEOS %s pointer encountered.' % self.__class__.__name__) def _set_ptr(self, ptr): # Only allow the pointer to be set with pointers of the # compatible type or None (NULL). if ptr is None or isinstance(ptr, self.ptr_type): self._ptr = ptr else: raise TypeError('Incompatible pointer type') # Property for controlling access to the GEOS object pointers. Using # this raises an exception when the pointer is NULL, thus preventing # the C library from attempting to access an invalid memory location. ptr = property(_get_ptr, _set_ptr)
7eb0dd1350b2a1cae6190e43d513baa559b9b4b3c9756f76306375874bdf3c85
""" This module houses the ctypes initialization procedures, as well as the notice and error handler function callbacks (get called when an error occurs in GEOS). This module also houses GEOS Pointer utilities, including get_pointer_arr(), and GEOM_PTR. """ import logging import os import re from ctypes import CDLL, CFUNCTYPE, POINTER, Structure, c_char_p from ctypes.util import find_library from django.contrib.gis.geos.error import GEOSException from django.core.exceptions import ImproperlyConfigured from django.utils.functional import SimpleLazyObject logger = logging.getLogger('django.contrib.gis') def load_geos(): # Custom library path set? try: from django.conf import settings lib_path = settings.GEOS_LIBRARY_PATH except (AttributeError, EnvironmentError, ImportError, ImproperlyConfigured): lib_path = None # Setting the appropriate names for the GEOS-C library. if lib_path: lib_names = None elif os.name == 'nt': # Windows NT libraries lib_names = ['geos_c', 'libgeos_c-1'] elif os.name == 'posix': # *NIX libraries lib_names = ['geos_c', 'GEOS'] else: raise ImportError('Unsupported OS "%s"' % os.name) # Using the ctypes `find_library` utility to find the path to the GEOS # shared library. This is better than manually specifying each library name # and extension (e.g., libgeos_c.[so|so.1|dylib].). if lib_names: for lib_name in lib_names: lib_path = find_library(lib_name) if lib_path is not None: break # No GEOS library could be found. if lib_path is None: raise ImportError( 'Could not find the GEOS library (tried "%s"). ' 'Try setting GEOS_LIBRARY_PATH in your settings.' % '", "'.join(lib_names) ) # Getting the GEOS C library. The C interface (CDLL) is used for # both *NIX and Windows. # See the GEOS C API source code for more details on the library function calls: # http://geos.refractions.net/ro/doxygen_docs/html/geos__c_8h-source.html _lgeos = CDLL(lib_path) # Here we set up the prototypes for the initGEOS_r and finishGEOS_r # routines. These functions aren't actually called until they are # attached to a GEOS context handle -- this actually occurs in # geos/prototypes/threadsafe.py. _lgeos.initGEOS_r.restype = CONTEXT_PTR _lgeos.finishGEOS_r.argtypes = [CONTEXT_PTR] return _lgeos # The notice and error handler C function callback definitions. # Supposed to mimic the GEOS message handler (C below): # typedef void (*GEOSMessageHandler)(const char *fmt, ...); NOTICEFUNC = CFUNCTYPE(None, c_char_p, c_char_p) def notice_h(fmt, lst): fmt, lst = fmt.decode(), lst.decode() try: warn_msg = fmt % lst except TypeError: warn_msg = fmt logger.warning('GEOS_NOTICE: %s\n', warn_msg) notice_h = NOTICEFUNC(notice_h) ERRORFUNC = CFUNCTYPE(None, c_char_p, c_char_p) def error_h(fmt, lst): fmt, lst = fmt.decode(), lst.decode() try: err_msg = fmt % lst except TypeError: err_msg = fmt logger.error('GEOS_ERROR: %s\n', err_msg) error_h = ERRORFUNC(error_h) # #### GEOS Geometry C data structures, and utility functions. #### # Opaque GEOS geometry structures, used for GEOM_PTR and CS_PTR class GEOSGeom_t(Structure): pass class GEOSPrepGeom_t(Structure): pass class GEOSCoordSeq_t(Structure): pass class GEOSContextHandle_t(Structure): pass # Pointers to opaque GEOS geometry structures. GEOM_PTR = POINTER(GEOSGeom_t) PREPGEOM_PTR = POINTER(GEOSPrepGeom_t) CS_PTR = POINTER(GEOSCoordSeq_t) CONTEXT_PTR = POINTER(GEOSContextHandle_t) # Used specifically by the GEOSGeom_createPolygon and GEOSGeom_createCollection # GEOS routines def get_pointer_arr(n): "Gets a ctypes pointer array (of length `n`) for GEOSGeom_t opaque pointer." GeomArr = GEOM_PTR * n return GeomArr() lgeos = SimpleLazyObject(load_geos) class GEOSFuncFactory(object): """ Lazy loading of GEOS functions. """ argtypes = None restype = None errcheck = None def __init__(self, func_name, *args, **kwargs): self.func_name = func_name self.restype = kwargs.pop('restype', self.restype) self.errcheck = kwargs.pop('errcheck', self.errcheck) self.argtypes = kwargs.pop('argtypes', self.argtypes) self.args = args self.kwargs = kwargs self.func = None def __call__(self, *args, **kwargs): if self.func is None: self.func = self.get_func(*self.args, **self.kwargs) return self.func(*args, **kwargs) def get_func(self, *args, **kwargs): from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc func = GEOSFunc(self.func_name) func.argtypes = self.argtypes or [] func.restype = self.restype if self.errcheck: func.errcheck = self.errcheck return func # Returns the string version of the GEOS library. Have to set the restype # explicitly to c_char_p to ensure compatibility across 32 and 64-bit platforms. geos_version = GEOSFuncFactory('GEOSversion', restype=c_char_p) # Regular expression should be able to parse version strings such as # '3.0.0rc4-CAPI-1.3.3', '3.0.0-CAPI-1.4.1', '3.4.0dev-CAPI-1.8.0' or '3.4.0dev-CAPI-1.8.0 r0' version_regex = re.compile( r'^(?P<version>(?P<major>\d+)\.(?P<minor>\d+)\.(?P<subminor>\d+))' r'((rc(?P<release_candidate>\d+))|dev)?-CAPI-(?P<capi_version>\d+\.\d+\.\d+)( r\d+)?$' ) def geos_version_info(): """ Returns a dictionary containing the various version metadata parsed from the GEOS version string, including the version number, whether the version is a release candidate (and what number release candidate), and the C API version. """ ver = geos_version().decode() m = version_regex.match(ver) if not m: raise GEOSException('Could not parse version info string "%s"' % ver) return {key: m.group(key) for key in ( 'version', 'release_candidate', 'capi_version', 'major', 'minor', 'subminor')}
22b6bbe180f91cb24f456cd922b5905bdcc76d0fb2b2b5eea67a2495431f2b9b
from django.contrib.gis.geos import prototypes as capi from django.contrib.gis.geos.coordseq import GEOSCoordSeq from django.contrib.gis.geos.error import GEOSException from django.contrib.gis.geos.geometry import GEOSGeometry, LinearGeometryMixin from django.contrib.gis.geos.point import Point from django.contrib.gis.shortcuts import numpy from django.utils.six.moves import range class LineString(LinearGeometryMixin, GEOSGeometry): _init_func = capi.create_linestring _minlength = 2 has_cs = True def __init__(self, *args, **kwargs): """ Initializes on the given sequence -- may take lists, tuples, NumPy arrays of X,Y pairs, or Point objects. If Point objects are used, ownership is _not_ transferred to the LineString object. Examples: ls = LineString((1, 1), (2, 2)) ls = LineString([(1, 1), (2, 2)]) ls = LineString(array([(1, 1), (2, 2)])) ls = LineString(Point(1, 1), Point(2, 2)) """ # If only one argument provided, set the coords array appropriately if len(args) == 1: coords = args[0] else: coords = args if not (isinstance(coords, (tuple, list)) or numpy and isinstance(coords, numpy.ndarray)): raise TypeError('Invalid initialization input for LineStrings.') # If SRID was passed in with the keyword arguments srid = kwargs.get('srid') ncoords = len(coords) if not ncoords: super(LineString, self).__init__(self._init_func(None), srid=srid) return if ncoords < self._minlength: raise ValueError( '%s requires at least %d points, got %s.' % ( self.__class__.__name__, self._minlength, ncoords, ) ) if isinstance(coords, (tuple, list)): # Getting the number of coords and the number of dimensions -- which # must stay the same, e.g., no LineString((1, 2), (1, 2, 3)). ndim = None # Incrementing through each of the coordinates and verifying for coord in coords: if not isinstance(coord, (tuple, list, Point)): raise TypeError('Each coordinate should be a sequence (list or tuple)') if ndim is None: ndim = len(coord) self._checkdim(ndim) elif len(coord) != ndim: raise TypeError('Dimension mismatch.') numpy_coords = False else: shape = coords.shape # Using numpy's shape. if len(shape) != 2: raise TypeError('Too many dimensions.') self._checkdim(shape[1]) ndim = shape[1] numpy_coords = True # Creating a coordinate sequence object because it is easier to # set the points using GEOSCoordSeq.__setitem__(). cs = GEOSCoordSeq(capi.create_cs(ncoords, ndim), z=bool(ndim == 3)) for i in range(ncoords): if numpy_coords: cs[i] = coords[i, :] elif isinstance(coords[i], Point): cs[i] = coords[i].tuple else: cs[i] = coords[i] # Calling the base geometry initialization with the returned pointer # from the function. super(LineString, self).__init__(self._init_func(cs.ptr), srid=srid) def __iter__(self): "Allows iteration over this LineString." for i in range(len(self)): yield self[i] def __len__(self): "Returns the number of points in this LineString." return len(self._cs) def _get_single_external(self, index): return self._cs[index] _get_single_internal = _get_single_external def _set_list(self, length, items): ndim = self._cs.dims hasz = self._cs.hasz # I don't understand why these are different # create a new coordinate sequence and populate accordingly cs = GEOSCoordSeq(capi.create_cs(length, ndim), z=hasz) for i, c in enumerate(items): cs[i] = c ptr = self._init_func(cs.ptr) if ptr: capi.destroy_geom(self.ptr) self.ptr = ptr self._post_init(self.srid) else: # can this happen? raise GEOSException('Geometry resulting from slice deletion was invalid.') def _set_single(self, index, value): self._checkindex(index) self._cs[index] = value def _checkdim(self, dim): if dim not in (2, 3): raise TypeError('Dimension mismatch.') # #### Sequence Properties #### @property def tuple(self): "Returns a tuple version of the geometry from the coordinate sequence." return self._cs.tuple coords = tuple def _listarr(self, func): """ Internal routine that returns a sequence (list) corresponding with the given function. Will return a numpy array if possible. """ lst = [func(i) for i in range(len(self))] if numpy: return numpy.array(lst) # ARRRR! else: return lst @property def array(self): "Returns a numpy array for the LineString." return self._listarr(self._cs.__getitem__) @property def x(self): "Returns a list or numpy array of the X variable." return self._listarr(self._cs.getX) @property def y(self): "Returns a list or numpy array of the Y variable." return self._listarr(self._cs.getY) @property def z(self): "Returns a list or numpy array of the Z variable." if not self.hasz: return None else: return self._listarr(self._cs.getZ) # LinearRings are LineStrings used within Polygons. class LinearRing(LineString): _minlength = 4 _init_func = capi.create_linearring
895dc054e313a4ef80362cd80ce345cdefe0718b626b4a2fafbc07c7753e49a3
""" Module that holds classes for performing I/O operations on GEOS geometry objects. Specifically, this has Python implementations of WKB/WKT reader and writer classes. """ from django.contrib.gis.geos.geometry import GEOSGeometry from django.contrib.gis.geos.prototypes.io import ( WKBWriter, WKTWriter, _WKBReader, _WKTReader, ) __all__ = ['WKBWriter', 'WKTWriter', 'WKBReader', 'WKTReader'] # Public classes for (WKB|WKT)Reader, which return GEOSGeometry class WKBReader(_WKBReader): def read(self, wkb): "Returns a GEOSGeometry for the given WKB buffer." return GEOSGeometry(super(WKBReader, self).read(wkb)) class WKTReader(_WKTReader): def read(self, wkt): "Returns a GEOSGeometry for the given WKT string." return GEOSGeometry(super(WKTReader, self).read(wkt))
79b8c8ab57f5bcf6a6aad1a88f71625f95bcd81064d6c9e7d9add2a128abc7f7
from .base import GEOSBase from .prototypes import prepared as capi class PreparedGeometry(GEOSBase): """ A geometry that is prepared for performing certain operations. At the moment this includes the contains covers, and intersects operations. """ ptr_type = capi.PREPGEOM_PTR def __init__(self, geom): # Keeping a reference to the original geometry object to prevent it # from being garbage collected which could then crash the prepared one # See #21662 self._base_geom = geom from .geometry import GEOSGeometry if not isinstance(geom, GEOSGeometry): raise TypeError self.ptr = capi.geos_prepare(geom.ptr) def __del__(self): try: capi.prepared_destroy(self._ptr) except (AttributeError, TypeError): pass # Some part might already have been garbage collected def contains(self, other): return capi.prepared_contains(self.ptr, other.ptr) def contains_properly(self, other): return capi.prepared_contains_properly(self.ptr, other.ptr) def covers(self, other): return capi.prepared_covers(self.ptr, other.ptr) def intersects(self, other): return capi.prepared_intersects(self.ptr, other.ptr) def crosses(self, other): return capi.prepared_crosses(self.ptr, other.ptr) def disjoint(self, other): return capi.prepared_disjoint(self.ptr, other.ptr) def overlaps(self, other): return capi.prepared_overlaps(self.ptr, other.ptr) def touches(self, other): return capi.prepared_touches(self.ptr, other.ptr) def within(self, other): return capi.prepared_within(self.ptr, other.ptr)
c8ac17156c8d543c8e4da19c627bfe4a03028eb64ed043983a492920ff70387a
""" This module houses the GEOSCoordSeq object, which is used internally by GEOSGeometry to house the actual coordinates of the Point, LineString, and LinearRing geometries. """ from ctypes import byref, c_double, c_uint from django.contrib.gis.geos import prototypes as capi from django.contrib.gis.geos.base import GEOSBase from django.contrib.gis.geos.error import GEOSException from django.contrib.gis.geos.libgeos import CS_PTR from django.contrib.gis.shortcuts import numpy from django.utils.six.moves import range class GEOSCoordSeq(GEOSBase): "The internal representation of a list of coordinates inside a Geometry." ptr_type = CS_PTR def __init__(self, ptr, z=False): "Initializes from a GEOS pointer." if not isinstance(ptr, CS_PTR): raise TypeError('Coordinate sequence should initialize with a CS_PTR.') self._ptr = ptr self._z = z def __iter__(self): "Iterates over each point in the coordinate sequence." for i in range(self.size): yield self[i] def __len__(self): "Returns the number of points in the coordinate sequence." return int(self.size) def __str__(self): "Returns the string representation of the coordinate sequence." return str(self.tuple) def __getitem__(self, index): "Returns the coordinate sequence value at the given index." coords = [self.getX(index), self.getY(index)] if self.dims == 3 and self._z: coords.append(self.getZ(index)) return tuple(coords) def __setitem__(self, index, value): "Sets the coordinate sequence value at the given index." # Checking the input value if isinstance(value, (list, tuple)): pass elif numpy and isinstance(value, numpy.ndarray): pass else: raise TypeError('Must set coordinate with a sequence (list, tuple, or numpy array).') # Checking the dims of the input if self.dims == 3 and self._z: n_args = 3 set_3d = True else: n_args = 2 set_3d = False if len(value) != n_args: raise TypeError('Dimension of value does not match.') # Setting the X, Y, Z self.setX(index, value[0]) self.setY(index, value[1]) if set_3d: self.setZ(index, value[2]) # #### Internal Routines #### def _checkindex(self, index): "Checks the given index." sz = self.size if (sz < 1) or (index < 0) or (index >= sz): raise IndexError('invalid GEOS Geometry index: %s' % str(index)) def _checkdim(self, dim): "Checks the given dimension." if dim < 0 or dim > 2: raise GEOSException('invalid ordinate dimension "%d"' % dim) # #### Ordinate getting and setting routines #### def getOrdinate(self, dimension, index): "Returns the value for the given dimension and index." self._checkindex(index) self._checkdim(dimension) return capi.cs_getordinate(self.ptr, index, dimension, byref(c_double())) def setOrdinate(self, dimension, index, value): "Sets the value for the given dimension and index." self._checkindex(index) self._checkdim(dimension) capi.cs_setordinate(self.ptr, index, dimension, value) def getX(self, index): "Get the X value at the index." return self.getOrdinate(0, index) def setX(self, index, value): "Set X with the value at the given index." self.setOrdinate(0, index, value) def getY(self, index): "Get the Y value at the given index." return self.getOrdinate(1, index) def setY(self, index, value): "Set Y with the value at the given index." self.setOrdinate(1, index, value) def getZ(self, index): "Get Z with the value at the given index." return self.getOrdinate(2, index) def setZ(self, index, value): "Set Z with the value at the given index." self.setOrdinate(2, index, value) # ### Dimensions ### @property def size(self): "Returns the size of this coordinate sequence." return capi.cs_getsize(self.ptr, byref(c_uint())) @property def dims(self): "Returns the dimensions of this coordinate sequence." return capi.cs_getdims(self.ptr, byref(c_uint())) @property def hasz(self): """ Returns whether this coordinate sequence is 3D. This property value is inherited from the parent Geometry. """ return self._z # ### Other Methods ### def clone(self): "Clones this coordinate sequence." return GEOSCoordSeq(capi.cs_clone(self.ptr), self.hasz) @property def kml(self): "Returns the KML representation for the coordinates." # Getting the substitution string depending on whether the coordinates have # a Z dimension. if self.hasz: substr = '%s,%s,%s ' else: substr = '%s,%s,0 ' return '<coordinates>%s</coordinates>' % \ ''.join(substr % self[i] for i in range(len(self))).strip() @property def tuple(self): "Returns a tuple version of this coordinate sequence." n = self.size if n == 1: return self[0] else: return tuple(self[i] for i in range(n))
f969de52def897691ea4df46990263723d48a97e3ee42e8f1d08d9231b0a1a0d
from ctypes import byref, c_uint from django.contrib.gis.geos import prototypes as capi from django.contrib.gis.geos.geometry import GEOSGeometry from django.contrib.gis.geos.libgeos import GEOM_PTR, get_pointer_arr from django.contrib.gis.geos.linestring import LinearRing from django.utils import six from django.utils.six.moves import range class Polygon(GEOSGeometry): _minlength = 1 def __init__(self, *args, **kwargs): """ Initializes on an exterior ring and a sequence of holes (both instances may be either LinearRing instances, or a tuple/list that may be constructed into a LinearRing). Examples of initialization, where shell, hole1, and hole2 are valid LinearRing geometries: >>> from django.contrib.gis.geos import LinearRing, Polygon >>> shell = hole1 = hole2 = LinearRing() >>> poly = Polygon(shell, hole1, hole2) >>> poly = Polygon(shell, (hole1, hole2)) >>> # Example where a tuple parameters are used: >>> poly = Polygon(((0, 0), (0, 10), (10, 10), (0, 10), (0, 0)), ... ((4, 4), (4, 6), (6, 6), (6, 4), (4, 4))) """ if not args: super(Polygon, self).__init__(self._create_polygon(0, None), **kwargs) return # Getting the ext_ring and init_holes parameters from the argument list ext_ring = args[0] init_holes = args[1:] n_holes = len(init_holes) # If initialized as Polygon(shell, (LinearRing, LinearRing)) [for backward-compatibility] if n_holes == 1 and isinstance(init_holes[0], (tuple, list)): if len(init_holes[0]) == 0: init_holes = () n_holes = 0 elif isinstance(init_holes[0][0], LinearRing): init_holes = init_holes[0] n_holes = len(init_holes) polygon = self._create_polygon(n_holes + 1, (ext_ring,) + init_holes) super(Polygon, self).__init__(polygon, **kwargs) def __iter__(self): "Iterates over each ring in the polygon." for i in range(len(self)): yield self[i] def __len__(self): "Returns the number of rings in this Polygon." return self.num_interior_rings + 1 @classmethod def from_bbox(cls, bbox): "Constructs a Polygon from a bounding box (4-tuple)." x0, y0, x1, y1 = bbox for z in bbox: if not isinstance(z, six.integer_types + (float,)): return GEOSGeometry('POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' % (x0, y0, x0, y1, x1, y1, x1, y0, x0, y0)) return Polygon(((x0, y0), (x0, y1), (x1, y1), (x1, y0), (x0, y0))) # ### These routines are needed for list-like operation w/ListMixin ### def _create_polygon(self, length, items): # Instantiate LinearRing objects if necessary, but don't clone them yet # _construct_ring will throw a TypeError if a parameter isn't a valid ring # If we cloned the pointers here, we wouldn't be able to clean up # in case of error. if not length: return capi.create_empty_polygon() rings = [] for r in items: if isinstance(r, GEOM_PTR): rings.append(r) else: rings.append(self._construct_ring(r)) shell = self._clone(rings.pop(0)) n_holes = length - 1 if n_holes: holes = get_pointer_arr(n_holes) for i, r in enumerate(rings): holes[i] = self._clone(r) holes_param = byref(holes) else: holes_param = None return capi.create_polygon(shell, holes_param, c_uint(n_holes)) def _clone(self, g): if isinstance(g, GEOM_PTR): return capi.geom_clone(g) else: return capi.geom_clone(g.ptr) def _construct_ring(self, param, msg=( 'Parameter must be a sequence of LinearRings or objects that can initialize to LinearRings')): "Helper routine for trying to construct a ring from the given parameter." if isinstance(param, LinearRing): return param try: ring = LinearRing(param) return ring except TypeError: raise TypeError(msg) def _set_list(self, length, items): # Getting the current pointer, replacing with the newly constructed # geometry, and destroying the old geometry. prev_ptr = self.ptr srid = self.srid self.ptr = self._create_polygon(length, items) if srid: self.srid = srid capi.destroy_geom(prev_ptr) def _get_single_internal(self, index): """ Returns the ring at the specified index. The first index, 0, will always return the exterior ring. Indices > 0 will return the interior ring at the given index (e.g., poly[1] and poly[2] would return the first and second interior ring, respectively). CAREFUL: Internal/External are not the same as Interior/Exterior! _get_single_internal returns a pointer from the existing geometries for use internally by the object's methods. _get_single_external returns a clone of the same geometry for use by external code. """ if index == 0: return capi.get_extring(self.ptr) else: # Getting the interior ring, have to subtract 1 from the index. return capi.get_intring(self.ptr, index - 1) def _get_single_external(self, index): return GEOSGeometry(capi.geom_clone(self._get_single_internal(index)), srid=self.srid) _set_single = GEOSGeometry._set_single_rebuild _assign_extended_slice = GEOSGeometry._assign_extended_slice_rebuild # #### Polygon Properties #### @property def num_interior_rings(self): "Returns the number of interior rings." # Getting the number of rings return capi.get_nrings(self.ptr) def _get_ext_ring(self): "Gets the exterior ring of the Polygon." return self[0] def _set_ext_ring(self, ring): "Sets the exterior ring of the Polygon." self[0] = ring # Properties for the exterior ring/shell. exterior_ring = property(_get_ext_ring, _set_ext_ring) shell = exterior_ring @property def tuple(self): "Gets the tuple for each ring in this Polygon." return tuple(self[i].tuple for i in range(len(self))) coords = tuple @property def kml(self): "Returns the KML representation of this Polygon." inner_kml = ''.join( "<innerBoundaryIs>%s</innerBoundaryIs>" % self[i + 1].kml for i in range(self.num_interior_rings) ) return "<Polygon><outerBoundaryIs>%s</outerBoundaryIs>%s</Polygon>" % (self[0].kml, inner_kml)
450805f337a58a390f6077a49c446792fecf3b5ae4598adefa6d23852ee097ba
from django.contrib.gis.db.models.fields import ExtentField from django.db.models.aggregates import Aggregate __all__ = ['Collect', 'Extent', 'Extent3D', 'MakeLine', 'Union'] class GeoAggregate(Aggregate): function = None is_extent = False def as_sql(self, compiler, connection): # this will be called again in parent, but it's needed now - before # we get the spatial_aggregate_name connection.ops.check_expression_support(self) self.function = connection.ops.spatial_aggregate_name(self.name) return super(GeoAggregate, self).as_sql(compiler, connection) def as_oracle(self, compiler, connection): if not hasattr(self, 'tolerance'): self.tolerance = 0.05 self.extra['tolerance'] = self.tolerance if not self.is_extent: self.template = '%(function)s(SDOAGGRTYPE(%(expressions)s,%(tolerance)s))' return self.as_sql(compiler, connection) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = super(GeoAggregate, self).resolve_expression(query, allow_joins, reuse, summarize, for_save) for expr in c.get_source_expressions(): if not hasattr(expr.field, 'geom_type'): raise ValueError('Geospatial aggregates only allowed on geometry fields.') return c def convert_value(self, value, expression, connection, context): return connection.ops.convert_geom(value, self.output_field) class Collect(GeoAggregate): name = 'Collect' class Extent(GeoAggregate): name = 'Extent' is_extent = '2D' def __init__(self, expression, **extra): super(Extent, self).__init__(expression, output_field=ExtentField(), **extra) def convert_value(self, value, expression, connection, context): return connection.ops.convert_extent(value, context.get('transformed_srid')) class Extent3D(GeoAggregate): name = 'Extent3D' is_extent = '3D' def __init__(self, expression, **extra): super(Extent3D, self).__init__(expression, output_field=ExtentField(), **extra) def convert_value(self, value, expression, connection, context): return connection.ops.convert_extent3d(value, context.get('transformed_srid')) class MakeLine(GeoAggregate): name = 'MakeLine' class Union(GeoAggregate): name = 'Union'
160efa3b6bd0c6bc420bf8ee806523dcacde1770f0e2d5745a52c7730372f793
import warnings from django.contrib.gis.db.models.fields import ( GeometryField, LineStringField, PointField, get_srid_info, ) from django.contrib.gis.db.models.lookups import GISLookup from django.contrib.gis.db.models.sql import ( AreaField, DistanceField, GeomField, GMLField, ) from django.contrib.gis.geometry.backend import Geometry from django.contrib.gis.measure import Area, Distance from django.db import connections from django.db.models.constants import LOOKUP_SEP from django.db.models.expressions import RawSQL from django.db.models.fields import Field from django.db.models.query import QuerySet from django.utils import six from django.utils.deprecation import RemovedInDjango20Warning class GeoQuerySet(QuerySet): "The Geographic QuerySet." # ### GeoQuerySet Methods ### def area(self, tolerance=0.05, **kwargs): """ Returns the area of the geographic field in an `area` attribute on each element of this GeoQuerySet. """ # Performing setup here rather than in `_spatial_attribute` so that # we can get the units for `AreaField`. procedure_args, geo_field = self._spatial_setup( 'area', field_name=kwargs.get('field_name')) s = {'procedure_args': procedure_args, 'geo_field': geo_field, 'setup': False, } connection = connections[self.db] backend = connection.ops if backend.oracle: s['procedure_fmt'] = '%(geo_col)s,%(tolerance)s' s['procedure_args']['tolerance'] = tolerance s['select_field'] = AreaField('sq_m') # Oracle returns area in units of meters. elif backend.postgis or backend.spatialite: if backend.geography: # Geography fields support area calculation, returns square meters. s['select_field'] = AreaField('sq_m') elif not geo_field.geodetic(connection): # Getting the area units of the geographic field. s['select_field'] = AreaField(Area.unit_attname(geo_field.units_name(connection))) else: # TODO: Do we want to support raw number areas for geodetic fields? raise Exception('Area on geodetic coordinate systems not supported.') return self._spatial_attribute('area', s, **kwargs) def centroid(self, **kwargs): """ Returns the centroid of the geographic field in a `centroid` attribute on each element of this GeoQuerySet. """ return self._geom_attribute('centroid', **kwargs) def difference(self, geom, **kwargs): """ Returns the spatial difference of the geographic field in a `difference` attribute on each element of this GeoQuerySet. """ return self._geomset_attribute('difference', geom, **kwargs) def distance(self, geom, **kwargs): """ Returns the distance from the given geographic field name to the given geometry in a `distance` attribute on each element of the GeoQuerySet. Keyword Arguments: `spheroid` => If the geometry field is geodetic and PostGIS is the spatial database, then the more accurate spheroid calculation will be used instead of the quicker sphere calculation. `tolerance` => Used only for Oracle. The tolerance is in meters -- a default of 5 centimeters (0.05) is used. """ return self._distance_attribute('distance', geom, **kwargs) def envelope(self, **kwargs): """ Returns a Geometry representing the bounding box of the Geometry field in an `envelope` attribute on each element of the GeoQuerySet. """ return self._geom_attribute('envelope', **kwargs) def force_rhr(self, **kwargs): """ Returns a modified version of the Polygon/MultiPolygon in which all of the vertices follow the Right-Hand-Rule. By default, this is attached as the `force_rhr` attribute on each element of the GeoQuerySet. """ return self._geom_attribute('force_rhr', **kwargs) def geojson(self, precision=8, crs=False, bbox=False, **kwargs): """ Returns a GeoJSON representation of the geometry field in a `geojson` attribute on each element of the GeoQuerySet. The `crs` and `bbox` keywords may be set to True if the user wants the coordinate reference system and the bounding box to be included in the GeoJSON representation of the geometry. """ backend = connections[self.db].ops if not backend.geojson: raise NotImplementedError('Only PostGIS and SpatiaLite support GeoJSON serialization.') if not isinstance(precision, six.integer_types): raise TypeError('Precision keyword must be set with an integer.') options = 0 if crs and bbox: options = 3 elif bbox: options = 1 elif crs: options = 2 s = {'desc': 'GeoJSON', 'procedure_args': {'precision': precision, 'options': options}, 'procedure_fmt': '%(geo_col)s,%(precision)s,%(options)s', } return self._spatial_attribute('geojson', s, **kwargs) def geohash(self, precision=20, **kwargs): """ Returns a GeoHash representation of the given field in a `geohash` attribute on each element of the GeoQuerySet. The `precision` keyword may be used to custom the number of _characters_ used in the output GeoHash, the default is 20. """ s = {'desc': 'GeoHash', 'procedure_args': {'precision': precision}, 'procedure_fmt': '%(geo_col)s,%(precision)s', } return self._spatial_attribute('geohash', s, **kwargs) def gml(self, precision=8, version=2, **kwargs): """ Returns GML representation of the given field in a `gml` attribute on each element of the GeoQuerySet. """ backend = connections[self.db].ops s = {'desc': 'GML', 'procedure_args': {'precision': precision}} if backend.postgis: s['procedure_fmt'] = '%(version)s,%(geo_col)s,%(precision)s' s['procedure_args'] = {'precision': precision, 'version': version} if backend.oracle: s['select_field'] = GMLField() return self._spatial_attribute('gml', s, **kwargs) def intersection(self, geom, **kwargs): """ Returns the spatial intersection of the Geometry field in an `intersection` attribute on each element of this GeoQuerySet. """ return self._geomset_attribute('intersection', geom, **kwargs) def kml(self, **kwargs): """ Returns KML representation of the geometry field in a `kml` attribute on each element of this GeoQuerySet. """ s = {'desc': 'KML', 'procedure_fmt': '%(geo_col)s,%(precision)s', 'procedure_args': {'precision': kwargs.pop('precision', 8)}, } return self._spatial_attribute('kml', s, **kwargs) def length(self, **kwargs): """ Returns the length of the geometry field as a `Distance` object stored in a `length` attribute on each element of this GeoQuerySet. """ return self._distance_attribute('length', None, **kwargs) def mem_size(self, **kwargs): """ Returns the memory size (number of bytes) that the geometry field takes in a `mem_size` attribute on each element of this GeoQuerySet. """ return self._spatial_attribute('mem_size', {}, **kwargs) def num_geom(self, **kwargs): """ Returns the number of geometries if the field is a GeometryCollection or Multi* Field in a `num_geom` attribute on each element of this GeoQuerySet; otherwise the sets with None. """ return self._spatial_attribute('num_geom', {}, **kwargs) def num_points(self, **kwargs): """ Returns the number of points in the first linestring in the Geometry field in a `num_points` attribute on each element of this GeoQuerySet; otherwise sets with None. """ return self._spatial_attribute('num_points', {}, **kwargs) def perimeter(self, **kwargs): """ Returns the perimeter of the geometry field as a `Distance` object stored in a `perimeter` attribute on each element of this GeoQuerySet. """ return self._distance_attribute('perimeter', None, **kwargs) def point_on_surface(self, **kwargs): """ Returns a Point geometry guaranteed to lie on the surface of the Geometry field in a `point_on_surface` attribute on each element of this GeoQuerySet; otherwise sets with None. """ return self._geom_attribute('point_on_surface', **kwargs) def reverse_geom(self, **kwargs): """ Reverses the coordinate order of the geometry, and attaches as a `reverse` attribute on each element of this GeoQuerySet. """ s = {'select_field': GeomField()} kwargs.setdefault('model_att', 'reverse_geom') if connections[self.db].ops.oracle: s['geo_field_type'] = LineStringField return self._spatial_attribute('reverse', s, **kwargs) def scale(self, x, y, z=0.0, **kwargs): """ Scales the geometry to a new size by multiplying the ordinates with the given x,y,z scale factors. """ if connections[self.db].ops.spatialite: if z != 0.0: raise NotImplementedError('SpatiaLite does not support 3D scaling.') s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s', 'procedure_args': {'x': x, 'y': y}, 'select_field': GeomField(), } else: s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s,%(z)s', 'procedure_args': {'x': x, 'y': y, 'z': z}, 'select_field': GeomField(), } return self._spatial_attribute('scale', s, **kwargs) def snap_to_grid(self, *args, **kwargs): """ Snap all points of the input geometry to the grid. How the geometry is snapped to the grid depends on how many arguments were given: - 1 argument : A single size to snap both the X and Y grids to. - 2 arguments: X and Y sizes to snap the grid to. - 4 arguments: X, Y sizes and the X, Y origins. """ if False in [isinstance(arg, (float,) + six.integer_types) for arg in args]: raise TypeError('Size argument(s) for the grid must be a float or integer values.') nargs = len(args) if nargs == 1: size = args[0] procedure_fmt = '%(geo_col)s,%(size)s' procedure_args = {'size': size} elif nargs == 2: xsize, ysize = args procedure_fmt = '%(geo_col)s,%(xsize)s,%(ysize)s' procedure_args = {'xsize': xsize, 'ysize': ysize} elif nargs == 4: xsize, ysize, xorigin, yorigin = args procedure_fmt = '%(geo_col)s,%(xorigin)s,%(yorigin)s,%(xsize)s,%(ysize)s' procedure_args = {'xsize': xsize, 'ysize': ysize, 'xorigin': xorigin, 'yorigin': yorigin} else: raise ValueError('Must provide 1, 2, or 4 arguments to `snap_to_grid`.') s = {'procedure_fmt': procedure_fmt, 'procedure_args': procedure_args, 'select_field': GeomField(), } return self._spatial_attribute('snap_to_grid', s, **kwargs) def svg(self, relative=False, precision=8, **kwargs): """ Returns SVG representation of the geographic field in a `svg` attribute on each element of this GeoQuerySet. Keyword Arguments: `relative` => If set to True, this will evaluate the path in terms of relative moves (rather than absolute). `precision` => May be used to set the maximum number of decimal digits used in output (defaults to 8). """ relative = int(bool(relative)) if not isinstance(precision, six.integer_types): raise TypeError('SVG precision keyword argument must be an integer.') s = { 'desc': 'SVG', 'procedure_fmt': '%(geo_col)s,%(rel)s,%(precision)s', 'procedure_args': { 'rel': relative, 'precision': precision, } } return self._spatial_attribute('svg', s, **kwargs) def sym_difference(self, geom, **kwargs): """ Returns the symmetric difference of the geographic field in a `sym_difference` attribute on each element of this GeoQuerySet. """ return self._geomset_attribute('sym_difference', geom, **kwargs) def translate(self, x, y, z=0.0, **kwargs): """ Translates the geometry to a new location using the given numeric parameters as offsets. """ if connections[self.db].ops.spatialite: if z != 0.0: raise NotImplementedError('SpatiaLite does not support 3D translation.') s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s', 'procedure_args': {'x': x, 'y': y}, 'select_field': GeomField(), } else: s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s,%(z)s', 'procedure_args': {'x': x, 'y': y, 'z': z}, 'select_field': GeomField(), } return self._spatial_attribute('translate', s, **kwargs) def transform(self, srid=4326, **kwargs): """ Transforms the given geometry field to the given SRID. If no SRID is provided, the transformation will default to using 4326 (WGS84). """ if not isinstance(srid, six.integer_types): raise TypeError('An integer SRID must be provided.') field_name = kwargs.get('field_name') self._spatial_setup('transform', field_name=field_name) self.query.add_context('transformed_srid', srid) return self._clone() def union(self, geom, **kwargs): """ Returns the union of the geographic field with the given Geometry in a `union` attribute on each element of this GeoQuerySet. """ return self._geomset_attribute('union', geom, **kwargs) # ### Private API -- Abstracted DRY routines. ### def _spatial_setup(self, att, desc=None, field_name=None, geo_field_type=None): """ Performs set up for executing the spatial function. """ # Does the spatial backend support this? connection = connections[self.db] func = getattr(connection.ops, att, False) if desc is None: desc = att if not func: raise NotImplementedError('%s stored procedure not available on ' 'the %s backend.' % (desc, connection.ops.name)) # Initializing the procedure arguments. procedure_args = {'function': func} # Is there a geographic field in the model to perform this # operation on? geo_field = self._geo_field(field_name) if not geo_field: raise TypeError('%s output only available on GeometryFields.' % func) # If the `geo_field_type` keyword was used, then enforce that # type limitation. if geo_field_type is not None and not isinstance(geo_field, geo_field_type): raise TypeError('"%s" stored procedures may only be called on %ss.' % (func, geo_field_type.__name__)) # Setting the procedure args. procedure_args['geo_col'] = self._geocol_select(geo_field, field_name) return procedure_args, geo_field def _spatial_attribute(self, att, settings, field_name=None, model_att=None): """ DRY routine for calling a spatial stored procedure on a geometry column and attaching its output as an attribute of the model. Arguments: att: The name of the spatial attribute that holds the spatial SQL function to call. settings: Dictionary of internal settings to customize for the spatial procedure. Public Keyword Arguments: field_name: The name of the geographic field to call the spatial function on. May also be a lookup to a geometry field as part of a foreign key relation. model_att: The name of the model attribute to attach the output of the spatial function to. """ warnings.warn( "The %s GeoQuerySet method is deprecated. See GeoDjango Functions " "documentation to find the expression-based replacement." % att, RemovedInDjango20Warning, stacklevel=2 ) # Default settings. settings.setdefault('desc', None) settings.setdefault('geom_args', ()) settings.setdefault('geom_field', None) settings.setdefault('procedure_args', {}) settings.setdefault('procedure_fmt', '%(geo_col)s') settings.setdefault('select_params', []) connection = connections[self.db] # Performing setup for the spatial column, unless told not to. if settings.get('setup', True): default_args, geo_field = self._spatial_setup( att, desc=settings['desc'], field_name=field_name, geo_field_type=settings.get('geo_field_type')) for k, v in six.iteritems(default_args): settings['procedure_args'].setdefault(k, v) else: geo_field = settings['geo_field'] # The attribute to attach to the model. if not isinstance(model_att, six.string_types): model_att = att # Special handling for any argument that is a geometry. for name in settings['geom_args']: # Using the field's get_placeholder() routine to get any needed # transformation SQL. geom = geo_field.get_prep_value(settings['procedure_args'][name]) params = geo_field._get_db_prep_lookup('contains', geom, connection=connection) geom_placeholder = geo_field.get_placeholder(geom, None, connection) # Replacing the procedure format with that of any needed # transformation SQL. old_fmt = '%%(%s)s' % name new_fmt = geom_placeholder % '%%s' settings['procedure_fmt'] = settings['procedure_fmt'].replace(old_fmt, new_fmt) settings['select_params'].extend(params) # Getting the format for the stored procedure. fmt = '%%(function)s(%s)' % settings['procedure_fmt'] # If the result of this function needs to be converted. if settings.get('select_field'): select_field = settings['select_field'] if connection.ops.oracle: select_field.empty_strings_allowed = False else: select_field = Field() # Finally, setting the extra selection attribute with # the format string expanded with the stored procedure # arguments. self.query.add_annotation( RawSQL(fmt % settings['procedure_args'], settings['select_params'], select_field), model_att) return self def _distance_attribute(self, func, geom=None, tolerance=0.05, spheroid=False, **kwargs): """ DRY routine for GeoQuerySet distance attribute routines. """ # Setting up the distance procedure arguments. procedure_args, geo_field = self._spatial_setup(func, field_name=kwargs.get('field_name')) # If geodetic defaulting distance attribute to meters (Oracle and # PostGIS spherical distances return meters). Otherwise, use the # units of the geometry field. connection = connections[self.db] geodetic = geo_field.geodetic(connection) geography = geo_field.geography if geodetic: dist_att = 'm' else: dist_att = Distance.unit_attname(geo_field.units_name(connection)) # Shortcut booleans for what distance function we're using and # whether the geometry field is 3D. distance = func == 'distance' length = func == 'length' perimeter = func == 'perimeter' if not (distance or length or perimeter): raise ValueError('Unknown distance function: %s' % func) geom_3d = geo_field.dim == 3 # The field's _get_db_prep_lookup() is used to get any # extra distance parameters. Here we set up the # parameters that will be passed in to field's function. lookup_params = [geom or 'POINT (0 0)', 0] # Getting the spatial backend operations. backend = connection.ops # If the spheroid calculation is desired, either by the `spheroid` # keyword or when calculating the length of geodetic field, make # sure the 'spheroid' distance setting string is passed in so we # get the correct spatial stored procedure. if spheroid or (backend.postgis and geodetic and (not geography) and length): lookup_params.append('spheroid') lookup_params = geo_field.get_prep_value(lookup_params) params = geo_field._get_db_prep_lookup('distance_lte', lookup_params, connection=connection) # The `geom_args` flag is set to true if a geometry parameter was # passed in. geom_args = bool(geom) if backend.oracle: if distance: procedure_fmt = '%(geo_col)s,%(geom)s,%(tolerance)s' elif length or perimeter: procedure_fmt = '%(geo_col)s,%(tolerance)s' procedure_args['tolerance'] = tolerance else: # Getting whether this field is in units of degrees since the field may have # been transformed via the `transform` GeoQuerySet method. srid = self.query.get_context('transformed_srid') if srid: u, unit_name, s = get_srid_info(srid, connection) geodetic = unit_name.lower() in geo_field.geodetic_units if geodetic and not connection.features.supports_distance_geodetic: raise ValueError( 'This database does not support linear distance ' 'calculations on geodetic coordinate systems.' ) if distance: if srid: # Setting the `geom_args` flag to false because we want to handle # transformation SQL here, rather than the way done by default # (which will transform to the original SRID of the field rather # than to what was transformed to). geom_args = False procedure_fmt = '%s(%%(geo_col)s, %s)' % (backend.transform, srid) if geom.srid is None or geom.srid == srid: # If the geom parameter srid is None, it is assumed the coordinates # are in the transformed units. A placeholder is used for the # geometry parameter. `GeomFromText` constructor is also needed # to wrap geom placeholder for SpatiaLite. if backend.spatialite: procedure_fmt += ', %s(%%%%s, %s)' % (backend.from_text, srid) else: procedure_fmt += ', %%s' else: # We need to transform the geom to the srid specified in `transform()`, # so wrapping the geometry placeholder in transformation SQL. # SpatiaLite also needs geometry placeholder wrapped in `GeomFromText` # constructor. if backend.spatialite: procedure_fmt += (', %s(%s(%%%%s, %s), %s)' % ( backend.transform, backend.from_text, geom.srid, srid)) else: procedure_fmt += ', %s(%%%%s, %s)' % (backend.transform, srid) else: # `transform()` was not used on this GeoQuerySet. procedure_fmt = '%(geo_col)s,%(geom)s' if not geography and geodetic: # Spherical distance calculation is needed (because the geographic # field is geodetic). However, the PostGIS ST_distance_sphere/spheroid() # procedures may only do queries from point columns to point geometries # some error checking is required. if not backend.geography: if not isinstance(geo_field, PointField): raise ValueError('Spherical distance calculation only supported on PointFields.') if not str(Geometry(six.memoryview(params[0].ewkb)).geom_type) == 'Point': raise ValueError( 'Spherical distance calculation only supported with ' 'Point Geometry parameters' ) # The `function` procedure argument needs to be set differently for # geodetic distance calculations. if spheroid: # Call to distance_spheroid() requires spheroid param as well. procedure_fmt += ",'%(spheroid)s'" procedure_args.update({'function': backend.distance_spheroid, 'spheroid': params[1]}) else: procedure_args.update({'function': backend.distance_sphere}) elif length or perimeter: procedure_fmt = '%(geo_col)s' if not geography and geodetic and length: # There's no `length_sphere`, and `length_spheroid` also # works on 3D geometries. procedure_fmt += ",'%(spheroid)s'" procedure_args.update({'function': backend.length_spheroid, 'spheroid': params[1]}) elif geom_3d and connection.features.supports_3d_functions: # Use 3D variants of perimeter and length routines on supported backends. if perimeter: procedure_args.update({'function': backend.perimeter3d}) elif length: procedure_args.update({'function': backend.length3d}) # Setting up the settings for `_spatial_attribute`. s = {'select_field': DistanceField(dist_att), 'setup': False, 'geo_field': geo_field, 'procedure_args': procedure_args, 'procedure_fmt': procedure_fmt, } if geom_args: s['geom_args'] = ('geom',) s['procedure_args']['geom'] = geom elif geom: # The geometry is passed in as a parameter because we handled # transformation conditions in this routine. s['select_params'] = [backend.Adapter(geom)] return self._spatial_attribute(func, s, **kwargs) def _geom_attribute(self, func, tolerance=0.05, **kwargs): """ DRY routine for setting up a GeoQuerySet method that attaches a Geometry attribute (e.g., `centroid`, `point_on_surface`). """ s = {'select_field': GeomField()} if connections[self.db].ops.oracle: s['procedure_fmt'] = '%(geo_col)s,%(tolerance)s' s['procedure_args'] = {'tolerance': tolerance} return self._spatial_attribute(func, s, **kwargs) def _geomset_attribute(self, func, geom, tolerance=0.05, **kwargs): """ DRY routine for setting up a GeoQuerySet method that attaches a Geometry attribute and takes a Geoemtry parameter. This is used for geometry set-like operations (e.g., intersection, difference, union, sym_difference). """ s = { 'geom_args': ('geom',), 'select_field': GeomField(), 'procedure_fmt': '%(geo_col)s,%(geom)s', 'procedure_args': {'geom': geom}, } if connections[self.db].ops.oracle: s['procedure_fmt'] += ',%(tolerance)s' s['procedure_args']['tolerance'] = tolerance return self._spatial_attribute(func, s, **kwargs) def _geocol_select(self, geo_field, field_name): """ Helper routine for constructing the SQL to select the geographic column. Takes into account if the geographic field is in a ForeignKey relation to the current model. """ compiler = self.query.get_compiler(self.db) opts = self.model._meta if geo_field not in opts.fields: # Is this operation going to be on a related geographic field? # If so, it'll have to be added to the select related information # (e.g., if 'location__point' was given as the field name, then # chop the non-relational field and add select_related('location')). # Note: the operation really is defined as "must add select related!" self.query.add_select_related([field_name.rsplit(LOOKUP_SEP, 1)[0]]) # Call pre_sql_setup() so that compiler.select gets populated. compiler.pre_sql_setup() for col, _, _ in compiler.select: if col.output_field == geo_field: return col.as_sql(compiler, compiler.connection)[0] raise ValueError("%r not in compiler's related_select_cols" % geo_field) elif geo_field not in opts.local_fields: # This geographic field is inherited from another model, so we have to # use the db table for the _parent_ model instead. parent_model = geo_field.model._meta.concrete_model return self._field_column(compiler, geo_field, parent_model._meta.db_table) else: return self._field_column(compiler, geo_field) # Private API utilities, subject to change. def _geo_field(self, field_name=None): """ Returns the first Geometry field encountered or the one specified via the `field_name` keyword. The `field_name` may be a string specifying the geometry field on this GeoQuerySet's model, or a lookup string to a geometry field via a ForeignKey relation. """ if field_name is None: # Incrementing until the first geographic field is found. for field in self.model._meta.fields: if isinstance(field, GeometryField): return field return False else: # Otherwise, check by the given field name -- which may be # a lookup to a _related_ geographic field. return GISLookup._check_geo_field(self.model._meta, field_name) def _field_column(self, compiler, field, table_alias=None, column=None): """ Helper function that returns the database column for the given field. The table and column are returned (quoted) in the proper format, e.g., `"geoapp_city"."point"`. If `table_alias` is not specified, the database table associated with the model of this `GeoQuerySet` will be used. If `column` is specified, it will be used instead of the value in `field.column`. """ if table_alias is None: table_alias = compiler.query.get_meta().db_table return "%s.%s" % (compiler.quote_name_unless_alias(table_alias), compiler.connection.ops.quote_name(column or field.column))
b9e83eedabbf0f7d152d2e2fbe49e1a61fdf48da77fd386ec5c952ab01491b14
""" The SpatialProxy object allows for lazy-geometries and lazy-rasters. The proxy uses Python descriptors for instantiating and setting Geometry or Raster objects corresponding to geographic model fields. Thanks to Robert Coup for providing this functionality (see #4322). """ from django.db.models.query_utils import DeferredAttribute from django.utils import six class SpatialProxy(DeferredAttribute): def __init__(self, klass, field): """ Proxy initializes on the given Geometry or Raster class (not an instance) and the corresponding field. """ self._field = field self._klass = klass super(SpatialProxy, self).__init__(field.attname, klass) def __get__(self, instance, cls=None): """ This accessor retrieves the geometry or raster, initializing it using the corresponding class specified during initialization and the value of the field. Currently, GEOS or OGR geometries as well as GDALRasters are supported. """ if instance is None: # Accessed on a class, not an instance return self # Getting the value of the field. try: geo_value = instance.__dict__[self._field.attname] except KeyError: geo_value = super(SpatialProxy, self).__get__(instance, cls) if isinstance(geo_value, self._klass): geo_obj = geo_value elif (geo_value is None) or (geo_value == ''): geo_obj = None else: # Otherwise, a geometry or raster object is built using the field's # contents, and the model's corresponding attribute is set. geo_obj = self._klass(geo_value) setattr(instance, self._field.attname, geo_obj) return geo_obj def __set__(self, instance, value): """ This accessor sets the proxied geometry or raster with the corresponding class specified during initialization. To set geometries, values of None, HEXEWKB, or WKT may be used. To set rasters, JSON or dict values may be used. """ # The geographic type of the field. gtype = self._field.geom_type if gtype == 'RASTER' and (value is None or isinstance(value, six.string_types + (dict, self._klass))): # For raster fields, assure input is None or a string, dict, or # raster instance. pass elif isinstance(value, self._klass) and (str(value.geom_type).upper() == gtype or gtype == 'GEOMETRY'): # The geometry type must match that of the field -- unless the # general GeometryField is used. if value.srid is None: # Assigning the field SRID if the geometry has no SRID. value.srid = self._field.srid elif value is None or isinstance(value, six.string_types + (six.memoryview,)): # Set geometries with None, WKT, HEX, or WKB pass else: raise TypeError('Cannot set %s SpatialProxy (%s) with value of type: %s' % ( instance.__class__.__name__, gtype, type(value))) # Setting the objects dictionary with the value, and returning. instance.__dict__[self._field.attname] = value return value
5d02d6aac66b0019f9a9b4219b9a624a50ca3bd8dab8bee557283a866d075e26
from django.db.models import * # NOQA isort:skip from django.db.models import __all__ as models_all # isort:skip from django.contrib.gis.db.models.aggregates import * # NOQA from django.contrib.gis.db.models.aggregates import __all__ as aggregates_all from django.contrib.gis.db.models.fields import ( GeometryCollectionField, GeometryField, LineStringField, MultiLineStringField, MultiPointField, MultiPolygonField, PointField, PolygonField, RasterField, ) from django.contrib.gis.db.models.manager import GeoManager __all__ = models_all + aggregates_all __all__ += [ 'GeometryCollectionField', 'GeometryField', 'LineStringField', 'MultiLineStringField', 'MultiPointField', 'MultiPolygonField', 'PointField', 'PolygonField', 'RasterField', 'GeoManager', ]
4b210aebbf4f675aaa4f6214b3e8d55360f9279ec977b28da469147d4a9461af
import warnings from django.contrib.gis.db.models.query import GeoQuerySet from django.db.models.manager import Manager from django.utils.deprecation import RemovedInDjango20Warning class GeoManager(Manager.from_queryset(GeoQuerySet)): "Overrides Manager to return Geographic QuerySets." # This manager should be used for queries on related fields # so that geometry columns on Oracle and MySQL are selected # properly. use_for_related_fields = True # No need to bother users with the use_for_related_fields # deprecation for this manager which is itself deprecated. silence_use_for_related_fields_deprecation = True def __init__(self, *args, **kwargs): warnings.warn( "The GeoManager class is deprecated. Simply use a normal manager " "once you have replaced all calls to GeoQuerySet methods by annotations.", RemovedInDjango20Warning, stacklevel=2 ) super(GeoManager, self).__init__(*args, **kwargs)
3a73bf586426e0dcb6ed157a7305a919bef52fe5fa7389cc180ffbfb84d32d51
from django.contrib.gis import forms, gdal from django.contrib.gis.db.models.lookups import ( RasterBandTransform, gis_lookups, ) from django.contrib.gis.db.models.proxy import SpatialProxy from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.geometry.backend import Geometry, GeometryException from django.core.exceptions import ImproperlyConfigured from django.db.models.expressions import Expression from django.db.models.fields import Field from django.utils import six from django.utils.translation import ugettext_lazy as _ # Local cache of the spatial_ref_sys table, which holds SRID data for each # spatial database alias. This cache exists so that the database isn't queried # for SRID info each time a distance query is constructed. _srid_cache = {} def get_srid_info(srid, connection): """ Returns the units, unit name, and spheroid WKT associated with the given SRID from the `spatial_ref_sys` (or equivalent) spatial database table for the given database connection. These results are cached. """ global _srid_cache try: # The SpatialRefSys model for the spatial backend. SpatialRefSys = connection.ops.spatial_ref_sys() except NotImplementedError: # No `spatial_ref_sys` table in spatial backend (e.g., MySQL). return None, None, None if connection.alias not in _srid_cache: # Initialize SRID dictionary for database if it doesn't exist. _srid_cache[connection.alias] = {} if srid not in _srid_cache[connection.alias]: # Use `SpatialRefSys` model to query for spatial reference info. sr = SpatialRefSys.objects.using(connection.alias).get(srid=srid) units, units_name = sr.units spheroid = SpatialRefSys.get_spheroid(sr.wkt) _srid_cache[connection.alias][srid] = (units, units_name, spheroid) return _srid_cache[connection.alias][srid] class GeoSelectFormatMixin(object): def select_format(self, compiler, sql, params): """ Returns the selection format string, depending on the requirements of the spatial backend. For example, Oracle and MySQL require custom selection formats in order to retrieve geometries in OGC WKT. For all other fields a simple '%s' format string is returned. """ connection = compiler.connection srid = compiler.query.get_context('transformed_srid') if srid: sel_fmt = '%s(%%s, %s)' % (connection.ops.transform, srid) else: sel_fmt = '%s' if connection.ops.select: # This allows operations to be done on fields in the SELECT, # overriding their values -- used by the Oracle and MySQL # spatial backends to get database values as WKT, and by the # `transform` method. sel_fmt = connection.ops.select % sel_fmt return sel_fmt % sql, params class BaseSpatialField(Field): """ The Base GIS Field. It's used as a base class for GeometryField and RasterField. Defines properties that are common to all GIS fields such as the characteristics of the spatial reference system of the field. """ description = _("The base GIS field.") empty_strings_allowed = False # Geodetic units. geodetic_units = ('decimal degree', 'degree') def __init__(self, verbose_name=None, srid=4326, spatial_index=True, **kwargs): """ The initialization function for base spatial fields. Takes the following as keyword arguments: srid: The spatial reference system identifier, an OGC standard. Defaults to 4326 (WGS84). spatial_index: Indicates whether to create a spatial index. Defaults to True. Set this instead of 'db_index' for geographic fields since index creation is different for geometry columns. """ # Setting the index flag with the value of the `spatial_index` keyword. self.spatial_index = spatial_index # Setting the SRID and getting the units. Unit information must be # easily available in the field instance for distance queries. self.srid = srid # Setting the verbose_name keyword argument with the positional # first parameter, so this works like normal fields. kwargs['verbose_name'] = verbose_name super(BaseSpatialField, self).__init__(**kwargs) def deconstruct(self): name, path, args, kwargs = super(BaseSpatialField, self).deconstruct() # Always include SRID for less fragility; include spatial index if it's # not the default value. kwargs['srid'] = self.srid if self.spatial_index is not True: kwargs['spatial_index'] = self.spatial_index return name, path, args, kwargs def db_type(self, connection): return connection.ops.geo_db_type(self) # The following functions are used to get the units, their name, and # the spheroid corresponding to the SRID of the BaseSpatialField. def _get_srid_info(self, connection): # Get attributes from `get_srid_info`. self._units, self._units_name, self._spheroid = get_srid_info(self.srid, connection) def spheroid(self, connection): if not hasattr(self, '_spheroid'): self._get_srid_info(connection) return self._spheroid def units(self, connection): if not hasattr(self, '_units'): self._get_srid_info(connection) return self._units def units_name(self, connection): if not hasattr(self, '_units_name'): self._get_srid_info(connection) return self._units_name def geodetic(self, connection): """ Returns true if this field's SRID corresponds with a coordinate system that uses non-projected units (e.g., latitude/longitude). """ units_name = self.units_name(connection) # Some backends like MySQL cannot determine units name. In that case, # test if srid is 4326 (WGS84), even if this is over-simplification. return units_name.lower() in self.geodetic_units if units_name else self.srid == 4326 def get_placeholder(self, value, compiler, connection): """ Returns the placeholder for the spatial column for the given value. """ return connection.ops.get_geom_placeholder(self, value, compiler) def get_srid(self, obj): """ Return the default SRID for the given geometry or raster, taking into account the SRID set for the field. For example, if the input geometry or raster doesn't have an SRID, then the SRID of the field will be returned. """ srid = obj.srid # SRID of given geometry. if srid is None or self.srid == -1 or (srid == -1 and self.srid != -1): return self.srid else: return srid def get_db_prep_save(self, value, connection): """ Prepare the value for saving in the database. """ if not value: return None else: return connection.ops.Adapter(self.get_prep_value(value)) def get_raster_prep_value(self, value, is_candidate): """ Return a GDALRaster if conversion is successful, otherwise return None. """ if isinstance(value, gdal.GDALRaster): return value elif is_candidate: try: return gdal.GDALRaster(value) except GDALException: pass elif isinstance(value, dict): try: return gdal.GDALRaster(value) except GDALException: raise ValueError("Couldn't create spatial object from lookup value '%s'." % value) def get_prep_value(self, value): """ Spatial lookup values are either a parameter that is (or may be converted to) a geometry or raster, or a sequence of lookup values that begins with a geometry or raster. This routine sets up the geometry or raster value properly and preserves any other lookup parameters. """ value = super(BaseSpatialField, self).get_prep_value(value) # For IsValid lookups, boolean values are allowed. if isinstance(value, (Expression, bool)): return value elif isinstance(value, (tuple, list)): obj = value[0] seq_value = True else: obj = value seq_value = False # When the input is not a geometry or raster, attempt to construct one # from the given string input. if isinstance(obj, Geometry): pass else: # Check if input is a candidate for conversion to raster or geometry. is_candidate = isinstance(obj, (bytes, six.string_types)) or hasattr(obj, '__geo_interface__') # Try to convert the input to raster. raster = self.get_raster_prep_value(obj, is_candidate) if raster: obj = raster elif is_candidate: try: obj = Geometry(obj) except (GeometryException, GDALException): raise ValueError("Couldn't create spatial object from lookup value '%s'." % obj) else: raise ValueError('Cannot use object with type %s for a spatial lookup parameter.' % type(obj).__name__) # Assigning the SRID value. obj.srid = self.get_srid(obj) if seq_value: lookup_val = [obj] lookup_val.extend(value[1:]) return tuple(lookup_val) else: return obj for klass in gis_lookups.values(): BaseSpatialField.register_lookup(klass) class GeometryField(GeoSelectFormatMixin, BaseSpatialField): """ The base Geometry field -- maps to the OpenGIS Specification Geometry type. """ description = _("The base Geometry field -- maps to the OpenGIS Specification Geometry type.") form_class = forms.GeometryField # The OpenGIS Geometry name. geom_type = 'GEOMETRY' def __init__(self, verbose_name=None, dim=2, geography=False, **kwargs): """ The initialization function for geometry fields. In addition to the parameters from BaseSpatialField, it takes the following as keyword arguments: dim: The number of dimensions for this geometry. Defaults to 2. extent: Customize the extent, in a 4-tuple of WGS 84 coordinates, for the geometry field entry in the `USER_SDO_GEOM_METADATA` table. Defaults to (-180.0, -90.0, 180.0, 90.0). tolerance: Define the tolerance, in meters, to use for the geometry field entry in the `USER_SDO_GEOM_METADATA` table. Defaults to 0.05. """ # Setting the dimension of the geometry field. self.dim = dim # Is this a geography rather than a geometry column? self.geography = geography # Oracle-specific private attributes for creating the entry in # `USER_SDO_GEOM_METADATA` self._extent = kwargs.pop('extent', (-180.0, -90.0, 180.0, 90.0)) self._tolerance = kwargs.pop('tolerance', 0.05) super(GeometryField, self).__init__(verbose_name=verbose_name, **kwargs) def deconstruct(self): name, path, args, kwargs = super(GeometryField, self).deconstruct() # Include kwargs if they're not the default values. if self.dim != 2: kwargs['dim'] = self.dim if self.geography is not False: kwargs['geography'] = self.geography return name, path, args, kwargs # ### Routines specific to GeometryField ### def get_distance(self, value, lookup_type, connection): """ Returns a distance number in units of the field. For example, if `D(km=1)` was passed in and the units of the field were in meters, then 1000 would be returned. """ return connection.ops.get_distance(self, value, lookup_type) def from_db_value(self, value, expression, connection, context): if value: if not isinstance(value, Geometry): value = Geometry(value) srid = value.srid if not srid and self.srid != -1: value.srid = self.srid return value # ### Routines overloaded from Field ### def contribute_to_class(self, cls, name, **kwargs): super(GeometryField, self).contribute_to_class(cls, name, **kwargs) # Setup for lazy-instantiated Geometry object. setattr(cls, self.attname, SpatialProxy(Geometry, self)) def formfield(self, **kwargs): defaults = {'form_class': self.form_class, 'geom_type': self.geom_type, 'srid': self.srid, } defaults.update(kwargs) if (self.dim > 2 and 'widget' not in kwargs and not getattr(defaults['form_class'].widget, 'supports_3d', False)): defaults['widget'] = forms.Textarea return super(GeometryField, self).formfield(**defaults) def _get_db_prep_lookup(self, lookup_type, value, connection): """ Prepare for the database lookup, and return any spatial parameters necessary for the query. This includes wrapping any geometry parameters with a backend-specific adapter and formatting any distance parameters into the correct units for the coordinate system of the field. Only used by the deprecated GeoQuerySet and to be RemovedInDjango20Warning. """ # Populating the parameters list, and wrapping the Geometry # with the Adapter of the spatial backend. if isinstance(value, (tuple, list)): params = [connection.ops.Adapter(value[0])] # Getting the distance parameter in the units of the field. params += self.get_distance(value[1:], lookup_type, connection) else: params = [connection.ops.Adapter(value)] return params # The OpenGIS Geometry Type Fields class PointField(GeometryField): geom_type = 'POINT' form_class = forms.PointField description = _("Point") class LineStringField(GeometryField): geom_type = 'LINESTRING' form_class = forms.LineStringField description = _("Line string") class PolygonField(GeometryField): geom_type = 'POLYGON' form_class = forms.PolygonField description = _("Polygon") class MultiPointField(GeometryField): geom_type = 'MULTIPOINT' form_class = forms.MultiPointField description = _("Multi-point") class MultiLineStringField(GeometryField): geom_type = 'MULTILINESTRING' form_class = forms.MultiLineStringField description = _("Multi-line string") class MultiPolygonField(GeometryField): geom_type = 'MULTIPOLYGON' form_class = forms.MultiPolygonField description = _("Multi polygon") class GeometryCollectionField(GeometryField): geom_type = 'GEOMETRYCOLLECTION' form_class = forms.GeometryCollectionField description = _("Geometry collection") class ExtentField(GeoSelectFormatMixin, Field): "Used as a return value from an extent aggregate" description = _("Extent Aggregate Field") def get_internal_type(self): return "ExtentField" class RasterField(BaseSpatialField): """ Raster field for GeoDjango -- evaluates into GDALRaster objects. """ description = _("Raster Field") geom_type = 'RASTER' geography = False def _check_connection(self, connection): # Make sure raster fields are used only on backends with raster support. if not connection.features.gis_enabled or not connection.features.supports_raster: raise ImproperlyConfigured('Raster fields require backends with raster support.') def db_type(self, connection): self._check_connection(connection) return super(RasterField, self).db_type(connection) def from_db_value(self, value, expression, connection, context): return connection.ops.parse_raster(value) def get_db_prep_value(self, value, connection, prepared=False): self._check_connection(connection) # Prepare raster for writing to database. if not prepared: value = connection.ops.deconstruct_raster(value) return super(RasterField, self).get_db_prep_value(value, connection, prepared) def contribute_to_class(self, cls, name, **kwargs): super(RasterField, self).contribute_to_class(cls, name, **kwargs) # Setup for lazy-instantiated Raster object. For large querysets, the # instantiation of all GDALRasters can potentially be expensive. This # delays the instantiation of the objects to the moment of evaluation # of the raster attribute. setattr(cls, self.attname, SpatialProxy(gdal.GDALRaster, self)) def get_transform(self, name): try: band_index = int(name) return type( 'SpecificRasterBandTransform', (RasterBandTransform, ), {'band_index': band_index} ) except ValueError: pass return super(RasterField, self).get_transform(name)
b052be4165dd1d7c96dd3be294135eafe9fd5dfc07c5c3e8a9771f0b7b6043f9
from decimal import Decimal from django.contrib.gis.db.models.fields import GeometryField, RasterField from django.contrib.gis.db.models.sql import AreaField from django.contrib.gis.geometry.backend import Geometry from django.contrib.gis.measure import ( Area as AreaMeasure, Distance as DistanceMeasure, ) from django.core.exceptions import FieldError from django.db.models import BooleanField, FloatField, IntegerField, TextField from django.db.models.expressions import Func, Value from django.utils import six NUMERIC_TYPES = six.integer_types + (float, Decimal) class GeoFunc(Func): function = None output_field_class = None geom_param_pos = 0 def __init__(self, *expressions, **extra): if 'output_field' not in extra and self.output_field_class: extra['output_field'] = self.output_field_class() super(GeoFunc, self).__init__(*expressions, **extra) @property def name(self): return self.__class__.__name__ @property def srid(self): expr = self.source_expressions[self.geom_param_pos] if hasattr(expr, 'srid'): return expr.srid try: return expr.field.srid except (AttributeError, FieldError): return None def as_sql(self, compiler, connection): if self.function is None: self.function = connection.ops.spatial_function_name(self.name) if any(isinstance(field, RasterField) for field in self.get_source_fields()): raise TypeError("Geometry functions not supported for raster fields.") return super(GeoFunc, self).as_sql(compiler, connection) def resolve_expression(self, *args, **kwargs): res = super(GeoFunc, self).resolve_expression(*args, **kwargs) base_srid = res.srid if not base_srid: raise TypeError("Geometry functions can only operate on geometric content.") for pos, expr in enumerate(res.source_expressions[1:], start=1): if isinstance(expr, GeomValue) and expr.srid != base_srid: # Automatic SRID conversion so objects are comparable res.source_expressions[pos] = Transform(expr, base_srid).resolve_expression(*args, **kwargs) return res def _handle_param(self, value, param_name='', check_types=None): if not hasattr(value, 'resolve_expression'): if check_types and not isinstance(value, check_types): raise TypeError( "The %s parameter has the wrong type: should be %s." % ( param_name, str(check_types)) ) return value class GeomValue(Value): geography = False @property def srid(self): return self.value.srid def as_sql(self, compiler, connection): return '%s(%%s, %s)' % (connection.ops.from_text, self.srid), [connection.ops.Adapter(self.value)] def as_mysql(self, compiler, connection): return '%s(%%s)' % (connection.ops.from_text), [connection.ops.Adapter(self.value)] def as_postgresql(self, compiler, connection): if self.geography: self.value = connection.ops.Adapter(self.value, geography=self.geography) else: self.value = connection.ops.Adapter(self.value) return super(GeomValue, self).as_sql(compiler, connection) class GeoFuncWithGeoParam(GeoFunc): def __init__(self, expression, geom, *expressions, **extra): if not isinstance(geom, Geometry): raise TypeError("Please provide a geometry object.") if not hasattr(geom, 'srid') or not geom.srid: raise ValueError("Please provide a geometry attribute with a defined SRID.") super(GeoFuncWithGeoParam, self).__init__(expression, GeomValue(geom), *expressions, **extra) class SQLiteDecimalToFloatMixin(object): """ By default, Decimal values are converted to str by the SQLite backend, which is not acceptable by the GIS functions expecting numeric values. """ def as_sqlite(self, compiler, connection): for expr in self.get_source_expressions(): if hasattr(expr, 'value') and isinstance(expr.value, Decimal): expr.value = float(expr.value) return super(SQLiteDecimalToFloatMixin, self).as_sql(compiler, connection) class OracleToleranceMixin(object): tolerance = 0.05 def as_oracle(self, compiler, connection): tol = self.extra.get('tolerance', self.tolerance) self.template = "%%(function)s(%%(expressions)s, %s)" % tol return super(OracleToleranceMixin, self).as_sql(compiler, connection) class Area(OracleToleranceMixin, GeoFunc): output_field_class = AreaField arity = 1 def as_sql(self, compiler, connection): if connection.ops.geography: self.output_field.area_att = 'sq_m' else: # Getting the area units of the geographic field. source_fields = self.get_source_fields() if len(source_fields): source_field = source_fields[0] if source_field.geodetic(connection): # TODO: Do we want to support raw number areas for geodetic fields? raise NotImplementedError('Area on geodetic coordinate systems not supported.') units_name = source_field.units_name(connection) if units_name: self.output_field.area_att = AreaMeasure.unit_attname(units_name) return super(Area, self).as_sql(compiler, connection) def as_oracle(self, compiler, connection): self.output_field = AreaField('sq_m') # Oracle returns area in units of meters. return super(Area, self).as_oracle(compiler, connection) class AsGeoJSON(GeoFunc): output_field_class = TextField def __init__(self, expression, bbox=False, crs=False, precision=8, **extra): expressions = [expression] if precision is not None: expressions.append(self._handle_param(precision, 'precision', six.integer_types)) options = 0 if crs and bbox: options = 3 elif bbox: options = 1 elif crs: options = 2 if options: expressions.append(options) super(AsGeoJSON, self).__init__(*expressions, **extra) class AsGML(GeoFunc): geom_param_pos = 1 output_field_class = TextField def __init__(self, expression, version=2, precision=8, **extra): expressions = [version, expression] if precision is not None: expressions.append(self._handle_param(precision, 'precision', six.integer_types)) super(AsGML, self).__init__(*expressions, **extra) class AsKML(AsGML): def as_sqlite(self, compiler, connection): # No version parameter self.source_expressions.pop(0) return super(AsKML, self).as_sql(compiler, connection) class AsSVG(GeoFunc): output_field_class = TextField def __init__(self, expression, relative=False, precision=8, **extra): relative = relative if hasattr(relative, 'resolve_expression') else int(relative) expressions = [ expression, relative, self._handle_param(precision, 'precision', six.integer_types), ] super(AsSVG, self).__init__(*expressions, **extra) class BoundingCircle(GeoFunc): def __init__(self, expression, num_seg=48, **extra): super(BoundingCircle, self).__init__(*[expression, num_seg], **extra) class Centroid(OracleToleranceMixin, GeoFunc): arity = 1 class Difference(OracleToleranceMixin, GeoFuncWithGeoParam): arity = 2 class DistanceResultMixin(object): def source_is_geography(self): return self.get_source_fields()[0].geography and self.srid == 4326 def convert_value(self, value, expression, connection, context): if value is None: return None geo_field = GeometryField(srid=self.srid) # Fake field to get SRID info if geo_field.geodetic(connection): dist_att = 'm' else: units = geo_field.units_name(connection) if units: dist_att = DistanceMeasure.unit_attname(units) else: dist_att = None if dist_att: return DistanceMeasure(**{dist_att: value}) return value class Distance(DistanceResultMixin, OracleToleranceMixin, GeoFuncWithGeoParam): output_field_class = FloatField spheroid = None def __init__(self, expr1, expr2, spheroid=None, **extra): expressions = [expr1, expr2] if spheroid is not None: self.spheroid = spheroid expressions += (self._handle_param(spheroid, 'spheroid', bool),) super(Distance, self).__init__(*expressions, **extra) def as_postgresql(self, compiler, connection): geo_field = GeometryField(srid=self.srid) # Fake field to get SRID info if self.source_is_geography(): # Set parameters as geography if base field is geography for pos, expr in enumerate( self.source_expressions[self.geom_param_pos + 1:], start=self.geom_param_pos + 1): if isinstance(expr, GeomValue): expr.geography = True elif geo_field.geodetic(connection): # Geometry fields with geodetic (lon/lat) coordinates need special distance functions if self.spheroid: self.function = 'ST_Distance_Spheroid' # More accurate, resource intensive # Replace boolean param by the real spheroid of the base field self.source_expressions[2] = Value(geo_field._spheroid) else: self.function = 'ST_Distance_Sphere' return super(Distance, self).as_sql(compiler, connection) def as_oracle(self, compiler, connection): if self.spheroid: self.source_expressions.pop(2) return super(Distance, self).as_oracle(compiler, connection) class Envelope(GeoFunc): arity = 1 class ForceRHR(GeoFunc): arity = 1 class GeoHash(GeoFunc): output_field_class = TextField def __init__(self, expression, precision=None, **extra): expressions = [expression] if precision is not None: expressions.append(self._handle_param(precision, 'precision', six.integer_types)) super(GeoHash, self).__init__(*expressions, **extra) class Intersection(OracleToleranceMixin, GeoFuncWithGeoParam): arity = 2 class IsValid(GeoFunc): output_field_class = BooleanField class Length(DistanceResultMixin, OracleToleranceMixin, GeoFunc): output_field_class = FloatField def __init__(self, expr1, spheroid=True, **extra): self.spheroid = spheroid super(Length, self).__init__(expr1, **extra) def as_sql(self, compiler, connection): geo_field = GeometryField(srid=self.srid) # Fake field to get SRID info if geo_field.geodetic(connection) and not connection.features.supports_length_geodetic: raise NotImplementedError("This backend doesn't support Length on geodetic fields") return super(Length, self).as_sql(compiler, connection) def as_postgresql(self, compiler, connection): geo_field = GeometryField(srid=self.srid) # Fake field to get SRID info if self.source_is_geography(): self.source_expressions.append(Value(self.spheroid)) elif geo_field.geodetic(connection): # Geometry fields with geodetic (lon/lat) coordinates need length_spheroid self.function = 'ST_Length_Spheroid' self.source_expressions.append(Value(geo_field._spheroid)) else: dim = min(f.dim for f in self.get_source_fields() if f) if dim > 2: self.function = connection.ops.length3d return super(Length, self).as_sql(compiler, connection) def as_sqlite(self, compiler, connection): geo_field = GeometryField(srid=self.srid) if geo_field.geodetic(connection): if self.spheroid: self.function = 'GeodesicLength' else: self.function = 'GreatCircleLength' return super(Length, self).as_sql(compiler, connection) class MakeValid(GeoFunc): pass class MemSize(GeoFunc): output_field_class = IntegerField arity = 1 class NumGeometries(GeoFunc): output_field_class = IntegerField arity = 1 class NumPoints(GeoFunc): output_field_class = IntegerField arity = 1 def as_sqlite(self, compiler, connection): if self.source_expressions[self.geom_param_pos].output_field.geom_type != 'LINESTRING': raise TypeError("SpatiaLite NumPoints can only operate on LineString content") return super(NumPoints, self).as_sql(compiler, connection) class Perimeter(DistanceResultMixin, OracleToleranceMixin, GeoFunc): output_field_class = FloatField arity = 1 def as_postgresql(self, compiler, connection): geo_field = GeometryField(srid=self.srid) # Fake field to get SRID info if geo_field.geodetic(connection) and not self.source_is_geography(): raise NotImplementedError("ST_Perimeter cannot use a non-projected non-geography field.") dim = min(f.dim for f in self.get_source_fields()) if dim > 2: self.function = connection.ops.perimeter3d return super(Perimeter, self).as_sql(compiler, connection) def as_sqlite(self, compiler, connection): geo_field = GeometryField(srid=self.srid) # Fake field to get SRID info if geo_field.geodetic(connection): raise NotImplementedError("Perimeter cannot use a non-projected field.") return super(Perimeter, self).as_sql(compiler, connection) class PointOnSurface(OracleToleranceMixin, GeoFunc): arity = 1 class Reverse(GeoFunc): arity = 1 class Scale(SQLiteDecimalToFloatMixin, GeoFunc): def __init__(self, expression, x, y, z=0.0, **extra): expressions = [ expression, self._handle_param(x, 'x', NUMERIC_TYPES), self._handle_param(y, 'y', NUMERIC_TYPES), ] if z != 0.0: expressions.append(self._handle_param(z, 'z', NUMERIC_TYPES)) super(Scale, self).__init__(*expressions, **extra) class SnapToGrid(SQLiteDecimalToFloatMixin, GeoFunc): def __init__(self, expression, *args, **extra): nargs = len(args) expressions = [expression] if nargs in (1, 2): expressions.extend( [self._handle_param(arg, '', NUMERIC_TYPES) for arg in args] ) elif nargs == 4: # Reverse origin and size param ordering expressions.extend( [self._handle_param(arg, '', NUMERIC_TYPES) for arg in args[2:]] ) expressions.extend( [self._handle_param(arg, '', NUMERIC_TYPES) for arg in args[0:2]] ) else: raise ValueError('Must provide 1, 2, or 4 arguments to `SnapToGrid`.') super(SnapToGrid, self).__init__(*expressions, **extra) class SymDifference(OracleToleranceMixin, GeoFuncWithGeoParam): arity = 2 class Transform(GeoFunc): def __init__(self, expression, srid, **extra): expressions = [ expression, self._handle_param(srid, 'srid', six.integer_types), ] super(Transform, self).__init__(*expressions, **extra) @property def srid(self): # Make srid the resulting srid of the transformation return self.source_expressions[self.geom_param_pos + 1].value def convert_value(self, value, expression, connection, context): value = super(Transform, self).convert_value(value, expression, connection, context) if not connection.ops.postgis and not value.srid: # Some backends do not set the srid on the returning geometry value.srid = self.srid return value class Translate(Scale): def as_sqlite(self, compiler, connection): if len(self.source_expressions) < 4: # Always provide the z parameter for ST_Translate self.source_expressions.append(Value(0)) return super(Translate, self).as_sqlite(compiler, connection) class Union(OracleToleranceMixin, GeoFuncWithGeoParam): arity = 2
24e030b8ac669a7968c988527cc770c5b611cf8498c6dab9cd5e474d68fc26f5
from __future__ import unicode_literals import re from django.core.exceptions import FieldDoesNotExist from django.db.models.constants import LOOKUP_SEP from django.db.models.expressions import Col, Expression from django.db.models.lookups import BuiltinLookup, Lookup, Transform from django.db.models.sql.query import Query from django.utils import six gis_lookups = {} class RasterBandTransform(Transform): def as_sql(self, compiler, connection): return compiler.compile(self.lhs) class GISLookup(Lookup): sql_template = None transform_func = None distance = False band_rhs = None band_lhs = None def __init__(self, *args, **kwargs): super(GISLookup, self).__init__(*args, **kwargs) self.template_params = {} @classmethod def _check_geo_field(cls, opts, lookup): """ Utility for checking the given lookup with the given model options. The lookup is a string either specifying the geographic field, e.g. 'point, 'the_geom', or a related lookup on a geographic field like 'address__point'. If a BaseSpatialField exists according to the given lookup on the model options, it will be returned. Otherwise return None. """ from django.contrib.gis.db.models.fields import BaseSpatialField # This takes into account the situation where the lookup is a # lookup to a related geographic field, e.g., 'address__point'. field_list = lookup.split(LOOKUP_SEP) # Reversing so list operates like a queue of related lookups, # and popping the top lookup. field_list.reverse() fld_name = field_list.pop() try: geo_fld = opts.get_field(fld_name) # If the field list is still around, then it means that the # lookup was for a geometry field across a relationship -- # thus we keep on getting the related model options and the # model field associated with the next field in the list # until there's no more left. while len(field_list): opts = geo_fld.remote_field.model._meta geo_fld = opts.get_field(field_list.pop()) except (FieldDoesNotExist, AttributeError): return False # Finally, make sure we got a Geographic field and return. if isinstance(geo_fld, BaseSpatialField): return geo_fld else: return False def process_band_indices(self, only_lhs=False): """ Extract the lhs band index from the band transform class and the rhs band index from the input tuple. """ # PostGIS band indices are 1-based, so the band index needs to be # increased to be consistent with the GDALRaster band indices. if only_lhs: self.band_rhs = 1 self.band_lhs = self.lhs.band_index + 1 return if isinstance(self.lhs, RasterBandTransform): self.band_lhs = self.lhs.band_index + 1 else: self.band_lhs = 1 self.band_rhs = self.rhs[1] if len(self.rhs) == 1: self.rhs = self.rhs[0] else: self.rhs = (self.rhs[0], ) + self.rhs[2:] def get_db_prep_lookup(self, value, connection): # get_db_prep_lookup is called by process_rhs from super class if isinstance(value, (tuple, list)): # First param is assumed to be the geometric object params = [connection.ops.Adapter(value[0])] + list(value)[1:] else: params = [connection.ops.Adapter(value)] return ('%s', params) def process_rhs(self, compiler, connection): if isinstance(self.rhs, Query): # If rhs is some Query, don't touch it. return super(GISLookup, self).process_rhs(compiler, connection) geom = self.rhs if isinstance(self.rhs, Col): # Make sure the F Expression destination field exists, and # set an `srid` attribute with the same as that of the # destination. geo_fld = self.rhs.output_field if not hasattr(geo_fld, 'srid'): raise ValueError('No geographic field found in expression.') self.rhs.srid = geo_fld.srid elif isinstance(self.rhs, Expression): raise ValueError('Complex expressions not supported for spatial fields.') elif isinstance(self.rhs, (list, tuple)): geom = self.rhs[0] # Check if a band index was passed in the query argument. if ((len(self.rhs) == 2 and not self.lookup_name == 'relate') or (len(self.rhs) == 3 and self.lookup_name == 'relate')): self.process_band_indices() elif len(self.rhs) > 2: raise ValueError('Tuple too long for lookup %s.' % self.lookup_name) elif isinstance(self.lhs, RasterBandTransform): self.process_band_indices(only_lhs=True) rhs, rhs_params = super(GISLookup, self).process_rhs(compiler, connection) rhs = connection.ops.get_geom_placeholder(self.lhs.output_field, geom, compiler) return rhs, rhs_params def get_rhs_op(self, connection, rhs): # Unlike BuiltinLookup, the GIS get_rhs_op() implementation should return # an object (SpatialOperator) with an as_sql() method to allow for more # complex computations (where the lhs part can be mixed in). return connection.ops.gis_operators[self.lookup_name] def as_sql(self, compiler, connection): lhs_sql, sql_params = self.process_lhs(compiler, connection) rhs_sql, rhs_params = self.process_rhs(compiler, connection) sql_params.extend(rhs_params) template_params = {'lhs': lhs_sql, 'rhs': rhs_sql, 'value': '%s'} template_params.update(self.template_params) rhs_op = self.get_rhs_op(connection, rhs_sql) return rhs_op.as_sql(connection, self, template_params, sql_params) # ------------------ # Geometry operators # ------------------ class OverlapsLeftLookup(GISLookup): """ The overlaps_left operator returns true if A's bounding box overlaps or is to the left of B's bounding box. """ lookup_name = 'overlaps_left' gis_lookups['overlaps_left'] = OverlapsLeftLookup class OverlapsRightLookup(GISLookup): """ The 'overlaps_right' operator returns true if A's bounding box overlaps or is to the right of B's bounding box. """ lookup_name = 'overlaps_right' gis_lookups['overlaps_right'] = OverlapsRightLookup class OverlapsBelowLookup(GISLookup): """ The 'overlaps_below' operator returns true if A's bounding box overlaps or is below B's bounding box. """ lookup_name = 'overlaps_below' gis_lookups['overlaps_below'] = OverlapsBelowLookup class OverlapsAboveLookup(GISLookup): """ The 'overlaps_above' operator returns true if A's bounding box overlaps or is above B's bounding box. """ lookup_name = 'overlaps_above' gis_lookups['overlaps_above'] = OverlapsAboveLookup class LeftLookup(GISLookup): """ The 'left' operator returns true if A's bounding box is strictly to the left of B's bounding box. """ lookup_name = 'left' gis_lookups['left'] = LeftLookup class RightLookup(GISLookup): """ The 'right' operator returns true if A's bounding box is strictly to the right of B's bounding box. """ lookup_name = 'right' gis_lookups['right'] = RightLookup class StrictlyBelowLookup(GISLookup): """ The 'strictly_below' operator returns true if A's bounding box is strictly below B's bounding box. """ lookup_name = 'strictly_below' gis_lookups['strictly_below'] = StrictlyBelowLookup class StrictlyAboveLookup(GISLookup): """ The 'strictly_above' operator returns true if A's bounding box is strictly above B's bounding box. """ lookup_name = 'strictly_above' gis_lookups['strictly_above'] = StrictlyAboveLookup class SameAsLookup(GISLookup): """ The "~=" operator is the "same as" operator. It tests actual geometric equality of two features. So if A and B are the same feature, vertex-by-vertex, the operator returns true. """ lookup_name = 'same_as' gis_lookups['same_as'] = SameAsLookup class ExactLookup(SameAsLookup): # Alias of same_as lookup_name = 'exact' gis_lookups['exact'] = ExactLookup class BBContainsLookup(GISLookup): """ The 'bbcontains' operator returns true if A's bounding box completely contains by B's bounding box. """ lookup_name = 'bbcontains' gis_lookups['bbcontains'] = BBContainsLookup class BBOverlapsLookup(GISLookup): """ The 'bboverlaps' operator returns true if A's bounding box overlaps B's bounding box. """ lookup_name = 'bboverlaps' gis_lookups['bboverlaps'] = BBOverlapsLookup class ContainedLookup(GISLookup): """ The 'contained' operator returns true if A's bounding box is completely contained by B's bounding box. """ lookup_name = 'contained' gis_lookups['contained'] = ContainedLookup # ------------------ # Geometry functions # ------------------ class ContainsLookup(GISLookup): lookup_name = 'contains' gis_lookups['contains'] = ContainsLookup class ContainsProperlyLookup(GISLookup): lookup_name = 'contains_properly' gis_lookups['contains_properly'] = ContainsProperlyLookup class CoveredByLookup(GISLookup): lookup_name = 'coveredby' gis_lookups['coveredby'] = CoveredByLookup class CoversLookup(GISLookup): lookup_name = 'covers' gis_lookups['covers'] = CoversLookup class CrossesLookup(GISLookup): lookup_name = 'crosses' gis_lookups['crosses'] = CrossesLookup class DisjointLookup(GISLookup): lookup_name = 'disjoint' gis_lookups['disjoint'] = DisjointLookup class EqualsLookup(GISLookup): lookup_name = 'equals' gis_lookups['equals'] = EqualsLookup class IntersectsLookup(GISLookup): lookup_name = 'intersects' gis_lookups['intersects'] = IntersectsLookup class IsValidLookup(BuiltinLookup): lookup_name = 'isvalid' def as_sql(self, compiler, connection): if self.lhs.field.geom_type == 'RASTER': raise ValueError('The isvalid lookup is only available on geometry fields.') gis_op = connection.ops.gis_operators[self.lookup_name] sql, params = self.process_lhs(compiler, connection) sql = '%(func)s(%(lhs)s)' % {'func': gis_op.func, 'lhs': sql} if not self.rhs: sql = 'NOT ' + sql return sql, params gis_lookups['isvalid'] = IsValidLookup class OverlapsLookup(GISLookup): lookup_name = 'overlaps' gis_lookups['overlaps'] = OverlapsLookup class RelateLookup(GISLookup): lookup_name = 'relate' sql_template = '%(func)s(%(lhs)s, %(rhs)s, %%s)' pattern_regex = re.compile(r'^[012TF\*]{9}$') def get_db_prep_lookup(self, value, connection): if len(value) != 2: raise ValueError('relate must be passed a two-tuple') # Check the pattern argument backend_op = connection.ops.gis_operators[self.lookup_name] if hasattr(backend_op, 'check_relate_argument'): backend_op.check_relate_argument(value[1]) else: pattern = value[1] if not isinstance(pattern, six.string_types) or not self.pattern_regex.match(pattern): raise ValueError('Invalid intersection matrix pattern "%s".' % pattern) return super(RelateLookup, self).get_db_prep_lookup(value, connection) gis_lookups['relate'] = RelateLookup class TouchesLookup(GISLookup): lookup_name = 'touches' gis_lookups['touches'] = TouchesLookup class WithinLookup(GISLookup): lookup_name = 'within' gis_lookups['within'] = WithinLookup class DistanceLookupBase(GISLookup): distance = True sql_template = '%(func)s(%(lhs)s, %(rhs)s) %(op)s %(value)s' def process_rhs(self, compiler, connection): if not isinstance(self.rhs, (tuple, list)) or not 2 <= len(self.rhs) <= 4: raise ValueError("2, 3, or 4-element tuple required for '%s' lookup." % self.lookup_name) elif len(self.rhs) == 4 and not self.rhs[3] == 'spheroid': raise ValueError("For 4-element tuples the last argument must be the 'speroid' directive.") # Check if the second parameter is a band index. if len(self.rhs) > 2 and not self.rhs[2] == 'spheroid': self.process_band_indices() params = [connection.ops.Adapter(self.rhs[0])] # Getting the distance parameter in the units of the field. dist_param = self.rhs[1] if hasattr(dist_param, 'resolve_expression'): dist_param = dist_param.resolve_expression(compiler.query) sql, expr_params = compiler.compile(dist_param) self.template_params['value'] = sql params.extend(expr_params) else: params += connection.ops.get_distance( self.lhs.output_field, (dist_param,) + self.rhs[2:], self.lookup_name, handle_spheroid=False ) rhs = connection.ops.get_geom_placeholder(self.lhs.output_field, params[0], compiler) return (rhs, params) class DWithinLookup(DistanceLookupBase): lookup_name = 'dwithin' sql_template = '%(func)s(%(lhs)s, %(rhs)s, %%s)' gis_lookups['dwithin'] = DWithinLookup class DistanceGTLookup(DistanceLookupBase): lookup_name = 'distance_gt' gis_lookups['distance_gt'] = DistanceGTLookup class DistanceGTELookup(DistanceLookupBase): lookup_name = 'distance_gte' gis_lookups['distance_gte'] = DistanceGTELookup class DistanceLTLookup(DistanceLookupBase): lookup_name = 'distance_lt' gis_lookups['distance_lt'] = DistanceLTLookup class DistanceLTELookup(DistanceLookupBase): lookup_name = 'distance_lte' gis_lookups['distance_lte'] = DistanceLTELookup
62f2cbb21951826b4f5302e160adf1f542de36e791e6541a16120ea4f60d5d9f
""" A collection of utility routines and classes used by the spatial backends. """ class SpatialOperator(object): """ Class encapsulating the behavior specific to a GIS operation (used by lookups). """ sql_template = None def __init__(self, op=None, func=None): self.op = op self.func = func @property def default_template(self): if self.func: return '%(func)s(%(lhs)s, %(rhs)s)' else: return '%(lhs)s %(op)s %(rhs)s' def as_sql(self, connection, lookup, template_params, sql_params): sql_template = self.sql_template or lookup.sql_template or self.default_template template_params.update({'op': self.op, 'func': self.func}) return sql_template % template_params, sql_params
0c46b6cadddf08a854e9522671be90358a98ebec333876be886ad845bc3d808a
from django.contrib.gis.db.models.sql.conversion import ( AreaField, DistanceField, GeomField, GMLField, ) __all__ = [ 'AreaField', 'DistanceField', 'GeomField', 'GMLField' ]
806909aca176203ac26fd3bb42f702599dddb0b6017b5a6082ae3e6292ba93e0
""" This module holds simple classes to convert geospatial values from the database. """ from __future__ import unicode_literals from decimal import Decimal from django.contrib.gis.db.models.fields import GeoSelectFormatMixin from django.contrib.gis.geometry.backend import Geometry from django.contrib.gis.measure import Area, Distance class BaseField(object): empty_strings_allowed = True def get_db_converters(self, connection): return [self.from_db_value] def select_format(self, compiler, sql, params): return sql, params class AreaField(BaseField): "Wrapper for Area values." def __init__(self, area_att=None): self.area_att = area_att def from_db_value(self, value, expression, connection, context): if connection.features.interprets_empty_strings_as_nulls and value == '': value = None # If the database returns a Decimal, convert it to a float as expected # by the Python geometric objects. if isinstance(value, Decimal): value = float(value) # If the units are known, convert value into area measure. if value is not None and self.area_att: value = Area(**{self.area_att: value}) return value def get_internal_type(self): return 'AreaField' class DistanceField(BaseField): "Wrapper for Distance values." def __init__(self, distance_att): self.distance_att = distance_att def from_db_value(self, value, expression, connection, context): if value is not None: value = Distance(**{self.distance_att: value}) return value def get_internal_type(self): return 'DistanceField' class GeomField(GeoSelectFormatMixin, BaseField): """ Wrapper for Geometry values. It is a lightweight alternative to using GeometryField (which requires an SQL query upon instantiation). """ # Hacky marker for get_db_converters() geom_type = None def from_db_value(self, value, expression, connection, context): if value is not None: value = Geometry(value) return value def get_internal_type(self): return 'GeometryField' class GMLField(BaseField): """ Wrapper for GML to be used by Oracle to ensure Database.LOB conversion. """ def get_internal_type(self): return 'GMLField' def from_db_value(self, value, expression, connection, context): return value
15d0b364682ed53e13532a9f395f90b84736e51127ecba291c496efe0fce7b4a
from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures from django.db.backends.oracle.features import \ DatabaseFeatures as OracleDatabaseFeatures class DatabaseFeatures(BaseSpatialFeatures, OracleDatabaseFeatures): supports_add_srs_entry = False supports_geometry_field_introspection = False supports_geometry_field_unique_index = False supports_perimeter_geodetic = True
adeb2cdcef49a835322c12a2138408ed728454b66820efb07f1b229d3c171cc4
import sys import cx_Oracle from django.db.backends.oracle.introspection import DatabaseIntrospection from django.utils import six class OracleIntrospection(DatabaseIntrospection): # Associating any OBJECTVAR instances with GeometryField. Of course, # this won't work right on Oracle objects that aren't MDSYS.SDO_GEOMETRY, # but it is the only object type supported within Django anyways. data_types_reverse = DatabaseIntrospection.data_types_reverse.copy() data_types_reverse[cx_Oracle.OBJECT] = 'GeometryField' def get_geometry_type(self, table_name, geo_col): cursor = self.connection.cursor() try: # Querying USER_SDO_GEOM_METADATA to get the SRID and dimension information. try: cursor.execute( 'SELECT "DIMINFO", "SRID" FROM "USER_SDO_GEOM_METADATA" ' 'WHERE "TABLE_NAME"=%s AND "COLUMN_NAME"=%s', (table_name.upper(), geo_col.upper()) ) row = cursor.fetchone() except Exception as msg: new_msg = ( 'Could not find entry in USER_SDO_GEOM_METADATA ' 'corresponding to "%s"."%s"\n' 'Error message: %s.') % (table_name, geo_col, msg) six.reraise(Exception, Exception(new_msg), sys.exc_info()[2]) # TODO: Research way to find a more specific geometry field type for # the column's contents. field_type = 'GeometryField' # Getting the field parameters. field_params = {} dim, srid = row if srid != 4326: field_params['srid'] = srid # Length of object array ( SDO_DIM_ARRAY ) is number of dimensions. dim = len(dim) if dim != 2: field_params['dim'] = dim finally: cursor.close() return field_type, field_params
3928b7692c66cfb11a2ac95ad90a158e99d35a31c7280a4397bb2e620f4370a1
""" The GeometryColumns and SpatialRefSys models for the Oracle spatial backend. It should be noted that Oracle Spatial does not have database tables named according to the OGC standard, so the closest analogs are used. For example, the `USER_SDO_GEOM_METADATA` is used for the GeometryColumns model and the `SDO_COORD_REF_SYS` is used for the SpatialRefSys model. """ from django.contrib.gis.db import models from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class OracleGeometryColumns(models.Model): "Maps to the Oracle USER_SDO_GEOM_METADATA table." table_name = models.CharField(max_length=32) column_name = models.CharField(max_length=1024) srid = models.IntegerField(primary_key=True) # TODO: Add support for `diminfo` column (type MDSYS.SDO_DIM_ARRAY). class Meta: app_label = 'gis' db_table = 'USER_SDO_GEOM_METADATA' managed = False @classmethod def table_name_col(cls): """ Returns the name of the metadata column used to store the feature table name. """ return 'table_name' @classmethod def geom_col_name(cls): """ Returns the name of the metadata column used to store the feature geometry column. """ return 'column_name' def __str__(self): return '%s - %s (SRID: %s)' % (self.table_name, self.column_name, self.srid) class OracleSpatialRefSys(models.Model, SpatialRefSysMixin): "Maps to the Oracle MDSYS.CS_SRS table." cs_name = models.CharField(max_length=68) srid = models.IntegerField(primary_key=True) auth_srid = models.IntegerField() auth_name = models.CharField(max_length=256) wktext = models.CharField(max_length=2046) # Optional geometry representing the bounds of this coordinate # system. By default, all are NULL in the table. cs_bounds = models.PolygonField(null=True) objects = models.GeoManager() class Meta: app_label = 'gis' db_table = 'CS_SRS' managed = False @property def wkt(self): return self.wktext
139273aa0ff470a336078cd0e2e5aacad06c14f913223658da89f79aea0b709e
from django.db.backends.oracle.base import \ DatabaseWrapper as OracleDatabaseWrapper from .features import DatabaseFeatures from .introspection import OracleIntrospection from .operations import OracleOperations from .schema import OracleGISSchemaEditor class DatabaseWrapper(OracleDatabaseWrapper): SchemaEditorClass = OracleGISSchemaEditor # Classes instantiated in __init__(). features_class = DatabaseFeatures introspection_class = OracleIntrospection ops_class = OracleOperations
c0f7d2205eb7a738e445d37dfcad27be72282f7f04e04bbb685cd38d8b33f5aa
""" This module contains the spatial lookup types, and the `get_geo_where_clause` routine for Oracle Spatial. Please note that WKT support is broken on the XE version, and thus this backend will not work on such platforms. Specifically, XE lacks support for an internal JVM, and Java libraries are required to use the WKT constructors. """ import re from django.contrib.gis.db.backends.base.operations import \ BaseSpatialOperations from django.contrib.gis.db.backends.oracle.adapter import OracleSpatialAdapter from django.contrib.gis.db.backends.utils import SpatialOperator from django.contrib.gis.db.models import aggregates from django.contrib.gis.geometry.backend import Geometry from django.contrib.gis.measure import Distance from django.db.backends.oracle.base import Database from django.db.backends.oracle.operations import DatabaseOperations from django.utils import six DEFAULT_TOLERANCE = '0.05' class SDOOperator(SpatialOperator): sql_template = "%(func)s(%(lhs)s, %(rhs)s) = 'TRUE'" class SDODistance(SpatialOperator): sql_template = "SDO_GEOM.SDO_DISTANCE(%%(lhs)s, %%(rhs)s, %s) %%(op)s %%(value)s" % DEFAULT_TOLERANCE class SDODWithin(SpatialOperator): sql_template = "SDO_WITHIN_DISTANCE(%(lhs)s, %(rhs)s, %%s) = 'TRUE'" class SDODisjoint(SpatialOperator): sql_template = "SDO_GEOM.RELATE(%%(lhs)s, 'DISJOINT', %%(rhs)s, %s) = 'DISJOINT'" % DEFAULT_TOLERANCE class SDORelate(SpatialOperator): sql_template = "SDO_RELATE(%(lhs)s, %(rhs)s, 'mask=%(mask)s') = 'TRUE'" def check_relate_argument(self, arg): masks = 'TOUCH|OVERLAPBDYDISJOINT|OVERLAPBDYINTERSECT|EQUAL|INSIDE|COVEREDBY|CONTAINS|COVERS|ANYINTERACT|ON' mask_regex = re.compile(r'^(%s)(\+(%s))*$' % (masks, masks), re.I) if not isinstance(arg, six.string_types) or not mask_regex.match(arg): raise ValueError('Invalid SDO_RELATE mask: "%s"' % arg) def as_sql(self, connection, lookup, template_params, sql_params): template_params['mask'] = sql_params.pop() return super(SDORelate, self).as_sql(connection, lookup, template_params, sql_params) class OracleOperations(BaseSpatialOperations, DatabaseOperations): name = 'oracle' oracle = True disallowed_aggregates = (aggregates.Collect, aggregates.Extent3D, aggregates.MakeLine) Adapter = OracleSpatialAdapter area = 'SDO_GEOM.SDO_AREA' gml = 'SDO_UTIL.TO_GMLGEOMETRY' centroid = 'SDO_GEOM.SDO_CENTROID' difference = 'SDO_GEOM.SDO_DIFFERENCE' distance = 'SDO_GEOM.SDO_DISTANCE' extent = 'SDO_AGGR_MBR' intersection = 'SDO_GEOM.SDO_INTERSECTION' length = 'SDO_GEOM.SDO_LENGTH' num_points = 'SDO_UTIL.GETNUMVERTICES' perimeter = length point_on_surface = 'SDO_GEOM.SDO_POINTONSURFACE' reverse = 'SDO_UTIL.REVERSE_LINESTRING' sym_difference = 'SDO_GEOM.SDO_XOR' transform = 'SDO_CS.TRANSFORM' union = 'SDO_GEOM.SDO_UNION' unionagg = 'SDO_AGGR_UNION' from_text = 'SDO_GEOMETRY' function_names = { 'Area': 'SDO_GEOM.SDO_AREA', 'Centroid': 'SDO_GEOM.SDO_CENTROID', 'Difference': 'SDO_GEOM.SDO_DIFFERENCE', 'Distance': 'SDO_GEOM.SDO_DISTANCE', 'Intersection': 'SDO_GEOM.SDO_INTERSECTION', 'Length': 'SDO_GEOM.SDO_LENGTH', 'NumGeometries': 'SDO_UTIL.GETNUMELEM', 'NumPoints': 'SDO_UTIL.GETNUMVERTICES', 'Perimeter': 'SDO_GEOM.SDO_LENGTH', 'PointOnSurface': 'SDO_GEOM.SDO_POINTONSURFACE', 'Reverse': 'SDO_UTIL.REVERSE_LINESTRING', 'SymDifference': 'SDO_GEOM.SDO_XOR', 'Transform': 'SDO_CS.TRANSFORM', 'Union': 'SDO_GEOM.SDO_UNION', } # We want to get SDO Geometries as WKT because it is much easier to # instantiate GEOS proxies from WKT than SDO_GEOMETRY(...) strings. # However, this adversely affects performance (i.e., Java is called # to convert to WKT on every query). If someone wishes to write a # SDO_GEOMETRY(...) parser in Python, let me know =) select = 'SDO_UTIL.TO_WKTGEOMETRY(%s)' gis_operators = { 'contains': SDOOperator(func='SDO_CONTAINS'), 'coveredby': SDOOperator(func='SDO_COVEREDBY'), 'covers': SDOOperator(func='SDO_COVERS'), 'disjoint': SDODisjoint(), 'intersects': SDOOperator(func='SDO_OVERLAPBDYINTERSECT'), # TODO: Is this really the same as ST_Intersects()? 'equals': SDOOperator(func='SDO_EQUAL'), 'exact': SDOOperator(func='SDO_EQUAL'), 'overlaps': SDOOperator(func='SDO_OVERLAPS'), 'same_as': SDOOperator(func='SDO_EQUAL'), 'relate': SDORelate(), # Oracle uses a different syntax, e.g., 'mask=inside+touch' 'touches': SDOOperator(func='SDO_TOUCH'), 'within': SDOOperator(func='SDO_INSIDE'), 'distance_gt': SDODistance(op='>'), 'distance_gte': SDODistance(op='>='), 'distance_lt': SDODistance(op='<'), 'distance_lte': SDODistance(op='<='), 'dwithin': SDODWithin(), } truncate_params = {'relate': None} unsupported_functions = { 'AsGeoJSON', 'AsGML', 'AsKML', 'AsSVG', 'BoundingCircle', 'Envelope', 'ForceRHR', 'GeoHash', 'IsValid', 'MakeValid', 'MemSize', 'Scale', 'SnapToGrid', 'Translate', } def geo_quote_name(self, name): return super(OracleOperations, self).geo_quote_name(name).upper() def get_db_converters(self, expression): converters = super(OracleOperations, self).get_db_converters(expression) internal_type = expression.output_field.get_internal_type() geometry_fields = ( 'PointField', 'GeometryField', 'LineStringField', 'PolygonField', 'MultiPointField', 'MultiLineStringField', 'MultiPolygonField', 'GeometryCollectionField', 'GeomField', 'GMLField', ) if internal_type in geometry_fields: converters.append(self.convert_textfield_value) if hasattr(expression.output_field, 'geom_type'): converters.append(self.convert_geometry) return converters def convert_geometry(self, value, expression, connection, context): if value: value = Geometry(value) if 'transformed_srid' in context: value.srid = context['transformed_srid'] return value def convert_extent(self, clob, srid): if clob: # Generally, Oracle returns a polygon for the extent -- however, # it can return a single point if there's only one Point in the # table. ext_geom = Geometry(clob.read(), srid) gtype = str(ext_geom.geom_type) if gtype == 'Polygon': # Construct the 4-tuple from the coordinates in the polygon. shell = ext_geom.shell ll, ur = shell[0][:2], shell[2][:2] elif gtype == 'Point': ll = ext_geom.coords[:2] ur = ll else: raise Exception('Unexpected geometry type returned for extent: %s' % gtype) xmin, ymin = ll xmax, ymax = ur return (xmin, ymin, xmax, ymax) else: return None def convert_geom(self, value, geo_field): if value: if isinstance(value, Database.LOB): value = value.read() return Geometry(value, geo_field.srid) else: return None def geo_db_type(self, f): """ Returns the geometry database type for Oracle. Unlike other spatial backends, no stored procedure is necessary and it's the same for all geometry types. """ return 'MDSYS.SDO_GEOMETRY' def get_distance(self, f, value, lookup_type, **kwargs): """ Returns the distance parameters given the value and the lookup type. On Oracle, geometry columns with a geodetic coordinate system behave implicitly like a geography column, and thus meters will be used as the distance parameter on them. """ if not value: return [] value = value[0] if isinstance(value, Distance): if f.geodetic(self.connection): dist_param = value.m else: dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection))) else: dist_param = value # dwithin lookups on Oracle require a special string parameter # that starts with "distance=". if lookup_type == 'dwithin': dist_param = 'distance=%s' % dist_param return [dist_param] def get_geom_placeholder(self, f, value, compiler): """ Provides a proper substitution value for Geometries that are not in the SRID of the field. Specifically, this routine will substitute in the SDO_CS.TRANSFORM() function call. """ if value is None: return 'NULL' def transform_value(val, srid): return val.srid != srid if hasattr(value, 'as_sql'): if transform_value(value, f.srid): placeholder = '%s(%%s, %s)' % (self.transform, f.srid) else: placeholder = '%s' # No geometry value used for F expression, substitute in # the column name instead. sql, _ = compiler.compile(value) return placeholder % sql else: if transform_value(value, f.srid): return '%s(SDO_GEOMETRY(%%s, %s), %s)' % (self.transform, value.srid, f.srid) else: return 'SDO_GEOMETRY(%%s, %s)' % f.srid def spatial_aggregate_name(self, agg_name): """ Returns the spatial aggregate SQL name. """ agg_name = 'unionagg' if agg_name.lower() == 'union' else agg_name.lower() return getattr(self, agg_name) # Routines for getting the OGC-compliant models. def geometry_columns(self): from django.contrib.gis.db.backends.oracle.models import OracleGeometryColumns return OracleGeometryColumns def spatial_ref_sys(self): from django.contrib.gis.db.backends.oracle.models import OracleSpatialRefSys return OracleSpatialRefSys def modify_insert_params(self, placeholder, params): """Drop out insert parameters for NULL placeholder. Needed for Oracle Spatial backend due to #10888. """ if placeholder == 'NULL': return [] return super(OracleOperations, self).modify_insert_params(placeholder, params)
ae72e35058939a33294d9fa78c8cb4595457bec26f75163e0241c3874b916fb3
from django.contrib.gis.db.models.fields import GeometryField from django.db.backends.oracle.schema import DatabaseSchemaEditor from django.db.backends.utils import truncate_name class OracleGISSchemaEditor(DatabaseSchemaEditor): sql_add_geometry_metadata = (""" INSERT INTO USER_SDO_GEOM_METADATA ("TABLE_NAME", "COLUMN_NAME", "DIMINFO", "SRID") VALUES ( %(table)s, %(column)s, MDSYS.SDO_DIM_ARRAY( MDSYS.SDO_DIM_ELEMENT('LONG', %(dim0)s, %(dim2)s, %(tolerance)s), MDSYS.SDO_DIM_ELEMENT('LAT', %(dim1)s, %(dim3)s, %(tolerance)s) ), %(srid)s )""") sql_add_spatial_index = 'CREATE INDEX %(index)s ON %(table)s(%(column)s) INDEXTYPE IS MDSYS.SPATIAL_INDEX' sql_drop_spatial_index = 'DROP INDEX %(index)s' sql_clear_geometry_table_metadata = 'DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = %(table)s' sql_clear_geometry_field_metadata = ( 'DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = %(table)s ' 'AND COLUMN_NAME = %(column)s' ) def __init__(self, *args, **kwargs): super(OracleGISSchemaEditor, self).__init__(*args, **kwargs) self.geometry_sql = [] def geo_quote_name(self, name): return self.connection.ops.geo_quote_name(name) def column_sql(self, model, field, include_default=False): column_sql = super(OracleGISSchemaEditor, self).column_sql(model, field, include_default) if isinstance(field, GeometryField): db_table = model._meta.db_table self.geometry_sql.append( self.sql_add_geometry_metadata % { 'table': self.geo_quote_name(db_table), 'column': self.geo_quote_name(field.column), 'dim0': field._extent[0], 'dim1': field._extent[1], 'dim2': field._extent[2], 'dim3': field._extent[3], 'tolerance': field._tolerance, 'srid': field.srid, } ) if field.spatial_index: self.geometry_sql.append( self.sql_add_spatial_index % { 'index': self.quote_name(self._create_spatial_index_name(model, field)), 'table': self.quote_name(db_table), 'column': self.quote_name(field.column), } ) return column_sql def create_model(self, model): super(OracleGISSchemaEditor, self).create_model(model) self.run_geometry_sql() def delete_model(self, model): super(OracleGISSchemaEditor, self).delete_model(model) self.execute(self.sql_clear_geometry_table_metadata % { 'table': self.geo_quote_name(model._meta.db_table), }) def add_field(self, model, field): super(OracleGISSchemaEditor, self).add_field(model, field) self.run_geometry_sql() def remove_field(self, model, field): if isinstance(field, GeometryField): self.execute(self.sql_clear_geometry_field_metadata % { 'table': self.geo_quote_name(model._meta.db_table), 'column': self.geo_quote_name(field.column), }) if field.spatial_index: self.execute(self.sql_drop_spatial_index % { 'index': self.quote_name(self._create_spatial_index_name(model, field)), }) super(OracleGISSchemaEditor, self).remove_field(model, field) def run_geometry_sql(self): for sql in self.geometry_sql: self.execute(sql) self.geometry_sql = [] def _create_spatial_index_name(self, model, field): # Oracle doesn't allow object names > 30 characters. Use this scheme # instead of self._create_index_name() for backwards compatibility. return truncate_name('%s_%s_id' % (model._meta.db_table, field.column), 30)
e8901c3fb5b1b4af3d0a2a7b5bfc2eb529f52b4f59d81b4f73b6042a45b868b6
from cx_Oracle import CLOB from django.contrib.gis.db.backends.base.adapter import WKTAdapter from django.contrib.gis.geos import GeometryCollection, Polygon from django.utils.six.moves import range class OracleSpatialAdapter(WKTAdapter): input_size = CLOB def __init__(self, geom): """ Oracle requires that polygon rings are in proper orientation. This affects spatial operations and an invalid orientation may cause failures. Correct orientations are: * Outer ring - counter clockwise * Inner ring(s) - clockwise """ if isinstance(geom, Polygon): self._fix_polygon(geom) elif isinstance(geom, GeometryCollection): self._fix_geometry_collection(geom) self.wkt = geom.wkt self.srid = geom.srid def _fix_polygon(self, poly): # Fix single polygon orientation as described in __init__() if self._isClockwise(poly.exterior_ring): poly.exterior_ring = list(reversed(poly.exterior_ring)) for i in range(1, len(poly)): if not self._isClockwise(poly[i]): poly[i] = list(reversed(poly[i])) return poly def _fix_geometry_collection(self, coll): # Fix polygon orientations in geometry collections as described in # __init__() for i, geom in enumerate(coll): if isinstance(geom, Polygon): coll[i] = self._fix_polygon(geom) def _isClockwise(self, coords): # A modified shoelace algorithm to determine polygon orientation. # See https://en.wikipedia.org/wiki/Shoelace_formula n = len(coords) area = 0.0 for i in range(n): j = (i + 1) % n area += coords[i][0] * coords[j][1] area -= coords[j][0] * coords[i][1] return area < 0.0
0c2f6d1e1a790654eb2645132cb88079e3a21870b8d0adb2bbb9c7ac2487a37d
import re from functools import partial from django.contrib.gis.db.models import aggregates class BaseSpatialFeatures(object): gis_enabled = True # Does the database contain a SpatialRefSys model to store SRID information? has_spatialrefsys_table = True # Does the backend support the django.contrib.gis.utils.add_srs_entry() utility? supports_add_srs_entry = True # Does the backend introspect GeometryField to its subtypes? supports_geometry_field_introspection = True # Does the backend support storing 3D geometries? supports_3d_storage = False # Reference implementation of 3D functions is: # http://postgis.net/docs/PostGIS_Special_Functions_Index.html#PostGIS_3D_Functions supports_3d_functions = False # Does the database support SRID transform operations? supports_transform = True # Do geometric relationship operations operate on real shapes (or only on bounding boxes)? supports_real_shape_operations = True # Can geometry fields be null? supports_null_geometries = True # Can the the function be applied on geodetic coordinate systems? supports_distance_geodetic = True supports_length_geodetic = True supports_perimeter_geodetic = False # Is the database able to count vertices on polygons (with `num_points`)? supports_num_points_poly = True # The following properties indicate if the database backend support # certain lookups (dwithin, left and right, relate, ...) supports_distances_lookups = True supports_left_right_lookups = False # Does the database have raster support? supports_raster = False # Does the database support a unique index on geometry fields? supports_geometry_field_unique_index = True @property def supports_bbcontains_lookup(self): return 'bbcontains' in self.connection.ops.gis_operators @property def supports_contained_lookup(self): return 'contained' in self.connection.ops.gis_operators @property def supports_crosses_lookup(self): return 'crosses' in self.connection.ops.gis_operators @property def supports_dwithin_lookup(self): return 'dwithin' in self.connection.ops.gis_operators @property def supports_relate_lookup(self): return 'relate' in self.connection.ops.gis_operators @property def supports_isvalid_lookup(self): return 'isvalid' in self.connection.ops.gis_operators # For each of those methods, the class will have a property named # `has_<name>_method` (defined in __init__) which accesses connection.ops # to determine GIS method availability. geoqueryset_methods = ( 'area', 'bounding_circle', 'centroid', 'difference', 'distance', 'distance_spheroid', 'envelope', 'force_rhr', 'geohash', 'gml', 'intersection', 'kml', 'length', 'mem_size', 'num_geom', 'num_points', 'perimeter', 'point_on_surface', 'reverse', 'scale', 'snap_to_grid', 'svg', 'sym_difference', 'transform', 'translate', 'union', 'unionagg', ) # Specifies whether the Collect and Extent aggregates are supported by the database @property def supports_collect_aggr(self): return aggregates.Collect not in self.connection.ops.disallowed_aggregates @property def supports_extent_aggr(self): return aggregates.Extent not in self.connection.ops.disallowed_aggregates @property def supports_make_line_aggr(self): return aggregates.MakeLine not in self.connection.ops.disallowed_aggregates def __init__(self, *args): super(BaseSpatialFeatures, self).__init__(*args) for method in self.geoqueryset_methods: # Add dynamically properties for each GQS method, e.g. has_force_rhr_method, etc. setattr(self.__class__, 'has_%s_method' % method, property(partial(BaseSpatialFeatures.has_ops_method, method=method))) def __getattr__(self, name): m = re.match(r'has_(\w*)_function$', name) if m: func_name = m.group(1) return func_name not in self.connection.ops.unsupported_functions raise AttributeError def has_ops_method(self, method): return getattr(self.connection.ops, method, False)
9514d89bdd1814e719dd1cfb09fcb6673da229f6b122297dbf57c1b81570737e
from django.contrib.gis import gdal from django.utils import six from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class SpatialRefSysMixin(object): """ The SpatialRefSysMixin is a class used by the database-dependent SpatialRefSys objects to reduce redundant code. """ @property def srs(self): """ Returns a GDAL SpatialReference object. """ # TODO: Is caching really necessary here? Is complexity worth it? if hasattr(self, '_srs'): # Returning a clone of the cached SpatialReference object. return self._srs.clone() else: # Attempting to cache a SpatialReference object. # Trying to get from WKT first. try: self._srs = gdal.SpatialReference(self.wkt) return self.srs except Exception as e: msg = e try: self._srs = gdal.SpatialReference(self.proj4text) return self.srs except Exception as e: msg = e raise Exception('Could not get OSR SpatialReference from WKT: %s\nError:\n%s' % (self.wkt, msg)) @property def ellipsoid(self): """ Returns a tuple of the ellipsoid parameters: (semimajor axis, semiminor axis, and inverse flattening). """ return self.srs.ellipsoid @property def name(self): "Returns the projection name." return self.srs.name @property def spheroid(self): "Returns the spheroid name for this spatial reference." return self.srs['spheroid'] @property def datum(self): "Returns the datum for this spatial reference." return self.srs['datum'] @property def projected(self): "Is this Spatial Reference projected?" return self.srs.projected @property def local(self): "Is this Spatial Reference local?" return self.srs.local @property def geographic(self): "Is this Spatial Reference geographic?" return self.srs.geographic @property def linear_name(self): "Returns the linear units name." return self.srs.linear_name @property def linear_units(self): "Returns the linear units." return self.srs.linear_units @property def angular_name(self): "Returns the name of the angular units." return self.srs.angular_name @property def angular_units(self): "Returns the angular units." return self.srs.angular_units @property def units(self): "Returns a tuple of the units and the name." if self.projected or self.local: return (self.linear_units, self.linear_name) elif self.geographic: return (self.angular_units, self.angular_name) else: return (None, None) @classmethod def get_units(cls, wkt): """ Return a tuple of (unit_value, unit_name) for the given WKT without using any of the database fields. """ return gdal.SpatialReference(wkt).units @classmethod def get_spheroid(cls, wkt, string=True): """ Class method used by GeometryField on initialization to retrieve the `SPHEROID[..]` parameters from the given WKT. """ srs = gdal.SpatialReference(wkt) sphere_params = srs.ellipsoid sphere_name = srs['spheroid'] if not string: return sphere_name, sphere_params else: # `string` parameter used to place in format acceptable by PostGIS if len(sphere_params) == 3: radius, flattening = sphere_params[0], sphere_params[2] else: radius, flattening = sphere_params return 'SPHEROID["%s",%s,%s]' % (sphere_name, radius, flattening) def __str__(self): """ Returns the string representation, a 'pretty' OGC WKT. """ return six.text_type(self.srs)
0afdc53757468e1827b39926b0656c9655012e0e9464afcd14726374a524191f
class BaseSpatialOperations(object): """ This module holds the base `BaseSpatialBackend` object, which is instantiated by each spatial database backend with the features it has. """ truncate_params = {} # Quick booleans for the type of this spatial backend, and # an attribute for the spatial database version tuple (if applicable) postgis = False spatialite = False mysql = False oracle = False spatial_version = None # How the geometry column should be selected. select = None # Does the spatial database have a geometry or geography type? geography = False geometry = False area = False bounding_circle = False centroid = False difference = False distance = False distance_sphere = False distance_spheroid = False envelope = False force_rhr = False mem_size = False num_geom = False num_points = False perimeter = False perimeter3d = False point_on_surface = False polygonize = False reverse = False scale = False snap_to_grid = False sym_difference = False transform = False translate = False union = False # Aggregates disallowed_aggregates = () geom_func_prefix = '' # Mapping between Django function names and backend names, when names do not # match; used in spatial_function_name(). function_names = {} # Blacklist/set of known unsupported functions of the backend unsupported_functions = { 'Area', 'AsGeoJSON', 'AsGML', 'AsKML', 'AsSVG', 'BoundingCircle', 'Centroid', 'Difference', 'Distance', 'Envelope', 'ForceRHR', 'GeoHash', 'Intersection', 'IsValid', 'Length', 'MakeValid', 'MemSize', 'NumGeometries', 'NumPoints', 'Perimeter', 'PointOnSurface', 'Reverse', 'Scale', 'SnapToGrid', 'SymDifference', 'Transform', 'Translate', 'Union', } # Serialization geohash = False geojson = False gml = False kml = False svg = False # Constructors from_text = False from_wkb = False # Default conversion functions for aggregates; will be overridden if implemented # for the spatial backend. def convert_extent(self, box, srid): raise NotImplementedError('Aggregate extent not implemented for this spatial backend.') def convert_extent3d(self, box, srid): raise NotImplementedError('Aggregate 3D extent not implemented for this spatial backend.') def convert_geom(self, geom_val, geom_field): raise NotImplementedError('Aggregate method not implemented for this spatial backend.') # For quoting column values, rather than columns. def geo_quote_name(self, name): return "'%s'" % name # GeometryField operations def geo_db_type(self, f): """ Returns the database column type for the geometry field on the spatial backend. """ raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_type() method') def get_distance(self, f, value, lookup_type): """ Returns the distance parameters for the given geometry field, lookup value, and lookup type. """ raise NotImplementedError('Distance operations not available on this spatial backend.') def get_geom_placeholder(self, f, value, compiler): """ Returns the placeholder for the given geometry field with the given value. Depending on the spatial backend, the placeholder may contain a stored procedure call to the transformation function of the spatial backend. """ raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_placeholder() method') def check_expression_support(self, expression): if isinstance(expression, self.disallowed_aggregates): raise NotImplementedError( "%s spatial aggregation is not supported by this database backend." % expression.name ) super(BaseSpatialOperations, self).check_expression_support(expression) def spatial_aggregate_name(self, agg_name): raise NotImplementedError('Aggregate support not implemented for this spatial backend.') def spatial_function_name(self, func_name): if func_name in self.unsupported_functions: raise NotImplementedError("This backend doesn't support the %s function." % func_name) return self.function_names.get(func_name, self.geom_func_prefix + func_name) # Routines for getting the OGC-compliant models. def geometry_columns(self): raise NotImplementedError('Subclasses of BaseSpatialOperations must provide a geometry_columns() method.') def spatial_ref_sys(self): raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_ref_sys() method')
9efe94ef3418f4c7e558ae9e4609b1eb32fa312f09eaffa28daf4b3ffc521f0e
class WKTAdapter(object): """ This provides an adaptor for Geometries sent to the MySQL and Oracle database backends. """ def __init__(self, geom): self.wkt = geom.wkt self.srid = geom.srid def __eq__(self, other): if not isinstance(other, WKTAdapter): return False return self.wkt == other.wkt and self.srid == other.srid def __hash__(self): return hash((self.wkt, self.srid)) def __str__(self): return self.wkt
3b804ebb08a4fd5c37ea512cee675b11de1c50e407903c39bf271a0819d84f6e
from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures from django.db.backends.mysql.features import \ DatabaseFeatures as MySQLDatabaseFeatures class DatabaseFeatures(BaseSpatialFeatures, MySQLDatabaseFeatures): has_spatialrefsys_table = False supports_add_srs_entry = False supports_distance_geodetic = False supports_length_geodetic = False supports_distances_lookups = False supports_transform = False supports_real_shape_operations = False supports_null_geometries = False supports_num_points_poly = False
f857f79c3448ed1cf709cdf75250f44e06a38cd1f9add7bdbb448a4ccc2e014c
from MySQLdb.constants import FIELD_TYPE from django.contrib.gis.gdal import OGRGeomType from django.db.backends.mysql.introspection import DatabaseIntrospection class MySQLIntrospection(DatabaseIntrospection): # Updating the data_types_reverse dictionary with the appropriate # type for Geometry fields. data_types_reverse = DatabaseIntrospection.data_types_reverse.copy() data_types_reverse[FIELD_TYPE.GEOMETRY] = 'GeometryField' def get_geometry_type(self, table_name, geo_col): cursor = self.connection.cursor() try: # In order to get the specific geometry type of the field, # we introspect on the table definition using `DESCRIBE`. cursor.execute('DESCRIBE %s' % self.connection.ops.quote_name(table_name)) # Increment over description info until we get to the geometry # column. for column, typ, null, key, default, extra in cursor.fetchall(): if column == geo_col: # Using OGRGeomType to convert from OGC name to Django field. # MySQL does not support 3D or SRIDs, so the field params # are empty. field_type = OGRGeomType(typ).django field_params = {} break finally: cursor.close() return field_type, field_params def supports_spatial_index(self, cursor, table_name): # Supported with MyISAM, or InnoDB on MySQL 5.7.5+ storage_engine = self.get_storage_engine(cursor, table_name) return ( (storage_engine == 'InnoDB' and self.connection.mysql_version >= (5, 7, 5)) or storage_engine == 'MyISAM' )
67cadd0ed21307107ce4cf54dc66c9cbbd2cb26f6e017ecc8d1ddd6457bea7cf
from django.db.backends.mysql.base import \ DatabaseWrapper as MySQLDatabaseWrapper from .features import DatabaseFeatures from .introspection import MySQLIntrospection from .operations import MySQLOperations from .schema import MySQLGISSchemaEditor class DatabaseWrapper(MySQLDatabaseWrapper): SchemaEditorClass = MySQLGISSchemaEditor # Classes instantiated in __init__(). features_class = DatabaseFeatures introspection_class = MySQLIntrospection ops_class = MySQLOperations
1df7c823ef0c6d73384a0c93c34190cda2899b15c6699d5be88cbdf9bbc80538
from django.contrib.gis.db.backends.base.adapter import WKTAdapter from django.contrib.gis.db.backends.base.operations import \ BaseSpatialOperations from django.contrib.gis.db.backends.utils import SpatialOperator from django.contrib.gis.db.models import GeometryField, aggregates from django.db.backends.mysql.operations import DatabaseOperations from django.utils.functional import cached_property class MySQLOperations(BaseSpatialOperations, DatabaseOperations): mysql = True name = 'mysql' Adapter = WKTAdapter @cached_property def geom_func_prefix(self): return '' if self.is_mysql_5_5 else 'ST_' @cached_property def is_mysql_5_5(self): return self.connection.mysql_version < (5, 6, 1) @cached_property def is_mysql_5_6(self): return self.connection.mysql_version < (5, 7, 6) @cached_property def uses_invalid_empty_geometry_collection(self): return self.connection.mysql_version >= (5, 7, 5) @cached_property def select(self): return self.geom_func_prefix + 'AsText(%s)' @cached_property def from_wkb(self): return self.geom_func_prefix + 'GeomFromWKB' @cached_property def from_text(self): return self.geom_func_prefix + 'GeomFromText' @cached_property def gis_operators(self): MBREquals = 'MBREqual' if self.is_mysql_5_6 else 'MBREquals' return { 'bbcontains': SpatialOperator(func='MBRContains'), # For consistency w/PostGIS API 'bboverlaps': SpatialOperator(func='MBROverlaps'), # ... 'contained': SpatialOperator(func='MBRWithin'), # ... 'contains': SpatialOperator(func='MBRContains'), 'disjoint': SpatialOperator(func='MBRDisjoint'), 'equals': SpatialOperator(func=MBREquals), 'exact': SpatialOperator(func=MBREquals), 'intersects': SpatialOperator(func='MBRIntersects'), 'overlaps': SpatialOperator(func='MBROverlaps'), 'same_as': SpatialOperator(func=MBREquals), 'touches': SpatialOperator(func='MBRTouches'), 'within': SpatialOperator(func='MBRWithin'), } @cached_property def function_names(self): return {'Length': 'GLength'} if self.is_mysql_5_5 else {} disallowed_aggregates = ( aggregates.Collect, aggregates.Extent, aggregates.Extent3D, aggregates.MakeLine, aggregates.Union, ) @cached_property def unsupported_functions(self): unsupported = { 'AsGeoJSON', 'AsGML', 'AsKML', 'AsSVG', 'BoundingCircle', 'ForceRHR', 'GeoHash', 'IsValid', 'MakeValid', 'MemSize', 'Perimeter', 'PointOnSurface', 'Reverse', 'Scale', 'SnapToGrid', 'Transform', 'Translate', } if self.is_mysql_5_5: unsupported.update({'Difference', 'Distance', 'Intersection', 'SymDifference', 'Union'}) return unsupported def geo_db_type(self, f): return f.geom_type def get_geom_placeholder(self, f, value, compiler): """ The placeholder here has to include MySQL's WKT constructor. Because MySQL does not support spatial transformations, there is no need to modify the placeholder based on the contents of the given value. """ if hasattr(value, 'as_sql'): placeholder, _ = compiler.compile(value) else: placeholder = '%s(%%s)' % self.from_text return placeholder def get_db_converters(self, expression): converters = super(MySQLOperations, self).get_db_converters(expression) if isinstance(expression.output_field, GeometryField) and self.uses_invalid_empty_geometry_collection: converters.append(self.convert_invalid_empty_geometry_collection) return converters # https://dev.mysql.com/doc/refman/en/spatial-function-argument-handling.html # MySQL 5.7.5 adds support for the empty geometry collections, but they are represented with invalid WKT. def convert_invalid_empty_geometry_collection(self, value, expression, connection, context): if value == b'GEOMETRYCOLLECTION()': return b'GEOMETRYCOLLECTION EMPTY' return value
bf454995c8c5a006de4827f5cdbc0206007bd25af9e997b738c4f3e4af2e426b
import logging from django.contrib.gis.db.models.fields import GeometryField from django.db.backends.mysql.schema import DatabaseSchemaEditor from django.db.utils import OperationalError logger = logging.getLogger('django.contrib.gis') class MySQLGISSchemaEditor(DatabaseSchemaEditor): sql_add_spatial_index = 'CREATE SPATIAL INDEX %(index)s ON %(table)s(%(column)s)' sql_drop_spatial_index = 'DROP INDEX %(index)s ON %(table)s' def __init__(self, *args, **kwargs): super(MySQLGISSchemaEditor, self).__init__(*args, **kwargs) self.geometry_sql = [] def skip_default(self, field): return ( super(MySQLGISSchemaEditor, self).skip_default(field) or # Geometry fields are stored as BLOB/TEXT and can't have defaults. isinstance(field, GeometryField) ) def column_sql(self, model, field, include_default=False): column_sql = super(MySQLGISSchemaEditor, self).column_sql(model, field, include_default) # MySQL doesn't support spatial indexes on NULL columns if isinstance(field, GeometryField) and field.spatial_index and not field.null: qn = self.connection.ops.quote_name db_table = model._meta.db_table self.geometry_sql.append( self.sql_add_spatial_index % { 'index': qn(self._create_spatial_index_name(model, field)), 'table': qn(db_table), 'column': qn(field.column), } ) return column_sql def create_model(self, model): super(MySQLGISSchemaEditor, self).create_model(model) self.create_spatial_indexes() def add_field(self, model, field): super(MySQLGISSchemaEditor, self).add_field(model, field) self.create_spatial_indexes() def remove_field(self, model, field): if isinstance(field, GeometryField) and field.spatial_index: qn = self.connection.ops.quote_name sql = self.sql_drop_spatial_index % { 'index': qn(self._create_spatial_index_name(model, field)), 'table': qn(model._meta.db_table), } try: self.execute(sql) except OperationalError: logger.error( "Couldn't remove spatial index: %s (may be expected " "if your storage engine doesn't support them).", sql ) super(MySQLGISSchemaEditor, self).remove_field(model, field) def _create_spatial_index_name(self, model, field): return '%s_%s_id' % (model._meta.db_table, field.column) def create_spatial_indexes(self): for sql in self.geometry_sql: try: self.execute(sql) except OperationalError: logger.error( "Cannot create SPATIAL INDEX %s. Only MyISAM and (as of " "MySQL 5.7.5) InnoDB support them.", sql ) self.geometry_sql = []
80f4edd6b3daeeec5600e83b592ec6d42342768a5b0ea9df0bbaa688450adba9
from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures from django.db.backends.sqlite3.features import \ DatabaseFeatures as SQLiteDatabaseFeatures from django.utils.functional import cached_property class DatabaseFeatures(BaseSpatialFeatures, SQLiteDatabaseFeatures): supports_3d_storage = True supports_distance_geodetic = False # SpatiaLite can only count vertices in LineStrings supports_num_points_poly = False @cached_property def supports_initspatialmetadata_in_one_transaction(self): # SpatiaLite 4.1+ support initializing all metadata in one transaction # which can result in a significant performance improvement when # creating the database. return self.connection.ops.spatial_version >= (4, 1, 0)
8215c1354fb5d30974f462a9bab14210147962d1586c00c8a1f9e17aed9c22cb
from django.contrib.gis.gdal import OGRGeomType from django.db.backends.sqlite3.introspection import ( DatabaseIntrospection, FlexibleFieldLookupDict, ) from django.utils import six class GeoFlexibleFieldLookupDict(FlexibleFieldLookupDict): """ Sublcass that includes updates the `base_data_types_reverse` dict for geometry field types. """ base_data_types_reverse = FlexibleFieldLookupDict.base_data_types_reverse.copy() base_data_types_reverse.update( {'point': 'GeometryField', 'linestring': 'GeometryField', 'polygon': 'GeometryField', 'multipoint': 'GeometryField', 'multilinestring': 'GeometryField', 'multipolygon': 'GeometryField', 'geometrycollection': 'GeometryField', }) class SpatiaLiteIntrospection(DatabaseIntrospection): data_types_reverse = GeoFlexibleFieldLookupDict() def get_geometry_type(self, table_name, geo_col): cursor = self.connection.cursor() try: # Querying the `geometry_columns` table to get additional metadata. cursor.execute('SELECT coord_dimension, srid, geometry_type ' 'FROM geometry_columns ' 'WHERE f_table_name=%s AND f_geometry_column=%s', (table_name, geo_col)) row = cursor.fetchone() if not row: raise Exception('Could not find a geometry column for "%s"."%s"' % (table_name, geo_col)) # OGRGeomType does not require GDAL and makes it easy to convert # from OGC geom type name to Django field. ogr_type = row[2] if isinstance(ogr_type, six.integer_types) and ogr_type > 1000: # SpatiaLite versions >= 4 use the new SFSQL 1.2 offsets # 1000 (Z), 2000 (M), and 3000 (ZM) to indicate the presence of # higher dimensional coordinates (M not yet supported by Django). ogr_type = ogr_type % 1000 + OGRGeomType.wkb25bit field_type = OGRGeomType(ogr_type).django # Getting any GeometryField keyword arguments that are not the default. dim = row[0] srid = row[1] field_params = {} if srid != 4326: field_params['srid'] = srid if (isinstance(dim, six.string_types) and 'Z' in dim) or dim == 3: field_params['dim'] = 3 finally: cursor.close() return field_type, field_params def get_constraints(self, cursor, table_name): constraints = super(SpatiaLiteIntrospection, self).get_constraints(cursor, table_name) cursor.execute('SELECT f_geometry_column ' 'FROM geometry_columns ' 'WHERE f_table_name=%s AND spatial_index_enabled=1', (table_name,)) for row in cursor.fetchall(): constraints['%s__spatial__index' % row[0]] = { "columns": [row[0]], "primary_key": False, "unique": False, "foreign_key": None, "check": False, "index": True, } return constraints
7f36f459797ff7e2d2b0f41f7505d1bcd339741dab5d7aca9e90eae6d7d28349
""" The GeometryColumns and SpatialRefSys models for the SpatiaLite backend. """ from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class SpatialiteGeometryColumns(models.Model): """ The 'geometry_columns' table from SpatiaLite. """ f_table_name = models.CharField(max_length=256) f_geometry_column = models.CharField(max_length=256) coord_dimension = models.IntegerField() srid = models.IntegerField(primary_key=True) spatial_index_enabled = models.IntegerField() type = models.IntegerField(db_column='geometry_type') class Meta: app_label = 'gis' db_table = 'geometry_columns' managed = False @classmethod def table_name_col(cls): """ Returns the name of the metadata column used to store the feature table name. """ return 'f_table_name' @classmethod def geom_col_name(cls): """ Returns the name of the metadata column used to store the feature geometry column. """ return 'f_geometry_column' def __str__(self): return "%s.%s - %dD %s field (SRID: %d)" % \ (self.f_table_name, self.f_geometry_column, self.coord_dimension, self.type, self.srid) class SpatialiteSpatialRefSys(models.Model, SpatialRefSysMixin): """ The 'spatial_ref_sys' table from SpatiaLite. """ srid = models.IntegerField(primary_key=True) auth_name = models.CharField(max_length=256) auth_srid = models.IntegerField() ref_sys_name = models.CharField(max_length=256) proj4text = models.CharField(max_length=2048) srtext = models.CharField(max_length=2048) @property def wkt(self): return self.srtext class Meta: app_label = 'gis' db_table = 'spatial_ref_sys' managed = False
c5a09d67c7d23bfe8b1092405c9bad0609d2dc5ff5982471862728a0282f2831
import sys from ctypes.util import find_library from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.db.backends.sqlite3.base import ( Database, DatabaseWrapper as SQLiteDatabaseWrapper, SQLiteCursorWrapper, ) from django.utils import six from .client import SpatiaLiteClient from .features import DatabaseFeatures from .introspection import SpatiaLiteIntrospection from .operations import SpatiaLiteOperations from .schema import SpatialiteSchemaEditor class DatabaseWrapper(SQLiteDatabaseWrapper): SchemaEditorClass = SpatialiteSchemaEditor # Classes instantiated in __init__(). client_class = SpatiaLiteClient features_class = DatabaseFeatures introspection_class = SpatiaLiteIntrospection ops_class = SpatiaLiteOperations def __init__(self, *args, **kwargs): # Before we get too far, make sure pysqlite 2.5+ is installed. if Database.version_info < (2, 5, 0): raise ImproperlyConfigured('Only versions of pysqlite 2.5+ are ' 'compatible with SpatiaLite and GeoDjango.') # Trying to find the location of the SpatiaLite library. # Here we are figuring out the path to the SpatiaLite library # (`libspatialite`). If it's not in the system library path (e.g., it # cannot be found by `ctypes.util.find_library`), then it may be set # manually in the settings via the `SPATIALITE_LIBRARY_PATH` setting. self.spatialite_lib = getattr(settings, 'SPATIALITE_LIBRARY_PATH', find_library('spatialite')) if not self.spatialite_lib: raise ImproperlyConfigured('Unable to locate the SpatiaLite library. ' 'Make sure it is in your library path, or set ' 'SPATIALITE_LIBRARY_PATH in your settings.' ) super(DatabaseWrapper, self).__init__(*args, **kwargs) def get_new_connection(self, conn_params): conn = super(DatabaseWrapper, self).get_new_connection(conn_params) # Enabling extension loading on the SQLite connection. try: conn.enable_load_extension(True) except AttributeError: raise ImproperlyConfigured( 'The pysqlite library does not support C extension loading. ' 'Both SQLite and pysqlite must be configured to allow ' 'the loading of extensions to use SpatiaLite.') # Loading the SpatiaLite library extension on the connection, and returning # the created cursor. cur = conn.cursor(factory=SQLiteCursorWrapper) try: cur.execute("SELECT load_extension(%s)", (self.spatialite_lib,)) except Exception as msg: new_msg = ( 'Unable to load the SpatiaLite library extension ' '"%s" because: %s') % (self.spatialite_lib, msg) six.reraise(ImproperlyConfigured, ImproperlyConfigured(new_msg), sys.exc_info()[2]) cur.close() return conn def prepare_database(self): super(DatabaseWrapper, self).prepare_database() # Check if spatial metadata have been initialized in the database with self.cursor() as cursor: cursor.execute("PRAGMA table_info(geometry_columns);") if cursor.fetchall() == []: arg = "1" if self.features.supports_initspatialmetadata_in_one_transaction else "" cursor.execute("SELECT InitSpatialMetaData(%s)" % arg)
33da39fdcaafad437da14f53db77d84b1baebc9edf752bdd929a6d8011065e2b
""" SQL functions reference lists: https://web.archive.org/web/20130407175746/http://www.gaia-gis.it/gaia-sins/spatialite-sql-4.0.0.html http://www.gaia-gis.it/gaia-sins/spatialite-sql-4.2.1.html """ import re import sys from django.contrib.gis.db.backends.base.operations import \ BaseSpatialOperations from django.contrib.gis.db.backends.spatialite.adapter import SpatiaLiteAdapter from django.contrib.gis.db.backends.utils import SpatialOperator from django.contrib.gis.db.models import aggregates from django.contrib.gis.geometry.backend import Geometry from django.contrib.gis.measure import Distance from django.core.exceptions import ImproperlyConfigured from django.db.backends.sqlite3.operations import DatabaseOperations from django.utils import six from django.utils.functional import cached_property class SpatiaLiteOperations(BaseSpatialOperations, DatabaseOperations): name = 'spatialite' spatialite = True version_regex = re.compile(r'^(?P<major>\d)\.(?P<minor1>\d)\.(?P<minor2>\d+)') Adapter = SpatiaLiteAdapter area = 'Area' centroid = 'Centroid' collect = 'Collect' contained = 'MbrWithin' difference = 'Difference' distance = 'Distance' envelope = 'Envelope' extent = 'Extent' geojson = 'AsGeoJSON' gml = 'AsGML' intersection = 'Intersection' kml = 'AsKML' length = 'GLength' # OpenGis defines Length, but this conflicts with an SQLite reserved keyword makeline = 'MakeLine' num_geom = 'NumGeometries' num_points = 'NumPoints' point_on_surface = 'PointOnSurface' scale = 'ScaleCoords' svg = 'AsSVG' sym_difference = 'SymDifference' transform = 'Transform' translate = 'ShiftCoords' union = 'GUnion' # OpenGis defines Union, but this conflicts with an SQLite reserved keyword unionagg = 'GUnion' from_text = 'GeomFromText' from_wkb = 'GeomFromWKB' select = 'AsText(%s)' gis_operators = { 'equals': SpatialOperator(func='Equals'), 'disjoint': SpatialOperator(func='Disjoint'), 'dwithin': SpatialOperator(func='PtDistWithin'), 'touches': SpatialOperator(func='Touches'), 'crosses': SpatialOperator(func='Crosses'), 'within': SpatialOperator(func='Within'), 'overlaps': SpatialOperator(func='Overlaps'), 'contains': SpatialOperator(func='Contains'), 'intersects': SpatialOperator(func='Intersects'), 'relate': SpatialOperator(func='Relate'), # Returns true if B's bounding box completely contains A's bounding box. 'contained': SpatialOperator(func='MbrWithin'), # Returns true if A's bounding box completely contains B's bounding box. 'bbcontains': SpatialOperator(func='MbrContains'), # Returns true if A's bounding box overlaps B's bounding box. 'bboverlaps': SpatialOperator(func='MbrOverlaps'), # These are implemented here as synonyms for Equals 'same_as': SpatialOperator(func='Equals'), 'exact': SpatialOperator(func='Equals'), 'distance_gt': SpatialOperator(func='Distance', op='>'), 'distance_gte': SpatialOperator(func='Distance', op='>='), 'distance_lt': SpatialOperator(func='Distance', op='<'), 'distance_lte': SpatialOperator(func='Distance', op='<='), } disallowed_aggregates = (aggregates.Extent3D,) @cached_property def function_names(self): return { 'Length': 'ST_Length', 'Reverse': 'ST_Reverse', 'Scale': 'ScaleCoords', 'Translate': 'ST_Translate', 'Union': 'ST_Union', } @cached_property def unsupported_functions(self): unsupported = {'BoundingCircle', 'ForceRHR', 'IsValid', 'MakeValid', 'MemSize'} if not self.lwgeom_version(): unsupported.add('GeoHash') return unsupported @cached_property def spatial_version(self): """Determine the version of the SpatiaLite library.""" try: version = self.spatialite_version_tuple()[1:] except Exception as msg: new_msg = ( 'Cannot determine the SpatiaLite version for the "%s" ' 'database (error was "%s"). Was the SpatiaLite initialization ' 'SQL loaded on this database?') % (self.connection.settings_dict['NAME'], msg) six.reraise(ImproperlyConfigured, ImproperlyConfigured(new_msg), sys.exc_info()[2]) if version < (4, 0, 0): raise ImproperlyConfigured('GeoDjango only supports SpatiaLite versions 4.0.0 and above.') return version def convert_extent(self, box, srid): """ Convert the polygon data received from SpatiaLite to min/max values. """ if box is None: return None shell = Geometry(box, srid).shell xmin, ymin = shell[0][:2] xmax, ymax = shell[2][:2] return (xmin, ymin, xmax, ymax) def convert_geom(self, wkt, geo_field): """ Converts geometry WKT returned from a SpatiaLite aggregate. """ if wkt: return Geometry(wkt, geo_field.srid) else: return None def geo_db_type(self, f): """ Returns None because geometry columns are added via the `AddGeometryColumn` stored procedure on SpatiaLite. """ return None def get_distance(self, f, value, lookup_type, **kwargs): """ Returns the distance parameters for the given geometry field, lookup value, and lookup type. SpatiaLite only supports regular cartesian-based queries (no spheroid/sphere calculations for point geometries like PostGIS). """ if not value: return [] value = value[0] if isinstance(value, Distance): if f.geodetic(self.connection): raise ValueError('SpatiaLite does not support distance queries on ' 'geometry fields with a geodetic coordinate system. ' 'Distance objects; use a numeric value of your ' 'distance in degrees instead.') else: dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection))) else: dist_param = value return [dist_param] def get_geom_placeholder(self, f, value, compiler): """ Provides a proper substitution value for Geometries that are not in the SRID of the field. Specifically, this routine will substitute in the Transform() and GeomFromText() function call(s). """ def transform_value(value, srid): return not (value is None or value.srid == srid) if hasattr(value, 'as_sql'): if transform_value(value, f.srid): placeholder = '%s(%%s, %s)' % (self.transform, f.srid) else: placeholder = '%s' # No geometry value used for F expression, substitute in # the column name instead. sql, _ = compiler.compile(value) return placeholder % sql else: if transform_value(value, f.srid): # Adding Transform() to the SQL placeholder. return '%s(%s(%%s,%s), %s)' % (self.transform, self.from_text, value.srid, f.srid) else: return '%s(%%s,%s)' % (self.from_text, f.srid) def _get_spatialite_func(self, func): """ Helper routine for calling SpatiaLite functions and returning their result. Any error occurring in this method should be handled by the caller. """ cursor = self.connection._cursor() try: cursor.execute('SELECT %s' % func) row = cursor.fetchone() finally: cursor.close() return row[0] def geos_version(self): "Returns the version of GEOS used by SpatiaLite as a string." return self._get_spatialite_func('geos_version()') def proj4_version(self): "Returns the version of the PROJ.4 library used by SpatiaLite." return self._get_spatialite_func('proj4_version()') def lwgeom_version(self): """Return the version of LWGEOM library used by SpatiaLite.""" return self._get_spatialite_func('lwgeom_version()') def spatialite_version(self): "Returns the SpatiaLite library version as a string." return self._get_spatialite_func('spatialite_version()') def spatialite_version_tuple(self): """ Returns the SpatiaLite version as a tuple (version string, major, minor, subminor). """ version = self.spatialite_version() m = self.version_regex.match(version) if m: major = int(m.group('major')) minor1 = int(m.group('minor1')) minor2 = int(m.group('minor2')) else: raise Exception('Could not parse SpatiaLite version string: %s' % version) return (version, major, minor1, minor2) def spatial_aggregate_name(self, agg_name): """ Returns the spatial aggregate SQL template and function for the given Aggregate instance. """ agg_name = 'unionagg' if agg_name.lower() == 'union' else agg_name.lower() return getattr(self, agg_name) # Routines for getting the OGC-compliant models. def geometry_columns(self): from django.contrib.gis.db.backends.spatialite.models import SpatialiteGeometryColumns return SpatialiteGeometryColumns def spatial_ref_sys(self): from django.contrib.gis.db.backends.spatialite.models import SpatialiteSpatialRefSys return SpatialiteSpatialRefSys def get_db_converters(self, expression): converters = super(SpatiaLiteOperations, self).get_db_converters(expression) if hasattr(expression.output_field, 'geom_type'): converters.append(self.convert_geometry) return converters def convert_geometry(self, value, expression, connection, context): if value: value = Geometry(value) if 'transformed_srid' in context: value.srid = context['transformed_srid'] return value
88d662c80355e7d9de37af3a422278b2139d6dc0d7c8207857ca66192918aa68
from django.db.backends.sqlite3.schema import DatabaseSchemaEditor from django.db.utils import DatabaseError class SpatialiteSchemaEditor(DatabaseSchemaEditor): sql_add_geometry_column = ( "SELECT AddGeometryColumn(%(table)s, %(column)s, %(srid)s, " "%(geom_type)s, %(dim)s, %(null)s)" ) sql_add_spatial_index = "SELECT CreateSpatialIndex(%(table)s, %(column)s)" sql_drop_spatial_index = "DROP TABLE idx_%(table)s_%(column)s" sql_recover_geometry_metadata = ( "SELECT RecoverGeometryColumn(%(table)s, %(column)s, %(srid)s, " "%(geom_type)s, %(dim)s)" ) sql_remove_geometry_metadata = "SELECT DiscardGeometryColumn(%(table)s, %(column)s)" sql_discard_geometry_columns = "DELETE FROM %(geom_table)s WHERE f_table_name = %(table)s" sql_update_geometry_columns = ( "UPDATE %(geom_table)s SET f_table_name = %(new_table)s " "WHERE f_table_name = %(old_table)s" ) geometry_tables = [ "geometry_columns", "geometry_columns_auth", "geometry_columns_time", "geometry_columns_statistics", ] def __init__(self, *args, **kwargs): super(SpatialiteSchemaEditor, self).__init__(*args, **kwargs) self.geometry_sql = [] def geo_quote_name(self, name): return self.connection.ops.geo_quote_name(name) def column_sql(self, model, field, include_default=False): from django.contrib.gis.db.models.fields import GeometryField if not isinstance(field, GeometryField): return super(SpatialiteSchemaEditor, self).column_sql(model, field, include_default) # Geometry columns are created by the `AddGeometryColumn` function self.geometry_sql.append( self.sql_add_geometry_column % { "table": self.geo_quote_name(model._meta.db_table), "column": self.geo_quote_name(field.column), "srid": field.srid, "geom_type": self.geo_quote_name(field.geom_type), "dim": field.dim, "null": int(not field.null), } ) if field.spatial_index: self.geometry_sql.append( self.sql_add_spatial_index % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(field.column), } ) return None, None def remove_geometry_metadata(self, model, field): self.execute( self.sql_remove_geometry_metadata % { "table": self.quote_name(model._meta.db_table), "column": self.quote_name(field.column), } ) self.execute( self.sql_drop_spatial_index % { "table": model._meta.db_table, "column": field.column, } ) def create_model(self, model): super(SpatialiteSchemaEditor, self).create_model(model) # Create geometry columns for sql in self.geometry_sql: self.execute(sql) self.geometry_sql = [] def delete_model(self, model, **kwargs): from django.contrib.gis.db.models.fields import GeometryField # Drop spatial metadata (dropping the table does not automatically remove them) for field in model._meta.local_fields: if isinstance(field, GeometryField): self.remove_geometry_metadata(model, field) # Make sure all geom stuff is gone for geom_table in self.geometry_tables: try: self.execute( self.sql_discard_geometry_columns % { "geom_table": geom_table, "table": self.quote_name(model._meta.db_table), } ) except DatabaseError: pass super(SpatialiteSchemaEditor, self).delete_model(model, **kwargs) def add_field(self, model, field): from django.contrib.gis.db.models.fields import GeometryField if isinstance(field, GeometryField): # Populate self.geometry_sql self.column_sql(model, field) for sql in self.geometry_sql: self.execute(sql) self.geometry_sql = [] else: super(SpatialiteSchemaEditor, self).add_field(model, field) def remove_field(self, model, field): from django.contrib.gis.db.models.fields import GeometryField # NOTE: If the field is a geometry field, the table is just recreated, # the parent's remove_field can't be used cause it will skip the # recreation if the field does not have a database type. Geometry fields # do not have a db type cause they are added and removed via stored # procedures. if isinstance(field, GeometryField): self._remake_table(model, delete_field=field) else: super(SpatialiteSchemaEditor, self).remove_field(model, field) def alter_db_table(self, model, old_db_table, new_db_table): from django.contrib.gis.db.models.fields import GeometryField # Remove geometry-ness from temp table for field in model._meta.local_fields: if isinstance(field, GeometryField): self.execute( self.sql_remove_geometry_metadata % { "table": self.quote_name(old_db_table), "column": self.quote_name(field.column), } ) # Alter table super(SpatialiteSchemaEditor, self).alter_db_table(model, old_db_table, new_db_table) # Repoint any straggler names for geom_table in self.geometry_tables: try: self.execute( self.sql_update_geometry_columns % { "geom_table": geom_table, "old_table": self.quote_name(old_db_table), "new_table": self.quote_name(new_db_table), } ) except DatabaseError: pass # Re-add geometry-ness and rename spatial index tables for field in model._meta.local_fields: if isinstance(field, GeometryField): self.execute(self.sql_recover_geometry_metadata % { "table": self.geo_quote_name(new_db_table), "column": self.geo_quote_name(field.column), "srid": field.srid, "geom_type": self.geo_quote_name(field.geom_type), "dim": field.dim, }) if getattr(field, 'spatial_index', False): self.execute(self.sql_rename_table % { "old_table": self.quote_name("idx_%s_%s" % (old_db_table, field.column)), "new_table": self.quote_name("idx_%s_%s" % (new_db_table, field.column)), })
b39e8b2d8e1f446ea51687355b5ef5148ec9bcc46193ea07b248417e370f9afd
from django.contrib.gis.db.backends.base.adapter import WKTAdapter from django.db.backends.sqlite3.base import Database class SpatiaLiteAdapter(WKTAdapter): "SQLite adaptor for geometry objects." def __conform__(self, protocol): if protocol is Database.PrepareProtocol: return str(self)
8ec0ee8e3e3582493ded10e97b52f751999e669b8c404f67ad09656575eb56a2
from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures from django.db.backends.postgresql.features import \ DatabaseFeatures as Psycopg2DatabaseFeatures class DatabaseFeatures(BaseSpatialFeatures, Psycopg2DatabaseFeatures): supports_3d_storage = True supports_3d_functions = True supports_left_right_lookups = True supports_raster = True
17252f4b4094c65e8510a65b57eeb126c262825d9ee4a8ace9d31ae544c8317a
from django.contrib.gis.gdal import OGRGeomType from django.db.backends.postgresql.introspection import DatabaseIntrospection class GeoIntrospectionError(Exception): pass class PostGISIntrospection(DatabaseIntrospection): # Reverse dictionary for PostGIS geometry types not populated until # introspection is actually performed. postgis_types_reverse = {} ignored_tables = DatabaseIntrospection.ignored_tables + [ 'geography_columns', 'geometry_columns', 'raster_columns', 'spatial_ref_sys', 'raster_overviews', ] # Overridden from parent to include raster indices in retrieval. # Raster indices have pg_index.indkey value 0 because they are an # expression over the raster column through the ST_ConvexHull function. # So the default query has to be adapted to include raster indices. _get_indexes_query = """ SELECT DISTINCT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, pg_catalog.pg_index idx, pg_catalog.pg_attribute attr, pg_catalog.pg_type t WHERE c.oid = idx.indrelid AND idx.indexrelid = c2.oid AND attr.attrelid = c.oid AND t.oid = attr.atttypid AND ( attr.attnum = idx.indkey[0] OR (t.typname LIKE 'raster' AND idx.indkey = '0') ) AND attr.attnum > 0 AND c.relname = %s""" def get_postgis_types(self): """ Returns a dictionary with keys that are the PostgreSQL object identification integers for the PostGIS geometry and/or geography types (if supported). """ field_types = [ ('geometry', 'GeometryField'), # The value for the geography type is actually a tuple # to pass in the `geography=True` keyword to the field # definition. ('geography', ('GeometryField', {'geography': True})), ] postgis_types = {} # The OID integers associated with the geometry type may # be different across versions; hence, this is why we have # to query the PostgreSQL pg_type table corresponding to the # PostGIS custom data types. oid_sql = 'SELECT "oid" FROM "pg_type" WHERE "typname" = %s' cursor = self.connection.cursor() try: for field_type in field_types: cursor.execute(oid_sql, (field_type[0],)) for result in cursor.fetchall(): postgis_types[result[0]] = field_type[1] finally: cursor.close() return postgis_types def get_field_type(self, data_type, description): if not self.postgis_types_reverse: # If the PostGIS types reverse dictionary is not populated, do so # now. In order to prevent unnecessary requests upon connection # initialization, the `data_types_reverse` dictionary is not updated # with the PostGIS custom types until introspection is actually # performed -- in other words, when this function is called. self.postgis_types_reverse = self.get_postgis_types() self.data_types_reverse.update(self.postgis_types_reverse) return super(PostGISIntrospection, self).get_field_type(data_type, description) def get_geometry_type(self, table_name, geo_col): """ The geometry type OID used by PostGIS does not indicate the particular type of field that a geometry column is (e.g., whether it's a PointField or a PolygonField). Thus, this routine queries the PostGIS metadata tables to determine the geometry type, """ cursor = self.connection.cursor() try: try: # First seeing if this geometry column is in the `geometry_columns` cursor.execute('SELECT "coord_dimension", "srid", "type" ' 'FROM "geometry_columns" ' 'WHERE "f_table_name"=%s AND "f_geometry_column"=%s', (table_name, geo_col)) row = cursor.fetchone() if not row: raise GeoIntrospectionError except GeoIntrospectionError: cursor.execute('SELECT "coord_dimension", "srid", "type" ' 'FROM "geography_columns" ' 'WHERE "f_table_name"=%s AND "f_geography_column"=%s', (table_name, geo_col)) row = cursor.fetchone() if not row: raise Exception('Could not find a geometry or geography column for "%s"."%s"' % (table_name, geo_col)) # OGRGeomType does not require GDAL and makes it easy to convert # from OGC geom type name to Django field. field_type = OGRGeomType(row[2]).django # Getting any GeometryField keyword arguments that are not the default. dim = row[0] srid = row[1] field_params = {} if srid != 4326: field_params['srid'] = srid if dim != 2: field_params['dim'] = dim finally: cursor.close() return field_type, field_params
205c42b2bce76417bd66068defd1cae57f328ed50ae3f46fa62776fa4357f87b
import binascii import struct from django.forms import ValidationError from .const import ( GDAL_TO_POSTGIS, GDAL_TO_STRUCT, POSTGIS_HEADER_STRUCTURE, POSTGIS_TO_GDAL, STRUCT_SIZE, ) def pack(structure, data): """ Pack data into hex string with little endian format. """ return binascii.hexlify(struct.pack('<' + structure, *data)).upper() def unpack(structure, data): """ Unpack little endian hexlified binary string into a list. """ return struct.unpack('<' + structure, binascii.unhexlify(data)) def chunk(data, index): """ Split a string into two parts at the input index. """ return data[:index], data[index:] def get_pgraster_srid(data): """ Extract the SRID from a PostGIS raster string. """ if data is None: return # The positional arguments here extract the hex-encoded srid from the # header of the PostGIS raster string. This can be understood through # the POSTGIS_HEADER_STRUCTURE constant definition in the const module. return unpack('i', data[106:114])[0] def from_pgraster(data): """ Convert a PostGIS HEX String into a dictionary. """ if data is None: return # Split raster header from data header, data = chunk(data, 122) header = unpack(POSTGIS_HEADER_STRUCTURE, header) # Parse band data bands = [] pixeltypes = [] while data: # Get pixel type for this band pixeltype, data = chunk(data, 2) pixeltype = unpack('B', pixeltype)[0] # Subtract nodata byte from band nodata value if it exists has_nodata = pixeltype >= 64 if has_nodata: pixeltype -= 64 # Convert datatype from PostGIS to GDAL & get pack type and size pixeltype = POSTGIS_TO_GDAL[pixeltype] pack_type = GDAL_TO_STRUCT[pixeltype] pack_size = 2 * STRUCT_SIZE[pack_type] # Parse band nodata value. The nodata value is part of the # PGRaster string even if the nodata flag is True, so it always # has to be chunked off the data string. nodata, data = chunk(data, pack_size) nodata = unpack(pack_type, nodata)[0] # Chunk and unpack band data (pack size times nr of pixels) band, data = chunk(data, pack_size * header[10] * header[11]) band_result = {'data': binascii.unhexlify(band)} # If the nodata flag is True, set the nodata value. if has_nodata: band_result['nodata_value'] = nodata # Append band data to band list bands.append(band_result) # Store pixeltype of this band in pixeltypes array pixeltypes.append(pixeltype) # Check that all bands have the same pixeltype. # This is required by GDAL. PostGIS rasters could have different pixeltypes # for bands of the same raster. if len(set(pixeltypes)) != 1: raise ValidationError("Band pixeltypes are not all equal.") return { 'srid': int(header[9]), 'width': header[10], 'height': header[11], 'datatype': pixeltypes[0], 'origin': (header[5], header[6]), 'scale': (header[3], header[4]), 'skew': (header[7], header[8]), 'bands': bands, } def to_pgraster(rast): """ Convert a GDALRaster into PostGIS Raster format. """ # Return if the raster is null if rast is None or rast == '': return # Prepare the raster header data as a tuple. The first two numbers are # the endianness and the PostGIS Raster Version, both are fixed by # PostGIS at the moment. rasterheader = ( 1, 0, len(rast.bands), rast.scale.x, rast.scale.y, rast.origin.x, rast.origin.y, rast.skew.x, rast.skew.y, rast.srs.srid, rast.width, rast.height, ) # Hexlify raster header result = pack(POSTGIS_HEADER_STRUCTURE, rasterheader) for band in rast.bands: # The PostGIS raster band header has exactly two elements, a 8BUI byte # and the nodata value. # # The 8BUI stores both the PostGIS pixel data type and a nodata flag. # It is composed as the datatype integer plus 64 as a flag for existing # nodata values: # 8BUI_VALUE = PG_PIXEL_TYPE (0-11) + FLAG (0 or 64) # # For example, if the byte value is 71, then the datatype is # 71-64 = 7 (32BSI) and the nodata value is True. structure = 'B' + GDAL_TO_STRUCT[band.datatype()] # Get band pixel type in PostGIS notation pixeltype = GDAL_TO_POSTGIS[band.datatype()] # Set the nodata flag if band.nodata_value is not None: pixeltype += 64 # Pack band header bandheader = pack(structure, (pixeltype, band.nodata_value or 0)) # Hexlify band data band_data_hex = binascii.hexlify(band.data(as_memoryview=True)).upper() # Add packed header and band data to result result += bandheader + band_data_hex # Cast raster to string before passing it to the DB return result.decode()
a8cf23a14d0754d0fb4f14f834e13157bbd1d798c121e19cda3a02329fcaf128
""" The GeometryColumns and SpatialRefSys models for the PostGIS backend. """ from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class PostGISGeometryColumns(models.Model): """ The 'geometry_columns' view from PostGIS. See the PostGIS documentation at Ch. 4.3.2. """ f_table_catalog = models.CharField(max_length=256) f_table_schema = models.CharField(max_length=256) f_table_name = models.CharField(max_length=256) f_geometry_column = models.CharField(max_length=256) coord_dimension = models.IntegerField() srid = models.IntegerField(primary_key=True) type = models.CharField(max_length=30) class Meta: app_label = 'gis' db_table = 'geometry_columns' managed = False @classmethod def table_name_col(cls): """ Returns the name of the metadata column used to store the feature table name. """ return 'f_table_name' @classmethod def geom_col_name(cls): """ Returns the name of the metadata column used to store the feature geometry column. """ return 'f_geometry_column' def __str__(self): return "%s.%s - %dD %s field (SRID: %d)" % \ (self.f_table_name, self.f_geometry_column, self.coord_dimension, self.type, self.srid) class PostGISSpatialRefSys(models.Model, SpatialRefSysMixin): """ The 'spatial_ref_sys' table from PostGIS. See the PostGIS documentation at Ch. 4.2.1. """ srid = models.IntegerField(primary_key=True) auth_name = models.CharField(max_length=256) auth_srid = models.IntegerField() srtext = models.CharField(max_length=2048) proj4text = models.CharField(max_length=2048) class Meta: app_label = 'gis' db_table = 'spatial_ref_sys' managed = False @property def wkt(self): return self.srtext
b09584720fd0e02a9d1c6e869778ad0a55935b0aace7fd4043503b71a37bf8c9
from django.db.backends.base.base import NO_DB_ALIAS from django.db.backends.postgresql.base import \ DatabaseWrapper as Psycopg2DatabaseWrapper from .features import DatabaseFeatures from .introspection import PostGISIntrospection from .operations import PostGISOperations from .schema import PostGISSchemaEditor class DatabaseWrapper(Psycopg2DatabaseWrapper): SchemaEditorClass = PostGISSchemaEditor def __init__(self, *args, **kwargs): super(DatabaseWrapper, self).__init__(*args, **kwargs) if kwargs.get('alias', '') != NO_DB_ALIAS: self.features = DatabaseFeatures(self) self.ops = PostGISOperations(self) self.introspection = PostGISIntrospection(self) def prepare_database(self): super(DatabaseWrapper, self).prepare_database() # Check that postgis extension is installed. with self.cursor() as cursor: cursor.execute("CREATE EXTENSION IF NOT EXISTS postgis")
57a8a0164c63d93cc71eb0c1cc19ddeb7e8d3a7c7313d5ebb1a6fd6b83349f79
import re from django.conf import settings from django.contrib.gis.db.backends.base.operations import \ BaseSpatialOperations from django.contrib.gis.db.backends.utils import SpatialOperator from django.contrib.gis.gdal import GDALRaster from django.contrib.gis.geometry.backend import Geometry from django.contrib.gis.measure import Distance from django.core.exceptions import ImproperlyConfigured from django.db.backends.postgresql.operations import DatabaseOperations from django.db.utils import ProgrammingError from django.utils import six from django.utils.functional import cached_property from .adapter import PostGISAdapter from .models import PostGISGeometryColumns, PostGISSpatialRefSys from .pgraster import from_pgraster, get_pgraster_srid, to_pgraster # Identifier to mark raster lookups as bilateral. BILATERAL = 'bilateral' class PostGISOperator(SpatialOperator): def __init__(self, geography=False, raster=False, **kwargs): # Only a subset of the operators and functions are available for the # geography type. self.geography = geography # Only a subset of the operators and functions are available for the # raster type. Lookups that don't suport raster will be converted to # polygons. If the raster argument is set to BILATERAL, then the # operator cannot handle mixed geom-raster lookups. self.raster = raster super(PostGISOperator, self).__init__(**kwargs) def as_sql(self, connection, lookup, template_params, *args): if lookup.lhs.output_field.geography and not self.geography: raise ValueError('PostGIS geography does not support the "%s" ' 'function/operator.' % (self.func or self.op,)) template_params = self.check_raster(lookup, template_params) return super(PostGISOperator, self).as_sql(connection, lookup, template_params, *args) def check_raster(self, lookup, template_params): # Get rhs value. if isinstance(lookup.rhs, (tuple, list)): rhs_val = lookup.rhs[0] spheroid = lookup.rhs[-1] == 'spheroid' else: rhs_val = lookup.rhs spheroid = False # Check which input is a raster. lhs_is_raster = lookup.lhs.field.geom_type == 'RASTER' rhs_is_raster = isinstance(rhs_val, GDALRaster) # Look for band indices and inject them if provided. if lookup.band_lhs is not None and lhs_is_raster: if not self.func: raise ValueError('Band indices are not allowed for this operator, it works on bbox only.') template_params['lhs'] = '%s, %s' % (template_params['lhs'], lookup.band_lhs) if lookup.band_rhs is not None and rhs_is_raster: if not self.func: raise ValueError('Band indices are not allowed for this operator, it works on bbox only.') template_params['rhs'] = '%s, %s' % (template_params['rhs'], lookup.band_rhs) # Convert rasters to polygons if necessary. if not self.raster or spheroid: # Operators without raster support. if lhs_is_raster: template_params['lhs'] = 'ST_Polygon(%s)' % template_params['lhs'] if rhs_is_raster: template_params['rhs'] = 'ST_Polygon(%s)' % template_params['rhs'] elif self.raster == BILATERAL: # Operators with raster support but don't support mixed (rast-geom) # lookups. if lhs_is_raster and not rhs_is_raster: template_params['lhs'] = 'ST_Polygon(%s)' % template_params['lhs'] elif rhs_is_raster and not lhs_is_raster: template_params['rhs'] = 'ST_Polygon(%s)' % template_params['rhs'] return template_params class PostGISDistanceOperator(PostGISOperator): sql_template = '%(func)s(%(lhs)s, %(rhs)s) %(op)s %(value)s' def as_sql(self, connection, lookup, template_params, sql_params): if not lookup.lhs.output_field.geography and lookup.lhs.output_field.geodetic(connection): template_params = self.check_raster(lookup, template_params) sql_template = self.sql_template if len(lookup.rhs) == 3 and lookup.rhs[-1] == 'spheroid': template_params.update({'op': self.op, 'func': 'ST_Distance_Spheroid'}) sql_template = '%(func)s(%(lhs)s, %(rhs)s, %%s) %(op)s %(value)s' # Using distance_spheroid requires the spheroid of the field as # a parameter. sql_params.insert(1, lookup.lhs.output_field._spheroid) else: template_params.update({'op': self.op, 'func': 'ST_Distance_Sphere'}) return sql_template % template_params, sql_params return super(PostGISDistanceOperator, self).as_sql(connection, lookup, template_params, sql_params) class PostGISOperations(BaseSpatialOperations, DatabaseOperations): name = 'postgis' postgis = True geography = True geom_func_prefix = 'ST_' version_regex = re.compile(r'^(?P<major>\d)\.(?P<minor1>\d)\.(?P<minor2>\d+)') Adapter = PostGISAdapter gis_operators = { 'bbcontains': PostGISOperator(op='~', raster=True), 'bboverlaps': PostGISOperator(op='&&', geography=True, raster=True), 'contained': PostGISOperator(op='@', raster=True), 'overlaps_left': PostGISOperator(op='&<', raster=BILATERAL), 'overlaps_right': PostGISOperator(op='&>', raster=BILATERAL), 'overlaps_below': PostGISOperator(op='&<|'), 'overlaps_above': PostGISOperator(op='|&>'), 'left': PostGISOperator(op='<<'), 'right': PostGISOperator(op='>>'), 'strictly_below': PostGISOperator(op='<<|'), 'strictly_above': PostGISOperator(op='|>>'), 'same_as': PostGISOperator(op='~=', raster=BILATERAL), 'exact': PostGISOperator(op='~=', raster=BILATERAL), # alias of same_as 'contains': PostGISOperator(func='ST_Contains', raster=BILATERAL), 'contains_properly': PostGISOperator(func='ST_ContainsProperly', raster=BILATERAL), 'coveredby': PostGISOperator(func='ST_CoveredBy', geography=True, raster=BILATERAL), 'covers': PostGISOperator(func='ST_Covers', geography=True, raster=BILATERAL), 'crosses': PostGISOperator(func='ST_Crosses'), 'disjoint': PostGISOperator(func='ST_Disjoint', raster=BILATERAL), 'equals': PostGISOperator(func='ST_Equals'), 'intersects': PostGISOperator(func='ST_Intersects', geography=True, raster=BILATERAL), 'isvalid': PostGISOperator(func='ST_IsValid'), 'overlaps': PostGISOperator(func='ST_Overlaps', raster=BILATERAL), 'relate': PostGISOperator(func='ST_Relate'), 'touches': PostGISOperator(func='ST_Touches', raster=BILATERAL), 'within': PostGISOperator(func='ST_Within', raster=BILATERAL), 'dwithin': PostGISOperator(func='ST_DWithin', geography=True, raster=BILATERAL), 'distance_gt': PostGISDistanceOperator(func='ST_Distance', op='>', geography=True), 'distance_gte': PostGISDistanceOperator(func='ST_Distance', op='>=', geography=True), 'distance_lt': PostGISDistanceOperator(func='ST_Distance', op='<', geography=True), 'distance_lte': PostGISDistanceOperator(func='ST_Distance', op='<=', geography=True), } unsupported_functions = set() function_names = { 'BoundingCircle': 'ST_MinimumBoundingCircle', 'MemSize': 'ST_Mem_Size', 'NumPoints': 'ST_NPoints', } def __init__(self, connection): super(PostGISOperations, self).__init__(connection) prefix = self.geom_func_prefix self.area = prefix + 'Area' self.bounding_circle = prefix + 'MinimumBoundingCircle' self.centroid = prefix + 'Centroid' self.collect = prefix + 'Collect' self.difference = prefix + 'Difference' self.distance = prefix + 'Distance' self.distance_sphere = prefix + 'distance_sphere' self.distance_spheroid = prefix + 'distance_spheroid' self.envelope = prefix + 'Envelope' self.extent = prefix + 'Extent' self.extent3d = prefix + '3DExtent' self.force_rhr = prefix + 'ForceRHR' self.geohash = prefix + 'GeoHash' self.geojson = prefix + 'AsGeoJson' self.gml = prefix + 'AsGML' self.intersection = prefix + 'Intersection' self.isvalid = prefix + 'IsValid' self.kml = prefix + 'AsKML' self.length = prefix + 'Length' self.length3d = prefix + '3DLength' self.length_spheroid = prefix + 'length_spheroid' self.makeline = prefix + 'MakeLine' self.makevalid = prefix + 'MakeValid' self.mem_size = prefix + 'mem_size' self.num_geom = prefix + 'NumGeometries' self.num_points = prefix + 'npoints' self.perimeter = prefix + 'Perimeter' self.perimeter3d = prefix + '3DPerimeter' self.point_on_surface = prefix + 'PointOnSurface' self.polygonize = prefix + 'Polygonize' self.reverse = prefix + 'Reverse' self.scale = prefix + 'Scale' self.snap_to_grid = prefix + 'SnapToGrid' self.svg = prefix + 'AsSVG' self.sym_difference = prefix + 'SymDifference' self.transform = prefix + 'Transform' self.translate = prefix + 'Translate' self.union = prefix + 'Union' self.unionagg = prefix + 'Union' @cached_property def spatial_version(self): """Determine the version of the PostGIS library.""" # Trying to get the PostGIS version because the function # signatures will depend on the version used. The cost # here is a database query to determine the version, which # can be mitigated by setting `POSTGIS_VERSION` with a 3-tuple # comprising user-supplied values for the major, minor, and # subminor revision of PostGIS. if hasattr(settings, 'POSTGIS_VERSION'): version = settings.POSTGIS_VERSION else: # Run a basic query to check the status of the connection so we're # sure we only raise the error below if the problem comes from # PostGIS and not from PostgreSQL itself (see #24862). self._get_postgis_func('version') try: vtup = self.postgis_version_tuple() except ProgrammingError: raise ImproperlyConfigured( 'Cannot determine PostGIS version for database "%s" ' 'using command "SELECT postgis_lib_version()". ' 'GeoDjango requires at least PostGIS version 2.1. ' 'Was the database created from a spatial database ' 'template?' % self.connection.settings_dict['NAME'] ) version = vtup[1:] return version def convert_extent(self, box, srid): """ Returns a 4-tuple extent for the `Extent` aggregate by converting the bounding box text returned by PostGIS (`box` argument), for example: "BOX(-90.0 30.0, -85.0 40.0)". """ if box is None: return None ll, ur = box[4:-1].split(',') xmin, ymin = map(float, ll.split()) xmax, ymax = map(float, ur.split()) return (xmin, ymin, xmax, ymax) def convert_extent3d(self, box3d, srid): """ Returns a 6-tuple extent for the `Extent3D` aggregate by converting the 3d bounding-box text returned by PostGIS (`box3d` argument), for example: "BOX3D(-90.0 30.0 1, -85.0 40.0 2)". """ if box3d is None: return None ll, ur = box3d[6:-1].split(',') xmin, ymin, zmin = map(float, ll.split()) xmax, ymax, zmax = map(float, ur.split()) return (xmin, ymin, zmin, xmax, ymax, zmax) def convert_geom(self, hex, geo_field): """ Converts the geometry returned from PostGIS aggregates. """ if hex: return Geometry(hex, srid=geo_field.srid) else: return None def geo_db_type(self, f): """ Return the database field type for the given spatial field. """ if f.geom_type == 'RASTER': return 'raster' # Type-based geometries. # TODO: Support 'M' extension. if f.dim == 3: geom_type = f.geom_type + 'Z' else: geom_type = f.geom_type if f.geography: if f.srid != 4326: raise NotImplementedError('PostGIS only supports geography columns with an SRID of 4326.') return 'geography(%s,%d)' % (geom_type, f.srid) else: return 'geometry(%s,%d)' % (geom_type, f.srid) def get_distance(self, f, dist_val, lookup_type, handle_spheroid=True): """ Retrieve the distance parameters for the given geometry field, distance lookup value, and the distance lookup type. This is the most complex implementation of the spatial backends due to what is supported on geodetic geometry columns vs. what's available on projected geometry columns. In addition, it has to take into account the geography column type. """ # Getting the distance parameter value = dist_val[0] # Shorthand boolean flags. geodetic = f.geodetic(self.connection) geography = f.geography if isinstance(value, Distance): if geography: dist_param = value.m elif geodetic: if lookup_type == 'dwithin': raise ValueError('Only numeric values of degree units are ' 'allowed on geographic DWithin queries.') dist_param = value.m else: dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection))) else: # Assuming the distance is in the units of the field. dist_param = value params = [dist_param] # handle_spheroid *might* be dropped in Django 2.0 as PostGISDistanceOperator # also handles it (#25524). if handle_spheroid and len(dist_val) > 1: option = dist_val[1] if not geography and geodetic and lookup_type != 'dwithin' and option == 'spheroid': # using distance_spheroid requires the spheroid of the field as # a parameter. params.insert(0, f._spheroid) return params def get_geom_placeholder(self, f, value, compiler): """ Provide a proper substitution value for Geometries or rasters that are not in the SRID of the field. Specifically, this routine will substitute in the ST_Transform() function call. """ # Get the srid for this object if value is None: value_srid = None elif f.geom_type == 'RASTER' and isinstance(value, six.string_types): value_srid = get_pgraster_srid(value) else: value_srid = value.srid # Adding Transform() to the SQL placeholder if the value srid # is not equal to the field srid. if value_srid is None or value_srid == f.srid: placeholder = '%s' elif f.geom_type == 'RASTER' and isinstance(value, six.string_types): placeholder = '%s((%%s)::raster, %s)' % (self.transform, f.srid) else: placeholder = '%s(%%s, %s)' % (self.transform, f.srid) if hasattr(value, 'as_sql'): # If this is an F expression, then we don't really want # a placeholder and instead substitute in the column # of the expression. sql, _ = compiler.compile(value) placeholder = placeholder % sql return placeholder def _get_postgis_func(self, func): """ Helper routine for calling PostGIS functions and returning their result. """ # Close out the connection. See #9437. with self.connection.temporary_connection() as cursor: cursor.execute('SELECT %s()' % func) return cursor.fetchone()[0] def postgis_geos_version(self): "Returns the version of the GEOS library used with PostGIS." return self._get_postgis_func('postgis_geos_version') def postgis_lib_version(self): "Returns the version number of the PostGIS library used with PostgreSQL." return self._get_postgis_func('postgis_lib_version') def postgis_proj_version(self): "Returns the version of the PROJ.4 library used with PostGIS." return self._get_postgis_func('postgis_proj_version') def postgis_version(self): "Returns PostGIS version number and compile-time options." return self._get_postgis_func('postgis_version') def postgis_full_version(self): "Returns PostGIS version number and compile-time options." return self._get_postgis_func('postgis_full_version') def postgis_version_tuple(self): """ Returns the PostGIS version as a tuple (version string, major, minor, subminor). """ # Getting the PostGIS version version = self.postgis_lib_version() m = self.version_regex.match(version) if m: major = int(m.group('major')) minor1 = int(m.group('minor1')) minor2 = int(m.group('minor2')) else: raise Exception('Could not parse PostGIS version string: %s' % version) return (version, major, minor1, minor2) def proj_version_tuple(self): """ Return the version of PROJ.4 used by PostGIS as a tuple of the major, minor, and subminor release numbers. """ proj_regex = re.compile(r'(\d+)\.(\d+)\.(\d+)') proj_ver_str = self.postgis_proj_version() m = proj_regex.search(proj_ver_str) if m: return tuple(map(int, [m.group(1), m.group(2), m.group(3)])) else: raise Exception('Could not determine PROJ.4 version from PostGIS.') def spatial_aggregate_name(self, agg_name): if agg_name == 'Extent3D': return self.extent3d else: return self.geom_func_prefix + agg_name # Routines for getting the OGC-compliant models. def geometry_columns(self): return PostGISGeometryColumns def spatial_ref_sys(self): return PostGISSpatialRefSys # Methods to convert between PostGIS rasters and dicts that are # readable by GDALRaster. def parse_raster(self, value): return from_pgraster(value) def deconstruct_raster(self, value): return to_pgraster(value)
306a5bfc9dfec6e36149cd0c82544fdf8fc5be80a787642cd9dc0c25ebdc7836
from django.db.backends.postgresql.schema import DatabaseSchemaEditor class PostGISSchemaEditor(DatabaseSchemaEditor): geom_index_type = 'GIST' geom_index_ops_nd = 'GIST_GEOMETRY_OPS_ND' rast_index_wrapper = 'ST_ConvexHull(%s)' sql_alter_column_to_3d = "ALTER COLUMN %(column)s TYPE %(type)s USING ST_Force3D(%(column)s)::%(type)s" sql_alter_column_to_2d = "ALTER COLUMN %(column)s TYPE %(type)s USING ST_Force2D(%(column)s)::%(type)s" def geo_quote_name(self, name): return self.connection.ops.geo_quote_name(name) def _field_should_be_indexed(self, model, field): if getattr(field, 'spatial_index', False): return True return super(PostGISSchemaEditor, self)._field_should_be_indexed(model, field) def _create_index_sql(self, model, fields, suffix="", sql=None): if len(fields) != 1 or not hasattr(fields[0], 'geodetic'): return super(PostGISSchemaEditor, self)._create_index_sql(model, fields, suffix=suffix, sql=sql) field = fields[0] field_column = self.quote_name(field.column) if field.geom_type == 'RASTER': # For raster fields, wrap index creation SQL statement with ST_ConvexHull. # Indexes on raster columns are based on the convex hull of the raster. field_column = self.rast_index_wrapper % field_column elif field.dim > 2 and not field.geography: # Use "nd" ops which are fast on multidimensional cases field_column = "%s %s" % (field_column, self.geom_index_ops_nd) return self.sql_create_index % { "name": self.quote_name('%s_%s_id' % (model._meta.db_table, field.column)), "table": self.quote_name(model._meta.db_table), "using": "USING %s" % self.geom_index_type, "columns": field_column, "extra": '', } def _alter_column_type_sql(self, table, old_field, new_field, new_type): """ Special case when dimension changed. """ if not hasattr(old_field, 'dim') or not hasattr(new_field, 'dim'): return super(PostGISSchemaEditor, self)._alter_column_type_sql( table, old_field, new_field, new_type ) if old_field.dim == 2 and new_field.dim == 3: sql_alter = self.sql_alter_column_to_3d elif old_field.dim == 3 and new_field.dim == 2: sql_alter = self.sql_alter_column_to_2d else: sql_alter = self.sql_alter_column_type return ( ( sql_alter % { "column": self.quote_name(new_field.column), "type": new_type, }, [], ), [], )
b7627902e6e0e703b62ab8500eaed0ecd90995f54cc5724327c8b13ad25b3a1a
""" This object provides quoting for GEOS geometries into PostgreSQL/PostGIS. """ from __future__ import unicode_literals from psycopg2 import Binary from psycopg2.extensions import ISQLQuote from django.contrib.gis.db.backends.postgis.pgraster import to_pgraster from django.contrib.gis.geometry.backend import Geometry class PostGISAdapter(object): def __init__(self, obj, geography=False): """ Initialize on the spatial object. """ self.is_geometry = isinstance(obj, (Geometry, PostGISAdapter)) # Getting the WKB (in string form, to allow easy pickling of # the adaptor) and the SRID from the geometry or raster. if self.is_geometry: self.ewkb = bytes(obj.ewkb) self._adapter = Binary(self.ewkb) else: self.ewkb = to_pgraster(obj) self.srid = obj.srid self.geography = geography def __conform__(self, proto): # Does the given protocol conform to what Psycopg2 expects? if proto == ISQLQuote: return self else: raise Exception('Error implementing psycopg2 protocol. Is psycopg2 installed?') def __eq__(self, other): if not isinstance(other, PostGISAdapter): return False return (self.ewkb == other.ewkb) and (self.srid == other.srid) def __hash__(self): return hash((self.ewkb, self.srid)) def __str__(self): return self.getquoted() def prepare(self, conn): """ This method allows escaping the binary in the style required by the server's `standard_conforming_string` setting. """ if self.is_geometry: self._adapter.prepare(conn) def getquoted(self): """ Return a properly quoted string for use in PostgreSQL/PostGIS. """ if self.is_geometry: # Psycopg will figure out whether to use E'\\000' or '\000'. return str('%s(%s)' % ( 'ST_GeogFromWKB' if self.geography else 'ST_GeomFromEWKB', self._adapter.getquoted().decode()) ) else: # For rasters, add explicit type cast to WKB string. return "'%s'::raster" % self.ewkb
ff7b7fccac8e980df07836606185d84a21e26aecdd541c3dc737117950a29922
import argparse from django.contrib.gis import gdal from django.core.management.base import BaseCommand, CommandError from django.utils.inspect import get_func_args class LayerOptionAction(argparse.Action): """ Custom argparse action for the `ogrinspect` `layer_key` keyword option which may be an integer or a string. """ def __call__(self, parser, namespace, value, option_string=None): try: setattr(namespace, self.dest, int(value)) except ValueError: setattr(namespace, self.dest, value) class ListOptionAction(argparse.Action): """ Custom argparse action for `ogrinspect` keywords that require a string list. If the string is 'True'/'true' then the option value will be a boolean instead. """ def __call__(self, parser, namespace, value, option_string=None): if value.lower() == 'true': setattr(namespace, self.dest, True) else: setattr(namespace, self.dest, value.split(',')) class Command(BaseCommand): help = ( 'Inspects the given OGR-compatible data source (e.g., a shapefile) and outputs\n' 'a GeoDjango model with the given model name. For example:\n' ' ./manage.py ogrinspect zipcode.shp Zipcode' ) requires_system_checks = False def add_arguments(self, parser): parser.add_argument('data_source', help='Path to the data source.') parser.add_argument('model_name', help='Name of the model to create.') parser.add_argument( '--blank', dest='blank', action=ListOptionAction, default=False, help='Use a comma separated list of OGR field names to add ' 'the `blank=True` option to the field definition. Set to `true` ' 'to apply to all applicable fields.', ) parser.add_argument( '--decimal', dest='decimal', action=ListOptionAction, default=False, help='Use a comma separated list of OGR float fields to ' 'generate `DecimalField` instead of the default ' '`FloatField`. Set to `true` to apply to all OGR float fields.', ) parser.add_argument( '--geom-name', dest='geom_name', default='geom', help='Specifies the model name for the Geometry Field (defaults to `geom`)' ) parser.add_argument( '--layer', dest='layer_key', action=LayerOptionAction, default=0, help='The key for specifying which layer in the OGR data ' 'source to use. Defaults to 0 (the first layer). May be ' 'an integer or a string identifier for the layer.', ) parser.add_argument( '--multi-geom', action='store_true', dest='multi_geom', default=False, help='Treat the geometry in the data source as a geometry collection.', ) parser.add_argument( '--name-field', dest='name_field', help='Specifies a field name to return for the `__unicode__`/`__str__` function.', ) parser.add_argument( '--no-imports', action='store_false', dest='imports', default=True, help='Do not include `from django.contrib.gis.db import models` statement.', ) parser.add_argument( '--null', dest='null', action=ListOptionAction, default=False, help='Use a comma separated list of OGR field names to add ' 'the `null=True` option to the field definition. Set to `true` ' 'to apply to all applicable fields.', ) parser.add_argument( '--srid', dest='srid', help='The SRID to use for the Geometry Field. If it can be ' 'determined, the SRID of the data source is used.', ) parser.add_argument( '--mapping', action='store_true', dest='mapping', help='Generate mapping dictionary for use with `LayerMapping`.', ) def handle(self, *args, **options): data_source, model_name = options.pop('data_source'), options.pop('model_name') # Getting the OGR DataSource from the string parameter. try: ds = gdal.DataSource(data_source) except gdal.GDALException as msg: raise CommandError(msg) # Returning the output of ogrinspect with the given arguments # and options. from django.contrib.gis.utils.ogrinspect import _ogrinspect, mapping # Filter options to params accepted by `_ogrinspect` ogr_options = {k: v for k, v in options.items() if k in get_func_args(_ogrinspect) and v is not None} output = [s for s in _ogrinspect(ds, model_name, **ogr_options)] if options['mapping']: # Constructing the keyword arguments for `mapping`, and # calling it on the data source. kwargs = { 'geom_name': options['geom_name'], 'layer_key': options['layer_key'], 'multi_geom': options['multi_geom'], } mapping_dict = mapping(ds, **kwargs) # This extra legwork is so that the dictionary definition comes # out in the same order as the fields in the model definition. rev_mapping = {v: k for k, v in mapping_dict.items()} output.extend(['', '# Auto-generated `LayerMapping` dictionary for %s model' % model_name, '%s_mapping = {' % model_name.lower()]) output.extend(" '%s' : '%s'," % ( rev_mapping[ogr_fld], ogr_fld) for ogr_fld in ds[options['layer_key']].fields ) output.extend([" '%s' : '%s'," % (options['geom_name'], mapping_dict[options['geom_name']]), '}']) return '\n'.join(output) + '\n'
c96376a82b2a7d74a9dd30950c64189ab313275c1ae49cb5d9aaa4804880546c
from django.core.management.commands.inspectdb import \ Command as InspectDBCommand class Command(InspectDBCommand): db_module = 'django.contrib.gis.db' def get_field_type(self, connection, table_name, row): field_type, field_params, field_notes = super(Command, self).get_field_type(connection, table_name, row) if field_type == 'GeometryField': geo_col = row[0] # Getting a more specific field type and any additional parameters # from the `get_geometry_type` routine for the spatial backend. field_type, geo_params = connection.introspection.get_geometry_type(table_name, geo_col) field_params.update(geo_params) return field_type, field_params, field_notes
18019646798556cb82d4490cd0d1dcd30530c2ef2f02b3d306beb23e33d3f5fe
import json import os from ctypes import addressof, byref, c_double, c_void_p from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.driver import Driver from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.gdal.prototypes import raster as capi from django.contrib.gis.gdal.raster.band import BandList from django.contrib.gis.gdal.raster.const import GDAL_RESAMPLE_ALGORITHMS from django.contrib.gis.gdal.srs import SpatialReference, SRSException from django.contrib.gis.geometry.regex import json_regex from django.utils import six from django.utils.encoding import ( force_bytes, force_text, python_2_unicode_compatible, ) from django.utils.functional import cached_property class TransformPoint(list): indices = { 'origin': (0, 3), 'scale': (1, 5), 'skew': (2, 4), } def __init__(self, raster, prop): x = raster.geotransform[self.indices[prop][0]] y = raster.geotransform[self.indices[prop][1]] list.__init__(self, [x, y]) self._raster = raster self._prop = prop @property def x(self): return self[0] @x.setter def x(self, value): gtf = self._raster.geotransform gtf[self.indices[self._prop][0]] = value self._raster.geotransform = gtf @property def y(self): return self[1] @y.setter def y(self, value): gtf = self._raster.geotransform gtf[self.indices[self._prop][1]] = value self._raster.geotransform = gtf @python_2_unicode_compatible class GDALRaster(GDALBase): """ Wraps a raster GDAL Data Source object. """ def __init__(self, ds_input, write=False): self._write = 1 if write else 0 Driver.ensure_registered() # Preprocess json inputs. This converts json strings to dictionaries, # which are parsed below the same way as direct dictionary inputs. if isinstance(ds_input, six.string_types) and json_regex.match(ds_input): ds_input = json.loads(ds_input) # If input is a valid file path, try setting file as source. if isinstance(ds_input, six.string_types): if not os.path.exists(ds_input): raise GDALException('Unable to read raster source input "{}"'.format(ds_input)) try: # GDALOpen will auto-detect the data source type. self._ptr = capi.open_ds(force_bytes(ds_input), self._write) except GDALException as err: raise GDALException('Could not open the datasource at "{}" ({}).'.format(ds_input, err)) elif isinstance(ds_input, dict): # A new raster needs to be created in write mode self._write = 1 # Create driver (in memory by default) driver = Driver(ds_input.get('driver', 'MEM')) # For out of memory drivers, check filename argument if driver.name != 'MEM' and 'name' not in ds_input: raise GDALException('Specify name for creation of raster with driver "{}".'.format(driver.name)) # Check if width and height where specified if 'width' not in ds_input or 'height' not in ds_input: raise GDALException('Specify width and height attributes for JSON or dict input.') # Check if srid was specified if 'srid' not in ds_input: raise GDALException('Specify srid for JSON or dict input.') # Create GDAL Raster self._ptr = capi.create_ds( driver._ptr, force_bytes(ds_input.get('name', '')), ds_input['width'], ds_input['height'], ds_input.get('nr_of_bands', len(ds_input.get('bands', []))), ds_input.get('datatype', 6), None ) # Set band data if provided for i, band_input in enumerate(ds_input.get('bands', [])): band = self.bands[i] band.data(band_input['data']) if 'nodata_value' in band_input: band.nodata_value = band_input['nodata_value'] # Set SRID self.srs = ds_input.get('srid') # Set additional properties if provided if 'origin' in ds_input: self.origin.x, self.origin.y = ds_input['origin'] if 'scale' in ds_input: self.scale.x, self.scale.y = ds_input['scale'] if 'skew' in ds_input: self.skew.x, self.skew.y = ds_input['skew'] elif isinstance(ds_input, c_void_p): # Instantiate the object using an existing pointer to a gdal raster. self._ptr = ds_input else: raise GDALException('Invalid data source input type: "{}".'.format(type(ds_input))) def __del__(self): try: capi.close_ds(self._ptr) except (AttributeError, TypeError): pass # Some part might already have been garbage collected def __str__(self): return self.name def __repr__(self): """ Short-hand representation because WKB may be very large. """ return '<Raster object at %s>' % hex(addressof(self._ptr)) def _flush(self): """ Flush all data from memory into the source file if it exists. The data that needs flushing are geotransforms, coordinate systems, nodata_values and pixel values. This function will be called automatically wherever it is needed. """ # Raise an Exception if the value is being changed in read mode. if not self._write: raise GDALException('Raster needs to be opened in write mode to change values.') capi.flush_ds(self._ptr) @property def name(self): """ Returns the name of this raster. Corresponds to filename for file-based rasters. """ return force_text(capi.get_ds_description(self._ptr)) @cached_property def driver(self): """ Returns the GDAL Driver used for this raster. """ ds_driver = capi.get_ds_driver(self._ptr) return Driver(ds_driver) @property def width(self): """ Width (X axis) in pixels. """ return capi.get_ds_xsize(self._ptr) @property def height(self): """ Height (Y axis) in pixels. """ return capi.get_ds_ysize(self._ptr) @property def srs(self): """ Returns the SpatialReference used in this GDALRaster. """ try: wkt = capi.get_ds_projection_ref(self._ptr) if not wkt: return None return SpatialReference(wkt, srs_type='wkt') except SRSException: return None @srs.setter def srs(self, value): """ Sets the spatial reference used in this GDALRaster. The input can be a SpatialReference or any parameter accepted by the SpatialReference constructor. """ if isinstance(value, SpatialReference): srs = value elif isinstance(value, six.integer_types + six.string_types): srs = SpatialReference(value) else: raise ValueError('Could not create a SpatialReference from input.') capi.set_ds_projection_ref(self._ptr, srs.wkt.encode()) self._flush() @property def srid(self): """ Shortcut to access the srid of this GDALRaster. """ return self.srs.srid @srid.setter def srid(self, value): """ Shortcut to set this GDALRaster's srs from an srid. """ self.srs = value @property def geotransform(self): """ Returns the geotransform of the data source. Returns the default geotransform if it does not exist or has not been set previously. The default is [0.0, 1.0, 0.0, 0.0, 0.0, -1.0]. """ # Create empty ctypes double array for data gtf = (c_double * 6)() capi.get_ds_geotransform(self._ptr, byref(gtf)) return list(gtf) @geotransform.setter def geotransform(self, values): "Sets the geotransform for the data source." if sum([isinstance(x, (int, float)) for x in values]) != 6: raise ValueError('Geotransform must consist of 6 numeric values.') # Create ctypes double array with input and write data values = (c_double * 6)(*values) capi.set_ds_geotransform(self._ptr, byref(values)) self._flush() @property def origin(self): """ Coordinates of the raster origin. """ return TransformPoint(self, 'origin') @property def scale(self): """ Pixel scale in units of the raster projection. """ return TransformPoint(self, 'scale') @property def skew(self): """ Skew of pixels (rotation parameters). """ return TransformPoint(self, 'skew') @property def extent(self): """ Returns the extent as a 4-tuple (xmin, ymin, xmax, ymax). """ # Calculate boundary values based on scale and size xval = self.origin.x + self.scale.x * self.width yval = self.origin.y + self.scale.y * self.height # Calculate min and max values xmin = min(xval, self.origin.x) xmax = max(xval, self.origin.x) ymin = min(yval, self.origin.y) ymax = max(yval, self.origin.y) return xmin, ymin, xmax, ymax @property def bands(self): return BandList(self) def warp(self, ds_input, resampling='NearestNeighbour', max_error=0.0): """ Returns a warped GDALRaster with the given input characteristics. The input is expected to be a dictionary containing the parameters of the target raster. Allowed values are width, height, SRID, origin, scale, skew, datatype, driver, and name (filename). By default, the warp functions keeps all parameters equal to the values of the original source raster. For the name of the target raster, the name of the source raster will be used and appended with _copy. + source_driver_name. In addition, the resampling algorithm can be specified with the "resampling" input parameter. The default is NearestNeighbor. For a list of all options consult the GDAL_RESAMPLE_ALGORITHMS constant. """ # Get the parameters defining the geotransform, srid, and size of the raster if 'width' not in ds_input: ds_input['width'] = self.width if 'height' not in ds_input: ds_input['height'] = self.height if 'srid' not in ds_input: ds_input['srid'] = self.srs.srid if 'origin' not in ds_input: ds_input['origin'] = self.origin if 'scale' not in ds_input: ds_input['scale'] = self.scale if 'skew' not in ds_input: ds_input['skew'] = self.skew # Get the driver, name, and datatype of the target raster if 'driver' not in ds_input: ds_input['driver'] = self.driver.name if 'name' not in ds_input: ds_input['name'] = self.name + '_copy.' + self.driver.name if 'datatype' not in ds_input: ds_input['datatype'] = self.bands[0].datatype() # Set the number of bands ds_input['nr_of_bands'] = len(self.bands) # Create target raster target = GDALRaster(ds_input, write=True) # Copy nodata values to warped raster for index, band in enumerate(self.bands): target.bands[index].nodata_value = band.nodata_value # Select resampling algorithm algorithm = GDAL_RESAMPLE_ALGORITHMS[resampling] # Reproject image capi.reproject_image( self._ptr, self.srs.wkt.encode(), target._ptr, target.srs.wkt.encode(), algorithm, 0.0, max_error, c_void_p(), c_void_p(), c_void_p() ) # Make sure all data is written to file target._flush() return target def transform(self, srid, driver=None, name=None, resampling='NearestNeighbour', max_error=0.0): """ Returns a copy of this raster reprojected into the given SRID. """ # Convert the resampling algorithm name into an algorithm id algorithm = GDAL_RESAMPLE_ALGORITHMS[resampling] # Instantiate target spatial reference system target_srs = SpatialReference(srid) # Create warped virtual dataset in the target reference system target = capi.auto_create_warped_vrt( self._ptr, self.srs.wkt.encode(), target_srs.wkt.encode(), algorithm, max_error, c_void_p() ) target = GDALRaster(target) # Construct the target warp dictionary from the virtual raster data = { 'srid': srid, 'width': target.width, 'height': target.height, 'origin': [target.origin.x, target.origin.y], 'scale': [target.scale.x, target.scale.y], 'skew': [target.skew.x, target.skew.y], } # Set the driver and filepath if provided if driver: data['driver'] = driver if name: data['name'] = name # Warp the raster into new srid return self.warp(data, resampling=resampling, max_error=max_error)
0a938c43dadf040767179f5d3755f0b8481a81ef4cedc8c161c33aadbdc167f7
""" GDAL - Constant definitions """ from ctypes import ( c_double, c_float, c_int16, c_int32, c_ubyte, c_uint16, c_uint32, ) # See http://www.gdal.org/gdal_8h.html#a22e22ce0a55036a96f652765793fb7a4 GDAL_PIXEL_TYPES = { 0: 'GDT_Unknown', # Unknown or unspecified type 1: 'GDT_Byte', # Eight bit unsigned integer 2: 'GDT_UInt16', # Sixteen bit unsigned integer 3: 'GDT_Int16', # Sixteen bit signed integer 4: 'GDT_UInt32', # Thirty-two bit unsigned integer 5: 'GDT_Int32', # Thirty-two bit signed integer 6: 'GDT_Float32', # Thirty-two bit floating point 7: 'GDT_Float64', # Sixty-four bit floating point 8: 'GDT_CInt16', # Complex Int16 9: 'GDT_CInt32', # Complex Int32 10: 'GDT_CFloat32', # Complex Float32 11: 'GDT_CFloat64', # Complex Float64 } # A list of gdal datatypes that are integers. GDAL_INTEGER_TYPES = [1, 2, 3, 4, 5] # Lookup values to convert GDAL pixel type indices into ctypes objects. # The GDAL band-io works with ctypes arrays to hold data to be written # or to hold the space for data to be read into. The lookup below helps # selecting the right ctypes object for a given gdal pixel type. GDAL_TO_CTYPES = [ None, c_ubyte, c_uint16, c_int16, c_uint32, c_int32, c_float, c_double, None, None, None, None ] # List of resampling algorithms that can be used to warp a GDALRaster. GDAL_RESAMPLE_ALGORITHMS = { 'NearestNeighbour': 0, 'Bilinear': 1, 'Cubic': 2, 'CubicSpline': 3, 'Lanczos': 4, 'Average': 5, 'Mode': 6, }
1c862baabc299d1b9f5ff677ac358bd2b794abcc95179846cacedd74ec55b8f6
from ctypes import byref, c_double, c_int, c_void_p from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.gdal.prototypes import raster as capi from django.contrib.gis.shortcuts import numpy from django.utils import six from django.utils.encoding import force_text from django.utils.six.moves import range from .const import GDAL_INTEGER_TYPES, GDAL_PIXEL_TYPES, GDAL_TO_CTYPES class GDALBand(GDALBase): """ Wraps a GDAL raster band, needs to be obtained from a GDALRaster object. """ def __init__(self, source, index): self.source = source self._ptr = capi.get_ds_raster_band(source._ptr, index) def _flush(self): """ Call the flush method on the Band's parent raster and force a refresh of the statistics attribute when requested the next time. """ self.source._flush() self._stats_refresh = True @property def description(self): """ Returns the description string of the band. """ return force_text(capi.get_band_description(self._ptr)) @property def width(self): """ Width (X axis) in pixels of the band. """ return capi.get_band_xsize(self._ptr) @property def height(self): """ Height (Y axis) in pixels of the band. """ return capi.get_band_ysize(self._ptr) @property def pixel_count(self): """ Returns the total number of pixels in this band. """ return self.width * self.height _stats_refresh = False def statistics(self, refresh=False, approximate=False): """ Compute statistics on the pixel values of this band. The return value is a tuple with the following structure: (minimum, maximum, mean, standard deviation). If approximate=True, the statistics may be computed based on overviews or a subset of image tiles. If refresh=True, the statistics will be computed from the data directly, and the cache will be updated where applicable. For empty bands (where all pixel values are nodata), all statistics values are returned as None. For raster formats using Persistent Auxiliary Metadata (PAM) services, the statistics might be cached in an auxiliary file. """ # Prepare array with arguments for capi function smin, smax, smean, sstd = c_double(), c_double(), c_double(), c_double() stats_args = [ self._ptr, c_int(approximate), byref(smin), byref(smax), byref(smean), byref(sstd), c_void_p(), c_void_p(), ] if refresh or self._stats_refresh: func = capi.compute_band_statistics else: # Add additional argument to force computation if there is no # existing PAM file to take the values from. force = True stats_args.insert(2, c_int(force)) func = capi.get_band_statistics # Computation of statistics fails for empty bands. try: func(*stats_args) result = smin.value, smax.value, smean.value, sstd.value except GDALException: result = (None, None, None, None) self._stats_refresh = False return result @property def min(self): """ Return the minimum pixel value for this band. """ return self.statistics()[0] @property def max(self): """ Return the maximum pixel value for this band. """ return self.statistics()[1] @property def mean(self): """ Return the mean of all pixel values of this band. """ return self.statistics()[2] @property def std(self): """ Return the standard deviation of all pixel values of this band. """ return self.statistics()[3] @property def nodata_value(self): """ Returns the nodata value for this band, or None if it isn't set. """ # Get value and nodata exists flag nodata_exists = c_int() value = capi.get_band_nodata_value(self._ptr, nodata_exists) if not nodata_exists: value = None # If the pixeltype is an integer, convert to int elif self.datatype() in GDAL_INTEGER_TYPES: value = int(value) return value @nodata_value.setter def nodata_value(self, value): """ Sets the nodata value for this band. """ if value is None: if not capi.delete_band_nodata_value: raise ValueError('GDAL >= 2.1 required to delete nodata values.') capi.delete_band_nodata_value(self._ptr) elif not isinstance(value, (int, float)): raise ValueError('Nodata value must be numeric or None.') else: capi.set_band_nodata_value(self._ptr, value) self._flush() def datatype(self, as_string=False): """ Returns the GDAL Pixel Datatype for this band. """ dtype = capi.get_band_datatype(self._ptr) if as_string: dtype = GDAL_PIXEL_TYPES[dtype] return dtype def data(self, data=None, offset=None, size=None, shape=None, as_memoryview=False): """ Reads or writes pixel values for this band. Blocks of data can be accessed by specifying the width, height and offset of the desired block. The same specification can be used to update parts of a raster by providing an array of values. Allowed input data types are bytes, memoryview, list, tuple, and array. """ if not offset: offset = (0, 0) if not size: size = (self.width - offset[0], self.height - offset[1]) if not shape: shape = size if any(x <= 0 for x in size): raise ValueError('Offset too big for this raster.') if size[0] > self.width or size[1] > self.height: raise ValueError('Size is larger than raster.') # Create ctypes type array generator ctypes_array = GDAL_TO_CTYPES[self.datatype()] * (shape[0] * shape[1]) if data is None: # Set read mode access_flag = 0 # Prepare empty ctypes array data_array = ctypes_array() else: # Set write mode access_flag = 1 # Instantiate ctypes array holding the input data if isinstance(data, (bytes, six.memoryview)) or (numpy and isinstance(data, numpy.ndarray)): data_array = ctypes_array.from_buffer_copy(data) else: data_array = ctypes_array(*data) # Access band capi.band_io(self._ptr, access_flag, offset[0], offset[1], size[0], size[1], byref(data_array), shape[0], shape[1], self.datatype(), 0, 0) # Return data as numpy array if possible, otherwise as list if data is None: if as_memoryview: return memoryview(data_array) elif numpy: # reshape() needs a reshape parameter with the height first. return numpy.frombuffer( data_array, dtype=numpy.dtype(data_array) ).reshape(tuple(reversed(size))) else: return list(data_array) else: self._flush() class BandList(list): def __init__(self, source): self.source = source list.__init__(self) def __iter__(self): for idx in range(1, len(self) + 1): yield GDALBand(self.source, idx) def __len__(self): return capi.get_ds_raster_count(self.source._ptr) def __getitem__(self, index): try: return GDALBand(self.source, index + 1) except GDALException: raise GDALException('Unable to get band index %d' % index)
3cb5610cc4dde4e91b41cd79333de50ea8a08194df292db07257aff9588f6ffc
from ctypes import POINTER, c_char_p, c_double, c_int, c_void_p from django.contrib.gis.gdal.envelope import OGREnvelope from django.contrib.gis.gdal.libgdal import lgdal from django.contrib.gis.gdal.prototypes.errcheck import check_envelope from django.contrib.gis.gdal.prototypes.generation import ( const_string_output, double_output, geom_output, int_output, srs_output, string_output, void_output, ) # ### Generation routines specific to this module ### def env_func(f, argtypes): "For getting OGREnvelopes." f.argtypes = argtypes f.restype = None f.errcheck = check_envelope return f def pnt_func(f): "For accessing point information." return double_output(f, [c_void_p, c_int]) def topology_func(f): f.argtypes = [c_void_p, c_void_p] f.restype = c_int f.errcheck = lambda result, func, cargs: bool(result) return f # ### OGR_G ctypes function prototypes ### # GeoJSON routines. from_json = geom_output(lgdal.OGR_G_CreateGeometryFromJson, [c_char_p]) to_json = string_output(lgdal.OGR_G_ExportToJson, [c_void_p], str_result=True, decoding='ascii') to_kml = string_output(lgdal.OGR_G_ExportToKML, [c_void_p, c_char_p], str_result=True, decoding='ascii') # GetX, GetY, GetZ all return doubles. getx = pnt_func(lgdal.OGR_G_GetX) gety = pnt_func(lgdal.OGR_G_GetY) getz = pnt_func(lgdal.OGR_G_GetZ) # Geometry creation routines. from_wkb = geom_output(lgdal.OGR_G_CreateFromWkb, [c_char_p, c_void_p, POINTER(c_void_p), c_int], offset=-2) from_wkt = geom_output(lgdal.OGR_G_CreateFromWkt, [POINTER(c_char_p), c_void_p, POINTER(c_void_p)], offset=-1) from_gml = geom_output(lgdal.OGR_G_CreateFromGML, [c_char_p]) create_geom = geom_output(lgdal.OGR_G_CreateGeometry, [c_int]) clone_geom = geom_output(lgdal.OGR_G_Clone, [c_void_p]) get_geom_ref = geom_output(lgdal.OGR_G_GetGeometryRef, [c_void_p, c_int]) get_boundary = geom_output(lgdal.OGR_G_GetBoundary, [c_void_p]) geom_convex_hull = geom_output(lgdal.OGR_G_ConvexHull, [c_void_p]) geom_diff = geom_output(lgdal.OGR_G_Difference, [c_void_p, c_void_p]) geom_intersection = geom_output(lgdal.OGR_G_Intersection, [c_void_p, c_void_p]) geom_sym_diff = geom_output(lgdal.OGR_G_SymmetricDifference, [c_void_p, c_void_p]) geom_union = geom_output(lgdal.OGR_G_Union, [c_void_p, c_void_p]) # Geometry modification routines. add_geom = void_output(lgdal.OGR_G_AddGeometry, [c_void_p, c_void_p]) import_wkt = void_output(lgdal.OGR_G_ImportFromWkt, [c_void_p, POINTER(c_char_p)]) # Destroys a geometry destroy_geom = void_output(lgdal.OGR_G_DestroyGeometry, [c_void_p], errcheck=False) # Geometry export routines. to_wkb = void_output(lgdal.OGR_G_ExportToWkb, None, errcheck=True) # special handling for WKB. to_wkt = string_output(lgdal.OGR_G_ExportToWkt, [c_void_p, POINTER(c_char_p)], decoding='ascii') to_gml = string_output(lgdal.OGR_G_ExportToGML, [c_void_p], str_result=True, decoding='ascii') get_wkbsize = int_output(lgdal.OGR_G_WkbSize, [c_void_p]) # Geometry spatial-reference related routines. assign_srs = void_output(lgdal.OGR_G_AssignSpatialReference, [c_void_p, c_void_p], errcheck=False) get_geom_srs = srs_output(lgdal.OGR_G_GetSpatialReference, [c_void_p]) # Geometry properties get_area = double_output(lgdal.OGR_G_GetArea, [c_void_p]) get_centroid = void_output(lgdal.OGR_G_Centroid, [c_void_p, c_void_p]) get_dims = int_output(lgdal.OGR_G_GetDimension, [c_void_p]) get_coord_dim = int_output(lgdal.OGR_G_GetCoordinateDimension, [c_void_p]) set_coord_dim = void_output(lgdal.OGR_G_SetCoordinateDimension, [c_void_p, c_int], errcheck=False) get_geom_count = int_output(lgdal.OGR_G_GetGeometryCount, [c_void_p]) get_geom_name = const_string_output(lgdal.OGR_G_GetGeometryName, [c_void_p], decoding='ascii') get_geom_type = int_output(lgdal.OGR_G_GetGeometryType, [c_void_p]) get_point_count = int_output(lgdal.OGR_G_GetPointCount, [c_void_p]) get_point = void_output( lgdal.OGR_G_GetPoint, [c_void_p, c_int, POINTER(c_double), POINTER(c_double), POINTER(c_double)], errcheck=False ) geom_close_rings = void_output(lgdal.OGR_G_CloseRings, [c_void_p], errcheck=False) # Topology routines. ogr_contains = topology_func(lgdal.OGR_G_Contains) ogr_crosses = topology_func(lgdal.OGR_G_Crosses) ogr_disjoint = topology_func(lgdal.OGR_G_Disjoint) ogr_equals = topology_func(lgdal.OGR_G_Equals) ogr_intersects = topology_func(lgdal.OGR_G_Intersects) ogr_overlaps = topology_func(lgdal.OGR_G_Overlaps) ogr_touches = topology_func(lgdal.OGR_G_Touches) ogr_within = topology_func(lgdal.OGR_G_Within) # Transformation routines. geom_transform = void_output(lgdal.OGR_G_Transform, [c_void_p, c_void_p]) geom_transform_to = void_output(lgdal.OGR_G_TransformTo, [c_void_p, c_void_p]) # For retrieving the envelope of the geometry. get_envelope = env_func(lgdal.OGR_G_GetEnvelope, [c_void_p, POINTER(OGREnvelope)])
9a64e7b1d88ea8f4cee2d0526b3f34fdee6b38a609be94fd183f1ad10b0a6ce1
""" This module contains functions that generate ctypes prototypes for the GDAL routines. """ from ctypes import c_char_p, c_double, c_int, c_int64, c_void_p from functools import partial from django.contrib.gis.gdal.prototypes.errcheck import ( check_arg_errcode, check_const_string, check_errcode, check_geom, check_geom_offset, check_pointer, check_srs, check_str_arg, check_string, ) class gdal_char_p(c_char_p): pass def double_output(func, argtypes, errcheck=False, strarg=False, cpl=False): "Generates a ctypes function that returns a double value." func.argtypes = argtypes func.restype = c_double if errcheck: func.errcheck = partial(check_arg_errcode, cpl=cpl) if strarg: func.errcheck = check_str_arg return func def geom_output(func, argtypes, offset=None): """ Generates a function that returns a Geometry either by reference or directly (if the return_geom keyword is set to True). """ # Setting the argument types func.argtypes = argtypes if not offset: # When a geometry pointer is directly returned. func.restype = c_void_p func.errcheck = check_geom else: # Error code returned, geometry is returned by-reference. func.restype = c_int def geomerrcheck(result, func, cargs): return check_geom_offset(result, func, cargs, offset) func.errcheck = geomerrcheck return func def int_output(func, argtypes): "Generates a ctypes function that returns an integer value." func.argtypes = argtypes func.restype = c_int return func def int64_output(func, argtypes): "Generates a ctypes function that returns a 64-bit integer value." func.argtypes = argtypes func.restype = c_int64 return func def srs_output(func, argtypes): """ Generates a ctypes prototype for the given function with the given C arguments that returns a pointer to an OGR Spatial Reference System. """ func.argtypes = argtypes func.restype = c_void_p func.errcheck = check_srs return func def const_string_output(func, argtypes, offset=None, decoding=None, cpl=False): func.argtypes = argtypes if offset: func.restype = c_int else: func.restype = c_char_p def _check_const(result, func, cargs): res = check_const_string(result, func, cargs, offset=offset, cpl=cpl) if res and decoding: res = res.decode(decoding) return res func.errcheck = _check_const return func def string_output(func, argtypes, offset=-1, str_result=False, decoding=None): """ Generates a ctypes prototype for the given function with the given argument types that returns a string from a GDAL pointer. The `const` flag indicates whether the allocated pointer should be freed via the GDAL library routine VSIFree -- but only applies only when `str_result` is True. """ func.argtypes = argtypes if str_result: # Use subclass of c_char_p so the error checking routine # can free the memory at the pointer's address. func.restype = gdal_char_p else: # Error code is returned func.restype = c_int # Dynamically defining our error-checking function with the # given offset. def _check_str(result, func, cargs): res = check_string(result, func, cargs, offset=offset, str_result=str_result) if res and decoding: res = res.decode(decoding) return res func.errcheck = _check_str return func def void_output(func, argtypes, errcheck=True, cpl=False): """ For functions that don't only return an error code that needs to be examined. """ if argtypes: func.argtypes = argtypes if errcheck: # `errcheck` keyword may be set to False for routines that # return void, rather than a status code. func.restype = c_int func.errcheck = partial(check_errcode, cpl=cpl) else: func.restype = None return func def voidptr_output(func, argtypes, errcheck=True): "For functions that return c_void_p." func.argtypes = argtypes func.restype = c_void_p if errcheck: func.errcheck = check_pointer return func
c59ec0ef12db27dc0e8a898995575c6a336110bf3548f5f858bd0911fff8fb62
""" This module houses the error-checking routines used by the GDAL ctypes prototypes. """ from ctypes import c_void_p, string_at from django.contrib.gis.gdal.error import ( GDALException, SRSException, check_err, ) from django.contrib.gis.gdal.libgdal import lgdal from django.utils import six # Helper routines for retrieving pointers and/or values from # arguments passed in by reference. def arg_byref(args, offset=-1): "Returns the pointer argument's by-reference value." return args[offset]._obj.value def ptr_byref(args, offset=-1): "Returns the pointer argument passed in by-reference." return args[offset]._obj # ### String checking Routines ### def check_const_string(result, func, cargs, offset=None, cpl=False): """ Similar functionality to `check_string`, but does not free the pointer. """ if offset: check_err(result, cpl=cpl) ptr = ptr_byref(cargs, offset) return ptr.value else: return result def check_string(result, func, cargs, offset=-1, str_result=False): """ Checks the string output returned from the given function, and frees the string pointer allocated by OGR. The `str_result` keyword may be used when the result is the string pointer, otherwise the OGR error code is assumed. The `offset` keyword may be used to extract the string pointer passed in by-reference at the given slice offset in the function arguments. """ if str_result: # For routines that return a string. ptr = result if not ptr: s = None else: s = string_at(result) else: # Error-code return specified. check_err(result) ptr = ptr_byref(cargs, offset) # Getting the string value s = ptr.value # Correctly freeing the allocated memory behind GDAL pointer # with the VSIFree routine. if ptr: lgdal.VSIFree(ptr) return s # ### DataSource, Layer error-checking ### # ### Envelope checking ### def check_envelope(result, func, cargs, offset=-1): "Checks a function that returns an OGR Envelope by reference." env = ptr_byref(cargs, offset) return env # ### Geometry error-checking routines ### def check_geom(result, func, cargs): "Checks a function that returns a geometry." # OGR_G_Clone may return an integer, even though the # restype is set to c_void_p if isinstance(result, six.integer_types): result = c_void_p(result) if not result: raise GDALException('Invalid geometry pointer returned from "%s".' % func.__name__) return result def check_geom_offset(result, func, cargs, offset=-1): "Chcks the geometry at the given offset in the C parameter list." check_err(result) geom = ptr_byref(cargs, offset=offset) return check_geom(geom, func, cargs) # ### Spatial Reference error-checking routines ### def check_srs(result, func, cargs): if isinstance(result, six.integer_types): result = c_void_p(result) if not result: raise SRSException('Invalid spatial reference pointer returned from "%s".' % func.__name__) return result # ### Other error-checking routines ### def check_arg_errcode(result, func, cargs, cpl=False): """ The error code is returned in the last argument, by reference. Check its value with `check_err` before returning the result. """ check_err(arg_byref(cargs), cpl=cpl) return result def check_errcode(result, func, cargs, cpl=False): """ Check the error code returned (c_int). """ check_err(result, cpl=cpl) def check_pointer(result, func, cargs): "Makes sure the result pointer is valid." if isinstance(result, six.integer_types): result = c_void_p(result) if result: return result else: raise GDALException('Invalid pointer returned from "%s"' % func.__name__) def check_str_arg(result, func, cargs): """ This is for the OSRGet[Angular|Linear]Units functions, which require that the returned string pointer not be freed. This returns both the double and string values. """ dbl = result ptr = cargs[-1]._obj return dbl, ptr.value.decode()
5b43f338037f749359e09be965493e0c41b391c41830fa1d831d1b8f6d810d26
""" This module houses the ctypes function prototypes for GDAL DataSource (raster) related data structures. """ from ctypes import POINTER, c_char_p, c_double, c_int, c_void_p from functools import partial from django.contrib.gis.gdal.libgdal import GDAL_VERSION, std_call from django.contrib.gis.gdal.prototypes.generation import ( const_string_output, double_output, int_output, void_output, voidptr_output, ) # For more detail about c function names and definitions see # http://gdal.org/gdal_8h.html # http://gdal.org/gdalwarper_8h.html # Prepare partial functions that use cpl error codes void_output = partial(void_output, cpl=True) const_string_output = partial(const_string_output, cpl=True) double_output = partial(double_output, cpl=True) # Raster Driver Routines register_all = void_output(std_call('GDALAllRegister'), []) get_driver = voidptr_output(std_call('GDALGetDriver'), [c_int]) get_driver_by_name = voidptr_output(std_call('GDALGetDriverByName'), [c_char_p], errcheck=False) get_driver_count = int_output(std_call('GDALGetDriverCount'), []) get_driver_description = const_string_output(std_call('GDALGetDescription'), [c_void_p]) # Raster Data Source Routines create_ds = voidptr_output(std_call('GDALCreate'), [c_void_p, c_char_p, c_int, c_int, c_int, c_int, c_void_p]) open_ds = voidptr_output(std_call('GDALOpen'), [c_char_p, c_int]) if GDAL_VERSION >= (2, 0): close_ds = voidptr_output(std_call('GDALClose'), [c_void_p]) else: close_ds = void_output(std_call('GDALClose'), [c_void_p]) flush_ds = int_output(std_call('GDALFlushCache'), [c_void_p]) copy_ds = voidptr_output( std_call('GDALCreateCopy'), [c_void_p, c_char_p, c_void_p, c_int, POINTER(c_char_p), c_void_p, c_void_p] ) add_band_ds = void_output(std_call('GDALAddBand'), [c_void_p, c_int]) get_ds_description = const_string_output(std_call('GDALGetDescription'), [c_void_p]) get_ds_driver = voidptr_output(std_call('GDALGetDatasetDriver'), [c_void_p]) get_ds_xsize = int_output(std_call('GDALGetRasterXSize'), [c_void_p]) get_ds_ysize = int_output(std_call('GDALGetRasterYSize'), [c_void_p]) get_ds_raster_count = int_output(std_call('GDALGetRasterCount'), [c_void_p]) get_ds_raster_band = voidptr_output(std_call('GDALGetRasterBand'), [c_void_p, c_int]) get_ds_projection_ref = const_string_output(std_call('GDALGetProjectionRef'), [c_void_p]) set_ds_projection_ref = void_output(std_call('GDALSetProjection'), [c_void_p, c_char_p]) get_ds_geotransform = void_output(std_call('GDALGetGeoTransform'), [c_void_p, POINTER(c_double * 6)], errcheck=False) set_ds_geotransform = void_output(std_call('GDALSetGeoTransform'), [c_void_p, POINTER(c_double * 6)]) # Raster Band Routines band_io = void_output( std_call('GDALRasterIO'), [c_void_p, c_int, c_int, c_int, c_int, c_int, c_void_p, c_int, c_int, c_int, c_int, c_int] ) get_band_xsize = int_output(std_call('GDALGetRasterBandXSize'), [c_void_p]) get_band_ysize = int_output(std_call('GDALGetRasterBandYSize'), [c_void_p]) get_band_index = int_output(std_call('GDALGetBandNumber'), [c_void_p]) get_band_description = const_string_output(std_call('GDALGetDescription'), [c_void_p]) get_band_ds = voidptr_output(std_call('GDALGetBandDataset'), [c_void_p]) get_band_datatype = int_output(std_call('GDALGetRasterDataType'), [c_void_p]) get_band_nodata_value = double_output(std_call('GDALGetRasterNoDataValue'), [c_void_p, POINTER(c_int)]) set_band_nodata_value = void_output(std_call('GDALSetRasterNoDataValue'), [c_void_p, c_double]) if GDAL_VERSION >= (2, 1): delete_band_nodata_value = void_output(std_call('GDALDeleteRasterNoDataValue'), [c_void_p]) else: delete_band_nodata_value = None get_band_statistics = void_output( std_call('GDALGetRasterStatistics'), [ c_void_p, c_int, c_int, POINTER(c_double), POINTER(c_double), POINTER(c_double), POINTER(c_double), c_void_p, c_void_p, ], ) compute_band_statistics = void_output( std_call('GDALComputeRasterStatistics'), [c_void_p, c_int, POINTER(c_double), POINTER(c_double), POINTER(c_double), POINTER(c_double), c_void_p, c_void_p], ) # Reprojection routine reproject_image = void_output( std_call('GDALReprojectImage'), [c_void_p, c_char_p, c_void_p, c_char_p, c_int, c_double, c_double, c_void_p, c_void_p, c_void_p] ) auto_create_warped_vrt = voidptr_output( std_call('GDALAutoCreateWarpedVRT'), [c_void_p, c_char_p, c_char_p, c_int, c_double, c_void_p] )
a510da1c7fd7da9025d26a013478271b4402ad65e4f3ae0b4860ab976b95cf99
from ctypes import POINTER, c_char_p, c_int, c_void_p from django.contrib.gis.gdal.libgdal import lgdal, std_call from django.contrib.gis.gdal.prototypes.generation import ( const_string_output, double_output, int_output, srs_output, string_output, void_output, ) # Shortcut generation for routines with known parameters. def srs_double(f): """ Creates a function prototype for the OSR routines that take the OSRSpatialReference object and """ return double_output(f, [c_void_p, POINTER(c_int)], errcheck=True) def units_func(f): """ Creates a ctypes function prototype for OSR units functions, e.g., OSRGetAngularUnits, OSRGetLinearUnits. """ return double_output(f, [c_void_p, POINTER(c_char_p)], strarg=True) # Creation & destruction. clone_srs = srs_output(std_call('OSRClone'), [c_void_p]) new_srs = srs_output(std_call('OSRNewSpatialReference'), [c_char_p]) release_srs = void_output(lgdal.OSRRelease, [c_void_p], errcheck=False) destroy_srs = void_output(std_call('OSRDestroySpatialReference'), [c_void_p], errcheck=False) srs_validate = void_output(lgdal.OSRValidate, [c_void_p]) # Getting the semi_major, semi_minor, and flattening functions. semi_major = srs_double(lgdal.OSRGetSemiMajor) semi_minor = srs_double(lgdal.OSRGetSemiMinor) invflattening = srs_double(lgdal.OSRGetInvFlattening) # WKT, PROJ, EPSG, XML importation routines. from_wkt = void_output(lgdal.OSRImportFromWkt, [c_void_p, POINTER(c_char_p)]) from_proj = void_output(lgdal.OSRImportFromProj4, [c_void_p, c_char_p]) from_epsg = void_output(std_call('OSRImportFromEPSG'), [c_void_p, c_int]) from_xml = void_output(lgdal.OSRImportFromXML, [c_void_p, c_char_p]) from_user_input = void_output(std_call('OSRSetFromUserInput'), [c_void_p, c_char_p]) # Morphing to/from ESRI WKT. morph_to_esri = void_output(lgdal.OSRMorphToESRI, [c_void_p]) morph_from_esri = void_output(lgdal.OSRMorphFromESRI, [c_void_p]) # Identifying the EPSG identify_epsg = void_output(lgdal.OSRAutoIdentifyEPSG, [c_void_p]) # Getting the angular_units, linear_units functions linear_units = units_func(lgdal.OSRGetLinearUnits) angular_units = units_func(lgdal.OSRGetAngularUnits) # For exporting to WKT, PROJ.4, "Pretty" WKT, and XML. to_wkt = string_output(std_call('OSRExportToWkt'), [c_void_p, POINTER(c_char_p)], decoding='utf-8') to_proj = string_output(std_call('OSRExportToProj4'), [c_void_p, POINTER(c_char_p)], decoding='ascii') to_pretty_wkt = string_output( std_call('OSRExportToPrettyWkt'), [c_void_p, POINTER(c_char_p), c_int], offset=-2, decoding='utf-8' ) # Memory leak fixed in GDAL 1.5; still exists in 1.4. to_xml = string_output(lgdal.OSRExportToXML, [c_void_p, POINTER(c_char_p), c_char_p], offset=-2, decoding='utf-8') # String attribute retrival routines. get_attr_value = const_string_output(std_call('OSRGetAttrValue'), [c_void_p, c_char_p, c_int], decoding='utf-8') get_auth_name = const_string_output(lgdal.OSRGetAuthorityName, [c_void_p, c_char_p], decoding='ascii') get_auth_code = const_string_output(lgdal.OSRGetAuthorityCode, [c_void_p, c_char_p], decoding='ascii') # SRS Properties isgeographic = int_output(lgdal.OSRIsGeographic, [c_void_p]) islocal = int_output(lgdal.OSRIsLocal, [c_void_p]) isprojected = int_output(lgdal.OSRIsProjected, [c_void_p]) # Coordinate transformation new_ct = srs_output(std_call('OCTNewCoordinateTransformation'), [c_void_p, c_void_p]) destroy_ct = void_output(std_call('OCTDestroyCoordinateTransformation'), [c_void_p], errcheck=False)
cb46d47a65f54b86ee557f89e79abac84a113f7cfd6e14f3c80d50ad65648a8a
from importlib import import_module from django.conf import settings from django.core.exceptions import ImproperlyConfigured geom_backend = getattr(settings, 'GEOMETRY_BACKEND', 'geos') try: module = import_module('django.contrib.gis.geometry.backend.%s' % geom_backend) except ImportError: try: module = import_module(geom_backend) except ImportError: raise ImproperlyConfigured('Could not import user-defined GEOMETRY_BACKEND ' '"%s".' % geom_backend) try: Geometry = module.Geometry GeometryException = module.GeometryException except AttributeError: raise ImproperlyConfigured('Cannot import Geometry from the "%s" ' 'geometry backend.' % geom_backend)
0a3bcefb2fc3d74a224e0e69b5cb0b0bc373d4960c4b5d4eae2240e1fc2b0c1a
from django.contrib.gis.geos import ( GEOSException as GeometryException, GEOSGeometry as Geometry, ) __all__ = ['Geometry', 'GeometryException']
8152c2b579492a688b59cf0d89d86d491864bdfb54ddc1517a296fa2960c8bf3
import threading from django.contrib.gis.geos.libgeos import ( CONTEXT_PTR, error_h, lgeos, notice_h, ) class GEOSContextHandle(object): """ Python object representing a GEOS context handle. """ def __init__(self): # Initializing the context handler for this thread with # the notice and error handler. self.ptr = lgeos.initGEOS_r(notice_h, error_h) def __del__(self): if self.ptr and lgeos: lgeos.finishGEOS_r(self.ptr) # Defining a thread-local object and creating an instance # to hold a reference to GEOSContextHandle for this thread. class GEOSContext(threading.local): handle = None thread_context = GEOSContext() class GEOSFunc(object): """ Class that serves as a wrapper for GEOS C Functions, and will use thread-safe function variants when available. """ def __init__(self, func_name): try: # GEOS thread-safe function signatures end with '_r', and # take an additional context handle parameter. self.cfunc = getattr(lgeos, func_name + '_r') self.threaded = True # Create a reference here to thread_context so it's not # garbage-collected before an attempt to call this object. self.thread_context = thread_context except AttributeError: # Otherwise, use usual function. self.cfunc = getattr(lgeos, func_name) self.threaded = False def __call__(self, *args): if self.threaded: # If a context handle does not exist for this thread, initialize one. if not self.thread_context.handle: self.thread_context.handle = GEOSContextHandle() # Call the threaded GEOS routine with pointer of the context handle # as the first argument. return self.cfunc(self.thread_context.handle.ptr, *args) else: return self.cfunc(*args) def __str__(self): return self.cfunc.__name__ # argtypes property def _get_argtypes(self): return self.cfunc.argtypes def _set_argtypes(self, argtypes): if self.threaded: new_argtypes = [CONTEXT_PTR] new_argtypes.extend(argtypes) self.cfunc.argtypes = new_argtypes else: self.cfunc.argtypes = argtypes argtypes = property(_get_argtypes, _set_argtypes) # restype property def _get_restype(self): return self.cfunc.restype def _set_restype(self, restype): self.cfunc.restype = restype restype = property(_get_restype, _set_restype) # errcheck property def _get_errcheck(self): return self.cfunc.errcheck def _set_errcheck(self, errcheck): self.cfunc.errcheck = errcheck errcheck = property(_get_errcheck, _set_errcheck)
87cd5471ab4dca330253f48f805ae47b650ae77271e677835199d8c0ae205ee0
from ctypes import POINTER, c_char_p, c_int, c_size_t, c_ubyte from django.contrib.gis.geos.libgeos import CS_PTR, GEOM_PTR, GEOSFuncFactory from django.contrib.gis.geos.prototypes.errcheck import ( check_geom, check_minus_one, check_sized_string, check_string, check_zero, ) # This is the return type used by binary output (WKB, HEX) routines. c_uchar_p = POINTER(c_ubyte) # We create a simple subclass of c_char_p here because when the response # type is set to c_char_p, you get a _Python_ string and there's no way # to access the string's address inside the error checking function. # In other words, you can't free the memory allocated inside GEOS. Previously, # the return type would just be omitted and the integer address would be # used -- but this allows us to be specific in the function definition and # keeps the reference so it may be free'd. class geos_char_p(c_char_p): pass # ### ctypes factory classes ### class BinConstructor(GEOSFuncFactory): "Generates a prototype for binary construction (HEX, WKB) GEOS routines." argtypes = [c_char_p, c_size_t] restype = GEOM_PTR errcheck = staticmethod(check_geom) # HEX & WKB output class BinOutput(GEOSFuncFactory): "Generates a prototype for the routines that return a sized string." argtypes = [GEOM_PTR, POINTER(c_size_t)] restype = c_uchar_p errcheck = staticmethod(check_sized_string) class GeomOutput(GEOSFuncFactory): "For GEOS routines that return a geometry." restype = GEOM_PTR errcheck = staticmethod(check_geom) def get_func(self, argtypes): self.argtypes = argtypes return super(GeomOutput, self).get_func() class IntFromGeom(GEOSFuncFactory): "Argument is a geometry, return type is an integer." argtypes = [GEOM_PTR] restype = c_int def get_func(self, zero=False): if zero: self.errcheck = check_zero else: self.errcheck = check_minus_one return super(IntFromGeom, self).get_func() class StringFromGeom(GEOSFuncFactory): "Argument is a Geometry, return type is a string." argtypes = [GEOM_PTR] restype = geos_char_p errcheck = staticmethod(check_string) # ### ctypes prototypes ### # Deprecated creation routines from WKB, HEX, WKT from_hex = BinConstructor('GEOSGeomFromHEX_buf') from_wkb = BinConstructor('GEOSGeomFromWKB_buf') from_wkt = GeomOutput('GEOSGeomFromWKT', [c_char_p]) # Deprecated output routines to_hex = BinOutput('GEOSGeomToHEX_buf') to_wkb = BinOutput('GEOSGeomToWKB_buf') to_wkt = StringFromGeom('GEOSGeomToWKT') # The GEOS geometry type, typeid, num_coordinates and number of geometries geos_normalize = IntFromGeom('GEOSNormalize') geos_type = StringFromGeom('GEOSGeomType') geos_typeid = IntFromGeom('GEOSGeomTypeId') get_dims = GEOSFuncFactory('GEOSGeom_getDimensions', argtypes=[GEOM_PTR], restype=c_int) get_num_coords = IntFromGeom('GEOSGetNumCoordinates') get_num_geoms = IntFromGeom('GEOSGetNumGeometries') # Geometry creation factories create_point = GeomOutput('GEOSGeom_createPoint', [CS_PTR]) create_linestring = GeomOutput('GEOSGeom_createLineString', [CS_PTR]) create_linearring = GeomOutput('GEOSGeom_createLinearRing', [CS_PTR]) # Polygon and collection creation routines are special and will not # have their argument types defined. create_polygon = GeomOutput('GEOSGeom_createPolygon', None) create_empty_polygon = GeomOutput('GEOSGeom_createEmptyPolygon', None) create_collection = GeomOutput('GEOSGeom_createCollection', None) # Ring routines get_extring = GeomOutput('GEOSGetExteriorRing', [GEOM_PTR]) get_intring = GeomOutput('GEOSGetInteriorRingN', [GEOM_PTR, c_int]) get_nrings = IntFromGeom('GEOSGetNumInteriorRings') # Collection Routines get_geomn = GeomOutput('GEOSGetGeometryN', [GEOM_PTR, c_int]) # Cloning geom_clone = GEOSFuncFactory('GEOSGeom_clone', argtypes=[GEOM_PTR], restype=GEOM_PTR) # Destruction routine. destroy_geom = GEOSFuncFactory('GEOSGeom_destroy', argtypes=[GEOM_PTR]) # SRID routines geos_get_srid = GEOSFuncFactory('GEOSGetSRID', argtypes=[GEOM_PTR], restype=c_int) geos_set_srid = GEOSFuncFactory('GEOSSetSRID', argtypes=[GEOM_PTR, c_int])
797c7b7c9cf9f3d0a6e129999d405e5820b7fa6a54caf38c505ac099d7c1fbbe
""" This module contains all of the GEOS ctypes function prototypes. Each prototype handles the interaction between the GEOS library and Python via ctypes. """ from django.contrib.gis.geos.prototypes.coordseq import ( # NOQA create_cs, cs_clone, cs_getdims, cs_getordinate, cs_getsize, cs_getx, cs_gety, cs_getz, cs_setordinate, cs_setx, cs_sety, cs_setz, get_cs, ) from django.contrib.gis.geos.prototypes.geom import ( # NOQA create_collection, create_empty_polygon, create_linearring, create_linestring, create_point, create_polygon, destroy_geom, from_hex, from_wkb, from_wkt, geom_clone, geos_get_srid, geos_normalize, geos_set_srid, geos_type, geos_typeid, get_dims, get_extring, get_geomn, get_intring, get_nrings, get_num_coords, get_num_geoms, to_hex, to_wkb, to_wkt, ) from django.contrib.gis.geos.prototypes.misc import * # NOQA from django.contrib.gis.geos.prototypes.predicates import ( # NOQA geos_contains, geos_covers, geos_crosses, geos_disjoint, geos_equals, geos_equalsexact, geos_hasz, geos_intersects, geos_isclosed, geos_isempty, geos_isring, geos_issimple, geos_isvalid, geos_overlaps, geos_relatepattern, geos_touches, geos_within, ) from django.contrib.gis.geos.prototypes.topology import * # NOQA
994a0f7b6ff12691e5c2d41aa2942a35f2c27c1e7694883f5bec05eeacd30fd7
""" This module houses the GEOS ctypes prototype functions for the unary and binary predicate operations on geometries. """ from ctypes import c_char, c_char_p, c_double from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOSFuncFactory from django.contrib.gis.geos.prototypes.errcheck import check_predicate # ## Binary & unary predicate factories ## class UnaryPredicate(GEOSFuncFactory): "For GEOS unary predicate functions." argtypes = [GEOM_PTR] restype = c_char errcheck = staticmethod(check_predicate) class BinaryPredicate(UnaryPredicate): "For GEOS binary predicate functions." argtypes = [GEOM_PTR, GEOM_PTR] # ## Unary Predicates ## geos_hasz = UnaryPredicate('GEOSHasZ') geos_isclosed = UnaryPredicate('GEOSisClosed') geos_isempty = UnaryPredicate('GEOSisEmpty') geos_isring = UnaryPredicate('GEOSisRing') geos_issimple = UnaryPredicate('GEOSisSimple') geos_isvalid = UnaryPredicate('GEOSisValid') # ## Binary Predicates ## geos_contains = BinaryPredicate('GEOSContains') geos_covers = BinaryPredicate('GEOSCovers') geos_crosses = BinaryPredicate('GEOSCrosses') geos_disjoint = BinaryPredicate('GEOSDisjoint') geos_equals = BinaryPredicate('GEOSEquals') geos_equalsexact = BinaryPredicate('GEOSEqualsExact', argtypes=[GEOM_PTR, GEOM_PTR, c_double]) geos_intersects = BinaryPredicate('GEOSIntersects') geos_overlaps = BinaryPredicate('GEOSOverlaps') geos_relatepattern = BinaryPredicate('GEOSRelatePattern', argtypes=[GEOM_PTR, GEOM_PTR, c_char_p]) geos_touches = BinaryPredicate('GEOSTouches') geos_within = BinaryPredicate('GEOSWithin')
a918415c5f057dece16d074b7ef2f8238cd88fcdca5c983e750ca3ab664486ba
""" Error checking functions for GEOS ctypes prototype functions. """ from ctypes import c_void_p, string_at from django.contrib.gis.geos.error import GEOSException from django.contrib.gis.geos.libgeos import GEOSFuncFactory # Getting the `free` routine used to free the memory allocated for # string pointers returned by GEOS. free = GEOSFuncFactory('GEOSFree') free.argtypes = [c_void_p] def last_arg_byref(args): "Returns the last C argument's value by reference." return args[-1]._obj.value def check_dbl(result, func, cargs): "Checks the status code and returns the double value passed in by reference." # Checking the status code if result != 1: return None # Double passed in by reference, return its value. return last_arg_byref(cargs) def check_geom(result, func, cargs): "Error checking on routines that return Geometries." if not result: raise GEOSException('Error encountered checking Geometry returned from GEOS C function "%s".' % func.__name__) return result def check_minus_one(result, func, cargs): "Error checking on routines that should not return -1." if result == -1: raise GEOSException('Error encountered in GEOS C function "%s".' % func.__name__) else: return result def check_predicate(result, func, cargs): "Error checking for unary/binary predicate functions." val = ord(result) # getting the ordinal from the character if val == 1: return True elif val == 0: return False else: raise GEOSException('Error encountered on GEOS C predicate function "%s".' % func.__name__) def check_sized_string(result, func, cargs): """ Error checking for routines that return explicitly sized strings. This frees the memory allocated by GEOS at the result pointer. """ if not result: raise GEOSException('Invalid string pointer returned by GEOS C function "%s"' % func.__name__) # A c_size_t object is passed in by reference for the second # argument on these routines, and its needed to determine the # correct size. s = string_at(result, last_arg_byref(cargs)) # Freeing the memory allocated within GEOS free(result) return s def check_string(result, func, cargs): """ Error checking for routines that return strings. This frees the memory allocated by GEOS at the result pointer. """ if not result: raise GEOSException('Error encountered checking string return value in GEOS C function "%s".' % func.__name__) # Getting the string value at the pointer address. s = string_at(result) # Freeing the memory allocated within GEOS free(result) return s def check_zero(result, func, cargs): "Error checking on routines that should not return 0." if result == 0: raise GEOSException('Error encountered in GEOS C function "%s".' % func.__name__) else: return result
bb8aef6a4f2f8f3233c0d9332ff9d983901b2274054c04dd187687ba6df85696
""" This module houses the GEOS ctypes prototype functions for the topological operations on geometries. """ from ctypes import c_double, c_int from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOSFuncFactory from django.contrib.gis.geos.prototypes.errcheck import ( check_geom, check_minus_one, check_string, ) from django.contrib.gis.geos.prototypes.geom import geos_char_p class Topology(GEOSFuncFactory): "For GEOS unary topology functions." argtypes = [GEOM_PTR] restype = GEOM_PTR errcheck = staticmethod(check_geom) # Topology Routines geos_boundary = Topology('GEOSBoundary') geos_buffer = Topology('GEOSBuffer', argtypes=[GEOM_PTR, c_double, c_int]) geos_centroid = Topology('GEOSGetCentroid') geos_convexhull = Topology('GEOSConvexHull') geos_difference = Topology('GEOSDifference', argtypes=[GEOM_PTR, GEOM_PTR]) geos_envelope = Topology('GEOSEnvelope') geos_intersection = Topology('GEOSIntersection', argtypes=[GEOM_PTR, GEOM_PTR]) geos_linemerge = Topology('GEOSLineMerge') geos_pointonsurface = Topology('GEOSPointOnSurface') geos_preservesimplify = Topology('GEOSTopologyPreserveSimplify', argtypes=[GEOM_PTR, c_double]) geos_simplify = Topology('GEOSSimplify', argtypes=[GEOM_PTR, c_double]) geos_symdifference = Topology('GEOSSymDifference', argtypes=[GEOM_PTR, GEOM_PTR]) geos_union = Topology('GEOSUnion', argtypes=[GEOM_PTR, GEOM_PTR]) geos_cascaded_union = GEOSFuncFactory('GEOSUnionCascaded', argtypes=[GEOM_PTR], restype=GEOM_PTR) geos_unary_union = GEOSFuncFactory('GEOSUnaryUnion', argtypes=[GEOM_PTR], restype=GEOM_PTR) # GEOSRelate returns a string, not a geometry. geos_relate = GEOSFuncFactory( 'GEOSRelate', argtypes=[GEOM_PTR, GEOM_PTR], restype=geos_char_p, errcheck=check_string ) # Linear referencing routines geos_project = GEOSFuncFactory( 'GEOSProject', argtypes=[GEOM_PTR, GEOM_PTR], restype=c_double, errcheck=check_minus_one ) geos_interpolate = Topology('GEOSInterpolate', argtypes=[GEOM_PTR, c_double]) geos_project_normalized = GEOSFuncFactory( 'GEOSProjectNormalized', argtypes=[GEOM_PTR, GEOM_PTR], restype=c_double, errcheck=check_minus_one ) geos_interpolate_normalized = Topology('GEOSInterpolateNormalized', argtypes=[GEOM_PTR, c_double])
5ab6b62ec38773b4f9c7c970b2028964baf80e01e158940e9b7cb7e196d62f42
import threading from ctypes import POINTER, Structure, byref, c_char, c_char_p, c_int, c_size_t from django.contrib.gis.geos.base import GEOSBase from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOSFuncFactory from django.contrib.gis.geos.prototypes.errcheck import ( check_geom, check_sized_string, check_string, ) from django.contrib.gis.geos.prototypes.geom import c_uchar_p, geos_char_p from django.utils import six from django.utils.encoding import force_bytes # ### The WKB/WKT Reader/Writer structures and pointers ### class WKTReader_st(Structure): pass class WKTWriter_st(Structure): pass class WKBReader_st(Structure): pass class WKBWriter_st(Structure): pass WKT_READ_PTR = POINTER(WKTReader_st) WKT_WRITE_PTR = POINTER(WKTWriter_st) WKB_READ_PTR = POINTER(WKBReader_st) WKB_WRITE_PTR = POINTER(WKBReader_st) # WKTReader routines wkt_reader_create = GEOSFuncFactory('GEOSWKTReader_create', restype=WKT_READ_PTR) wkt_reader_destroy = GEOSFuncFactory('GEOSWKTReader_destroy', argtypes=[WKT_READ_PTR]) wkt_reader_read = GEOSFuncFactory( 'GEOSWKTReader_read', argtypes=[WKT_READ_PTR, c_char_p], restype=GEOM_PTR, errcheck=check_geom ) # WKTWriter routines wkt_writer_create = GEOSFuncFactory('GEOSWKTWriter_create', restype=WKT_WRITE_PTR) wkt_writer_destroy = GEOSFuncFactory('GEOSWKTWriter_destroy', argtypes=[WKT_WRITE_PTR]) wkt_writer_write = GEOSFuncFactory( 'GEOSWKTWriter_write', argtypes=[WKT_WRITE_PTR, GEOM_PTR], restype=geos_char_p, errcheck=check_string ) wkt_writer_get_outdim = GEOSFuncFactory( 'GEOSWKTWriter_getOutputDimension', argtypes=[WKT_WRITE_PTR], restype=c_int ) wkt_writer_set_outdim = GEOSFuncFactory( 'GEOSWKTWriter_setOutputDimension', argtypes=[WKT_WRITE_PTR, c_int] ) wkt_writer_set_trim = GEOSFuncFactory('GEOSWKTWriter_setTrim', argtypes=[WKT_WRITE_PTR, c_char]) wkt_writer_set_precision = GEOSFuncFactory('GEOSWKTWriter_setRoundingPrecision', argtypes=[WKT_WRITE_PTR, c_int]) # WKBReader routines wkb_reader_create = GEOSFuncFactory('GEOSWKBReader_create', restype=WKB_READ_PTR) wkb_reader_destroy = GEOSFuncFactory('GEOSWKBReader_destroy', argtypes=[WKB_READ_PTR]) class WKBReadFunc(GEOSFuncFactory): # Although the function definitions take `const unsigned char *` # as their parameter, we use c_char_p here so the function may # take Python strings directly as parameters. Inside Python there # is not a difference between signed and unsigned characters, so # it is not a problem. argtypes = [WKB_READ_PTR, c_char_p, c_size_t] restype = GEOM_PTR errcheck = staticmethod(check_geom) wkb_reader_read = WKBReadFunc('GEOSWKBReader_read') wkb_reader_read_hex = WKBReadFunc('GEOSWKBReader_readHEX') # WKBWriter routines wkb_writer_create = GEOSFuncFactory('GEOSWKBWriter_create', restype=WKB_WRITE_PTR) wkb_writer_destroy = GEOSFuncFactory('GEOSWKBWriter_destroy', argtypes=[WKB_WRITE_PTR]) # WKB Writing prototypes. class WKBWriteFunc(GEOSFuncFactory): argtypes = [WKB_WRITE_PTR, GEOM_PTR, POINTER(c_size_t)] restype = c_uchar_p errcheck = staticmethod(check_sized_string) wkb_writer_write = WKBWriteFunc('GEOSWKBWriter_write') wkb_writer_write_hex = WKBWriteFunc('GEOSWKBWriter_writeHEX') # WKBWriter property getter/setter prototypes. class WKBWriterGet(GEOSFuncFactory): argtypes = [WKB_WRITE_PTR] restype = c_int class WKBWriterSet(GEOSFuncFactory): argtypes = [WKB_WRITE_PTR, c_int] wkb_writer_get_byteorder = WKBWriterGet('GEOSWKBWriter_getByteOrder') wkb_writer_set_byteorder = WKBWriterSet('GEOSWKBWriter_setByteOrder') wkb_writer_get_outdim = WKBWriterGet('GEOSWKBWriter_getOutputDimension') wkb_writer_set_outdim = WKBWriterSet('GEOSWKBWriter_setOutputDimension') wkb_writer_get_include_srid = WKBWriterGet('GEOSWKBWriter_getIncludeSRID', restype=c_char) wkb_writer_set_include_srid = WKBWriterSet('GEOSWKBWriter_setIncludeSRID', argtypes=[WKB_WRITE_PTR, c_char]) # ### Base I/O Class ### class IOBase(GEOSBase): "Base class for GEOS I/O objects." def __init__(self): # Getting the pointer with the constructor. self.ptr = self._constructor() # Loading the real destructor function at this point as doing it in # __del__ is too late (import error). self._destructor.func = self._destructor.get_func( *self._destructor.args, **self._destructor.kwargs ) def __del__(self): # Cleaning up with the appropriate destructor. try: self._destructor(self._ptr) except (AttributeError, TypeError): pass # Some part might already have been garbage collected # ### Base WKB/WKT Reading and Writing objects ### # Non-public WKB/WKT reader classes for internal use because # their `read` methods return _pointers_ instead of GEOSGeometry # objects. class _WKTReader(IOBase): _constructor = wkt_reader_create _destructor = wkt_reader_destroy ptr_type = WKT_READ_PTR def read(self, wkt): if not isinstance(wkt, (bytes, six.string_types)): raise TypeError return wkt_reader_read(self.ptr, force_bytes(wkt)) class _WKBReader(IOBase): _constructor = wkb_reader_create _destructor = wkb_reader_destroy ptr_type = WKB_READ_PTR def read(self, wkb): "Returns a _pointer_ to C GEOS Geometry object from the given WKB." if isinstance(wkb, six.memoryview): wkb_s = bytes(wkb) return wkb_reader_read(self.ptr, wkb_s, len(wkb_s)) elif isinstance(wkb, (bytes, six.string_types)): return wkb_reader_read_hex(self.ptr, wkb, len(wkb)) else: raise TypeError # ### WKB/WKT Writer Classes ### class WKTWriter(IOBase): _constructor = wkt_writer_create _destructor = wkt_writer_destroy ptr_type = WKT_WRITE_PTR _trim = False _precision = None def __init__(self, dim=2, trim=False, precision=None): super(WKTWriter, self).__init__() if bool(trim) != self._trim: self.trim = trim if precision is not None: self.precision = precision self.outdim = dim def write(self, geom): "Returns the WKT representation of the given geometry." return wkt_writer_write(self.ptr, geom.ptr) @property def outdim(self): return wkt_writer_get_outdim(self.ptr) @outdim.setter def outdim(self, new_dim): if new_dim not in (2, 3): raise ValueError('WKT output dimension must be 2 or 3') wkt_writer_set_outdim(self.ptr, new_dim) @property def trim(self): return self._trim @trim.setter def trim(self, flag): if bool(flag) != self._trim: self._trim = bool(flag) wkt_writer_set_trim(self.ptr, b'\x01' if flag else b'\x00') @property def precision(self): return self._precision @precision.setter def precision(self, precision): if (not isinstance(precision, int) or precision < 0) and precision is not None: raise AttributeError('WKT output rounding precision must be non-negative integer or None.') if precision != self._precision: self._precision = precision wkt_writer_set_precision(self.ptr, -1 if precision is None else precision) class WKBWriter(IOBase): _constructor = wkb_writer_create _destructor = wkb_writer_destroy ptr_type = WKB_WRITE_PTR def __init__(self, dim=2): super(WKBWriter, self).__init__() self.outdim = dim def write(self, geom): "Returns the WKB representation of the given geometry." return six.memoryview(wkb_writer_write(self.ptr, geom.ptr, byref(c_size_t()))) def write_hex(self, geom): "Returns the HEXEWKB representation of the given geometry." return wkb_writer_write_hex(self.ptr, geom.ptr, byref(c_size_t())) # ### WKBWriter Properties ### # Property for getting/setting the byteorder. def _get_byteorder(self): return wkb_writer_get_byteorder(self.ptr) def _set_byteorder(self, order): if order not in (0, 1): raise ValueError('Byte order parameter must be 0 (Big Endian) or 1 (Little Endian).') wkb_writer_set_byteorder(self.ptr, order) byteorder = property(_get_byteorder, _set_byteorder) # Property for getting/setting the output dimension. @property def outdim(self): return wkb_writer_get_outdim(self.ptr) @outdim.setter def outdim(self, new_dim): if new_dim not in (2, 3): raise ValueError('WKB output dimension must be 2 or 3') wkb_writer_set_outdim(self.ptr, new_dim) # Property for getting/setting the include srid flag. @property def srid(self): return bool(ord(wkb_writer_get_include_srid(self.ptr))) @srid.setter def srid(self, include): if include: flag = b'\x01' else: flag = b'\x00' wkb_writer_set_include_srid(self.ptr, flag) # `ThreadLocalIO` object holds instances of the WKT and WKB reader/writer # objects that are local to the thread. The `GEOSGeometry` internals # access these instances by calling the module-level functions, defined # below. class ThreadLocalIO(threading.local): wkt_r = None wkt_w = None wkb_r = None wkb_w = None ewkb_w = None thread_context = ThreadLocalIO() # These module-level routines return the I/O object that is local to the # thread. If the I/O object does not exist yet it will be initialized. def wkt_r(): if not thread_context.wkt_r: thread_context.wkt_r = _WKTReader() return thread_context.wkt_r def wkt_w(dim=2, trim=False, precision=None): if not thread_context.wkt_w: thread_context.wkt_w = WKTWriter(dim=dim, trim=trim, precision=precision) else: thread_context.wkt_w.outdim = dim thread_context.wkt_w.trim = trim thread_context.wkt_w.precision = precision return thread_context.wkt_w def wkb_r(): if not thread_context.wkb_r: thread_context.wkb_r = _WKBReader() return thread_context.wkb_r def wkb_w(dim=2): if not thread_context.wkb_w: thread_context.wkb_w = WKBWriter(dim=dim) else: thread_context.wkb_w.outdim = dim return thread_context.wkb_w def ewkb_w(dim=2): if not thread_context.ewkb_w: thread_context.ewkb_w = WKBWriter(dim=dim) thread_context.ewkb_w.srid = True else: thread_context.ewkb_w.outdim = dim return thread_context.ewkb_w
5b53b4be458a67c5f77384e2b932cbf14ecfafc88f3fcdd29c4b91a251129e78
""" This module is for the miscellaneous GEOS routines, particularly the ones that return the area, distance, and length. """ from ctypes import POINTER, c_double, c_int from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOSFuncFactory from django.contrib.gis.geos.prototypes.errcheck import check_dbl, check_string from django.contrib.gis.geos.prototypes.geom import geos_char_p from django.utils.six.moves import range __all__ = ['geos_area', 'geos_distance', 'geos_length', 'geos_isvalidreason'] class DblFromGeom(GEOSFuncFactory): """ Argument is a Geometry, return type is double that is passed in by reference as the last argument. """ restype = c_int # Status code returned errcheck = staticmethod(check_dbl) def get_func(self, num_geom=1): argtypes = [GEOM_PTR for i in range(num_geom)] argtypes += [POINTER(c_double)] self.argtypes = argtypes return super(DblFromGeom, self).get_func() # ### ctypes prototypes ### # Area, distance, and length prototypes. geos_area = DblFromGeom('GEOSArea') geos_distance = DblFromGeom('GEOSDistance', num_geom=2) geos_length = DblFromGeom('GEOSLength') geos_isvalidreason = GEOSFuncFactory( 'GEOSisValidReason', restype=geos_char_p, errcheck=check_string, argtypes=[GEOM_PTR] )
2c9f4b9062cb19b834f41370555191b5538f388df52eec6612adb8bcd0e9dc18
from ctypes import c_char from django.contrib.gis.geos.libgeos import ( GEOM_PTR, PREPGEOM_PTR, GEOSFuncFactory, ) from django.contrib.gis.geos.prototypes.errcheck import check_predicate # Prepared geometry constructor and destructors. geos_prepare = GEOSFuncFactory('GEOSPrepare', argtypes=[GEOM_PTR], restype=PREPGEOM_PTR) prepared_destroy = GEOSFuncFactory('GEOSPreparedGeom_destroy', argtypes=[PREPGEOM_PTR]) # Prepared geometry binary predicate support. class PreparedPredicate(GEOSFuncFactory): argtypes = [PREPGEOM_PTR, GEOM_PTR] restype = c_char errcheck = staticmethod(check_predicate) prepared_contains = PreparedPredicate('GEOSPreparedContains') prepared_contains_properly = PreparedPredicate('GEOSPreparedContainsProperly') prepared_covers = PreparedPredicate('GEOSPreparedCovers') prepared_crosses = PreparedPredicate('GEOSPreparedCrosses') prepared_disjoint = PreparedPredicate('GEOSPreparedDisjoint') prepared_intersects = PreparedPredicate('GEOSPreparedIntersects') prepared_overlaps = PreparedPredicate('GEOSPreparedOverlaps') prepared_touches = PreparedPredicate('GEOSPreparedTouches') prepared_within = PreparedPredicate('GEOSPreparedWithin')
114f095b9f826e8168060eba8738fe132918a9b124ca32ea9df7c38b6aeeee6b
from ctypes import POINTER, c_double, c_int, c_uint from django.contrib.gis.geos.libgeos import CS_PTR, GEOM_PTR, GEOSFuncFactory from django.contrib.gis.geos.prototypes.errcheck import ( GEOSException, last_arg_byref, ) # ## Error-checking routines specific to coordinate sequences. ## def check_cs_op(result, func, cargs): "Checks the status code of a coordinate sequence operation." if result == 0: raise GEOSException('Could not set value on coordinate sequence') else: return result def check_cs_get(result, func, cargs): "Checking the coordinate sequence retrieval." check_cs_op(result, func, cargs) # Object in by reference, return its value. return last_arg_byref(cargs) # ## Coordinate sequence prototype factory classes. ## class CsInt(GEOSFuncFactory): "For coordinate sequence routines that return an integer." argtypes = [CS_PTR, POINTER(c_uint)] restype = c_int errcheck = staticmethod(check_cs_get) class CsOperation(GEOSFuncFactory): "For coordinate sequence operations." restype = c_int def get_func(self, ordinate=False, get=False): if get: # Get routines have double parameter passed-in by reference. self.errcheck = check_cs_get dbl_param = POINTER(c_double) else: self.errcheck = check_cs_op dbl_param = c_double if ordinate: # Get/Set ordinate routines have an extra uint parameter. self.argtypes = [CS_PTR, c_uint, c_uint, dbl_param] else: self.argtypes = [CS_PTR, c_uint, dbl_param] return super(CsOperation, self).get_func() class CsOutput(GEOSFuncFactory): restype = CS_PTR def get_func(self, argtypes): self.argtypes = argtypes return super(CsOutput, self).get_func() @staticmethod def errcheck(result, func, cargs): if not result: raise GEOSException( 'Error encountered checking Coordinate Sequence returned from GEOS ' 'C function "%s".' % func.__name__ ) return result # ## Coordinate Sequence ctypes prototypes ## # Coordinate Sequence constructors & cloning. cs_clone = CsOutput('GEOSCoordSeq_clone', [CS_PTR]) create_cs = CsOutput('GEOSCoordSeq_create', [c_uint, c_uint]) get_cs = CsOutput('GEOSGeom_getCoordSeq', [GEOM_PTR]) # Getting, setting ordinate cs_getordinate = CsOperation('GEOSCoordSeq_getOrdinate', ordinate=True, get=True) cs_setordinate = CsOperation('GEOSCoordSeq_setOrdinate', ordinate=True) # For getting, x, y, z cs_getx = CsOperation('GEOSCoordSeq_getX', get=True) cs_gety = CsOperation('GEOSCoordSeq_getY', get=True) cs_getz = CsOperation('GEOSCoordSeq_getZ', get=True) # For setting, x, y, z cs_setx = CsOperation('GEOSCoordSeq_setX') cs_sety = CsOperation('GEOSCoordSeq_setY') cs_setz = CsOperation('GEOSCoordSeq_setZ') # These routines return size & dimensions. cs_getsize = CsInt('GEOSCoordSeq_getSize') cs_getdims = CsInt('GEOSCoordSeq_getDimensions')
75e3a07fca875ac45475ce3372eb9193773b621b4ee889c013ac4a6e55509ee8
# -*- encoding: utf-8 -*- """ Tests for django.core.servers. """ from __future__ import unicode_literals import contextlib import errno import os import socket from django.test import LiveServerTestCase, override_settings from django.utils._os import upath from django.utils.http import urlencode from django.utils.six import text_type from django.utils.six.moves.urllib.error import HTTPError from django.utils.six.moves.urllib.request import urlopen from .models import Person TEST_ROOT = os.path.dirname(upath(__file__)) TEST_SETTINGS = { 'MEDIA_URL': '/media/', 'MEDIA_ROOT': os.path.join(TEST_ROOT, 'media'), 'STATIC_URL': '/static/', 'STATIC_ROOT': os.path.join(TEST_ROOT, 'static'), } @override_settings(ROOT_URLCONF='servers.urls', **TEST_SETTINGS) class LiveServerBase(LiveServerTestCase): available_apps = [ 'servers', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', ] fixtures = ['testdata.json'] def urlopen(self, url): return urlopen(self.live_server_url + url) class LiveServerAddress(LiveServerBase): @classmethod def setUpClass(cls): super(LiveServerAddress, cls).setUpClass() # put it in a list to prevent descriptor lookups in test cls.live_server_url_test = [cls.live_server_url] def test_live_server_url_is_class_property(self): self.assertIsInstance(self.live_server_url_test[0], text_type) self.assertEqual(self.live_server_url_test[0], self.live_server_url) class LiveServerViews(LiveServerBase): def test_404(self): """ Ensure that the LiveServerTestCase serves 404s. Refs #2879. """ with self.assertRaises(HTTPError) as err: self.urlopen('/') self.assertEqual(err.exception.code, 404, 'Expected 404 response') def test_view(self): """ Ensure that the LiveServerTestCase serves views. Refs #2879. """ with contextlib.closing(self.urlopen('/example_view/')) as f: self.assertEqual(f.read(), b'example view') def test_static_files(self): """ Ensure that the LiveServerTestCase serves static files. Refs #2879. """ with contextlib.closing(self.urlopen('/static/example_static_file.txt')) as f: self.assertEqual(f.read().rstrip(b'\r\n'), b'example static file') def test_no_collectstatic_emulation(self): """ Test that LiveServerTestCase reports a 404 status code when HTTP client tries to access a static file that isn't explicitly put under STATIC_ROOT. """ with self.assertRaises(HTTPError) as err: self.urlopen('/static/another_app/another_app_static_file.txt') self.assertEqual(err.exception.code, 404, 'Expected 404 response') def test_media_files(self): """ Ensure that the LiveServerTestCase serves media files. Refs #2879. """ with contextlib.closing(self.urlopen('/media/example_media_file.txt')) as f: self.assertEqual(f.read().rstrip(b'\r\n'), b'example media file') def test_environ(self): with contextlib.closing(self.urlopen('/environ_view/?%s' % urlencode({'q': 'тест'}))) as f: self.assertIn(b"QUERY_STRING: 'q=%D1%82%D0%B5%D1%81%D1%82'", f.read()) class LiveServerDatabase(LiveServerBase): def test_fixtures_loaded(self): """ Ensure that fixtures are properly loaded and visible to the live server thread. Refs #2879. """ with contextlib.closing(self.urlopen('/model_view/')) as f: self.assertEqual(f.read().splitlines(), [b'jane', b'robert']) def test_database_writes(self): """ Ensure that data written to the database by a view can be read. Refs #2879. """ self.urlopen('/create_model_instance/') self.assertQuerysetEqual( Person.objects.all().order_by('pk'), ['jane', 'robert', 'emily'], lambda b: b.name ) class LiveServerPort(LiveServerBase): def test_port_bind(self): """ Each LiveServerTestCase binds to a unique port or fails to start a server thread when run concurrently (#26011). """ TestCase = type(str("TestCase"), (LiveServerBase,), {}) try: TestCase.setUpClass() except socket.error as e: if e.errno == errno.EADDRINUSE: # We're out of ports, LiveServerTestCase correctly fails with # a socket error. return # Unexpected error. raise try: # We've acquired a port, ensure our server threads acquired # different addresses. self.assertNotEqual( self.live_server_url, TestCase.live_server_url, "Acquired duplicate server addresses for server threads: %s" % self.live_server_url ) finally: if hasattr(TestCase, 'server_thread'): TestCase.server_thread.terminate()
22bf2008eb63e37145cd7abd3ac82f0d86dc8a1c78ce7e6024066d0bf6edde77
from django.db import models class Person(models.Model): name = models.CharField(max_length=256)
f3ce23a56d64ec5a9bff0526e0411a72d69d93a379830010e8d7a2236391be70
from django.conf.urls import url from . import views urlpatterns = [ url(r'^example_view/$', views.example_view), url(r'^model_view/$', views.model_view), url(r'^create_model_instance/$', views.create_model_instance), url(r'^environ_view/$', views.environ_view), ]
df54adac5aa2e679f6f62565f6bc0d521f36a1052835371146eac758e8042153
import logging from io import BytesIO from django.core.handlers.wsgi import WSGIRequest from django.core.servers.basehttp import WSGIRequestHandler from django.test import SimpleTestCase from django.test.client import RequestFactory from django.test.utils import patch_logger class Stub(object): def __init__(self, **kwargs): self.__dict__.update(kwargs) def sendall(self, data): self.makefile('wb').write(data) class WSGIRequestHandlerTestCase(SimpleTestCase): def test_log_message(self): # Silence the django.server logger by replacing its StreamHandler with # NullHandler. logger = logging.getLogger('django.server') original_handlers = logger.handlers logger.handlers = [logging.NullHandler()] try: request = WSGIRequest(RequestFactory().get('/').environ) request.makefile = lambda *args, **kwargs: BytesIO() handler = WSGIRequestHandler(request, '192.168.0.2', None) level_status_codes = { 'info': [200, 301, 304], 'warning': [400, 403, 404], 'error': [500, 503], } def _log_level_code(level, status_code): with patch_logger('django.server', level) as messages: handler.log_message('GET %s %s', 'A', str(status_code)) return messages for level, status_codes in level_status_codes.items(): for status_code in status_codes: # The correct level gets the message. messages = _log_level_code(level, status_code) self.assertIn('GET A %d' % status_code, messages[0]) # Incorrect levels shouldn't have any messages. for wrong_level in level_status_codes.keys(): if wrong_level != level: messages = _log_level_code(wrong_level, status_code) self.assertEqual(len(messages), 0) finally: logger.handlers = original_handlers def test_https(self): request = WSGIRequest(RequestFactory().get('/').environ) request.makefile = lambda *args, **kwargs: BytesIO() handler = WSGIRequestHandler(request, '192.168.0.2', None) with patch_logger('django.server', 'error') as messages: handler.log_message("GET %s %s", str('\x16\x03'), "4") self.assertIn( "You're accessing the development server over HTTPS, " "but it only supports HTTP.", messages[0] ) def test_strips_underscore_headers(self): """WSGIRequestHandler ignores headers containing underscores. This follows the lead of nginx and Apache 2.4, and is to avoid ambiguity between dashes and underscores in mapping to WSGI environ, which can have security implications. """ def test_app(environ, start_response): """A WSGI app that just reflects its HTTP environ.""" start_response('200 OK', []) http_environ_items = sorted( '%s:%s' % (k, v) for k, v in environ.items() if k.startswith('HTTP_') ) yield (','.join(http_environ_items)).encode('utf-8') rfile = BytesIO() rfile.write(b"GET / HTTP/1.0\r\n") rfile.write(b"Some-Header: good\r\n") rfile.write(b"Some_Header: bad\r\n") rfile.write(b"Other_Header: bad\r\n") rfile.seek(0) # WSGIRequestHandler closes the output file; we need to make this a # no-op so we can still read its contents. class UnclosableBytesIO(BytesIO): def close(self): pass wfile = UnclosableBytesIO() def makefile(mode, *a, **kw): if mode == 'rb': return rfile elif mode == 'wb': return wfile request = Stub(makefile=makefile) server = Stub(base_environ={}, get_app=lambda: test_app) # We don't need to check stderr, but we don't want it in test output with patch_logger('django.server', 'info'): # instantiating a handler runs the request as side effect WSGIRequestHandler(request, '192.168.0.2', server) wfile.seek(0) body = list(wfile.readlines())[-1] self.assertEqual(body, b'HTTP_SOME_HEADER:good')
e9cb45e8d2b178b14592402dd90066346f7028393bf396c0b64b6b2bb4569f5b
from django.http import HttpResponse from .models import Person def example_view(request): return HttpResponse('example view') def model_view(request): people = Person.objects.all() return HttpResponse('\n'.join(person.name for person in people)) def create_model_instance(request): person = Person(name='emily') person.save() return HttpResponse('') def environ_view(request): return HttpResponse("\n".join("%s: %r" % (k, v) for k, v in request.environ.items()))
a9816ae38279299cf6811924b3fbdf512621a31023459033f76e440280813020
class EmptyRouter(object): pass class TestRouter(object): def allow_migrate(self, db, app_label, model_name=None, **hints): """ The Tribble model should be the only one to appear in the 'other' db. """ if model_name == 'tribble': return db == 'other' elif db == 'other': return False
32d84468228ba86476852031d8ecf6de6571713e27217c6ec19e73283e9d8b7f
# -*- coding: utf-8 -*- from django.db import migrations, models from django.db.migrations import operations from django.db.migrations.optimizer import MigrationOptimizer from django.test import SimpleTestCase from .models import EmptyManager, UnicodeModel class OptimizerTests(SimpleTestCase): """ Tests the migration autodetector. """ def optimize(self, operations, app_label): """ Handy shortcut for getting results + number of loops """ optimizer = MigrationOptimizer() return optimizer.optimize(operations, app_label), optimizer._iterations def assertOptimizesTo(self, operations, expected, exact=None, less_than=None, app_label=None): result, iterations = self.optimize(operations, app_label) result = [repr(f.deconstruct()) for f in result] expected = [repr(f.deconstruct()) for f in expected] self.assertEqual(expected, result) if exact is not None and iterations != exact: raise self.failureException( "Optimization did not take exactly %s iterations (it took %s)" % (exact, iterations) ) if less_than is not None and iterations >= less_than: raise self.failureException( "Optimization did not take less than %s iterations (it took %s)" % (less_than, iterations) ) def assertDoesNotOptimize(self, operations, **kwargs): self.assertOptimizesTo(operations, operations, **kwargs) def test_single(self): """ Tests that the optimizer does nothing on a single operation, and that it does it in just one pass. """ self.assertOptimizesTo( [migrations.DeleteModel("Foo")], [migrations.DeleteModel("Foo")], exact=1, ) def test_create_delete_model(self): """ CreateModel and DeleteModel should collapse into nothing. """ self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.DeleteModel("Foo"), ], [], ) def test_create_rename_model(self): """ CreateModel should absorb RenameModels. """ managers = [('objects', EmptyManager())] self.assertOptimizesTo( [ migrations.CreateModel( name="Foo", fields=[("name", models.CharField(max_length=255))], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), migrations.RenameModel("Foo", "Bar"), ], [ migrations.CreateModel( "Bar", [("name", models.CharField(max_length=255))], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ) ], ) def test_rename_model_self(self): """ RenameModels should absorb themselves. """ self.assertOptimizesTo( [ migrations.RenameModel("Foo", "Baa"), migrations.RenameModel("Baa", "Bar"), ], [ migrations.RenameModel("Foo", "Bar"), ], ) def _test_create_alter_foo_delete_model(self, alter_foo): """ CreateModel, AlterModelTable, AlterUniqueTogether/AlterIndexTogether/ AlterOrderWithRespectTo, and DeleteModel should collapse into nothing. """ self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.AlterModelTable("Foo", "woohoo"), alter_foo, migrations.DeleteModel("Foo"), ], [], ) def test_create_alter_unique_delete_model(self): self._test_create_alter_foo_delete_model(migrations.AlterUniqueTogether("Foo", [["a", "b"]])) def test_create_alter_index_delete_model(self): self._test_create_alter_foo_delete_model(migrations.AlterIndexTogether("Foo", [["a", "b"]])) def test_create_alter_owrt_delete_model(self): self._test_create_alter_foo_delete_model(migrations.AlterOrderWithRespectTo("Foo", "a")) def _test_alter_alter_model(self, alter_foo, alter_bar): """ Two AlterUniqueTogether/AlterIndexTogether/AlterOrderWithRespectTo should collapse into the second. """ self.assertOptimizesTo( [ alter_foo, alter_bar, ], [ alter_bar, ], ) def test_alter_alter_table_model(self): self._test_alter_alter_model( migrations.AlterModelTable("Foo", "a"), migrations.AlterModelTable("Foo", "b"), ) def test_alter_alter_unique_model(self): self._test_alter_alter_model( migrations.AlterUniqueTogether("Foo", [["a", "b"]]), migrations.AlterUniqueTogether("Foo", [["a", "c"]]), ) def test_alter_alter_index_model(self): self._test_alter_alter_model( migrations.AlterIndexTogether("Foo", [["a", "b"]]), migrations.AlterIndexTogether("Foo", [["a", "c"]]), ) def test_alter_alter_owrt_model(self): self._test_alter_alter_model( migrations.AlterOrderWithRespectTo("Foo", "a"), migrations.AlterOrderWithRespectTo("Foo", "b"), ) def test_optimize_through_create(self): """ We should be able to optimize away create/delete through a create or delete of a different model, but only if the create operation does not mention the model at all. """ # These should work self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())]), migrations.DeleteModel("Foo"), ], [ migrations.CreateModel("Bar", [("size", models.IntegerField())]), ], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())]), migrations.DeleteModel("Bar"), migrations.DeleteModel("Foo"), ], [], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())]), migrations.DeleteModel("Foo"), migrations.DeleteModel("Bar"), ], [], ) # This should not work - FK should block it self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]), migrations.DeleteModel("Foo"), ], [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]), migrations.DeleteModel("Foo"), ], ) # The same operations should be optimized if app_label is specified and # a FK references a model from the other app. self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]), migrations.DeleteModel("Foo"), ], [ migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]), ], app_label="otherapp", ) # But it shouldn't work if a FK references a model with the same # app_label. self.assertDoesNotOptimize( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]), migrations.DeleteModel("Foo"), ], app_label="testapp", ) # This should not work - bases should block it self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo", )), migrations.DeleteModel("Foo"), ], [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo", )), migrations.DeleteModel("Foo"), ], ) # The same operations should be optimized if app_label and none of # bases belong to that app. self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo", )), migrations.DeleteModel("Foo"), ], [ migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo", )), ], app_label="otherapp", ) # But it shouldn't work if some of bases belongs to the specified app. self.assertDoesNotOptimize( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo", )), migrations.DeleteModel("Foo"), ], app_label="testapp", ) def test_create_model_add_field(self): """ AddField should optimize into CreateModel. """ managers = [('objects', EmptyManager())] self.assertOptimizesTo( [ migrations.CreateModel( name="Foo", fields=[("name", models.CharField(max_length=255))], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), migrations.AddField("Foo", "age", models.IntegerField()), ], [ migrations.CreateModel( name="Foo", fields=[ ("name", models.CharField(max_length=255)), ("age", models.IntegerField()), ], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), ], ) def test_create_model_add_field_not_through_fk(self): """ AddField should NOT optimize into CreateModel if it's an FK to a model that's between them. """ self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Link", [("url", models.TextField())]), migrations.AddField("Foo", "link", models.ForeignKey("migrations.Link", models.CASCADE)), ], [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Link", [("url", models.TextField())]), migrations.AddField("Foo", "link", models.ForeignKey("migrations.Link", models.CASCADE)), ], ) def test_create_model_add_field_not_through_m2m_through(self): """ AddField should NOT optimize into CreateModel if it's an M2M using a through that's created between them. """ # Note: The middle model is not actually a valid through model, # but that doesn't matter, as we never render it. self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("LinkThrough", []), migrations.AddField( "Foo", "link", models.ManyToManyField("migrations.Link", through="migrations.LinkThrough") ), ], [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("LinkThrough", []), migrations.AddField( "Foo", "link", models.ManyToManyField("migrations.Link", through="migrations.LinkThrough") ), ], ) def test_create_model_alter_field(self): """ AlterField should optimize into CreateModel. """ managers = [('objects', EmptyManager())] self.assertOptimizesTo( [ migrations.CreateModel( name="Foo", fields=[("name", models.CharField(max_length=255))], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), migrations.AlterField("Foo", "name", models.IntegerField()), ], [ migrations.CreateModel( name="Foo", fields=[ ("name", models.IntegerField()), ], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), ], ) def test_create_model_rename_field(self): """ RenameField should optimize into CreateModel. """ managers = [('objects', EmptyManager())] self.assertOptimizesTo( [ migrations.CreateModel( name="Foo", fields=[("name", models.CharField(max_length=255))], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), migrations.RenameField("Foo", "name", "title"), ], [ migrations.CreateModel( name="Foo", fields=[ ("title", models.CharField(max_length=255)), ], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), ], ) def test_add_field_rename_field(self): """ RenameField should optimize into AddField """ self.assertOptimizesTo( [ migrations.AddField("Foo", "name", models.CharField(max_length=255)), migrations.RenameField("Foo", "name", "title"), ], [ migrations.AddField("Foo", "title", models.CharField(max_length=255)), ], ) def test_alter_field_rename_field(self): """ RenameField should optimize to the other side of AlterField, and into itself. """ self.assertOptimizesTo( [ migrations.AlterField("Foo", "name", models.CharField(max_length=255)), migrations.RenameField("Foo", "name", "title"), migrations.RenameField("Foo", "title", "nom"), ], [ migrations.RenameField("Foo", "name", "nom"), migrations.AlterField("Foo", "nom", models.CharField(max_length=255)), ], ) def test_create_model_remove_field(self): """ RemoveField should optimize into CreateModel. """ managers = [('objects', EmptyManager())] self.assertOptimizesTo( [ migrations.CreateModel( name="Foo", fields=[ ("name", models.CharField(max_length=255)), ("age", models.IntegerField()), ], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), migrations.RemoveField("Foo", "age"), ], [ migrations.CreateModel( name="Foo", fields=[ ("name", models.CharField(max_length=255)), ], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), ], ) def test_add_field_alter_field(self): """ AlterField should optimize into AddField. """ self.assertOptimizesTo( [ migrations.AddField("Foo", "age", models.IntegerField()), migrations.AlterField("Foo", "age", models.FloatField(default=2.4)), ], [ migrations.AddField("Foo", name="age", field=models.FloatField(default=2.4)), ], ) def test_add_field_delete_field(self): """ RemoveField should cancel AddField """ self.assertOptimizesTo( [ migrations.AddField("Foo", "age", models.IntegerField()), migrations.RemoveField("Foo", "age"), ], [], ) def test_alter_field_delete_field(self): """ RemoveField should absorb AlterField """ self.assertOptimizesTo( [ migrations.AlterField("Foo", "age", models.IntegerField()), migrations.RemoveField("Foo", "age"), ], [ migrations.RemoveField("Foo", "age"), ], ) def _test_create_alter_foo_field(self, alter): """ CreateModel, AlterFooTogether/AlterOrderWithRespectTo followed by an add/alter/rename field should optimize to CreateModel and the Alter* """ # AddField self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.AddField("Foo", "c", models.IntegerField()), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("c", models.IntegerField()), ]), alter, ], ) # AlterField self.assertDoesNotOptimize( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.AlterField("Foo", "b", models.CharField(max_length=255)), ], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("c", models.IntegerField()), ]), alter, migrations.AlterField("Foo", "c", models.CharField(max_length=255)), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("c", models.CharField(max_length=255)), ]), alter, ], ) # RenameField self.assertDoesNotOptimize( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.RenameField("Foo", "b", "c"), ], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.RenameField("Foo", "b", "x"), migrations.RenameField("Foo", "x", "c"), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.RenameField("Foo", "b", "c"), ], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("c", models.IntegerField()), ]), alter, migrations.RenameField("Foo", "c", "d"), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("d", models.IntegerField()), ]), alter, ], ) # RemoveField self.assertDoesNotOptimize( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.RemoveField("Foo", "b"), ], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("c", models.IntegerField()), ]), alter, migrations.RemoveField("Foo", "c"), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, ], ) def test_create_alter_unique_field(self): self._test_create_alter_foo_field(migrations.AlterUniqueTogether("Foo", [["a", "b"]])) def test_create_alter_index_field(self): self._test_create_alter_foo_field(migrations.AlterIndexTogether("Foo", [["a", "b"]])) def test_create_alter_owrt_field(self): self._test_create_alter_foo_field(migrations.AlterOrderWithRespectTo("Foo", "b")) def test_optimize_through_fields(self): """ Checks that field-level through checking is working. This should manage to collapse model Foo to nonexistence, and model Bar to a single IntegerField called "width". """ self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())]), migrations.AddField("Foo", "age", models.IntegerField()), migrations.AddField("Bar", "width", models.IntegerField()), migrations.AlterField("Foo", "age", models.IntegerField()), migrations.RenameField("Bar", "size", "dimensions"), migrations.RemoveField("Foo", "age"), migrations.RenameModel("Foo", "Phou"), migrations.RemoveField("Bar", "dimensions"), migrations.RenameModel("Phou", "Fou"), migrations.DeleteModel("Fou"), ], [ migrations.CreateModel("Bar", [("width", models.IntegerField())]), ], ) def test_optimize_elidable_operation(self): elidable_operation = operations.base.Operation() elidable_operation.elidable = True self.assertOptimizesTo( [ elidable_operation, migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), elidable_operation, migrations.CreateModel("Bar", [("size", models.IntegerField())]), elidable_operation, migrations.RenameModel("Foo", "Phou"), migrations.DeleteModel("Bar"), elidable_operation, ], [ migrations.CreateModel("Phou", [("name", models.CharField(max_length=255))]), ], )
1a27dc3e0df48f25cec3768229c098d942fdbda57f029b7cd3f1ba9096daafe5
from __future__ import unicode_literals from django.db.migrations.questioner import MigrationQuestioner from django.test import SimpleTestCase from django.test.utils import override_settings class QuestionerTests(SimpleTestCase): @override_settings( INSTALLED_APPS=['migrations'], MIGRATION_MODULES={'migrations': None}, ) def test_ask_initial_with_disabled_migrations(self): questioner = MigrationQuestioner() self.assertIs(False, questioner.ask_initial('migrations'))
a93fe67996ce5459176f014c4669e53b466bdb90cbcf5c6d35a9addc50d125e8
from django.apps.registry import apps as global_apps from django.db import connection from django.db.migrations.exceptions import InvalidMigrationPlan from django.db.migrations.executor import MigrationExecutor from django.db.migrations.graph import MigrationGraph from django.db.migrations.recorder import MigrationRecorder from django.db.utils import DatabaseError from django.test import TestCase, modify_settings, override_settings from .test_base import MigrationTestBase @modify_settings(INSTALLED_APPS={'append': 'migrations2'}) class ExecutorTests(MigrationTestBase): """ Tests the migration executor (full end-to-end running). Bear in mind that if these are failing you should fix the other test failures first, as they may be propagating into here. """ available_apps = ["migrations", "migrations2", "django.contrib.auth", "django.contrib.contenttypes"] @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_run(self): """ Tests running a simple set of migrations. """ executor = MigrationExecutor(connection) # Let's look at the plan first and make sure it's up to scratch plan = executor.migration_plan([("migrations", "0002_second")]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), (executor.loader.graph.nodes["migrations", "0002_second"], False), ], ) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") # Alright, let's try running it executor.migrate([("migrations", "0002_second")]) # Are the tables there now? self.assertTableExists("migrations_author") self.assertTableExists("migrations_book") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Alright, let's undo what we did plan = executor.migration_plan([("migrations", None)]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0002_second"], True), (executor.loader.graph.nodes["migrations", "0001_initial"], True), ], ) executor.migrate([("migrations", None)]) # Are the tables gone? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_run_with_squashed(self): """ Tests running a squashed migration from zero (should ignore what it replaces) """ executor = MigrationExecutor(connection) # Check our leaf node is the squashed one leaves = [key for key in executor.loader.graph.leaf_nodes() if key[0] == "migrations"] self.assertEqual(leaves, [("migrations", "0001_squashed_0002")]) # Check the plan plan = executor.migration_plan([("migrations", "0001_squashed_0002")]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_squashed_0002"], False), ], ) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") # Alright, let's try running it executor.migrate([("migrations", "0001_squashed_0002")]) # Are the tables there now? self.assertTableExists("migrations_author") self.assertTableExists("migrations_book") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Alright, let's undo what we did. Should also just use squashed. plan = executor.migration_plan([("migrations", None)]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_squashed_0002"], True), ], ) executor.migrate([("migrations", None)]) # Are the tables gone? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}) def test_non_atomic_migration(self): """ Applying a non-atomic migration works as expected. """ executor = MigrationExecutor(connection) with self.assertRaisesMessage(RuntimeError, "Abort migration"): executor.migrate([("migrations", "0001_initial")]) self.assertTableExists("migrations_publisher") migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps Publisher = migrations_apps.get_model("migrations", "Publisher") self.assertTrue(Publisher.objects.exists()) self.assertTableNotExists("migrations_book") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_atomic_operation"}) def test_atomic_operation_in_non_atomic_migration(self): """ An atomic operation is properly rolled back inside a non-atomic migration. """ executor = MigrationExecutor(connection) with self.assertRaisesMessage(RuntimeError, "Abort migration"): executor.migrate([("migrations", "0001_initial")]) migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps Editor = migrations_apps.get_model("migrations", "Editor") self.assertFalse(Editor.objects.exists()) @override_settings(MIGRATION_MODULES={ "migrations": "migrations.test_migrations", "migrations2": "migrations2.test_migrations_2", }) def test_empty_plan(self): """ Tests that re-planning a full migration of a fully-migrated set doesn't perform spurious unmigrations and remigrations. There was previously a bug where the executor just always performed the backwards plan for applied migrations - which even for the most recent migration in an app, might include other, dependent apps, and these were being unmigrated. """ # Make the initial plan, check it executor = MigrationExecutor(connection) plan = executor.migration_plan([ ("migrations", "0002_second"), ("migrations2", "0001_initial"), ]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), (executor.loader.graph.nodes["migrations", "0002_second"], False), (executor.loader.graph.nodes["migrations2", "0001_initial"], False), ], ) # Fake-apply all migrations executor.migrate([ ("migrations", "0002_second"), ("migrations2", "0001_initial") ], fake=True) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Now plan a second time and make sure it's empty plan = executor.migration_plan([ ("migrations", "0002_second"), ("migrations2", "0001_initial"), ]) self.assertEqual(plan, []) # The resulting state should include applied migrations. state = executor.migrate([ ("migrations", "0002_second"), ("migrations2", "0001_initial"), ]) self.assertIn(('migrations', 'book'), state.models) self.assertIn(('migrations', 'author'), state.models) self.assertIn(('migrations2', 'otherauthor'), state.models) # Erase all the fake records executor.recorder.record_unapplied("migrations2", "0001_initial") executor.recorder.record_unapplied("migrations", "0002_second") executor.recorder.record_unapplied("migrations", "0001_initial") @override_settings(MIGRATION_MODULES={ "migrations": "migrations.test_migrations", "migrations2": "migrations2.test_migrations_2_no_deps", }) def test_mixed_plan_not_supported(self): """ Although the MigrationExecutor interfaces allows for mixed migration plans (combined forwards and backwards migrations) this is not supported. """ # Prepare for mixed plan executor = MigrationExecutor(connection) plan = executor.migration_plan([("migrations", "0002_second")]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), (executor.loader.graph.nodes["migrations", "0002_second"], False), ], ) executor.migrate(None, plan) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() self.assertIn(('migrations', '0001_initial'), executor.loader.applied_migrations) self.assertIn(('migrations', '0002_second'), executor.loader.applied_migrations) self.assertNotIn(('migrations2', '0001_initial'), executor.loader.applied_migrations) # Generate mixed plan plan = executor.migration_plan([ ("migrations", None), ("migrations2", "0001_initial"), ]) msg = ( 'Migration plans with both forwards and backwards migrations are ' 'not supported. Please split your migration process into separate ' 'plans of only forwards OR backwards migrations.' ) with self.assertRaisesMessage(InvalidMigrationPlan, msg) as cm: executor.migrate(None, plan) self.assertEqual( cm.exception.args[1], [ (executor.loader.graph.nodes["migrations", "0002_second"], True), (executor.loader.graph.nodes["migrations", "0001_initial"], True), (executor.loader.graph.nodes["migrations2", "0001_initial"], False), ], ) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() executor.migrate([ ("migrations", None), ("migrations2", None), ]) # Are the tables gone? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") self.assertTableNotExists("migrations2_otherauthor") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_soft_apply(self): """ Tests detection of initial migrations already having been applied. """ state = {"faked": None} def fake_storer(phase, migration=None, fake=None): state["faked"] = fake executor = MigrationExecutor(connection, progress_callback=fake_storer) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Run it normally self.assertEqual( executor.migration_plan([("migrations", "0001_initial")]), [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), ], ) executor.migrate([("migrations", "0001_initial")]) # Are the tables there now? self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") # We shouldn't have faked that one self.assertIs(state["faked"], False) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Fake-reverse that executor.migrate([("migrations", None)], fake=True) # Are the tables still there? self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") # Make sure that was faked self.assertIs(state["faked"], True) # Finally, migrate forwards; this should fake-apply our initial migration executor.loader.build_graph() self.assertEqual( executor.migration_plan([("migrations", "0001_initial")]), [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), ], ) # Applying the migration should raise a database level error # because we haven't given the --fake-initial option with self.assertRaises(DatabaseError): executor.migrate([("migrations", "0001_initial")]) # Reset the faked state state = {"faked": None} # Allow faking of initial CreateModel operations executor.migrate([("migrations", "0001_initial")], fake_initial=True) self.assertIs(state["faked"], True) # And migrate back to clean up the database executor.loader.build_graph() executor.migrate([("migrations", None)]) self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") @override_settings( MIGRATION_MODULES={ "migrations": "migrations.test_migrations_custom_user", "django.contrib.auth": "django.contrib.auth.migrations", }, AUTH_USER_MODEL="migrations.Author", ) def test_custom_user(self): """ Regression test for #22325 - references to a custom user model defined in the same app are not resolved correctly. """ executor = MigrationExecutor(connection) self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Migrate forwards executor.migrate([("migrations", "0001_initial")]) self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") # Make sure the soft-application detection works (#23093) # Change table_names to not return auth_user during this as # it wouldn't be there in a normal run, and ensure migrations.Author # exists in the global app registry temporarily. old_table_names = connection.introspection.table_names connection.introspection.table_names = lambda c: [x for x in old_table_names(c) if x != "auth_user"] migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps global_apps.get_app_config("migrations").models["author"] = migrations_apps.get_model("migrations", "author") try: migration = executor.loader.get_migration("auth", "0001_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], True) finally: connection.introspection.table_names = old_table_names del global_apps.get_app_config("migrations").models["author"] # And migrate back to clean up the database executor.loader.build_graph() executor.migrate([("migrations", None)]) self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") @override_settings( MIGRATION_MODULES={ "migrations": "migrations.test_add_many_to_many_field_initial", }, ) def test_detect_soft_applied_add_field_manytomanyfield(self): """ executor.detect_soft_applied() detects ManyToManyField tables from an AddField operation. This checks the case of AddField in a migration with other operations (0001) and the case of AddField in its own migration (0002). """ tables = [ # from 0001 "migrations_project", "migrations_task", "migrations_project_tasks", # from 0002 "migrations_task_projects", ] executor = MigrationExecutor(connection) # Create the tables for 0001 but make it look like the migration hasn't # been applied. executor.migrate([("migrations", "0001_initial")]) executor.migrate([("migrations", None)], fake=True) for table in tables[:3]: self.assertTableExists(table) # Table detection sees 0001 is applied but not 0002. migration = executor.loader.get_migration("migrations", "0001_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], True) migration = executor.loader.get_migration("migrations", "0002_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], False) # Create the tables for both migrations but make it look like neither # has been applied. executor.loader.build_graph() executor.migrate([("migrations", "0001_initial")], fake=True) executor.migrate([("migrations", "0002_initial")]) executor.loader.build_graph() executor.migrate([("migrations", None)], fake=True) # Table detection sees 0002 is applied. migration = executor.loader.get_migration("migrations", "0002_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], True) # Leave the tables for 0001 except the many-to-many table. That missing # table should cause detect_soft_applied() to return False. with connection.schema_editor() as editor: for table in tables[2:]: editor.execute(editor.sql_delete_table % {"table": table}) migration = executor.loader.get_migration("migrations", "0001_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], False) # Cleanup by removing the remaining tables. with connection.schema_editor() as editor: for table in tables[:2]: editor.execute(editor.sql_delete_table % {"table": table}) for table in tables: self.assertTableNotExists(table) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.lookuperror_a", "migrations.migrations_test_apps.lookuperror_b", "migrations.migrations_test_apps.lookuperror_c" ] ) def test_unrelated_model_lookups_forwards(self): """ #24123 - Tests that all models of apps already applied which are unrelated to the first app being applied are part of the initial model state. """ try: executor = MigrationExecutor(connection) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") executor.migrate([("lookuperror_b", "0003_b3")]) self.assertTableExists("lookuperror_b_b3") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Migrate forwards -- This led to a lookup LookupErrors because # lookuperror_b.B2 is already applied executor.migrate([ ("lookuperror_a", "0004_a4"), ("lookuperror_c", "0003_c3"), ]) self.assertTableExists("lookuperror_a_a4") self.assertTableExists("lookuperror_c_c3") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() finally: # Cleanup executor.migrate([ ("lookuperror_a", None), ("lookuperror_b", None), ("lookuperror_c", None), ]) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.lookuperror_a", "migrations.migrations_test_apps.lookuperror_b", "migrations.migrations_test_apps.lookuperror_c" ] ) def test_unrelated_model_lookups_backwards(self): """ #24123 - Tests that all models of apps being unapplied which are unrelated to the first app being unapplied are part of the initial model state. """ try: executor = MigrationExecutor(connection) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") executor.migrate([ ("lookuperror_a", "0004_a4"), ("lookuperror_b", "0003_b3"), ("lookuperror_c", "0003_c3"), ]) self.assertTableExists("lookuperror_b_b3") self.assertTableExists("lookuperror_a_a4") self.assertTableExists("lookuperror_c_c3") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Migrate backwards -- This led to a lookup LookupErrors because # lookuperror_b.B2 is not in the initial state (unrelated to app c) executor.migrate([("lookuperror_a", None)]) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() finally: # Cleanup executor.migrate([ ("lookuperror_b", None), ("lookuperror_c", None) ]) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") @override_settings( INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_a', 'migrations.migrations_test_apps.mutate_state_b', ] ) def test_unrelated_applied_migrations_mutate_state(self): """ #26647 - Unrelated applied migrations should be part of the final state in both directions. """ executor = MigrationExecutor(connection) executor.migrate([ ('mutate_state_b', '0002_add_field'), ]) # Migrate forward. executor.loader.build_graph() state = executor.migrate([ ('mutate_state_a', '0001_initial'), ]) self.assertIn('added', dict(state.models['mutate_state_b', 'b'].fields)) executor.loader.build_graph() # Migrate backward. state = executor.migrate([ ('mutate_state_a', None), ]) self.assertIn('added', dict(state.models['mutate_state_b', 'b'].fields)) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_process_callback(self): """ #24129 - Tests callback process """ call_args_list = [] def callback(*args): call_args_list.append(args) executor = MigrationExecutor(connection, progress_callback=callback) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") executor.migrate([ ("migrations", "0001_initial"), ("migrations", "0002_second"), ]) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() executor.migrate([ ("migrations", None), ("migrations", None), ]) self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") migrations = executor.loader.graph.nodes expected = [ ("render_start", ), ("render_success", ), ("apply_start", migrations['migrations', '0001_initial'], False), ("apply_success", migrations['migrations', '0001_initial'], False), ("apply_start", migrations['migrations', '0002_second'], False), ("apply_success", migrations['migrations', '0002_second'], False), ("render_start", ), ("render_success", ), ("unapply_start", migrations['migrations', '0002_second'], False), ("unapply_success", migrations['migrations', '0002_second'], False), ("unapply_start", migrations['migrations', '0001_initial'], False), ("unapply_success", migrations['migrations', '0001_initial'], False), ] self.assertEqual(call_args_list, expected) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.alter_fk.author_app", "migrations.migrations_test_apps.alter_fk.book_app", ] ) def test_alter_id_type_with_fk(self): try: executor = MigrationExecutor(connection) self.assertTableNotExists("author_app_author") self.assertTableNotExists("book_app_book") # Apply initial migrations executor.migrate([ ("author_app", "0001_initial"), ("book_app", "0001_initial"), ]) self.assertTableExists("author_app_author") self.assertTableExists("book_app_book") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Apply PK type alteration executor.migrate([("author_app", "0002_alter_id")]) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() finally: # We can't simply unapply the migrations here because there is no # implicit cast from VARCHAR to INT on the database level. with connection.schema_editor() as editor: editor.execute(editor.sql_delete_table % {"table": "book_app_book"}) editor.execute(editor.sql_delete_table % {"table": "author_app_author"}) self.assertTableNotExists("author_app_author") self.assertTableNotExists("book_app_book") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_apply_all_replaced_marks_replacement_as_applied(self): """ Applying all replaced migrations marks replacement as applied (#24628). """ recorder = MigrationRecorder(connection) # Place the database in a state where the replaced migrations are # partially applied: 0001 is applied, 0002 is not. recorder.record_applied("migrations", "0001_initial") executor = MigrationExecutor(connection) # Use fake because we don't actually have the first migration # applied, so the second will fail. And there's no need to actually # create/modify tables here, we're just testing the # MigrationRecord, which works the same with or without fake. executor.migrate([("migrations", "0002_second")], fake=True) # Because we've now applied 0001 and 0002 both, their squashed # replacement should be marked as applied. self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations(), ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_marks_replacement_applied_even_if_it_did_nothing(self): """ A new squash migration will be marked as applied even if all its replaced migrations were previously already applied (#24628). """ recorder = MigrationRecorder(connection) # Record all replaced migrations as applied recorder.record_applied("migrations", "0001_initial") recorder.record_applied("migrations", "0002_second") executor = MigrationExecutor(connection) executor.migrate([("migrations", "0001_squashed_0002")]) # Because 0001 and 0002 are both applied, even though this migrate run # didn't apply anything new, their squashed replacement should be # marked as applied. self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations(), ) class FakeLoader(object): def __init__(self, graph, applied): self.graph = graph self.applied_migrations = applied class FakeMigration(object): """Really all we need is any object with a debug-useful repr.""" def __init__(self, name): self.name = name def __repr__(self): return 'M<%s>' % self.name class ExecutorUnitTests(TestCase): """(More) isolated unit tests for executor methods.""" def test_minimize_rollbacks(self): """ Minimize unnecessary rollbacks in connected apps. When you say "./manage.py migrate appA 0001", rather than migrating to just after appA-0001 in the linearized migration plan (which could roll back migrations in other apps that depend on appA 0001, but don't need to be rolled back since we're not rolling back appA 0001), we migrate to just before appA-0002. """ a1_impl = FakeMigration('a1') a1 = ('a', '1') a2_impl = FakeMigration('a2') a2 = ('a', '2') b1_impl = FakeMigration('b1') b1 = ('b', '1') graph = MigrationGraph() graph.add_node(a1, a1_impl) graph.add_node(a2, a2_impl) graph.add_node(b1, b1_impl) graph.add_dependency(None, b1, a1) graph.add_dependency(None, a2, a1) executor = MigrationExecutor(None) executor.loader = FakeLoader(graph, {a1, b1, a2}) plan = executor.migration_plan({a1}) self.assertEqual(plan, [(a2_impl, True)]) def test_minimize_rollbacks_branchy(self): r""" Minimize rollbacks when target has multiple in-app children. a: 1 <---- 3 <--\ \ \- 2 <--- 4 \ \ b: \- 1 <--- 2 """ a1_impl = FakeMigration('a1') a1 = ('a', '1') a2_impl = FakeMigration('a2') a2 = ('a', '2') a3_impl = FakeMigration('a3') a3 = ('a', '3') a4_impl = FakeMigration('a4') a4 = ('a', '4') b1_impl = FakeMigration('b1') b1 = ('b', '1') b2_impl = FakeMigration('b2') b2 = ('b', '2') graph = MigrationGraph() graph.add_node(a1, a1_impl) graph.add_node(a2, a2_impl) graph.add_node(a3, a3_impl) graph.add_node(a4, a4_impl) graph.add_node(b1, b1_impl) graph.add_node(b2, b2_impl) graph.add_dependency(None, a2, a1) graph.add_dependency(None, a3, a1) graph.add_dependency(None, a4, a2) graph.add_dependency(None, a4, a3) graph.add_dependency(None, b2, b1) graph.add_dependency(None, b1, a1) graph.add_dependency(None, b2, a2) executor = MigrationExecutor(None) executor.loader = FakeLoader(graph, {a1, b1, a2, b2, a3, a4}) plan = executor.migration_plan({a1}) should_be_rolled_back = [b2_impl, a4_impl, a2_impl, a3_impl] exp = [(m, True) for m in should_be_rolled_back] self.assertEqual(plan, exp) def test_backwards_nothing_to_do(self): r""" If the current state satisfies the given target, do nothing. a: 1 <--- 2 b: \- 1 c: \- 1 If a1 is applied already and a2 is not, and we're asked to migrate to a1, don't apply or unapply b1 or c1, regardless of their current state. """ a1_impl = FakeMigration('a1') a1 = ('a', '1') a2_impl = FakeMigration('a2') a2 = ('a', '2') b1_impl = FakeMigration('b1') b1 = ('b', '1') c1_impl = FakeMigration('c1') c1 = ('c', '1') graph = MigrationGraph() graph.add_node(a1, a1_impl) graph.add_node(a2, a2_impl) graph.add_node(b1, b1_impl) graph.add_node(c1, c1_impl) graph.add_dependency(None, a2, a1) graph.add_dependency(None, b1, a1) graph.add_dependency(None, c1, a1) executor = MigrationExecutor(None) executor.loader = FakeLoader(graph, {a1, b1}) plan = executor.migration_plan({a1}) self.assertEqual(plan, [])
7b5e02b03c59efc6591beb0cbe729f13205ebf17331b1f074f1ff6ae8443ab0a
# -*- coding: utf-8 -*- from __future__ import unicode_literals import datetime import importlib import io import os import sys from django.apps import apps from django.core.management import CommandError, call_command from django.db import ( ConnectionHandler, DatabaseError, connection, connections, models, ) from django.db.migrations.exceptions import ( InconsistentMigrationHistory, MigrationSchemaMissing, ) from django.db.migrations.recorder import MigrationRecorder from django.test import ignore_warnings, mock, override_settings from django.utils import six from django.utils.deprecation import RemovedInDjango20Warning from django.utils.encoding import force_text from .models import UnicodeModel, UnserializableModel from .routers import TestRouter from .test_base import MigrationTestBase class MigrateTests(MigrationTestBase): """ Tests running the migrate command. """ multi_db = True @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_migrate(self): """ Tests basic usage of the migrate command. """ # Make sure no tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) # Make sure the right tables exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run migrations all the way call_command("migrate", verbosity=0) # Make sure the right tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableExists("migrations_book") # Unmigrate everything call_command("migrate", "migrations", "zero", verbosity=0) # Make sure it's all gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings(INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'migrations.migrations_test_apps.migrated_app', ]) def test_migrate_with_system_checks(self): out = six.StringIO() call_command('migrate', skip_checks=False, no_color=True, stdout=out) self.assertIn('Apply all migrations: migrated_app', out.getvalue()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_initial_false"}) def test_migrate_initial_false(self): """ `Migration.initial = False` skips fake-initial detection. """ # Make sure no tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) # Fake rollback call_command("migrate", "migrations", "zero", fake=True, verbosity=0) # Make sure fake-initial detection does not run with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0) call_command("migrate", "migrations", "0001", fake=True, verbosity=0) # Real rollback call_command("migrate", "migrations", "zero", verbosity=0) # Make sure it's all gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings( MIGRATION_MODULES={"migrations": "migrations.test_migrations"}, DATABASE_ROUTERS=['migrations.routers.TestRouter'], ) def test_migrate_fake_initial(self): """ --fake-initial only works if all tables created in the initial migration of an app exists. Database routers must be obeyed when doing that check. """ # Make sure no tables are created for db in connections: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) call_command("migrate", "migrations", "0001", verbosity=0, database="other") # Make sure the right tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Also check the "other" database self.assertTableNotExists("migrations_author", using="other") self.assertTableExists("migrations_tribble", using="other") # Fake a roll-back call_command("migrate", "migrations", "zero", fake=True, verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0, database="other") # Make sure the tables still exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble", using="other") # Try to run initial migration with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", verbosity=0) # Run initial migration with an explicit --fake-initial out = six.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command("migrate", "migrations", "0001", fake_initial=True, stdout=out, verbosity=1) call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0, database="other") self.assertIn( "migrations.0001_initial... faked", out.getvalue().lower() ) # Run migrations all the way call_command("migrate", verbosity=0) call_command("migrate", verbosity=0, database="other") # Make sure the right tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableExists("migrations_book") self.assertTableNotExists("migrations_author", using="other") self.assertTableNotExists("migrations_tribble", using="other") self.assertTableNotExists("migrations_book", using="other") # Fake a roll-back call_command("migrate", "migrations", "zero", fake=True, verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0, database="other") # Make sure the tables still exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableExists("migrations_book") # Try to run initial migration with self.assertRaises(DatabaseError): call_command("migrate", "migrations", verbosity=0) # Run initial migration with an explicit --fake-initial with self.assertRaises(DatabaseError): # Fails because "migrations_tribble" does not exist but needs to in # order to make --fake-initial work. call_command("migrate", "migrations", fake_initial=True, verbosity=0) # Fake a apply call_command("migrate", "migrations", fake=True, verbosity=0) call_command("migrate", "migrations", fake=True, verbosity=0, database="other") # Unmigrate everything call_command("migrate", "migrations", "zero", verbosity=0) call_command("migrate", "migrations", "zero", verbosity=0, database="other") # Make sure it's all gone for db in connections: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) self.assertTableNotExists("migrations_book", using=db) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_fake_split_initial"}) def test_migrate_fake_split_initial(self): """ Split initial migrations can be faked with --fake-initial. """ call_command("migrate", "migrations", "0002", verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0) out = six.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command("migrate", "migrations", "0002", fake_initial=True, stdout=out, verbosity=1) value = out.getvalue().lower() self.assertIn("migrations.0001_initial... faked", value) self.assertIn("migrations.0002_second... faked", value) # Fake an apply call_command("migrate", "migrations", fake=True, verbosity=0) # Unmigrate everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"}) def test_migrate_conflict_exit(self): """ Makes sure that migrate exits if it detects a conflict. """ with self.assertRaisesMessage(CommandError, "Conflicting migrations detected"): call_command("migrate", "migrations") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_showmigrations_list(self): """ Tests --list output of showmigrations command """ out = six.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: True): call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False) self.assertEqual( '\x1b[1mmigrations\n\x1b[0m' ' [ ] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) call_command("migrate", "migrations", "0001", verbosity=0) out = six.StringIO() # Giving the explicit app_label tests for selective `show_list` in the command call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}) def test_showmigrations_plan(self): """ Tests --plan output of showmigrations command """ out = six.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = six.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "0003", verbosity=0) out = six.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = six.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}) def test_showmigrations_plan_no_migrations(self): """ Tests --plan output of showmigrations command without migrations """ out = six.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual("", out.getvalue().lower()) out = six.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual("", out.getvalue().lower()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}) def test_showmigrations_plan_squashed(self): """ Tests --plan output of showmigrations command with squashed migrations. """ out = six.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto\n" "[ ] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = six.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto ... (migrations.1_auto)\n" "[ ] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "3_squashed_5", verbosity=0) out = six.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto\n" "[x] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = six.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto ... (migrations.1_auto)\n" "[x] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_forwards(self): """ Makes sure that sqlmigrate does something. """ out = six.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_author = output.find('-- create model author') index_create_table = output.find('create table') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_unique_together = output.find('-- alter unique_together') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_op_desc_author, index_tx_start, "Operation description (author) not found or found before transaction start" ) self.assertGreater( index_create_table, index_op_desc_author, "CREATE TABLE not found or found before operation description (author)" ) self.assertGreater( index_op_desc_tribble, index_create_table, "Operation description (tribble) not found or found before CREATE TABLE (author)" ) self.assertGreater( index_op_desc_unique_together, index_op_desc_tribble, "Operation description (unique_together) not found or found before operation description (tribble)" ) self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before operation description (unique_together)" ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_backwards(self): """ Makes sure that sqlmigrate does something. """ # Cannot generate the reverse SQL unless we've applied the migration. call_command("migrate", "migrations", verbosity=0) out = six.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out, backwards=True) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_unique_together = output.find('-- alter unique_together') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_author = output.find('-- create model author') index_drop_table = output.rfind('drop table') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_op_desc_unique_together, index_tx_start, "Operation description (unique_together) not found or found before transaction start" ) self.assertGreater( index_op_desc_tribble, index_op_desc_unique_together, "Operation description (tribble) not found or found before operation description (unique_together)" ) self.assertGreater( index_op_desc_author, index_op_desc_tribble, "Operation description (author) not found or found before operation description (tribble)" ) self.assertGreater( index_drop_table, index_op_desc_author, "DROP TABLE not found or found before operation description (author)" ) self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before DROP TABLE" ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}) def test_sqlmigrate_for_non_atomic_migration(self): """ Transaction wrappers aren't shown for non-atomic migrations. """ out = six.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() queries = [q.strip() for q in output.splitlines()] if connection.ops.start_transaction_sql(): self.assertNotIn(connection.ops.start_transaction_sql().lower(), queries) self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.migrated_unapplied_app", "migrations.migrations_test_apps.unmigrated_app"]) def test_regression_22823_unmigrated_fk_to_migrated_model(self): """ https://code.djangoproject.com/ticket/22823 Assuming you have 3 apps, `A`, `B`, and `C`, such that: * `A` has migrations * `B` has a migration we want to apply * `C` has no migrations, but has an FK to `A` When we try to migrate "B", an exception occurs because the "B" was not included in the ProjectState that is used to detect soft-applied migrations. """ call_command("migrate", "migrated_unapplied_app", stdout=six.StringIO()) # unmigrated_app.SillyModel has a foreign key to 'migrations.Tribble', # but that model is only defined in a migration, so the global app # registry never sees it and the reference is left dangling. Remove it # to avoid problems in subsequent tests. del apps._pending_operations[('migrations', 'tribble')] @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_replaced(self): """ Running a single squashed migration should record all of the original replaced migrations as run. """ recorder = MigrationRecorder(connection) out = six.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) applied_migrations = recorder.applied_migrations() self.assertIn(("migrations", "0001_initial"), applied_migrations) self.assertIn(("migrations", "0002_second"), applied_migrations) self.assertIn(("migrations", "0001_squashed_0002"), applied_migrations) # Rollback changes call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_squashed(self): """ Running migrate for a squashed migration should record as run if all of the replaced migrations have been run (#25231). """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0001_initial") recorder.record_applied("migrations", "0002_second") out = six.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations() ) # No changes were actually applied so there is nothing to rollback @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrate_inconsistent_history(self): """ Running migrate with some migrations applied before their dependencies should not be allowed. """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0002_second") msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("migrate") applied_migrations = recorder.applied_migrations() self.assertNotIn(("migrations", "0001_initial"), applied_migrations) class MakeMigrationsTests(MigrationTestBase): """ Tests running the makemigrations command. """ def setUp(self): super(MakeMigrationsTests, self).setUp() self._old_models = apps.app_configs['migrations'].models.copy() def tearDown(self): apps.app_configs['migrations'].models = self._old_models apps.all_models['migrations'] = self._old_models apps.clear_cache() super(MakeMigrationsTests, self).tearDown() def test_files_content(self): self.assertTableNotExists("migrations_unicodemodel") apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) # Check for empty __init__.py file in migrations folder init_file = os.path.join(migration_dir, "__init__.py") self.assertTrue(os.path.exists(init_file)) with open(init_file, 'r') as fp: content = force_text(fp.read()) self.assertEqual(content, '') # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with io.open(initial_file, 'r', encoding='utf-8') as fp: content = fp.read() self.assertIn('# -*- coding: utf-8 -*-', content) self.assertIn('migrations.CreateModel', content) self.assertIn('initial = True', content) if six.PY3: self.assertIn('úñí©óðé µóðéø', content) # Meta.verbose_name self.assertIn('úñí©óðé µóðéøß', content) # Meta.verbose_name_plural self.assertIn('ÚÑÍ¢ÓÐÉ', content) # title.verbose_name self.assertIn('“Ðjáñgó”', content) # title.default else: # Meta.verbose_name self.assertIn('\\xfa\\xf1\\xed\\xa9\\xf3\\xf0\\xe9 \\xb5\\xf3\\xf0\\xe9\\xf8', content) # Meta.verbose_name_plural self.assertIn('\\xfa\\xf1\\xed\\xa9\\xf3\\xf0\\xe9 \\xb5\\xf3\\xf0\\xe9\\xf8\\xdf', content) self.assertIn('\\xda\\xd1\\xcd\\xa2\\xd3\\xd0\\xc9', content) # title.verbose_name self.assertIn('\\u201c\\xd0j\\xe1\\xf1g\\xf3\\u201d', content) # title.default def test_makemigrations_order(self): """ makemigrations should recognize number-only migrations (0001.py). """ module = 'migrations.test_migrations_order' with self.temporary_migration_module(module=module) as migration_dir: if hasattr(importlib, 'invalidate_caches'): # Python 3 importlib caches os.listdir() on some platforms like # Mac OS X (#23850). importlib.invalidate_caches() call_command('makemigrations', 'migrations', '--empty', '-n', 'a', '-v', '0') self.assertTrue(os.path.exists(os.path.join(migration_dir, '0002_a.py'))) def test_makemigrations_empty_connections(self): empty_connections = ConnectionHandler({'default': {}}) with mock.patch('django.core.management.commands.makemigrations.connections', new=empty_connections): # with no apps out = six.StringIO() call_command('makemigrations', stdout=out) self.assertIn('No changes detected', out.getvalue()) # with an app with self.temporary_migration_module() as migration_dir: call_command('makemigrations', 'migrations', verbosity=0) init_file = os.path.join(migration_dir, '__init__.py') self.assertTrue(os.path.exists(init_file)) def test_makemigrations_consistency_checks_respect_routers(self): """ The history consistency checks in makemigrations respect settings.DATABASE_ROUTERS. """ def patched_ensure_schema(migration_recorder): if migration_recorder.connection is connections['other']: raise MigrationSchemaMissing('Patched') else: return mock.DEFAULT self.assertTableNotExists('migrations_unicodemodel') apps.register_model('migrations', UnicodeModel) with mock.patch.object( MigrationRecorder, 'ensure_schema', autospec=True, side_effect=patched_ensure_schema) as ensure_schema: with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) self.assertEqual(ensure_schema.call_count, 1) # 'default' is checked # Router says not to migrate 'other' so consistency shouldn't # be checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(ensure_schema.call_count, 2) # 'default' again # With a router that doesn't prohibit migrating 'other', # consistency is checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.EmptyRouter']): with self.assertRaisesMessage(MigrationSchemaMissing, 'Patched'): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(ensure_schema.call_count, 4) # 'default' and 'other' # With a router that doesn't allow migrating on any database, # no consistency checks are made. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): with mock.patch.object(TestRouter, 'allow_migrate', return_value=False) as allow_migrate: call_command('makemigrations', 'migrations', verbosity=0) allow_migrate.assert_called_with('other', 'migrations', model_name='UnicodeModel') self.assertEqual(ensure_schema.call_count, 4) def test_failing_migration(self): # If a migration fails to serialize, it shouldn't generate an empty file. #21280 apps.register_model('migrations', UnserializableModel) with self.temporary_migration_module() as migration_dir: with self.assertRaisesMessage(ValueError, 'Cannot serialize'): call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertFalse(os.path.exists(initial_file)) def test_makemigrations_conflict_exit(self): """ Makes sure that makemigrations exits if it detects a conflict. """ with self.temporary_migration_module(module="migrations.test_migrations_conflict"): with self.assertRaises(CommandError): call_command("makemigrations") def test_makemigrations_merge_no_conflict(self): """ Makes sure that makemigrations exits if in merge mode with no conflicts. """ out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("makemigrations", merge=True, stdout=out) self.assertIn("No conflicts detected to merge.", out.getvalue()) def test_makemigrations_no_app_sys_exit(self): """ Makes sure that makemigrations exits if a non-existent app is specified. """ err = six.StringIO() with self.assertRaises(SystemExit): call_command("makemigrations", "this_app_does_not_exist", stderr=err) self.assertIn("'this_app_does_not_exist' could not be found.", err.getvalue()) def test_makemigrations_empty_no_app_specified(self): """ Makes sure that makemigrations exits if no app is specified with 'empty' mode. """ with self.assertRaises(CommandError): call_command("makemigrations", empty=True) def test_makemigrations_empty_migration(self): """ Makes sure that makemigrations properly constructs an empty migration. """ with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", empty=True, verbosity=0) # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with io.open(initial_file, 'r', encoding='utf-8') as fp: content = fp.read() self.assertIn('# -*- coding: utf-8 -*-', content) # Remove all whitespace to check for empty dependencies and operations content = content.replace(' ', '') self.assertIn('dependencies=[\n]', content) self.assertIn('operations=[\n]', content) @override_settings(MIGRATION_MODULES={"migrations": None}) def test_makemigrations_disabled_migrations_for_app(self): """ makemigrations raises a nice error when migrations are disabled for an app. """ msg = ( "Django can't create migrations for app 'migrations' because migrations " "have been disabled via the MIGRATION_MODULES setting." ) with self.assertRaisesMessage(ValueError, msg): call_command("makemigrations", "migrations", empty=True, verbosity=0) def test_makemigrations_no_changes_no_apps(self): """ Makes sure that makemigrations exits when there are no changes and no apps are specified. """ out = six.StringIO() call_command("makemigrations", stdout=out) self.assertIn("No changes detected", out.getvalue()) def test_makemigrations_no_changes(self): """ Makes sure that makemigrations exits when there are no changes to an app. """ out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", stdout=out) self.assertIn("No changes detected in app 'migrations'", out.getvalue()) def test_makemigrations_no_apps_initial(self): """ makemigrations should detect initial is needed on empty migration modules if no app provided. """ out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_empty"): call_command("makemigrations", stdout=out) self.assertIn("0001_initial.py", out.getvalue()) def test_makemigrations_migrations_announce(self): """ Makes sure that makemigrations announces the migration at the default verbosity level. """ out = six.StringIO() with self.temporary_migration_module(): call_command("makemigrations", "migrations", stdout=out) self.assertIn("Migrations for 'migrations'", out.getvalue()) def test_makemigrations_no_common_ancestor(self): """ Makes sure that makemigrations fails to merge migrations with no common ancestor. """ with self.assertRaises(ValueError) as context: with self.temporary_migration_module(module="migrations.test_migrations_no_ancestor"): call_command("makemigrations", "migrations", merge=True) exception_message = str(context.exception) self.assertIn("Could not find common ancestor of", exception_message) self.assertIn("0002_second", exception_message) self.assertIn("0002_conflicting_second", exception_message) def test_makemigrations_interactive_reject(self): """ Makes sure that makemigrations enters and exits interactive mode properly. """ # Monkeypatch interactive questioner to auto reject with mock.patch('django.db.migrations.questioner.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, verbosity=0) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) def test_makemigrations_interactive_accept(self): """ Makes sure that makemigrations enters interactive mode and merges properly. """ # Monkeypatch interactive questioner to auto accept with mock.patch('django.db.migrations.questioner.input', mock.Mock(return_value='y')): out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) @mock.patch('django.db.migrations.utils.datetime') def test_makemigrations_default_merge_name(self, mock_datetime): mock_datetime.datetime.now.return_value = datetime.datetime(2016, 1, 2, 3, 4) with mock.patch('django.db.migrations.questioner.input', mock.Mock(return_value='y')): out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge_20160102_0304.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) def test_makemigrations_non_interactive_not_null_addition(self): """ Tests that non-interactive makemigrations fails when a default is missing on a new not-null field. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_int = models.IntegerField() class Meta: app_label = "migrations" out = six.StringIO() with self.assertRaises(SystemExit): with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) def test_makemigrations_non_interactive_not_null_alteration(self): """ Tests that non-interactive makemigrations fails when a default is missing on a field changed to not-null. """ class Author(models.Model): name = models.CharField(max_length=255) slug = models.SlugField() age = models.IntegerField(default=0) class Meta: app_label = "migrations" out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Alter field slug on author", out.getvalue()) def test_makemigrations_non_interactive_no_model_rename(self): """ Makes sure that makemigrations adds and removes a possible model rename in non-interactive mode. """ class RenamedModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Delete model SillyModel", out.getvalue()) self.assertIn("Create model RenamedModel", out.getvalue()) def test_makemigrations_non_interactive_no_field_rename(self): """ Makes sure that makemigrations adds and removes a possible field rename in non-interactive mode. """ class SillyModel(models.Model): silly_rename = models.BooleanField(default=False) class Meta: app_label = "migrations" out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Remove field silly_field from sillymodel", out.getvalue()) self.assertIn("Add field silly_rename to sillymodel", out.getvalue()) def test_makemigrations_handle_merge(self): """ Makes sure that makemigrations properly merges the conflicting migrations with --noinput. """ out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=False, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertIn("Created new merge migration", output) def test_makemigration_merge_dry_run(self): """ Makes sure that makemigrations respects --dry-run option when fixing migration conflicts (#24427). """ out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) def test_makemigration_merge_dry_run_verbosity_3(self): """ Makes sure that `makemigrations --merge --dry-run` writes the merge migration file to stdout with `verbosity == 3` (#24427). """ out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, verbosity=3, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) # Additional output caused by verbosity 3 # The complete merge migration file that would be written # '\n#' is to verify no bytestring prefix before # self.assertIn("\n# -*- coding: utf-8 -*-", output) self.assertIn("class Migration(migrations.Migration):", output) self.assertIn("dependencies = [", output) self.assertIn("('migrations', '0002_second')", output) self.assertIn("('migrations', '0002_conflicting_second')", output) self.assertIn("operations = [", output) self.assertIn("]", output) def test_makemigrations_dry_run(self): """ Ticket #22676 -- `makemigrations --dry-run` should not ask for defaults. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_date = models.DateField() # Added field without a default class Meta: app_label = "migrations" out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out) # Output the expected changes directly, without asking for defaults self.assertIn("Add field silly_date to sillymodel", out.getvalue()) def test_makemigrations_dry_run_verbosity_3(self): """ Ticket #22675 -- Allow `makemigrations --dry-run` to output the migrations file to stdout (with verbosity == 3). """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_char = models.CharField(default="") class Meta: app_label = "migrations" out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out, verbosity=3) # Normal --dry-run output self.assertIn("- Add field silly_char to sillymodel", out.getvalue()) # Additional output caused by verbosity 3 # The complete migrations file that would be written self.assertIn("# -*- coding: utf-8 -*-", out.getvalue()) self.assertIn("class Migration(migrations.Migration):", out.getvalue()) self.assertIn("dependencies = [", out.getvalue()) self.assertIn("('migrations', '0001_initial'),", out.getvalue()) self.assertIn("migrations.AddField(", out.getvalue()) self.assertIn("model_name='sillymodel',", out.getvalue()) self.assertIn("name='silly_char',", out.getvalue()) def test_makemigrations_migrations_modules_path_not_exist(self): """ Ticket #22682 -- Makemigrations fails when specifying custom location for migration files (using MIGRATION_MODULES) if the custom path doesn't already exist. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = six.StringIO() migration_module = "migrations.test_migrations_path_doesnt_exist.foo.bar" with self.temporary_migration_module(module=migration_module) as migration_dir: call_command("makemigrations", "migrations", stdout=out) # Migrations file is actually created in the expected path. initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) # Command output indicates the migration is created. self.assertIn(" - Create model SillyModel", out.getvalue()) def test_makemigrations_interactive_by_default(self): """ Makes sure that the user is prompted to merge by default if there are conflicts and merge is True. Answer negative to differentiate it from behavior when --noinput is specified. """ # Monkeypatch interactive questioner to auto reject out = six.StringIO() with mock.patch('django.db.migrations.questioner.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') # This will fail if interactive is False by default self.assertFalse(os.path.exists(merge_file)) self.assertNotIn("Created new merge migration", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_no_merge(self): """ Makes sure that makemigrations does not raise a CommandError when an unspecified app has conflicting migrations. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", merge=False, verbosity=0) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_merge(self): """ Makes sure that makemigrations does not create a merge for an unspecified app even if it has conflicting migrations. """ # Monkeypatch interactive questioner to auto accept with mock.patch('django.db.migrations.questioner.input', mock.Mock(return_value='y')): out = six.StringIO() with self.temporary_migration_module(app_label="migrated_app") as migration_dir: call_command("makemigrations", "migrated_app", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) self.assertIn("No conflicts detected to merge.", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.conflicting_app_with_dependencies"]) def test_makemigrations_merge_dont_output_dependency_operations(self): """ Makes sure that makemigrations --merge does not output any operations from apps that don't belong to a given app. """ # Monkeypatch interactive questioner to auto accept with mock.patch('django.db.migrations.questioner.input', mock.Mock(return_value='N')): out = six.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command( "makemigrations", "conflicting_app_with_dependencies", merge=True, interactive=True, stdout=out ) val = out.getvalue().lower() self.assertIn('merging conflicting_app_with_dependencies\n', val) self.assertIn( ' branch 0002_conflicting_second\n' ' - create model something\n', val ) self.assertIn( ' branch 0002_second\n' ' - delete model tribble\n' ' - remove field silly_field from author\n' ' - add field rating to author\n' ' - create model book\n', val ) def test_makemigrations_with_custom_name(self): """ Makes sure that makemigrations generate a custom migration. """ with self.temporary_migration_module() as migration_dir: def cmd(migration_count, migration_name, *args): call_command("makemigrations", "migrations", "--verbosity", "0", "--name", migration_name, *args) migration_file = os.path.join(migration_dir, "%s_%s.py" % (migration_count, migration_name)) # Check for existing migration file in migration folder self.assertTrue(os.path.exists(migration_file)) with io.open(migration_file, "r", encoding="utf-8") as fp: content = fp.read() self.assertIn("# -*- coding: utf-8 -*-", content) content = content.replace(" ", "") return content # generate an initial migration migration_name_0001 = "my_initial_migration" content = cmd("0001", migration_name_0001) self.assertIn("dependencies=[\n]", content) # Python 3 importlib caches os.listdir() on some platforms like # Mac OS X (#23850). if hasattr(importlib, 'invalidate_caches'): importlib.invalidate_caches() # generate an empty migration migration_name_0002 = "my_custom_migration" content = cmd("0002", migration_name_0002, "--empty") self.assertIn("dependencies=[\n('migrations','0001_%s'),\n]" % migration_name_0001, content) self.assertIn("operations=[\n]", content) @ignore_warnings(category=RemovedInDjango20Warning) def test_makemigrations_exit(self): """ makemigrations --exit should exit with sys.exit(1) when there are no changes to an app. """ with self.temporary_migration_module(): call_command("makemigrations", "--exit", "migrations", verbosity=0) with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): with self.assertRaises(SystemExit): call_command("makemigrations", "--exit", "migrations", verbosity=0) def test_makemigrations_check(self): """ makemigrations --check should exit with a non-zero status when there are changes to an app requiring migrations. """ with self.temporary_migration_module(): with self.assertRaises(SystemExit): call_command("makemigrations", "--check", "migrations", verbosity=0) with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "--check", "migrations", verbosity=0) def test_makemigrations_migration_path_output(self): """ makemigrations should print the relative paths to the migrations unless they are outside of the current tree, in which case the absolute path should be shown. """ out = six.StringIO() apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_migration_path_output_valueerror(self): """ makemigrations prints the absolute path if os.path.relpath() raises a ValueError when it's impossible to obtain a relative path, e.g. on Windows if Django is installed on a different drive than where the migration files are created. """ out = six.StringIO() with self.temporary_migration_module() as migration_dir: with mock.patch('os.path.relpath', side_effect=ValueError): call_command('makemigrations', 'migrations', stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_inconsistent_history(self): """ makemigrations should raise InconsistentMigrationHistory exception if there are some migrations applied before their dependencies. """ recorder = MigrationRecorder(connection) recorder.record_applied('migrations', '0002_second') msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.temporary_migration_module(module="migrations.test_migrations"): with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("makemigrations") @mock.patch('django.db.migrations.questioner.input', return_value='1') @mock.patch('django.db.migrations.questioner.sys.stdin', mock.MagicMock(encoding=sys.getdefaultencoding())) def test_makemigrations_auto_now_add_interactive(self, *args): """ makemigrations prompts the user when adding auto_now_add to an existing model. """ class Entry(models.Model): title = models.CharField(max_length=255) creation_date = models.DateTimeField(auto_now_add=True) class Meta: app_label = 'migrations' # Monkeypatch interactive questioner to auto accept with mock.patch('django.db.migrations.questioner.sys.stdout', new_callable=six.StringIO) as prompt_stdout: out = six.StringIO() with self.temporary_migration_module(module='migrations.test_auto_now_add'): call_command('makemigrations', 'migrations', interactive=True, stdout=out) output = out.getvalue() prompt_output = prompt_stdout.getvalue() self.assertIn("You can accept the default 'timezone.now' by pressing 'Enter'", prompt_output) self.assertIn("Add field creation_date to entry", output) class SquashMigrationsTests(MigrationTestBase): """ Tests running the squashmigrations command. """ def test_squashmigrations_squashes(self): """ Tests that squashmigrations squashes migrations. """ with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") self.assertTrue(os.path.exists(squashed_migration_file)) def test_squashmigrations_initial_attribute(self): with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") with io.open(squashed_migration_file, "r", encoding="utf-8") as fp: content = fp.read() self.assertIn("initial = True", content) def test_squashmigrations_optimizes(self): """ Tests that squashmigrations optimizes operations. """ out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, stdout=out) self.assertIn("Optimized from 8 operations to 3 operations.", out.getvalue()) def test_ticket_23799_squashmigrations_no_optimize(self): """ Makes sure that squashmigrations --no-optimize really doesn't optimize operations. """ out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, no_optimize=True, stdout=out) self.assertIn("Skipping optimization", out.getvalue()) def test_squashmigrations_valid_start(self): """ squashmigrations accepts a starting migration. """ out = six.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes") as migration_dir: call_command("squashmigrations", "migrations", "0002", "0003", interactive=False, verbosity=1, stdout=out) squashed_migration_file = os.path.join(migration_dir, "0002_second_squashed_0003_third.py") with io.open(squashed_migration_file, "r", encoding="utf-8") as fp: content = fp.read() self.assertIn(" ('migrations', '0001_initial')", content) self.assertNotIn("initial = True", content) out = out.getvalue() self.assertNotIn(" - 0001_initial", out) self.assertIn(" - 0002_second", out) self.assertIn(" - 0003_third", out) def test_squashmigrations_invalid_start(self): """ squashmigrations doesn't accept a starting migration after the ending migration. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): msg = ( "The migration 'migrations.0003_third' cannot be found. Maybe " "it comes after the migration 'migrations.0002_second'" ) with self.assertRaisesMessage(CommandError, msg): call_command("squashmigrations", "migrations", "0003", "0002", interactive=False, verbosity=0)
f0cc3b806e504214b6186cc9b2f48d038e25a28072298cca68b76f3b6690ac64
# -*- coding: utf-8 -*- from __future__ import unicode_literals import datetime import decimal import functools import math import os import re import sys import tokenize import unittest import uuid import custom_migration_operations.more_operations import custom_migration_operations.operations from django import get_version from django.conf import settings from django.core.validators import EmailValidator, RegexValidator from django.db import migrations, models from django.db.migrations.writer import ( MigrationWriter, OperationWriter, SettingsReference, ) from django.test import SimpleTestCase, ignore_warnings, mock from django.utils import datetime_safe, six from django.utils._os import upath from django.utils.deconstruct import deconstructible from django.utils.encoding import force_str from django.utils.functional import SimpleLazyObject from django.utils.timezone import FixedOffset, get_default_timezone, utc from django.utils.translation import ugettext_lazy as _ from .models import FoodManager, FoodQuerySet try: import enum except ImportError: enum = None PY36 = sys.version_info >= (3, 6) class Money(decimal.Decimal): def deconstruct(self): return ( '%s.%s' % (self.__class__.__module__, self.__class__.__name__), [six.text_type(self)], {} ) class TestModel1(object): def upload_to(self): return "somewhere dynamic" thing = models.FileField(upload_to=upload_to) class OperationWriterTests(SimpleTestCase): def test_empty_signature(self): operation = custom_migration_operations.operations.TestOperation() buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.TestOperation(\n' '),' ) def test_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation(1, 2) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' '),' ) def test_kwargs_signature(self): operation = custom_migration_operations.operations.KwargsOperation(kwarg1=1) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=1,\n' '),' ) def test_args_kwargs_signature(self): operation = custom_migration_operations.operations.ArgsKwargsOperation(1, 2, kwarg2=4) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsKwargsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' ' kwarg2=4,\n' '),' ) def test_nested_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation( custom_migration_operations.operations.ArgsOperation(1, 2), custom_migration_operations.operations.KwargsOperation(kwarg1=3, kwarg2=4) ) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsOperation(\n' ' arg1=custom_migration_operations.operations.ArgsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' ' ),\n' ' arg2=custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=3,\n' ' kwarg2=4,\n' ' ),\n' '),' ) def test_multiline_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation("test\n arg1", "test\narg2") buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, "custom_migration_operations.operations.ArgsOperation(\n" " arg1='test\\n arg1',\n" " arg2='test\\narg2',\n" ")," ) def test_expand_args_signature(self): operation = custom_migration_operations.operations.ExpandArgsOperation([1, 2]) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ExpandArgsOperation(\n' ' arg=[\n' ' 1,\n' ' 2,\n' ' ],\n' '),' ) def test_nested_operation_expand_args_signature(self): operation = custom_migration_operations.operations.ExpandArgsOperation( arg=[ custom_migration_operations.operations.KwargsOperation( kwarg1=1, kwarg2=2, ), ] ) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ExpandArgsOperation(\n' ' arg=[\n' ' custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=1,\n' ' kwarg2=2,\n' ' ),\n' ' ],\n' '),' ) class WriterTests(SimpleTestCase): """ Tests the migration writer (makes migration files from Migration instances) """ def safe_exec(self, string, value=None): l = {} try: exec(force_str(string), globals(), l) except Exception as e: if value: self.fail("Could not exec %r (from value %r): %s" % (string.strip(), value, e)) else: self.fail("Could not exec %r: %s" % (string.strip(), e)) return l def serialize_round_trip(self, value): string, imports = MigrationWriter.serialize(value) return self.safe_exec("%s\ntest_value_result = %s" % ("\n".join(imports), string), value)['test_value_result'] def assertSerializedEqual(self, value): self.assertEqual(self.serialize_round_trip(value), value) def assertSerializedResultEqual(self, value, target): self.assertEqual(MigrationWriter.serialize(value), target) def assertSerializedFieldEqual(self, value): new_value = self.serialize_round_trip(value) self.assertEqual(value.__class__, new_value.__class__) self.assertEqual(value.max_length, new_value.max_length) self.assertEqual(value.null, new_value.null) self.assertEqual(value.unique, new_value.unique) def test_serialize_numbers(self): self.assertSerializedEqual(1) self.assertSerializedEqual(1.2) self.assertTrue(math.isinf(self.serialize_round_trip(float("inf")))) self.assertTrue(math.isinf(self.serialize_round_trip(float("-inf")))) self.assertTrue(math.isnan(self.serialize_round_trip(float("nan")))) self.assertSerializedEqual(decimal.Decimal('1.3')) self.assertSerializedResultEqual( decimal.Decimal('1.3'), ("Decimal('1.3')", {'from decimal import Decimal'}) ) self.assertSerializedEqual(Money('1.3')) self.assertSerializedResultEqual( Money('1.3'), ("migrations.test_writer.Money('1.3')", {'import migrations.test_writer'}) ) def test_serialize_constants(self): self.assertSerializedEqual(None) self.assertSerializedEqual(True) self.assertSerializedEqual(False) def test_serialize_strings(self): self.assertSerializedEqual(b"foobar") string, imports = MigrationWriter.serialize(b"foobar") self.assertEqual(string, "b'foobar'") self.assertSerializedEqual("föobár") string, imports = MigrationWriter.serialize("foobar") self.assertEqual(string, "'foobar'") def test_serialize_multiline_strings(self): self.assertSerializedEqual(b"foo\nbar") string, imports = MigrationWriter.serialize(b"foo\nbar") self.assertEqual(string, "b'foo\\nbar'") self.assertSerializedEqual("föo\nbár") string, imports = MigrationWriter.serialize("foo\nbar") self.assertEqual(string, "'foo\\nbar'") def test_serialize_collections(self): self.assertSerializedEqual({1: 2}) self.assertSerializedEqual(["a", 2, True, None]) self.assertSerializedEqual({2, 3, "eighty"}) self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]}) self.assertSerializedEqual(_('Hello')) def test_serialize_builtin_types(self): self.assertSerializedEqual([list, tuple, dict, set, frozenset]) self.assertSerializedResultEqual( [list, tuple, dict, set, frozenset], ("[list, tuple, dict, set, frozenset]", set()) ) def test_serialize_lazy_objects(self): pattern = re.compile(r'^foo$', re.UNICODE) lazy_pattern = SimpleLazyObject(lambda: pattern) self.assertEqual(self.serialize_round_trip(lazy_pattern), pattern) @unittest.skipUnless(enum, "enum34 is required on Python 2") def test_serialize_enums(self): class TextEnum(enum.Enum): A = 'a-value' B = 'value-b' class BinaryEnum(enum.Enum): A = b'a-value' B = b'value-b' class IntEnum(enum.IntEnum): A = 1 B = 2 self.assertSerializedResultEqual( TextEnum.A, ("migrations.test_writer.TextEnum('a-value')", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( BinaryEnum.A, ("migrations.test_writer.BinaryEnum(b'a-value')", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( IntEnum.B, ("migrations.test_writer.IntEnum(2)", {'import migrations.test_writer'}) ) field = models.CharField(default=TextEnum.B, choices=[(m.value, m) for m in TextEnum]) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[" "('a-value', migrations.test_writer.TextEnum('a-value')), " "('value-b', migrations.test_writer.TextEnum('value-b'))], " "default=migrations.test_writer.TextEnum('value-b'))" ) field = models.CharField(default=BinaryEnum.B, choices=[(m.value, m) for m in BinaryEnum]) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[" "(b'a-value', migrations.test_writer.BinaryEnum(b'a-value')), " "(b'value-b', migrations.test_writer.BinaryEnum(b'value-b'))], " "default=migrations.test_writer.BinaryEnum(b'value-b'))" ) field = models.IntegerField(default=IntEnum.A, choices=[(m.value, m) for m in IntEnum]) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.IntegerField(choices=[" "(1, migrations.test_writer.IntEnum(1)), " "(2, migrations.test_writer.IntEnum(2))], " "default=migrations.test_writer.IntEnum(1))" ) def test_serialize_uuid(self): self.assertSerializedEqual(uuid.uuid1()) self.assertSerializedEqual(uuid.uuid4()) uuid_a = uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8') uuid_b = uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2') self.assertSerializedResultEqual( uuid_a, ("uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8')", {'import uuid'}) ) self.assertSerializedResultEqual( uuid_b, ("uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2')", {'import uuid'}) ) field = models.UUIDField(choices=((uuid_a, 'UUID A'), (uuid_b, 'UUID B')), default=uuid_a) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.UUIDField(choices=[" "(uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'), 'UUID A'), " "(uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2'), 'UUID B')], " "default=uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'))" ) def test_serialize_functions(self): with self.assertRaisesMessage(ValueError, 'Cannot serialize function: lambda'): self.assertSerializedEqual(lambda x: 42) self.assertSerializedEqual(models.SET_NULL) string, imports = MigrationWriter.serialize(models.SET(42)) self.assertEqual(string, 'models.SET(42)') self.serialize_round_trip(models.SET(42)) def test_serialize_datetime(self): self.assertSerializedEqual(datetime.datetime.utcnow()) self.assertSerializedEqual(datetime.datetime.utcnow) self.assertSerializedEqual(datetime.datetime.today()) self.assertSerializedEqual(datetime.datetime.today) self.assertSerializedEqual(datetime.date.today()) self.assertSerializedEqual(datetime.date.today) self.assertSerializedEqual(datetime.datetime.now().time()) self.assertSerializedEqual(datetime.datetime(2014, 1, 1, 1, 1, tzinfo=get_default_timezone())) self.assertSerializedEqual(datetime.datetime(2013, 12, 31, 22, 1, tzinfo=FixedOffset(180))) self.assertSerializedResultEqual( datetime.datetime(2014, 1, 1, 1, 1), ("datetime.datetime(2014, 1, 1, 1, 1)", {'import datetime'}) ) self.assertSerializedResultEqual( datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc), ( "datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc)", {'import datetime', 'from django.utils.timezone import utc'}, ) ) def test_serialize_datetime_safe(self): self.assertSerializedResultEqual( datetime_safe.date(2014, 3, 31), ("datetime.date(2014, 3, 31)", {'import datetime'}) ) self.assertSerializedResultEqual( datetime_safe.time(10, 25), ("datetime.time(10, 25)", {'import datetime'}) ) self.assertSerializedResultEqual( datetime_safe.datetime(2014, 3, 31, 16, 4, 31), ("datetime.datetime(2014, 3, 31, 16, 4, 31)", {'import datetime'}) ) def test_serialize_fields(self): self.assertSerializedFieldEqual(models.CharField(max_length=255)) self.assertSerializedResultEqual( models.CharField(max_length=255), ("models.CharField(max_length=255)", {"from django.db import models"}) ) self.assertSerializedFieldEqual(models.TextField(null=True, blank=True)) self.assertSerializedResultEqual( models.TextField(null=True, blank=True), ("models.TextField(blank=True, null=True)", {'from django.db import models'}) ) def test_serialize_settings(self): self.assertSerializedEqual(SettingsReference(settings.AUTH_USER_MODEL, "AUTH_USER_MODEL")) self.assertSerializedResultEqual( SettingsReference("someapp.model", "AUTH_USER_MODEL"), ("settings.AUTH_USER_MODEL", {"from django.conf import settings"}) ) def test_serialize_iterators(self): self.assertSerializedResultEqual( ((x, x * x) for x in range(3)), ("((0, 0), (1, 1), (2, 4))", set()) ) def test_serialize_compiled_regex(self): """ Make sure compiled regex can be serialized. """ regex = re.compile(r'^\w+$', re.U) self.assertSerializedEqual(regex) def test_serialize_class_based_validators(self): """ Ticket #22943: Test serialization of class-based validators, including compiled regexes. """ validator = RegexValidator(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator(message='hello')") self.serialize_round_trip(validator) # Test with a compiled regex. validator = RegexValidator(regex=re.compile(r'^\w+$', re.U)) string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator(regex=re.compile('^\\\\w+$', 32))") self.serialize_round_trip(validator) # Test a string regex with flag validator = RegexValidator(r'^[0-9]+$', flags=re.U) string = MigrationWriter.serialize(validator)[0] if PY36: self.assertEqual(string, "django.core.validators.RegexValidator('^[0-9]+$', flags=re.RegexFlag(32))") else: self.assertEqual(string, "django.core.validators.RegexValidator('^[0-9]+$', flags=32)") self.serialize_round_trip(validator) # Test message and code validator = RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid') string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid')") self.serialize_round_trip(validator) # Test with a subclass. validator = EmailValidator(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.EmailValidator(message='hello')") self.serialize_round_trip(validator) validator = deconstructible(path="migrations.test_writer.EmailValidator")(EmailValidator)(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "migrations.test_writer.EmailValidator(message='hello')") validator = deconstructible(path="custom.EmailValidator")(EmailValidator)(message="hello") with six.assertRaisesRegex(self, ImportError, "No module named '?custom'?"): MigrationWriter.serialize(validator) validator = deconstructible(path="django.core.validators.EmailValidator2")(EmailValidator)(message="hello") with self.assertRaisesMessage(ValueError, "Could not find object EmailValidator2 in django.core.validators."): MigrationWriter.serialize(validator) def test_serialize_empty_nonempty_tuple(self): """ Ticket #22679: makemigrations generates invalid code for (an empty tuple) default_permissions = () """ empty_tuple = () one_item_tuple = ('a',) many_items_tuple = ('a', 'b', 'c') self.assertSerializedEqual(empty_tuple) self.assertSerializedEqual(one_item_tuple) self.assertSerializedEqual(many_items_tuple) def test_serialize_builtins(self): string, imports = MigrationWriter.serialize(range) self.assertEqual(string, 'range') self.assertEqual(imports, set()) @unittest.skipUnless(six.PY2, "Only applies on Python 2") def test_serialize_direct_function_reference(self): """ Ticket #22436: You cannot use a function straight from its body (e.g. define the method and use it in the same body) """ with self.assertRaises(ValueError): self.serialize_round_trip(TestModel1.thing) def test_serialize_local_function_reference(self): """ Neither py2 or py3 can serialize a reference in a local scope. """ class TestModel2(object): def upload_to(self): return "somewhere dynamic" thing = models.FileField(upload_to=upload_to) with self.assertRaises(ValueError): self.serialize_round_trip(TestModel2.thing) def test_serialize_local_function_reference_message(self): """ Make sure user is seeing which module/function is the issue """ class TestModel2(object): def upload_to(self): return "somewhere dynamic" thing = models.FileField(upload_to=upload_to) with self.assertRaisesMessage(ValueError, 'Could not find function upload_to in migrations.test_writer'): self.serialize_round_trip(TestModel2.thing) def test_serialize_managers(self): self.assertSerializedEqual(models.Manager()) self.assertSerializedResultEqual( FoodQuerySet.as_manager(), ('migrations.models.FoodQuerySet.as_manager()', {'import migrations.models'}) ) self.assertSerializedEqual(FoodManager('a', 'b')) self.assertSerializedEqual(FoodManager('x', 'y', c=3, d=4)) def test_serialize_frozensets(self): self.assertSerializedEqual(frozenset()) self.assertSerializedEqual(frozenset("let it go")) def test_serialize_timedelta(self): self.assertSerializedEqual(datetime.timedelta()) self.assertSerializedEqual(datetime.timedelta(minutes=42)) def test_serialize_functools_partial(self): value = functools.partial(datetime.timedelta, 1, seconds=2) result = self.serialize_round_trip(value) self.assertEqual(result.func, value.func) self.assertEqual(result.args, value.args) self.assertEqual(result.keywords, value.keywords) def test_simple_migration(self): """ Tests serializing a simple migration. """ fields = { 'charfield': models.DateTimeField(default=datetime.datetime.utcnow), 'datetimefield': models.DateTimeField(default=datetime.datetime.utcnow), } options = { 'verbose_name': 'My model', 'verbose_name_plural': 'My models', } migration = type(str("Migration"), (migrations.Migration,), { "operations": [ migrations.CreateModel("MyModel", tuple(fields.items()), options, (models.Model,)), migrations.CreateModel("MyModel2", tuple(fields.items()), bases=(models.Model,)), migrations.CreateModel( name="MyModel3", fields=tuple(fields.items()), options=options, bases=(models.Model,) ), migrations.DeleteModel("MyModel"), migrations.AddField("OtherModel", "datetimefield", fields["datetimefield"]), ], "dependencies": [("testapp", "some_other_one")], }) writer = MigrationWriter(migration) output = writer.as_string() # We don't test the output formatting - that's too fragile. # Just make sure it runs for now, and that things look alright. result = self.safe_exec(output) self.assertIn("Migration", result) # In order to preserve compatibility with Python 3.2 unicode literals # prefix shouldn't be added to strings. tokens = tokenize.generate_tokens(six.StringIO(str(output)).readline) for token_type, token_source, (srow, scol), __, line in tokens: if token_type == tokenize.STRING: self.assertFalse( token_source.startswith('u'), "Unicode literal prefix found at %d:%d: %r" % ( srow, scol, line.strip() ) ) # Silence warning on Python 2: Not importing directory # 'tests/migrations/migrations_test_apps/without_init_file/migrations': # missing __init__.py @ignore_warnings(category=ImportWarning) def test_migration_path(self): test_apps = [ 'migrations.migrations_test_apps.normal', 'migrations.migrations_test_apps.with_package_model', 'migrations.migrations_test_apps.without_init_file', ] base_dir = os.path.dirname(os.path.dirname(upath(__file__))) for app in test_apps: with self.modify_settings(INSTALLED_APPS={'append': app}): migration = migrations.Migration('0001_initial', app.split('.')[-1]) expected_path = os.path.join(base_dir, *(app.split('.') + ['migrations', '0001_initial.py'])) writer = MigrationWriter(migration) self.assertEqual(writer.path, expected_path) def test_custom_operation(self): migration = type(str("Migration"), (migrations.Migration,), { "operations": [ custom_migration_operations.operations.TestOperation(), custom_migration_operations.operations.CreateModel(), migrations.CreateModel("MyModel", (), {}, (models.Model,)), custom_migration_operations.more_operations.TestOperation() ], "dependencies": [] }) writer = MigrationWriter(migration) output = writer.as_string() result = self.safe_exec(output) self.assertIn("custom_migration_operations", result) self.assertNotEqual( result['custom_migration_operations'].operations.TestOperation, result['custom_migration_operations'].more_operations.TestOperation ) def test_sorted_imports(self): """ #24155 - Tests ordering of imports. """ migration = type(str("Migration"), (migrations.Migration,), { "operations": [ migrations.AddField("mymodel", "myfield", models.DateTimeField( default=datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc), )), ] }) writer = MigrationWriter(migration) output = writer.as_string() self.assertIn( "import datetime\n" "from django.db import migrations, models\n" "from django.utils.timezone import utc\n", output ) def test_migration_file_header_comments(self): """ Test comments at top of file. """ migration = type(str("Migration"), (migrations.Migration,), { "operations": [] }) dt = datetime.datetime(2015, 7, 31, 4, 40, 0, 0, tzinfo=utc) with mock.patch('django.db.migrations.writer.now', lambda: dt): writer = MigrationWriter(migration) output = writer.as_string() self.assertTrue( output.startswith( "# -*- coding: utf-8 -*-\n" "# Generated by Django %(version)s on 2015-07-31 04:40\n" % { 'version': get_version(), } ) ) def test_models_import_omitted(self): """ django.db.models shouldn't be imported if unused. """ migration = type(str("Migration"), (migrations.Migration,), { "operations": [ migrations.AlterModelOptions( name='model', options={'verbose_name': 'model', 'verbose_name_plural': 'models'}, ), ] }) writer = MigrationWriter(migration) output = writer.as_string() self.assertIn("from django.db import migrations\n", output) def test_deconstruct_class_arguments(self): # Yes, it doesn't make sense to use a class as a default for a # CharField. It does make sense for custom fields though, for example # an enumfield that takes the enum class as an argument. class DeconstructibleInstances(object): def deconstruct(self): return ('DeconstructibleInstances', [], {}) string = MigrationWriter.serialize(models.CharField(default=DeconstructibleInstances))[0] self.assertEqual(string, "models.CharField(default=migrations.test_writer.DeconstructibleInstances)")
82d5cf3c0686012f47d7f6c4e001a6463d51ec9288c52ce3ff3cd66435b0c90f
from __future__ import unicode_literals from unittest import skipIf from django.db import connection, connections from django.db.migrations.exceptions import ( AmbiguityError, InconsistentMigrationHistory, NodeNotFoundError, ) from django.db.migrations.loader import MigrationLoader from django.db.migrations.recorder import MigrationRecorder from django.test import TestCase, modify_settings, override_settings from django.utils import six class RecorderTests(TestCase): """ Tests recording migrations as applied or not. """ def test_apply(self): """ Tests marking migrations as applied/unapplied. """ recorder = MigrationRecorder(connection) self.assertEqual( set((x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"), set(), ) recorder.record_applied("myapp", "0432_ponies") self.assertEqual( set((x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"), {("myapp", "0432_ponies")}, ) # That should not affect records of another database recorder_other = MigrationRecorder(connections['other']) self.assertEqual( set((x, y) for (x, y) in recorder_other.applied_migrations() if x == "myapp"), set(), ) recorder.record_unapplied("myapp", "0432_ponies") self.assertEqual( set((x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"), set(), ) class LoaderTests(TestCase): """ Tests the disk and database loader, and running through migrations in memory. """ @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) @modify_settings(INSTALLED_APPS={'append': 'basic'}) def test_load(self): """ Makes sure the loader can load the migrations for the test apps, and then render them out to a new Apps. """ # Load and test the plan migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "0002_second")), [ ("migrations", "0001_initial"), ("migrations", "0002_second"), ], ) # Now render it out! project_state = migration_loader.project_state(("migrations", "0002_second")) self.assertEqual(len(project_state.models), 2) author_state = project_state.models["migrations", "author"] self.assertEqual( [x for x, y in author_state.fields], ["id", "name", "slug", "age", "rating"] ) book_state = project_state.models["migrations", "book"] self.assertEqual( [x for x, y in book_state.fields], ["id", "author"] ) # Ensure we've included unmigrated apps in there too self.assertIn("basic", project_state.real_apps) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_unmigdep"}) def test_load_unmigrated_dependency(self): """ Makes sure the loader can load migrations with a dependency on an unmigrated app. """ # Load and test the plan migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "0001_initial")), [ ('contenttypes', '0001_initial'), ('auth', '0001_initial'), ("migrations", "0001_initial"), ], ) # Now render it out! project_state = migration_loader.project_state(("migrations", "0001_initial")) self.assertEqual(len([m for a, m in project_state.models if a == "migrations"]), 1) book_state = project_state.models["migrations", "book"] self.assertEqual( [x for x, y in book_state.fields], ["id", "user"] ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}) def test_run_before(self): """ Makes sure the loader uses Migration.run_before. """ # Load and test the plan migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "0002_second")), [ ("migrations", "0001_initial"), ("migrations", "0003_third"), ("migrations", "0002_second"), ], ) @override_settings(MIGRATION_MODULES={ "migrations": "migrations.test_migrations_first", "migrations2": "migrations2.test_migrations_2_first", }) @modify_settings(INSTALLED_APPS={'append': 'migrations2'}) def test_first(self): """ Makes sure the '__first__' migrations build correctly. """ migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "second")), [ ("migrations", "thefirst"), ("migrations2", "0001_initial"), ("migrations2", "0002_second"), ("migrations", "second"), ], ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_name_match(self): "Tests prefix name matching" migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.get_migration_by_prefix("migrations", "0001").name, "0001_initial", ) with self.assertRaises(AmbiguityError): migration_loader.get_migration_by_prefix("migrations", "0") with self.assertRaises(KeyError): migration_loader.get_migration_by_prefix("migrations", "blarg") def test_load_import_error(self): with override_settings(MIGRATION_MODULES={"migrations": "import_error_package"}): with self.assertRaises(ImportError): MigrationLoader(connection) def test_load_module_file(self): with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.file"}): loader = MigrationLoader(connection) self.assertIn( "migrations", loader.unmigrated_apps, "App with migrations module file not in unmigrated apps." ) @skipIf(six.PY2, "PY2 doesn't load empty dirs.") def test_load_empty_dir(self): with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.namespace"}): loader = MigrationLoader(connection) self.assertIn( "migrations", loader.unmigrated_apps, "App missing __init__.py in migrations module not in unmigrated apps." ) @override_settings( INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'], ) def test_marked_as_migrated(self): """ Undefined MIGRATION_MODULES implies default migration module. """ migration_loader = MigrationLoader(connection) self.assertEqual(migration_loader.migrated_apps, {'migrated_app'}) self.assertEqual(migration_loader.unmigrated_apps, set()) @override_settings( INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'], MIGRATION_MODULES={"migrated_app": None}, ) def test_marked_as_unmigrated(self): """ MIGRATION_MODULES allows disabling of migrations for a particular app. """ migration_loader = MigrationLoader(connection) self.assertEqual(migration_loader.migrated_apps, set()) self.assertEqual(migration_loader.unmigrated_apps, {'migrated_app'}) @override_settings( INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'], MIGRATION_MODULES={'migrated_app': 'missing-module'}, ) def test_explicit_missing_module(self): """ If a MIGRATION_MODULES override points to a missing module, the error raised during the importation attempt should be propagated unless `ignore_no_migrations=True`. """ with self.assertRaisesMessage(ImportError, 'missing-module'): migration_loader = MigrationLoader(connection) migration_loader = MigrationLoader(connection, ignore_no_migrations=True) self.assertEqual(migration_loader.migrated_apps, set()) self.assertEqual(migration_loader.unmigrated_apps, {'migrated_app'}) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_loading_squashed(self): "Tests loading a squashed migration" migration_loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) # Loading with nothing applied should just give us the one node self.assertEqual( len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]), 1, ) # However, fake-apply one migration and it should now use the old two recorder.record_applied("migrations", "0001_initial") migration_loader.build_graph() self.assertEqual( len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]), 2, ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}) def test_loading_squashed_complex(self): "Tests loading a complex set of squashed migrations" loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) def num_nodes(): plan = set(loader.graph.forwards_plan(('migrations', '7_auto'))) return len(plan - loader.applied_migrations) # Empty database: use squashed migration loader.build_graph() self.assertEqual(num_nodes(), 5) # Starting at 1 or 2 should use the squashed migration too recorder.record_applied("migrations", "1_auto") loader.build_graph() self.assertEqual(num_nodes(), 4) recorder.record_applied("migrations", "2_auto") loader.build_graph() self.assertEqual(num_nodes(), 3) # However, starting at 3 to 5 cannot use the squashed migration recorder.record_applied("migrations", "3_auto") loader.build_graph() self.assertEqual(num_nodes(), 4) recorder.record_applied("migrations", "4_auto") loader.build_graph() self.assertEqual(num_nodes(), 3) # Starting at 5 to 7 we are passed the squashed migrations recorder.record_applied("migrations", "5_auto") loader.build_graph() self.assertEqual(num_nodes(), 2) recorder.record_applied("migrations", "6_auto") loader.build_graph() self.assertEqual(num_nodes(), 1) recorder.record_applied("migrations", "7_auto") loader.build_graph() self.assertEqual(num_nodes(), 0) @override_settings(MIGRATION_MODULES={ "app1": "migrations.test_migrations_squashed_complex_multi_apps.app1", "app2": "migrations.test_migrations_squashed_complex_multi_apps.app2", }) @modify_settings(INSTALLED_APPS={'append': [ "migrations.test_migrations_squashed_complex_multi_apps.app1", "migrations.test_migrations_squashed_complex_multi_apps.app2", ]}) def test_loading_squashed_complex_multi_apps(self): loader = MigrationLoader(connection) loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) expected_plan = { ('app1', '1_auto'), ('app2', '1_squashed_2'), ('app1', '2_squashed_3'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) @override_settings(MIGRATION_MODULES={ "app1": "migrations.test_migrations_squashed_complex_multi_apps.app1", "app2": "migrations.test_migrations_squashed_complex_multi_apps.app2", }) @modify_settings(INSTALLED_APPS={'append': [ "migrations.test_migrations_squashed_complex_multi_apps.app1", "migrations.test_migrations_squashed_complex_multi_apps.app2", ]}) def test_loading_squashed_complex_multi_apps_partially_applied(self): loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) recorder.record_applied('app1', '1_auto') recorder.record_applied('app1', '2_auto') loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations expected_plan = { ('app2', '1_squashed_2'), ('app1', '3_auto'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_erroneous"}) def test_loading_squashed_erroneous(self): "Tests loading a complex but erroneous set of squashed migrations" loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) def num_nodes(): plan = set(loader.graph.forwards_plan(('migrations', '7_auto'))) return len(plan - loader.applied_migrations) # Empty database: use squashed migration loader.build_graph() self.assertEqual(num_nodes(), 5) # Starting at 1 or 2 should use the squashed migration too recorder.record_applied("migrations", "1_auto") loader.build_graph() self.assertEqual(num_nodes(), 4) recorder.record_applied("migrations", "2_auto") loader.build_graph() self.assertEqual(num_nodes(), 3) # However, starting at 3 or 4 we'd need to use non-existing migrations msg = ("Migration migrations.6_auto depends on nonexistent node ('migrations', '5_auto'). " "Django tried to replace migration migrations.5_auto with any of " "[migrations.3_squashed_5] but wasn't able to because some of the replaced " "migrations are already applied.") recorder.record_applied("migrations", "3_auto") with self.assertRaisesMessage(NodeNotFoundError, msg): loader.build_graph() recorder.record_applied("migrations", "4_auto") with self.assertRaisesMessage(NodeNotFoundError, msg): loader.build_graph() # Starting at 5 to 7 we are passed the squashed migrations recorder.record_applied("migrations", "5_auto") loader.build_graph() self.assertEqual(num_nodes(), 2) recorder.record_applied("migrations", "6_auto") loader.build_graph() self.assertEqual(num_nodes(), 1) recorder.record_applied("migrations", "7_auto") loader.build_graph() self.assertEqual(num_nodes(), 0) @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}, INSTALLED_APPS=['migrations'], ) def test_check_consistent_history(self): loader = MigrationLoader(connection=None) loader.check_consistent_history(connection) recorder = MigrationRecorder(connection) recorder.record_applied('migrations', '0002_second') msg = ( "Migration migrations.0002_second is applied before its dependency " "migrations.0001_initial on database 'default'." ) with self.assertRaisesMessage(InconsistentMigrationHistory, msg): loader.check_consistent_history(connection) @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed_extra'}, INSTALLED_APPS=['migrations'], ) def test_check_consistent_history_squashed(self): """ MigrationLoader.check_consistent_history() should ignore unapplied squashed migrations that have all of their `replaces` applied. """ loader = MigrationLoader(connection=None) recorder = MigrationRecorder(connection) recorder.record_applied('migrations', '0001_initial') recorder.record_applied('migrations', '0002_second') loader.check_consistent_history(connection) recorder.record_applied('migrations', '0003_third') loader.check_consistent_history(connection) @override_settings(MIGRATION_MODULES={ "app1": "migrations.test_migrations_squashed_ref_squashed.app1", "app2": "migrations.test_migrations_squashed_ref_squashed.app2", }) @modify_settings(INSTALLED_APPS={'append': [ "migrations.test_migrations_squashed_ref_squashed.app1", "migrations.test_migrations_squashed_ref_squashed.app2", ]}) def test_loading_squashed_ref_squashed(self): "Tests loading a squashed migration with a new migration referencing it" r""" The sample migrations are structured like this: app_1 1 --> 2 ---------------------*--> 3 *--> 4 \ / / *-------------------*----/--> 2_sq_3 --* \ / / =============== \ ============= / == / ====================== app_2 *--> 1_sq_2 --* / \ / *--> 1 --> 2 --* Where 2_sq_3 is a replacing migration for 2 and 3 in app_1, as 1_sq_2 is a replacing migration for 1 and 2 in app_2. """ loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) # Load with nothing applied: both migrations squashed. loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations expected_plan = { ('app1', '1_auto'), ('app2', '1_squashed_2'), ('app1', '2_squashed_3'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) # Fake-apply a few from app1: unsquashes migration in app1. recorder.record_applied('app1', '1_auto') recorder.record_applied('app1', '2_auto') loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations expected_plan = { ('app2', '1_squashed_2'), ('app1', '3_auto'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) # Fake-apply one from app2: unsquashes migration in app2 too. recorder.record_applied('app2', '1_auto') loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations expected_plan = { ('app2', '2_auto'), ('app1', '3_auto'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan)
23f18fecb34fb294b7993305599ccd12d22fedcae9d522eba13024e08d85e106
import warnings from django.db.migrations.exceptions import ( CircularDependencyError, NodeNotFoundError, ) from django.db.migrations.graph import RECURSION_DEPTH_WARNING, MigrationGraph from django.test import SimpleTestCase from django.utils.encoding import force_text class GraphTests(SimpleTestCase): """ Tests the digraph structure. """ def test_simple_graph(self): """ Tests a basic dependency graph: app_a: 0001 <-- 0002 <--- 0003 <-- 0004 / app_b: 0001 <-- 0002 <-/ """ # Build graph graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) graph.add_node(("app_a", "0002"), None) graph.add_node(("app_a", "0003"), None) graph.add_node(("app_a", "0004"), None) graph.add_node(("app_b", "0001"), None) graph.add_node(("app_b", "0002"), None) graph.add_dependency("app_a.0004", ("app_a", "0004"), ("app_a", "0003")) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002")) graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001")) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_b", "0002")) graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001")) # Test root migration case self.assertEqual( graph.forwards_plan(("app_a", "0001")), [('app_a', '0001')], ) # Test branch B only self.assertEqual( graph.forwards_plan(("app_b", "0002")), [("app_b", "0001"), ("app_b", "0002")], ) # Test whole graph self.assertEqual( graph.forwards_plan(("app_a", "0004")), [ ('app_b', '0001'), ('app_b', '0002'), ('app_a', '0001'), ('app_a', '0002'), ('app_a', '0003'), ('app_a', '0004'), ], ) # Test reverse to b:0002 self.assertEqual( graph.backwards_plan(("app_b", "0002")), [('app_a', '0004'), ('app_a', '0003'), ('app_b', '0002')], ) # Test roots and leaves self.assertEqual( graph.root_nodes(), [('app_a', '0001'), ('app_b', '0001')], ) self.assertEqual( graph.leaf_nodes(), [('app_a', '0004'), ('app_b', '0002')], ) def test_complex_graph(self): r""" Tests a complex dependency graph: app_a: 0001 <-- 0002 <--- 0003 <-- 0004 \ \ / / app_b: 0001 <-\ 0002 <-X / \ \ / app_c: \ 0001 <-- 0002 <- """ # Build graph graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) graph.add_node(("app_a", "0002"), None) graph.add_node(("app_a", "0003"), None) graph.add_node(("app_a", "0004"), None) graph.add_node(("app_b", "0001"), None) graph.add_node(("app_b", "0002"), None) graph.add_node(("app_c", "0001"), None) graph.add_node(("app_c", "0002"), None) graph.add_dependency("app_a.0004", ("app_a", "0004"), ("app_a", "0003")) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002")) graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001")) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_b", "0002")) graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001")) graph.add_dependency("app_a.0004", ("app_a", "0004"), ("app_c", "0002")) graph.add_dependency("app_c.0002", ("app_c", "0002"), ("app_c", "0001")) graph.add_dependency("app_c.0001", ("app_c", "0001"), ("app_b", "0001")) graph.add_dependency("app_c.0002", ("app_c", "0002"), ("app_a", "0002")) # Test branch C only self.assertEqual( graph.forwards_plan(("app_c", "0002")), [('app_b', '0001'), ('app_c', '0001'), ('app_a', '0001'), ('app_a', '0002'), ('app_c', '0002')], ) # Test whole graph self.assertEqual( graph.forwards_plan(("app_a", "0004")), [ ('app_b', '0001'), ('app_c', '0001'), ('app_a', '0001'), ('app_a', '0002'), ('app_c', '0002'), ('app_b', '0002'), ('app_a', '0003'), ('app_a', '0004'), ], ) # Test reverse to b:0001 self.assertEqual( graph.backwards_plan(("app_b", "0001")), [ ('app_a', '0004'), ('app_c', '0002'), ('app_c', '0001'), ('app_a', '0003'), ('app_b', '0002'), ('app_b', '0001'), ], ) # Test roots and leaves self.assertEqual( graph.root_nodes(), [('app_a', '0001'), ('app_b', '0001'), ('app_c', '0001')], ) self.assertEqual( graph.leaf_nodes(), [('app_a', '0004'), ('app_b', '0002'), ('app_c', '0002')], ) def test_circular_graph(self): """ Tests a circular dependency graph. """ # Build graph graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) graph.add_node(("app_a", "0002"), None) graph.add_node(("app_a", "0003"), None) graph.add_node(("app_b", "0001"), None) graph.add_node(("app_b", "0002"), None) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002")) graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001")) graph.add_dependency("app_a.0001", ("app_a", "0001"), ("app_b", "0002")) graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001")) graph.add_dependency("app_b.0001", ("app_b", "0001"), ("app_a", "0003")) # Test whole graph with self.assertRaises(CircularDependencyError): graph.forwards_plan(("app_a", "0003"), ) def test_circular_graph_2(self): graph = MigrationGraph() graph.add_node(('A', '0001'), None) graph.add_node(('C', '0001'), None) graph.add_node(('B', '0001'), None) graph.add_dependency('A.0001', ('A', '0001'), ('B', '0001')) graph.add_dependency('B.0001', ('B', '0001'), ('A', '0001')) graph.add_dependency('C.0001', ('C', '0001'), ('B', '0001')) with self.assertRaises(CircularDependencyError): graph.forwards_plan(('C', '0001')) def test_graph_recursive(self): graph = MigrationGraph() root = ("app_a", "1") graph.add_node(root, None) expected = [root] for i in range(2, 750): parent = ("app_a", str(i - 1)) child = ("app_a", str(i)) graph.add_node(child, None) graph.add_dependency(str(i), child, parent) expected.append(child) leaf = expected[-1] forwards_plan = graph.forwards_plan(leaf) self.assertEqual(expected, forwards_plan) backwards_plan = graph.backwards_plan(root) self.assertEqual(expected[::-1], backwards_plan) def test_graph_iterative(self): graph = MigrationGraph() root = ("app_a", "1") graph.add_node(root, None) expected = [root] for i in range(2, 1000): parent = ("app_a", str(i - 1)) child = ("app_a", str(i)) graph.add_node(child, None) graph.add_dependency(str(i), child, parent) expected.append(child) leaf = expected[-1] with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always', RuntimeWarning) forwards_plan = graph.forwards_plan(leaf) self.assertEqual(len(w), 1) self.assertTrue(issubclass(w[-1].category, RuntimeWarning)) self.assertEqual(str(w[-1].message), RECURSION_DEPTH_WARNING) self.assertEqual(expected, forwards_plan) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always', RuntimeWarning) backwards_plan = graph.backwards_plan(root) self.assertEqual(len(w), 1) self.assertTrue(issubclass(w[-1].category, RuntimeWarning)) self.assertEqual(str(w[-1].message), RECURSION_DEPTH_WARNING) self.assertEqual(expected[::-1], backwards_plan) def test_plan_invalid_node(self): """ Tests for forwards/backwards_plan of nonexistent node. """ graph = MigrationGraph() message = "Node ('app_b', '0001') not a valid node" with self.assertRaisesMessage(NodeNotFoundError, message): graph.forwards_plan(("app_b", "0001")) with self.assertRaisesMessage(NodeNotFoundError, message): graph.backwards_plan(("app_b", "0001")) def test_missing_parent_nodes(self): """ Tests for missing parent nodes. """ # Build graph graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) graph.add_node(("app_a", "0002"), None) graph.add_node(("app_a", "0003"), None) graph.add_node(("app_b", "0001"), None) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002")) graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001")) msg = "Migration app_a.0001 dependencies reference nonexistent parent node ('app_b', '0002')" with self.assertRaisesMessage(NodeNotFoundError, msg): graph.add_dependency("app_a.0001", ("app_a", "0001"), ("app_b", "0002")) def test_missing_child_nodes(self): """ Tests for missing child nodes. """ # Build graph graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) msg = "Migration app_a.0002 dependencies reference nonexistent child node ('app_a', '0002')" with self.assertRaisesMessage(NodeNotFoundError, msg): graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001")) def test_validate_consistency(self): """ Tests for missing nodes, using `validate_consistency()` to raise the error. """ # Build graph graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) # Add dependency with missing parent node (skipping validation). graph.add_dependency("app_a.0001", ("app_a", "0001"), ("app_b", "0002"), skip_validation=True) msg = "Migration app_a.0001 dependencies reference nonexistent parent node ('app_b', '0002')" with self.assertRaisesMessage(NodeNotFoundError, msg): graph.validate_consistency() # Add missing parent node and ensure `validate_consistency()` no longer raises error. graph.add_node(("app_b", "0002"), None) graph.validate_consistency() # Add dependency with missing child node (skipping validation). graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"), skip_validation=True) msg = "Migration app_a.0002 dependencies reference nonexistent child node ('app_a', '0002')" with self.assertRaisesMessage(NodeNotFoundError, msg): graph.validate_consistency() # Add missing child node and ensure `validate_consistency()` no longer raises error. graph.add_node(("app_a", "0002"), None) graph.validate_consistency() # Rawly add dummy node. msg = "app_a.0001 (req'd by app_a.0002) is missing!" graph.add_dummy_node( key=("app_a", "0001"), origin="app_a.0002", error_message=msg ) with self.assertRaisesMessage(NodeNotFoundError, msg): graph.validate_consistency() def test_remove_replaced_nodes(self): """ Tests that replaced nodes are properly removed and dependencies remapped. """ # Add some dummy nodes to be replaced. graph = MigrationGraph() graph.add_dummy_node(key=("app_a", "0001"), origin="app_a.0002", error_message="BAD!") graph.add_dummy_node(key=("app_a", "0002"), origin="app_b.0001", error_message="BAD!") graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"), skip_validation=True) # Add some normal parent and child nodes to test dependency remapping. graph.add_node(("app_c", "0001"), None) graph.add_node(("app_b", "0001"), None) graph.add_dependency("app_a.0001", ("app_a", "0001"), ("app_c", "0001"), skip_validation=True) graph.add_dependency("app_b.0001", ("app_b", "0001"), ("app_a", "0002"), skip_validation=True) # Try replacing before replacement node exists. msg = ( "Unable to find replacement node ('app_a', '0001_squashed_0002'). It was either" " never added to the migration graph, or has been removed." ) with self.assertRaisesMessage(NodeNotFoundError, msg): graph.remove_replaced_nodes( replacement=("app_a", "0001_squashed_0002"), replaced=[("app_a", "0001"), ("app_a", "0002")] ) graph.add_node(("app_a", "0001_squashed_0002"), None) # Ensure `validate_consistency()` still raises an error at this stage. with self.assertRaisesMessage(NodeNotFoundError, "BAD!"): graph.validate_consistency() # Remove the dummy nodes. graph.remove_replaced_nodes( replacement=("app_a", "0001_squashed_0002"), replaced=[("app_a", "0001"), ("app_a", "0002")] ) # Ensure graph is now consistent and dependencies have been remapped graph.validate_consistency() parent_node = graph.node_map[("app_c", "0001")] replacement_node = graph.node_map[("app_a", "0001_squashed_0002")] child_node = graph.node_map[("app_b", "0001")] self.assertIn(parent_node, replacement_node.parents) self.assertIn(replacement_node, parent_node.children) self.assertIn(child_node, replacement_node.children) self.assertIn(replacement_node, child_node.parents) def test_remove_replacement_node(self): """ Tests that a replacement node is properly removed and child dependencies remapped. We assume parent dependencies are already correct. """ # Add some dummy nodes to be replaced. graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) graph.add_node(("app_a", "0002"), None) graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001")) # Try removing replacement node before replacement node exists. msg = ( "Unable to remove replacement node ('app_a', '0001_squashed_0002'). It was" " either never added to the migration graph, or has been removed already." ) with self.assertRaisesMessage(NodeNotFoundError, msg): graph.remove_replacement_node( replacement=("app_a", "0001_squashed_0002"), replaced=[("app_a", "0001"), ("app_a", "0002")] ) graph.add_node(("app_a", "0001_squashed_0002"), None) # Add a child node to test dependency remapping. graph.add_node(("app_b", "0001"), None) graph.add_dependency("app_b.0001", ("app_b", "0001"), ("app_a", "0001_squashed_0002")) # Remove the replacement node. graph.remove_replacement_node( replacement=("app_a", "0001_squashed_0002"), replaced=[("app_a", "0001"), ("app_a", "0002")] ) # Ensure graph is consistent and child dependency has been remapped graph.validate_consistency() replaced_node = graph.node_map[("app_a", "0002")] child_node = graph.node_map[("app_b", "0001")] self.assertIn(child_node, replaced_node.children) self.assertIn(replaced_node, child_node.parents) # Ensure child dependency hasn't also gotten remapped to the other replaced node. other_replaced_node = graph.node_map[("app_a", "0001")] self.assertNotIn(child_node, other_replaced_node.children) self.assertNotIn(other_replaced_node, child_node.parents) def test_infinite_loop(self): """ Tests a complex dependency graph: app_a: 0001 <- \ app_b: 0001 <- x 0002 <- / \ app_c: 0001<- <------------- x 0002 And apply squashing on app_c. """ graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) graph.add_node(("app_b", "0001"), None) graph.add_node(("app_b", "0002"), None) graph.add_node(("app_c", "0001_squashed_0002"), None) graph.add_dependency("app_b.0001", ("app_b", "0001"), ("app_c", "0001_squashed_0002")) graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_a", "0001")) graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001")) graph.add_dependency("app_c.0001_squashed_0002", ("app_c", "0001_squashed_0002"), ("app_b", "0002")) with self.assertRaises(CircularDependencyError): graph.forwards_plan(("app_c", "0001_squashed_0002")) def test_stringify(self): graph = MigrationGraph() self.assertEqual(force_text(graph), "Graph: 0 nodes, 0 edges") graph.add_node(("app_a", "0001"), None) graph.add_node(("app_a", "0002"), None) graph.add_node(("app_a", "0003"), None) graph.add_node(("app_b", "0001"), None) graph.add_node(("app_b", "0002"), None) graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001")) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002")) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_b", "0002")) self.assertEqual(force_text(graph), "Graph: 5 nodes, 3 edges") self.assertEqual(repr(graph), "<MigrationGraph: nodes=5, edges=3>")
41ffa367e6b72467855216d936d5adcafe06387f7f9a76e79b9e5db0ada37dd0
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.apps.registry import Apps from django.db import models from django.utils import six from django.utils.encoding import python_2_unicode_compatible class CustomModelBase(models.base.ModelBase): pass class ModelWithCustomBase(six.with_metaclass(CustomModelBase, models.Model)): pass @python_2_unicode_compatible class UnicodeModel(models.Model): title = models.CharField('ÚÑÍ¢ÓÐÉ', max_length=20, default='“Ðjáñgó”') class Meta: # Disable auto loading of this model as we load it on our own apps = Apps() verbose_name = 'úñí©óðé µóðéø' verbose_name_plural = 'úñí©óðé µóðéøß' def __str__(self): return self.title class Unserializable(object): """ An object that migration doesn't know how to serialize. """ pass class UnserializableModel(models.Model): title = models.CharField(max_length=20, default=Unserializable()) class Meta: # Disable auto loading of this model as we load it on our own apps = Apps() class UnmigratedModel(models.Model): """ A model that is in a migration-less app (which this app is if its migrations directory has not been repointed) """ pass class EmptyManager(models.Manager): use_in_migrations = True class FoodQuerySet(models.query.QuerySet): pass class BaseFoodManager(models.Manager): def __init__(self, a, b, c=1, d=2): super(BaseFoodManager, self).__init__() self.args = (a, b, c, d) class FoodManager(BaseFoodManager.from_queryset(FoodQuerySet)): use_in_migrations = True class NoMigrationFoodManager(BaseFoodManager.from_queryset(FoodQuerySet)): pass
e642b6f1da74c507a13f3bd36030e477504887f8db26ef207fc8d0a777fb8cdd
from __future__ import unicode_literals import unittest from django.db import connection, migrations, models, transaction from django.db.migrations.migration import Migration from django.db.migrations.operations import CreateModel from django.db.migrations.state import ModelState, ProjectState from django.db.models.fields import NOT_PROVIDED from django.db.transaction import atomic from django.db.utils import IntegrityError from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature from .models import FoodManager, FoodQuerySet, UnicodeModel from .test_base import MigrationTestBase try: import sqlparse except ImportError: sqlparse = None class Mixin(object): pass class OperationTestBase(MigrationTestBase): """ Common functions to help test operations. """ def apply_operations(self, app_label, project_state, operations): migration = Migration('name', app_label) migration.operations = operations with connection.schema_editor() as editor: return migration.apply(project_state, editor) def unapply_operations(self, app_label, project_state, operations): migration = Migration('name', app_label) migration.operations = operations with connection.schema_editor() as editor: return migration.unapply(project_state, editor) def make_test_state(self, app_label, operation, **kwargs): """ Makes a test state using set_up_test_model and returns the original state and the state after the migration is applied. """ project_state = self.set_up_test_model(app_label, **kwargs) new_state = project_state.clone() operation.state_forwards(app_label, new_state) return project_state, new_state def set_up_test_model( self, app_label, second_model=False, third_model=False, index=False, multicol_index=False, related_model=False, mti_model=False, proxy_model=False, manager_model=False, unique_together=False, options=False, db_table=None, index_together=False): """ Creates a test model state and database table. """ # Delete the tables if they already exist table_names = [ # Start with ManyToMany tables '_pony_stables', '_pony_vans', # Then standard model tables '_pony', '_stable', '_van', ] tables = [(app_label + table_name) for table_name in table_names] with connection.cursor() as cursor: table_names = connection.introspection.table_names(cursor) connection.disable_constraint_checking() sql_delete_table = connection.schema_editor().sql_delete_table with transaction.atomic(): for table in tables: if table in table_names: cursor.execute(sql_delete_table % { "table": connection.ops.quote_name(table), }) connection.enable_constraint_checking() # Make the "current" state model_options = { "swappable": "TEST_SWAP_MODEL", "index_together": [["weight", "pink"]] if index_together else [], "unique_together": [["pink", "weight"]] if unique_together else [], } if options: model_options["permissions"] = [("can_groom", "Can groom")] if db_table: model_options["db_table"] = db_table operations = [migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=3)), ("weight", models.FloatField()), ], options=model_options, )] if index: operations.append(migrations.AddIndex( "Pony", models.Index(fields=["pink"], name="pony_pink_idx") )) if multicol_index: operations.append(migrations.AddIndex( "Pony", models.Index(fields=["pink", "weight"], name="pony_test_idx") )) if second_model: operations.append(migrations.CreateModel( "Stable", [ ("id", models.AutoField(primary_key=True)), ] )) if third_model: operations.append(migrations.CreateModel( "Van", [ ("id", models.AutoField(primary_key=True)), ] )) if related_model: operations.append(migrations.CreateModel( "Rider", [ ("id", models.AutoField(primary_key=True)), ("pony", models.ForeignKey("Pony", models.CASCADE)), ("friend", models.ForeignKey("self", models.CASCADE)) ], )) if mti_model: operations.append(migrations.CreateModel( "ShetlandPony", fields=[ ('pony_ptr', models.OneToOneField( 'Pony', models.CASCADE, auto_created=True, parent_link=True, primary_key=True, to_field='id', serialize=False, )), ("cuteness", models.IntegerField(default=1)), ], bases=['%s.Pony' % app_label], )) if proxy_model: operations.append(migrations.CreateModel( "ProxyPony", fields=[], options={"proxy": True}, bases=['%s.Pony' % app_label], )) if manager_model: operations.append(migrations.CreateModel( "Food", fields=[ ("id", models.AutoField(primary_key=True)), ], managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] )) return self.apply_operations(app_label, ProjectState(), operations) class OperationTests(OperationTestBase): """ Tests running the operations and making sure they do what they say they do. Each test looks at their state changing, and then their database operation - both forwards and backwards. """ def test_create_model(self): """ Tests the CreateModel operation. Most other tests use this operation as part of setup, so check failures here first. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) self.assertEqual(operation.describe(), "Create model Pony") # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2].keys()), ["fields", "name"]) # And default manager not in set operation = migrations.CreateModel("Foo", fields=[], managers=[("objects", models.Manager())]) definition = operation.deconstruct() self.assertNotIn('managers', definition[2]) def test_create_model_with_duplicate_field_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value pink in CreateModel fields argument.'): migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.TextField()), ("pink", models.IntegerField(default=1)), ], ) def test_create_model_with_duplicate_base(self): message = 'Found duplicate value test_crmo.pony in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.Pony",), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.pony",), ) message = 'Found duplicate value migrations.unicodemodel in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, UnicodeModel,), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.unicodemodel',), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.UnicodeModel',), ) message = "Found duplicate value <class 'django.db.models.base.Model'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(models.Model, models.Model,), ) message = "Found duplicate value <class 'migrations.test_operations.Mixin'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(Mixin, Mixin,), ) def test_create_model_with_duplicate_manager_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value objects in CreateModel managers argument.'): migrations.CreateModel( "Pony", fields=[], managers=[ ("objects", models.Manager()), ("objects", models.Manager()), ], ) def test_create_model_with_unique_after(self): """ Tests the CreateModel operation directly followed by an AlterUniqueTogether (bug #22844 - sqlite remake issues) """ operation1 = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) operation2 = migrations.CreateModel( "Rider", [ ("id", models.AutoField(primary_key=True)), ("number", models.IntegerField(default=1)), ("pony", models.ForeignKey("test_crmoua.Pony", models.CASCADE)), ], ) operation3 = migrations.AlterUniqueTogether( "Rider", [ ("number", "pony"), ], ) # Test the database alteration project_state = ProjectState() self.assertTableNotExists("test_crmoua_pony") self.assertTableNotExists("test_crmoua_rider") with connection.schema_editor() as editor: new_state = project_state.clone() operation1.state_forwards("test_crmoua", new_state) operation1.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation2.state_forwards("test_crmoua", new_state) operation2.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation3.state_forwards("test_crmoua", new_state) operation3.database_forwards("test_crmoua", editor, project_state, new_state) self.assertTableExists("test_crmoua_pony") self.assertTableExists("test_crmoua_rider") def test_create_model_m2m(self): """ Test the creation of a model with a ManyToMany field and the auto-created "through" model. """ project_state = self.set_up_test_model("test_crmomm") operation = migrations.CreateModel( "Stable", [ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("Pony", related_name="stables")) ] ) # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_crmomm", new_state) # Test the database alteration self.assertTableNotExists("test_crmomm_stable_ponies") with connection.schema_editor() as editor: operation.database_forwards("test_crmomm", editor, project_state, new_state) self.assertTableExists("test_crmomm_stable") self.assertTableExists("test_crmomm_stable_ponies") self.assertColumnNotExists("test_crmomm_stable", "ponies") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_crmomm", "Pony") Stable = new_state.apps.get_model("test_crmomm", "Stable") stable = Stable.objects.create() p1 = Pony.objects.create(pink=False, weight=4.55) p2 = Pony.objects.create(pink=True, weight=5.43) stable.ponies.add(p1, p2) self.assertEqual(stable.ponies.count(), 2) stable.ponies.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmomm", editor, new_state, project_state) self.assertTableNotExists("test_crmomm_stable") self.assertTableNotExists("test_crmomm_stable_ponies") def test_create_model_inheritance(self): """ Tests the CreateModel operation on a multi-table inheritance setup. """ project_state = self.set_up_test_model("test_crmoih") # Test the state alteration operation = migrations.CreateModel( "ShetlandPony", [ ('pony_ptr', models.OneToOneField( 'test_crmoih.Pony', models.CASCADE, auto_created=True, primary_key=True, to_field='id', serialize=False, )), ("cuteness", models.IntegerField(default=1)), ], ) new_state = project_state.clone() operation.state_forwards("test_crmoih", new_state) self.assertIn(("test_crmoih", "shetlandpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crmoih_shetlandpony") with connection.schema_editor() as editor: operation.database_forwards("test_crmoih", editor, project_state, new_state) self.assertTableExists("test_crmoih_shetlandpony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmoih", editor, new_state, project_state) self.assertTableNotExists("test_crmoih_shetlandpony") def test_create_proxy_model(self): """ Tests that CreateModel ignores proxy models. """ project_state = self.set_up_test_model("test_crprmo") # Test the state alteration operation = migrations.CreateModel( "ProxyPony", [], options={"proxy": True}, bases=("test_crprmo.Pony", ), ) self.assertEqual(operation.describe(), "Create proxy model ProxyPony") new_state = project_state.clone() operation.state_forwards("test_crprmo", new_state) self.assertIn(("test_crprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crprmo", editor, project_state, new_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crprmo", editor, new_state, project_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2].keys()), ["bases", "fields", "name", "options"]) def test_create_unmanaged_model(self): """ Tests that CreateModel ignores unmanaged models. """ project_state = self.set_up_test_model("test_crummo") # Test the state alteration operation = migrations.CreateModel( "UnmanagedPony", [], options={"proxy": True}, bases=("test_crummo.Pony", ), ) self.assertEqual(operation.describe(), "Create proxy model UnmanagedPony") new_state = project_state.clone() operation.state_forwards("test_crummo", new_state) self.assertIn(("test_crummo", "unmanagedpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crummo", editor, project_state, new_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crummo", editor, new_state, project_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") def test_create_model_managers(self): """ Tests that the managers on a model are set. """ project_state = self.set_up_test_model("test_cmoma") # Test the state alteration operation = migrations.CreateModel( "Food", fields=[ ("id", models.AutoField(primary_key=True)), ], managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Create model Food") new_state = project_state.clone() operation.state_forwards("test_cmoma", new_state) self.assertIn(("test_cmoma", "food"), new_state.models) managers = new_state.models["test_cmoma", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) def test_delete_model(self): """ Tests the DeleteModel operation. """ project_state = self.set_up_test_model("test_dlmo") # Test the state alteration operation = migrations.DeleteModel("Pony") self.assertEqual(operation.describe(), "Delete model Pony") new_state = project_state.clone() operation.state_forwards("test_dlmo", new_state) self.assertNotIn(("test_dlmo", "pony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dlmo", editor, project_state, new_state) self.assertTableNotExists("test_dlmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlmo", editor, new_state, project_state) self.assertTableExists("test_dlmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "DeleteModel") self.assertEqual(definition[1], []) self.assertEqual(list(definition[2]), ["name"]) def test_delete_proxy_model(self): """ Tests the DeleteModel operation ignores proxy models. """ project_state = self.set_up_test_model("test_dlprmo", proxy_model=True) # Test the state alteration operation = migrations.DeleteModel("ProxyPony") new_state = project_state.clone() operation.state_forwards("test_dlprmo", new_state) self.assertIn(("test_dlprmo", "proxypony"), project_state.models) self.assertNotIn(("test_dlprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") with connection.schema_editor() as editor: operation.database_forwards("test_dlprmo", editor, project_state, new_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlprmo", editor, new_state, project_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") def test_rename_model(self): """ Tests the RenameModel operation. """ project_state = self.set_up_test_model("test_rnmo", related_model=True) # Test the state alteration operation = migrations.RenameModel("Pony", "Horse") self.assertEqual(operation.describe(), "Rename model Pony to Horse") # Test initial state and database self.assertIn(("test_rnmo", "pony"), project_state.models) self.assertNotIn(("test_rnmo", "horse"), project_state.models) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate forwards new_state = project_state.clone() new_state = self.apply_operations("test_rnmo", new_state, [operation]) # Test new state and database self.assertNotIn(("test_rnmo", "pony"), new_state.models) self.assertIn(("test_rnmo", "horse"), new_state.models) # RenameModel also repoints all incoming FKs and M2Ms self.assertEqual("test_rnmo.Horse", new_state.models["test_rnmo", "rider"].fields[1][1].remote_field.model) self.assertTableNotExists("test_rnmo_pony") self.assertTableExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate backwards original_state = self.unapply_operations("test_rnmo", project_state, [operation]) # Test original state and database self.assertIn(("test_rnmo", "pony"), original_state.models) self.assertNotIn(("test_rnmo", "horse"), original_state.models) self.assertEqual("Pony", original_state.models["test_rnmo", "rider"].fields[1][1].remote_field.model) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameModel") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'old_name': "Pony", 'new_name': "Horse"}) def test_rename_model_state_forwards(self): """ RenameModel operations shouldn't trigger the caching of rendered apps on state without prior apps. """ state = ProjectState() state.add_model(ModelState('migrations', 'Foo', [])) operation = migrations.RenameModel('Foo', 'Bar') operation.state_forwards('migrations', state) self.assertNotIn('apps', state.__dict__) self.assertNotIn(('migrations', 'foo'), state.models) self.assertIn(('migrations', 'bar'), state.models) # Now with apps cached. apps = state.apps operation = migrations.RenameModel('Bar', 'Foo') operation.state_forwards('migrations', state) self.assertIs(state.apps, apps) self.assertNotIn(('migrations', 'bar'), state.models) self.assertIn(('migrations', 'foo'), state.models) def test_rename_model_with_self_referential_fk(self): """ Tests the RenameModel operation on model with self referential FK. """ project_state = self.set_up_test_model("test_rmwsrf", related_model=True) # Test the state alteration operation = migrations.RenameModel("Rider", "HorseRider") self.assertEqual(operation.describe(), "Rename model Rider to HorseRider") new_state = project_state.clone() operation.state_forwards("test_rmwsrf", new_state) self.assertNotIn(("test_rmwsrf", "rider"), new_state.models) self.assertIn(("test_rmwsrf", "horserider"), new_state.models) # Remember, RenameModel also repoints all incoming FKs and M2Ms self.assertEqual( "test_rmwsrf.HorseRider", new_state.models["test_rmwsrf", "horserider"].fields[2][1].remote_field.model ) # Test the database alteration self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) with connection.schema_editor() as editor: operation.database_forwards("test_rmwsrf", editor, project_state, new_state) self.assertTableNotExists("test_rmwsrf_rider") self.assertTableExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_horserider", "id")) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmwsrf", editor, new_state, project_state) self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) def test_rename_model_with_superclass_fk(self): """ Tests the RenameModel operation on a model which has a superclass that has a foreign key. """ project_state = self.set_up_test_model("test_rmwsc", related_model=True, mti_model=True) # Test the state alteration operation = migrations.RenameModel("ShetlandPony", "LittleHorse") self.assertEqual(operation.describe(), "Rename model ShetlandPony to LittleHorse") new_state = project_state.clone() operation.state_forwards("test_rmwsc", new_state) self.assertNotIn(("test_rmwsc", "shetlandpony"), new_state.models) self.assertIn(("test_rmwsc", "littlehorse"), new_state.models) # RenameModel shouldn't repoint the superclass's relations, only local ones self.assertEqual( project_state.models["test_rmwsc", "rider"].fields[1][1].remote_field.model, new_state.models["test_rmwsc", "rider"].fields[1][1].remote_field.model ) # Before running the migration we have a table for Shetland Pony, not Little Horse self.assertTableExists("test_rmwsc_shetlandpony") self.assertTableNotExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # and the foreign key on rider points to pony, not shetland pony self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_shetlandpony", "id")) with connection.schema_editor() as editor: operation.database_forwards("test_rmwsc", editor, project_state, new_state) # Now we have a little horse table, not shetland pony self.assertTableNotExists("test_rmwsc_shetlandpony") self.assertTableExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # but the Foreign keys still point at pony, not little horse self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_littlehorse", "id")) def test_rename_model_with_self_referential_m2m(self): app_label = "test_rename_model_with_self_referential_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("ReflexivePony", fields=[ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("self")), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("ReflexivePony", "ReflexivePony2"), ]) Pony = project_state.apps.get_model(app_label, "ReflexivePony2") pony = Pony.objects.create() pony.ponies.add(pony) def test_rename_model_with_m2m(self): app_label = "test_rename_model_with_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Pony", "Pony2"), ]) Pony = project_state.apps.get_model(app_label, "Pony2") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_m2m_target_model(self): app_label = "test_rename_m2m_target_model" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Rider", "Rider2"), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider2") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_m2m_through_model(self): app_label = "test_rename_through" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("PonyRider", fields=[ ("id", models.AutoField(primary_key=True)), ("rider", models.ForeignKey("test_rename_through.Rider", models.CASCADE)), ("pony", models.ForeignKey("test_rename_through.Pony", models.CASCADE)), ]), migrations.AddField( "Pony", "riders", models.ManyToManyField("test_rename_through.Rider", through="test_rename_through.PonyRider"), ), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider") pony = Pony.objects.create() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("PonyRider", "PonyRider2"), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider2") pony = Pony.objects.first() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) self.assertEqual(Pony.objects.count(), 1) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(PonyRider.objects.count(), 2) self.assertEqual(pony.riders.count(), 2) def test_add_field(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) self.assertEqual(operation.describe(), "Add field height to Pony") project_state, new_state = self.make_test_state("test_adfl", operation) self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4) field = [ f for n, f in new_state.models["test_adfl", "pony"].fields if n == "height" ][0] self.assertEqual(field.default, 5) # Test the database alteration self.assertColumnNotExists("test_adfl_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adfl", editor, project_state, new_state) self.assertColumnExists("test_adfl_pony", "height") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfl", editor, new_state, project_state) self.assertColumnNotExists("test_adfl_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_add_charfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adchfl") Pony = project_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adchfl", project_state, [ migrations.AddField( "Pony", "text", models.CharField(max_length=10, default="some text"), ), migrations.AddField( "Pony", "empty", models.CharField(max_length=10, default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.CharField(max_length=10, default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.CharField(max_length=10, default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_textfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adtxtfl") Pony = project_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adtxtfl", project_state, [ migrations.AddField( "Pony", "text", models.TextField(default="some text"), ), migrations.AddField( "Pony", "empty", models.TextField(default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.TextField(default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.TextField(default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_binaryfield(self): """ Tests the AddField operation on TextField/BinaryField. """ project_state = self.set_up_test_model("test_adbinfl") Pony = project_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adbinfl", project_state, [ migrations.AddField( "Pony", "blob", models.BinaryField(default=b"some text"), ), migrations.AddField( "Pony", "empty", models.BinaryField(default=b""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.BinaryField(default=b"42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.BinaryField(default=b'"\'"'), ), ]) Pony = new_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.get(pk=pony.pk) # SQLite returns buffer/memoryview, cast to bytes for checking. self.assertEqual(bytes(pony.blob), b"some text") self.assertEqual(bytes(pony.empty), b"") self.assertEqual(bytes(pony.digits), b"42") self.assertEqual(bytes(pony.quotes), b'"\'"') def test_column_name_quoting(self): """ Column names that are SQL keywords shouldn't cause problems when used in migrations (#22168). """ project_state = self.set_up_test_model("test_regr22168") operation = migrations.AddField( "Pony", "order", models.IntegerField(default=0), ) new_state = project_state.clone() operation.state_forwards("test_regr22168", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_regr22168", editor, project_state, new_state) self.assertColumnExists("test_regr22168_pony", "order") def test_add_field_preserve_default(self): """ Tests the AddField operation's state alteration when preserve_default = False. """ project_state = self.set_up_test_model("test_adflpd") # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=4), preserve_default=False, ) new_state = project_state.clone() operation.state_forwards("test_adflpd", new_state) self.assertEqual(len(new_state.models["test_adflpd", "pony"].fields), 4) field = [ f for n, f in new_state.models["test_adflpd", "pony"].fields if n == "height" ][0] self.assertEqual(field.default, NOT_PROVIDED) # Test the database alteration project_state.apps.get_model("test_adflpd", "pony").objects.create( weight=4, ) self.assertColumnNotExists("test_adflpd_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adflpd", editor, project_state, new_state) self.assertColumnExists("test_adflpd_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name", "preserve_default"]) def test_add_field_m2m(self): """ Tests the AddField operation with a ManyToManyField. """ project_state = self.set_up_test_model("test_adflmm", second_model=True) # Test the state alteration operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) new_state = project_state.clone() operation.state_forwards("test_adflmm", new_state) self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4) # Test the database alteration self.assertTableNotExists("test_adflmm_pony_stables") with connection.schema_editor() as editor: operation.database_forwards("test_adflmm", editor, project_state, new_state) self.assertTableExists("test_adflmm_pony_stables") self.assertColumnNotExists("test_adflmm_pony", "stables") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_adflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.stables.create() self.assertEqual(p.stables.count(), 1) p.stables.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adflmm", editor, new_state, project_state) self.assertTableNotExists("test_adflmm_pony_stables") def test_alter_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertFalse(Pony._meta.get_field('stables').blank) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField( "Pony", "stables", models.ManyToManyField(to="Stable", related_name="ponies", blank=True) ) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertTrue(Pony._meta.get_field('stables').blank) def test_repoint_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True, third_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "places", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField("Pony", "places", models.ManyToManyField(to="Van", related_name="ponies")) ]) # Ensure the new field actually works Pony = project_state.apps.get_model("test_alflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.places.create() self.assertEqual(p.places.count(), 1) p.places.all().delete() def test_remove_field_m2m(self): project_state = self.set_up_test_model("test_rmflmm", second_model=True) project_state = self.apply_operations("test_rmflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) self.assertTableExists("test_rmflmm_pony_stables") with_field_state = project_state.clone() operations = [migrations.RemoveField("Pony", "stables")] project_state = self.apply_operations("test_rmflmm", project_state, operations=operations) self.assertTableNotExists("test_rmflmm_pony_stables") # And test reversal self.unapply_operations("test_rmflmm", with_field_state, operations=operations) self.assertTableExists("test_rmflmm_pony_stables") def test_remove_field_m2m_with_through(self): project_state = self.set_up_test_model("test_rmflmmwt", second_model=True) self.assertTableNotExists("test_rmflmmwt_ponystables") project_state = self.apply_operations("test_rmflmmwt", project_state, operations=[ migrations.CreateModel("PonyStables", fields=[ ("pony", models.ForeignKey('test_rmflmmwt.Pony', models.CASCADE)), ("stable", models.ForeignKey('test_rmflmmwt.Stable', models.CASCADE)), ]), migrations.AddField( "Pony", "stables", models.ManyToManyField("Stable", related_name="ponies", through='test_rmflmmwt.PonyStables') ) ]) self.assertTableExists("test_rmflmmwt_ponystables") operations = [migrations.RemoveField("Pony", "stables")] self.apply_operations("test_rmflmmwt", project_state, operations=operations) def test_remove_field(self): """ Tests the RemoveField operation. """ project_state = self.set_up_test_model("test_rmfl") # Test the state alteration operation = migrations.RemoveField("Pony", "pink") self.assertEqual(operation.describe(), "Remove field pink from Pony") new_state = project_state.clone() operation.state_forwards("test_rmfl", new_state) self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2) # Test the database alteration self.assertColumnExists("test_rmfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_rmfl", editor, project_state, new_state) self.assertColumnNotExists("test_rmfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmfl", editor, new_state, project_state) self.assertColumnExists("test_rmfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': 'pink'}) def test_remove_fk(self): """ Tests the RemoveField operation on a foreign key. """ project_state = self.set_up_test_model("test_rfk", related_model=True) self.assertColumnExists("test_rfk_rider", "pony_id") operation = migrations.RemoveField("Rider", "pony") new_state = project_state.clone() operation.state_forwards("test_rfk", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_rfk", editor, project_state, new_state) self.assertColumnNotExists("test_rfk_rider", "pony_id") with connection.schema_editor() as editor: operation.database_backwards("test_rfk", editor, new_state, project_state) self.assertColumnExists("test_rfk_rider", "pony_id") def test_alter_model_table(self): """ Tests the AlterModelTable operation. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony_2") self.assertEqual(operation.describe(), "Rename table for Pony to test_almota_pony_2") new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony_2") # Test the database alteration self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableNotExists("test_almota_pony") self.assertTableExists("test_almota_pony_2") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelTable") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'table': "test_almota_pony_2"}) def test_alter_model_table_none(self): """ Tests the AlterModelTable operation if the table name is set to None. """ operation = migrations.AlterModelTable("Pony", None) self.assertEqual(operation.describe(), "Rename table for Pony to (default)") def test_alter_model_table_noop(self): """ Tests the AlterModelTable operation if the table name is not changed. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony") new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony") # Test the database alteration self.assertTableExists("test_almota_pony") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableExists("test_almota_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") def test_alter_model_table_m2m(self): """ AlterModelTable should rename auto-generated M2M tables. """ app_label = "test_talflmltlm2m" pony_db_table = 'pony_foo' project_state = self.set_up_test_model(app_label, second_model=True, db_table=pony_db_table) # Add the M2M field first_state = project_state.clone() operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable")) operation.state_forwards(app_label, first_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, first_state) original_m2m_table = "%s_%s" % (pony_db_table, "stables") new_m2m_table = "%s_%s" % (app_label, "pony_stables") self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) # Rename the Pony db_table which should also rename the m2m table. second_state = first_state.clone() operation = migrations.AlterModelTable(name='pony', table=None) operation.state_forwards(app_label, second_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, first_state, second_state) self.assertTableExists(new_m2m_table) self.assertTableNotExists(original_m2m_table) # And test reversal with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, second_state, first_state) self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) def test_alter_field(self): """ Tests the AlterField operation. """ project_state = self.set_up_test_model("test_alfl") # Test the state alteration operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) self.assertEqual(operation.describe(), "Alter field pink on Pony") new_state = project_state.clone() operation.state_forwards("test_alfl", new_state) self.assertIs(project_state.models["test_alfl", "pony"].get_field_by_name("pink").null, False) self.assertIs(new_state.models["test_alfl", "pony"].get_field_by_name("pink").null, True) # Test the database alteration self.assertColumnNotNull("test_alfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alfl", editor, project_state, new_state) self.assertColumnNull("test_alfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alfl", editor, new_state, project_state) self.assertColumnNotNull("test_alfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_alter_field_pk(self): """ Tests the AlterField operation on primary keys (for things like PostgreSQL's SERIAL weirdness) """ project_state = self.set_up_test_model("test_alflpk") # Test the state alteration operation = migrations.AlterField("Pony", "id", models.IntegerField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpk", new_state) self.assertIsInstance(project_state.models["test_alflpk", "pony"].get_field_by_name("id"), models.AutoField) self.assertIsInstance(new_state.models["test_alflpk", "pony"].get_field_by_name("id"), models.IntegerField) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpk", editor, project_state, new_state) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpk", editor, new_state, project_state) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_pk_fk(self): """ Tests the AlterField operation on primary keys changes any FKs pointing to it. """ project_state = self.set_up_test_model("test_alflpkfk", related_model=True) # Test the state alteration operation = migrations.AlterField("Pony", "id", models.FloatField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpkfk", new_state) self.assertIsInstance(project_state.models["test_alflpkfk", "pony"].get_field_by_name("id"), models.AutoField) self.assertIsInstance(new_state.models["test_alflpkfk", "pony"].get_field_by_name("id"), models.FloatField) def assertIdTypeEqualsFkType(): with connection.cursor() as cursor: id_type, id_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_pony") if c.name == "id" ][0] fk_type, fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_rider") if c.name == "pony_id" ][0] self.assertEqual(id_type, fk_type) self.assertEqual(id_null, fk_null) assertIdTypeEqualsFkType() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpkfk", editor, project_state, new_state) assertIdTypeEqualsFkType() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpkfk", editor, new_state, project_state) assertIdTypeEqualsFkType() def test_rename_field(self): """ Tests the RenameField operation. """ project_state = self.set_up_test_model("test_rnfl", unique_together=True, index_together=True) # Test the state alteration operation = migrations.RenameField("Pony", "pink", "blue") self.assertEqual(operation.describe(), "Rename field pink on Pony to blue") new_state = project_state.clone() operation.state_forwards("test_rnfl", new_state) self.assertIn("blue", [n for n, f in new_state.models["test_rnfl", "pony"].fields]) self.assertNotIn("pink", [n for n, f in new_state.models["test_rnfl", "pony"].fields]) # Make sure the unique_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) # Make sure the index_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['index_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['index_together'][0]) # Test the database alteration self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") with connection.schema_editor() as editor: operation.database_forwards("test_rnfl", editor, project_state, new_state) self.assertColumnExists("test_rnfl_pony", "blue") self.assertColumnNotExists("test_rnfl_pony", "pink") # Ensure the unique constraint has been ported over with connection.cursor() as cursor: cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_rnfl_pony") # Ensure the index constraint has been ported over self.assertIndexExists("test_rnfl_pony", ["weight", "blue"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rnfl", editor, new_state, project_state) self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") # Ensure the index constraint has been reset self.assertIndexExists("test_rnfl_pony", ["weight", "pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'old_name': "pink", 'new_name': "blue"}) def test_alter_unique_together(self): """ Tests the AlterUniqueTogether operation. """ project_state = self.set_up_test_model("test_alunto") # Test the state alteration operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter unique_together for Pony (1 constraint(s))") new_state = project_state.clone() operation.state_forwards("test_alunto", new_state) self.assertEqual(len(project_state.models["test_alunto", "pony"].options.get("unique_together", set())), 0) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # Make sure we can insert duplicate rows with connection.cursor() as cursor: cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alunto", editor, project_state, new_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alunto", editor, new_state, project_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test flat unique_together operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight")) operation.state_forwards("test_alunto", new_state) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterUniqueTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'unique_together': {("pink", "weight")}}) def test_alter_unique_together_remove(self): operation = migrations.AlterUniqueTogether("Pony", None) self.assertEqual(operation.describe(), "Alter unique_together for Pony (0 constraint(s))") def test_add_index(self): """ Test the AddIndex operation. """ project_state = self.set_up_test_model("test_adin") msg = ( "Indexes passed to AddIndex operations require a name argument. " "<Index: fields='pink'> doesn't have one." ) with self.assertRaisesMessage(ValueError, msg): migrations.AddIndex("Pony", models.Index(fields=["pink"])) index = models.Index(fields=["pink"], name="test_adin_pony_pink_idx") operation = migrations.AddIndex("Pony", index) self.assertEqual(operation.describe(), "Create index test_adin_pony_pink_idx on field(s) pink of model Pony") new_state = project_state.clone() operation.state_forwards("test_adin", new_state) # Test the database alteration self.assertEqual(len(new_state.models["test_adin", "pony"].options['indexes']), 1) self.assertIndexNotExists("test_adin_pony", ["pink"]) with connection.schema_editor() as editor: operation.database_forwards("test_adin", editor, project_state, new_state) self.assertIndexExists("test_adin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adin", editor, new_state, project_state) self.assertIndexNotExists("test_adin_pony", ["pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'index': index}) def test_remove_index(self): """ Test the RemoveIndex operation. """ project_state = self.set_up_test_model("test_rmin", multicol_index=True) self.assertTableExists("test_rmin_pony") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) operation = migrations.RemoveIndex("Pony", "pony_test_idx") self.assertEqual(operation.describe(), "Remove index pony_test_idx from Pony") new_state = project_state.clone() operation.state_forwards("test_rmin", new_state) # Test the state alteration self.assertEqual(len(new_state.models["test_rmin", "pony"].options['indexes']), 0) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_rmin", editor, project_state, new_state) self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmin", editor, new_state, project_state) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': "pony_test_idx"}) # Also test a field dropped with index - sqlite remake issue operations = [ migrations.RemoveIndex("Pony", "pony_test_idx"), migrations.RemoveField("Pony", "pink"), ] self.assertColumnExists("test_rmin_pony", "pink") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test database alteration new_state = project_state.clone() self.apply_operations('test_rmin', new_state, operations=operations) self.assertColumnNotExists("test_rmin_pony", "pink") self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal self.unapply_operations("test_rmin", project_state, operations=operations) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) def test_alter_field_with_index(self): """ Test AlterField operation with an index to ensure indexes created via Meta.indexes don't get dropped with sqlite3 remake. """ project_state = self.set_up_test_model("test_alflin", index=True) operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) new_state = project_state.clone() operation.state_forwards("test_alflin", new_state) # Test the database alteration self.assertColumnNotNull("test_alflin_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alflin", editor, project_state, new_state) # Ensure that index hasn't been dropped self.assertIndexExists("test_alflin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflin", editor, new_state, project_state) # Ensure the index is still there self.assertIndexExists("test_alflin_pony", ["pink"]) def test_alter_index_together(self): """ Tests the AlterIndexTogether operation. """ project_state = self.set_up_test_model("test_alinto") # Test the state alteration operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter index_together for Pony (1 constraint(s))") new_state = project_state.clone() operation.state_forwards("test_alinto", new_state) self.assertEqual(len(project_state.models["test_alinto", "pony"].options.get("index_together", set())), 0) self.assertEqual(len(new_state.models["test_alinto", "pony"].options.get("index_together", set())), 1) # Make sure there's no matching index self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alinto", editor, project_state, new_state) self.assertIndexExists("test_alinto_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alinto", editor, new_state, project_state) self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterIndexTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'index_together': {("pink", "weight")}}) def test_alter_index_together_remove(self): operation = migrations.AlterIndexTogether("Pony", None) self.assertEqual(operation.describe(), "Alter index_together for Pony (0 constraint(s))") def test_alter_model_options(self): """ Tests the AlterModelOptions operation. """ project_state = self.set_up_test_model("test_almoop") # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {"permissions": [("can_groom", "Can groom")]}) self.assertEqual(operation.describe(), "Change Meta options on Pony") new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(new_state.models["test_almoop", "pony"].options["permissions"][0][0], "can_groom") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {"permissions": [("can_groom", "Can groom")]}}) def test_alter_model_options_emptying(self): """ Tests that the AlterModelOptions operation removes keys from the dict (#23121) """ project_state = self.set_up_test_model("test_almoop", options=True) # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {}) self.assertEqual(operation.describe(), "Change Meta options on Pony") new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {}}) def test_alter_order_with_respect_to(self): """ Tests the AlterOrderWithRespectTo operation. """ project_state = self.set_up_test_model("test_alorwrtto", related_model=True) # Test the state alteration operation = migrations.AlterOrderWithRespectTo("Rider", "pony") self.assertEqual(operation.describe(), "Set order_with_respect_to on Rider to pony") new_state = project_state.clone() operation.state_forwards("test_alorwrtto", new_state) self.assertIsNone( project_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None) ) self.assertEqual( new_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None), "pony" ) # Make sure there's no matching index self.assertColumnNotExists("test_alorwrtto_rider", "_order") # Create some rows before alteration rendered_state = project_state.apps pony = rendered_state.get_model("test_alorwrtto", "Pony").objects.create(weight=50) rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=1) rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=2) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alorwrtto", editor, project_state, new_state) self.assertColumnExists("test_alorwrtto_rider", "_order") # Check for correct value in rows updated_riders = new_state.apps.get_model("test_alorwrtto", "Rider").objects.all() self.assertEqual(updated_riders[0]._order, 0) self.assertEqual(updated_riders[1]._order, 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alorwrtto", editor, new_state, project_state) self.assertColumnNotExists("test_alorwrtto_rider", "_order") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterOrderWithRespectTo") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Rider", 'order_with_respect_to': "pony"}) def test_alter_model_managers(self): """ Tests that the managers on a model are set. """ project_state = self.set_up_test_model("test_almoma") # Test the state alteration operation = migrations.AlterModelManagers( "Pony", managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Change managers on Pony") managers = project_state.models["test_almoma", "pony"].managers self.assertEqual(managers, []) new_state = project_state.clone() operation.state_forwards("test_almoma", new_state) self.assertIn(("test_almoma", "pony"), new_state.models) managers = new_state.models["test_almoma", "pony"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) rendered_state = new_state.apps model = rendered_state.get_model('test_almoma', 'pony') self.assertIsInstance(model.food_qs, models.Manager) self.assertIsInstance(model.food_mgr, FoodManager) self.assertIsInstance(model.food_mgr_kwargs, FoodManager) def test_alter_model_managers_emptying(self): """ Tests that the managers on a model are set. """ project_state = self.set_up_test_model("test_almomae", manager_model=True) # Test the state alteration operation = migrations.AlterModelManagers("Food", managers=[]) self.assertEqual(operation.describe(), "Change managers on Food") self.assertIn(("test_almomae", "food"), project_state.models) managers = project_state.models["test_almomae", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) new_state = project_state.clone() operation.state_forwards("test_almomae", new_state) managers = new_state.models["test_almomae", "food"].managers self.assertEqual(managers, []) def test_alter_fk(self): """ Tests that creating and then altering an FK works correctly and deals with the pending SQL (#23091) """ project_state = self.set_up_test_model("test_alfk") # Test adding and then altering the FK in one go create_operation = migrations.CreateModel( name="Rider", fields=[ ("id", models.AutoField(primary_key=True)), ("pony", models.ForeignKey("Pony", models.CASCADE)), ], ) create_state = project_state.clone() create_operation.state_forwards("test_alfk", create_state) alter_operation = migrations.AlterField( model_name='Rider', name='pony', field=models.ForeignKey("Pony", models.CASCADE, editable=False), ) alter_state = create_state.clone() alter_operation.state_forwards("test_alfk", alter_state) with connection.schema_editor() as editor: create_operation.database_forwards("test_alfk", editor, project_state, create_state) alter_operation.database_forwards("test_alfk", editor, create_state, alter_state) def test_alter_fk_non_fk(self): """ Tests that altering an FK to a non-FK works (#23244) """ # Test the state alteration operation = migrations.AlterField( model_name="Rider", name="pony", field=models.FloatField(), ) project_state, new_state = self.make_test_state("test_afknfk", operation, related_model=True) # Test the database alteration self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") with connection.schema_editor() as editor: operation.database_forwards("test_afknfk", editor, project_state, new_state) self.assertColumnExists("test_afknfk_rider", "pony") self.assertColumnNotExists("test_afknfk_rider", "pony_id") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_afknfk", editor, new_state, project_state) self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") @unittest.skipIf(sqlparse is None and connection.features.requires_sqlparse_for_splitting, "Missing sqlparse") def test_run_sql(self): """ Tests the RunSQL operation. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( # Use a multi-line string with a comment to test splitting on SQLite and MySQL respectively "CREATE TABLE i_love_ponies (id int, special_thing varchar(15));\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'i love ponies'); -- this is magic!\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (2, 'i love django');\n" "UPDATE i_love_ponies SET special_thing = 'Ponies' WHERE special_thing LIKE '%%ponies';" "UPDATE i_love_ponies SET special_thing = 'Django' WHERE special_thing LIKE '%django';", # Run delete queries to test for parameter substitution failure # reported in #23426 "DELETE FROM i_love_ponies WHERE special_thing LIKE '%Django%';" "DELETE FROM i_love_ponies WHERE special_thing LIKE '%%Ponies%%';" "DROP TABLE i_love_ponies", state_operations=[migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])], ) self.assertEqual(operation.describe(), "Raw SQL operation") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_runsql", new_state) self.assertEqual(len(new_state.models["test_runsql", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test SQL collection with connection.schema_editor(collect_sql=True) as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%ponies';", "\n".join(editor.collected_sql)) operation.database_backwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%Ponies%%';", "\n".join(editor.collected_sql)) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 2) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Django'") self.assertEqual(cursor.fetchall()[0][0], 1) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Ponies'") self.assertEqual(cursor.fetchall()[0][0], 1) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunSQL") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["reverse_sql", "sql", "state_operations"]) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunSQL('SELECT 1 FROM void;', elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_sql_params(self): """ #23426 - RunSQL should accept parameters. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( ["CREATE TABLE i_love_ponies (id int, special_thing varchar(15));"], ["DROP TABLE i_love_ponies"], ) param_operation = migrations.RunSQL( # forwards ( "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'Django');", ["INSERT INTO i_love_ponies (id, special_thing) VALUES (2, %s);", ['Ponies']], ("INSERT INTO i_love_ponies (id, special_thing) VALUES (%s, %s);", (3, 'Python',)), ), # backwards [ "DELETE FROM i_love_ponies WHERE special_thing = 'Django';", ["DELETE FROM i_love_ponies WHERE special_thing = 'Ponies';", None], ("DELETE FROM i_love_ponies WHERE id = %s OR special_thing = %s;", [3, 'Python']), ] ) # Make sure there's no table self.assertTableNotExists("i_love_ponies") new_state = project_state.clone() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) # Test parameter passing with connection.schema_editor() as editor: param_operation.database_forwards("test_runsql", editor, project_state, new_state) # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 3) with connection.schema_editor() as editor: param_operation.database_backwards("test_runsql", editor, new_state, project_state) with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") def test_run_sql_params_invalid(self): """ #23426 - RunSQL should fail when a list of statements with an incorrect number of tuples is given. """ project_state = self.set_up_test_model("test_runsql") new_state = project_state.clone() operation = migrations.RunSQL( # forwards [ ["INSERT INTO foo (bar) VALUES ('buz');"] ], # backwards ( ("DELETE FROM foo WHERE bar = 'buz';", 'invalid', 'parameter count'), ), ) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 1"): operation.database_forwards("test_runsql", editor, project_state, new_state) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 3"): operation.database_backwards("test_runsql", editor, new_state, project_state) def test_run_sql_noop(self): """ #24098 - Tests no-op RunSQL operations. """ operation = migrations.RunSQL(migrations.RunSQL.noop, migrations.RunSQL.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, None, None) operation.database_backwards("test_runsql", editor, None, None) def test_run_python(self): """ Tests the RunPython operation """ project_state = self.set_up_test_model("test_runpython", mti_model=True) # Create the operation def inner_method(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.create(pink=1, weight=3.55) Pony.objects.create(weight=5) def inner_method_reverse(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.filter(pink=1, weight=3.55).delete() Pony.objects.filter(weight=5).delete() operation = migrations.RunPython(inner_method, reverse_code=inner_method_reverse) self.assertEqual(operation.describe(), "Raw Python operation") # Test the state alteration does nothing new_state = project_state.clone() operation.state_forwards("test_runpython", new_state) self.assertEqual(new_state, project_state) # Test the database alteration self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) # Now test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) # Now test we can't use a string with self.assertRaises(ValueError): migrations.RunPython("print 'ahahaha'") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code", "reverse_code"]) # Also test reversal fails, with an operation identical to above but without reverse_code set no_reverse_operation = migrations.RunPython(inner_method) self.assertFalse(no_reverse_operation.reversible) with connection.schema_editor() as editor: no_reverse_operation.database_forwards("test_runpython", editor, project_state, new_state) with self.assertRaises(NotImplementedError): no_reverse_operation.database_backwards("test_runpython", editor, new_state, project_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) def create_ponies(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") pony1 = Pony.objects.create(pink=1, weight=3.55) self.assertIsNot(pony1.pk, None) pony2 = Pony.objects.create(weight=5) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_ponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 4) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code"]) def create_shetlandponies(models, schema_editor): ShetlandPony = models.get_model("test_runpython", "ShetlandPony") pony1 = ShetlandPony.objects.create(weight=4.0) self.assertIsNot(pony1.pk, None) pony2 = ShetlandPony.objects.create(weight=5.0) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_shetlandponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 6) self.assertEqual(project_state.apps.get_model("test_runpython", "ShetlandPony").objects.count(), 2) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunPython(inner_method, elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_python_atomic(self): """ Tests the RunPython operation correctly handles the "atomic" keyword """ project_state = self.set_up_test_model("test_runpythonatomic", mti_model=True) def inner_method(models, schema_editor): Pony = models.get_model("test_runpythonatomic", "Pony") Pony.objects.create(pink=1, weight=3.55) raise ValueError("Adrian hates ponies.") atomic_migration = Migration("test", "test_runpythonatomic") atomic_migration.operations = [migrations.RunPython(inner_method)] non_atomic_migration = Migration("test", "test_runpythonatomic") non_atomic_migration.operations = [migrations.RunPython(inner_method, atomic=False)] # If we're a fully-transactional database, both versions should rollback if connection.features.can_rollback_ddl: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) # Otherwise, the non-atomic operation should leave a row there else: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1) # And deconstruction definition = non_atomic_migration.operations[0].deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["atomic", "code"]) def test_run_python_related_assignment(self): """ #24282 - Tests that model changes to a FK reverse side update the model on the FK side as well. """ def inner_method(models, schema_editor): Author = models.get_model("test_authors", "Author") Book = models.get_model("test_books", "Book") author = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author) create_author = migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey("test_authors.Author", models.CASCADE)) ], options={}, ) add_hometown = migrations.AddField( "Author", "hometown", models.CharField(max_length=100), ) create_old_man = migrations.RunPython(inner_method, inner_method) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_authors", new_state) create_author.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_books", new_state) create_book.database_forwards("test_books", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: add_hometown.state_forwards("test_authors", new_state) add_hometown.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_old_man.state_forwards("test_books", new_state) create_old_man.database_forwards("test_books", editor, project_state, new_state) def test_model_with_bigautofield(self): """ A model with BigAutoField can be created. """ def create_data(models, schema_editor): Author = models.get_model("test_author", "Author") Book = models.get_model("test_book", "Book") author1 = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author1) Book.objects.create(id=2 ** 33, title="A farewell to arms", author=author1) author2 = Author.objects.create(id=2 ** 33, name="Remarque") Book.objects.create(title="All quiet on the western front", author=author2) Book.objects.create(title="Arc de Triomphe", author=author2) create_author = migrations.CreateModel( "Author", [ ("id", models.BigAutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.BigAutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey(to="test_author.Author", on_delete=models.CASCADE)) ], options={}, ) fill_data = migrations.RunPython(create_data) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_author", new_state) create_author.database_forwards("test_author", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_book", new_state) create_book.database_forwards("test_book", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_data.state_forwards("fill_data", new_state) fill_data.database_forwards("fill_data", editor, project_state, new_state) def test_autofield_foreignfield_growth(self): """ A field may be migrated from AutoField to BigAutoField. """ def create_initial_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog = Blog.objects.create(name="web development done right") Article.objects.create(name="Frameworks", blog=blog) Article.objects.create(name="Programming Languages", blog=blog) def create_big_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog2 = Blog.objects.create(name="Frameworks", id=2 ** 33) Article.objects.create(name="Django", blog=blog2) Article.objects.create(id=2 ** 33, name="Django2", blog=blog2) create_blog = migrations.CreateModel( "Blog", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_article = migrations.CreateModel( "Article", [ ("id", models.AutoField(primary_key=True)), ("blog", models.ForeignKey(to="test_blog.Blog", on_delete=models.CASCADE)), ("name", models.CharField(max_length=100)), ("data", models.TextField(default="")), ], options={}, ) fill_initial_data = migrations.RunPython(create_initial_data, create_initial_data) fill_big_data = migrations.RunPython(create_big_data, create_big_data) grow_article_id = migrations.AlterField("Article", "id", models.BigAutoField(primary_key=True)) grow_blog_id = migrations.AlterField("Blog", "id", models.BigAutoField(primary_key=True)) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_blog.state_forwards("test_blog", new_state) create_blog.database_forwards("test_blog", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_article.state_forwards("test_article", new_state) create_article.database_forwards("test_article", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_initial_data.state_forwards("fill_initial_data", new_state) fill_initial_data.database_forwards("fill_initial_data", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_article_id.state_forwards("test_article", new_state) grow_article_id.database_forwards("test_article", editor, project_state, new_state) state = new_state.clone() article = state.apps.get_model("test_article.Article") self.assertIsInstance(article._meta.pk, models.BigAutoField) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_blog_id.state_forwards("test_blog", new_state) grow_blog_id.database_forwards("test_blog", editor, project_state, new_state) state = new_state.clone() blog = state.apps.get_model("test_blog.Blog") self.assertIsInstance(blog._meta.pk, models.BigAutoField) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_big_data.state_forwards("fill_big_data", new_state) fill_big_data.database_forwards("fill_big_data", editor, project_state, new_state) def test_run_python_noop(self): """ #24098 - Tests no-op RunPython operations. """ project_state = ProjectState() new_state = project_state.clone() operation = migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) operation.database_backwards("test_runpython", editor, new_state, project_state) @unittest.skipIf(sqlparse is None and connection.features.requires_sqlparse_for_splitting, "Missing sqlparse") def test_separate_database_and_state(self): """ Tests the SeparateDatabaseAndState operation. """ project_state = self.set_up_test_model("test_separatedatabaseandstate") # Create the operation database_operation = migrations.RunSQL( "CREATE TABLE i_love_ponies (id int, special_thing int);", "DROP TABLE i_love_ponies;" ) state_operation = migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))]) operation = migrations.SeparateDatabaseAndState( state_operations=[state_operation], database_operations=[database_operation] ) self.assertEqual(operation.describe(), "Custom state/database change combination") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_separatedatabaseandstate", new_state) self.assertEqual(len(new_state.models["test_separatedatabaseandstate", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_separatedatabaseandstate", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_separatedatabaseandstate", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "SeparateDatabaseAndState") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["database_operations", "state_operations"]) def test_separate_database_and_state2(self): """ A complex SeparateDatabaseAndState operation: Multiple operations both for state and database. Verify the state dependencies within each list and that state ops don't affect the database. """ app_label = "test_separatedatabaseandstate2" project_state = self.set_up_test_model(app_label) # Create the operation database_operations = [ migrations.CreateModel( "ILovePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveponies"}, ), migrations.CreateModel( "ILoveMorePonies", # We use IntegerField and not AutoField because # the model is going to be deleted immediately # and with an AutoField this fails on Oracle [("id", models.IntegerField(primary_key=True))], options={"db_table": "ilovemoreponies"}, ), migrations.DeleteModel("ILoveMorePonies"), migrations.CreateModel( "ILoveEvenMorePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveevenmoreponies"}, ), ] state_operations = [ migrations.CreateModel( "SomethingElse", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingelse"}, ), migrations.DeleteModel("SomethingElse"), migrations.CreateModel( "SomethingCompletelyDifferent", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingcompletelydifferent"}, ), ] operation = migrations.SeparateDatabaseAndState( state_operations=state_operations, database_operations=database_operations, ) # Test the state alteration new_state = project_state.clone() operation.state_forwards(app_label, new_state) def assertModelsAndTables(after_db): # Check that tables and models exist, or don't, as they should: self.assertNotIn((app_label, "somethingelse"), new_state.models) self.assertEqual(len(new_state.models[app_label, "somethingcompletelydifferent"].fields), 1) self.assertNotIn((app_label, "iloveponiesonies"), new_state.models) self.assertNotIn((app_label, "ilovemoreponies"), new_state.models) self.assertNotIn((app_label, "iloveevenmoreponies"), new_state.models) self.assertTableNotExists("somethingelse") self.assertTableNotExists("somethingcompletelydifferent") self.assertTableNotExists("ilovemoreponies") if after_db: self.assertTableExists("iloveponies") self.assertTableExists("iloveevenmoreponies") else: self.assertTableNotExists("iloveponies") self.assertTableNotExists("iloveevenmoreponies") assertModelsAndTables(after_db=False) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) assertModelsAndTables(after_db=True) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) assertModelsAndTables(after_db=False) class SwappableOperationTests(OperationTestBase): """ Tests that key operations ignore swappable models (we don't want to replicate all of them here, as the functionality is in a common base class anyway) """ available_apps = [ "migrations", "django.contrib.auth", "django.contrib.contenttypes", ] @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_create_ignore_swapped(self): """ Tests that the CreateTable operation ignores swapped models. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], options={ "swappable": "TEST_SWAP_MODEL", }, ) # Test the state alteration (it should still be there!) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crigsw", new_state) self.assertEqual(new_state.models["test_crigsw", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crigsw", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crigsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crigsw", editor, project_state, new_state) self.assertTableNotExists("test_crigsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crigsw", editor, new_state, project_state) self.assertTableNotExists("test_crigsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_delete_ignore_swapped(self): """ Tests the DeleteModel operation ignores swapped models. """ operation = migrations.DeleteModel("Pony") project_state, new_state = self.make_test_state("test_dligsw", operation) # Test the database alteration self.assertTableNotExists("test_dligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dligsw", editor, project_state, new_state) self.assertTableNotExists("test_dligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dligsw", editor, new_state, project_state) self.assertTableNotExists("test_dligsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_add_field_ignore_swapped(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) project_state, new_state = self.make_test_state("test_adfligsw", operation) # Test the database alteration self.assertTableNotExists("test_adfligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_adfligsw", editor, project_state, new_state) self.assertTableNotExists("test_adfligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfligsw", editor, new_state, project_state) self.assertTableNotExists("test_adfligsw_pony") @override_settings(TEST_SWAP_MODEL='migrations.SomeFakeModel') def test_indexes_ignore_swapped(self): """ Add/RemoveIndex operations ignore swapped models. """ operation = migrations.AddIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state('test_adinigsw', operation) with connection.schema_editor() as editor: # No database queries should be run for swapped models operation.database_forwards('test_adinigsw', editor, project_state, new_state) operation.database_backwards('test_adinigsw', editor, new_state, project_state) operation = migrations.RemoveIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state("test_rminigsw", operation) with connection.schema_editor() as editor: operation.database_forwards('test_rminigsw', editor, project_state, new_state) operation.database_backwards('test_rminigsw', editor, new_state, project_state) class TestCreateModel(SimpleTestCase): def test_references_model_mixin(self): CreateModel('name', [], bases=(Mixin, models.Model)).references_model('other_model')
6aa2b777d6820187dfb64b10a7d9edfff5c50b431de70a73e31d32f119cadafa
import os import shutil import tempfile from contextlib import contextmanager from importlib import import_module from django.apps import apps from django.db import connections from django.db.migrations.recorder import MigrationRecorder from django.test import TransactionTestCase from django.test.utils import extend_sys_path from django.utils.module_loading import module_dir class MigrationTestBase(TransactionTestCase): """ Contains an extended set of asserts for testing migrations and schema operations. """ available_apps = ["migrations"] def tearDown(self): # Reset applied-migrations state. for db in connections: recorder = MigrationRecorder(connections[db]) recorder.migration_qs.filter(app='migrations').delete() def get_table_description(self, table, using='default'): with connections[using].cursor() as cursor: return connections[using].introspection.get_table_description(cursor, table) def assertTableExists(self, table, using='default'): with connections[using].cursor() as cursor: self.assertIn(table, connections[using].introspection.table_names(cursor)) def assertTableNotExists(self, table, using='default'): with connections[using].cursor() as cursor: self.assertNotIn(table, connections[using].introspection.table_names(cursor)) def assertColumnExists(self, table, column, using='default'): self.assertIn(column, [c.name for c in self.get_table_description(table, using=using)]) def assertColumnNotExists(self, table, column, using='default'): self.assertNotIn(column, [c.name for c in self.get_table_description(table, using=using)]) def _get_column_allows_null(self, table, column, using): return [c.null_ok for c in self.get_table_description(table, using=using) if c.name == column][0] def assertColumnNull(self, table, column, using='default'): self.assertEqual(self._get_column_allows_null(table, column, using), True) def assertColumnNotNull(self, table, column, using='default'): self.assertEqual(self._get_column_allows_null(table, column, using), False) def assertIndexExists(self, table, columns, value=True, using='default'): with connections[using].cursor() as cursor: self.assertEqual( value, any( c["index"] for c in connections[using].introspection.get_constraints(cursor, table).values() if c['columns'] == list(columns) ), ) def assertIndexNotExists(self, table, columns): return self.assertIndexExists(table, columns, False) def assertFKExists(self, table, columns, to, value=True, using='default'): with connections[using].cursor() as cursor: self.assertEqual( value, any( c["foreign_key"] == to for c in connections[using].introspection.get_constraints(cursor, table).values() if c['columns'] == list(columns) ), ) def assertFKNotExists(self, table, columns, to, value=True): return self.assertFKExists(table, columns, to, False) @contextmanager def temporary_migration_module(self, app_label='migrations', module=None): """ Allows testing management commands in a temporary migrations module. Wrap all invocations to makemigrations and squashmigrations with this context manager in order to avoid creating migration files in your source tree inadvertently. Takes the application label that will be passed to makemigrations or squashmigrations and the Python path to a migrations module. The migrations module is used as a template for creating the temporary migrations module. If it isn't provided, the application's migrations module is used, if it exists. Returns the filesystem path to the temporary migrations module. """ temp_dir = tempfile.mkdtemp() try: target_dir = tempfile.mkdtemp(dir=temp_dir) with open(os.path.join(target_dir, '__init__.py'), 'w'): pass target_migrations_dir = os.path.join(target_dir, 'migrations') if module is None: module = apps.get_app_config(app_label).name + '.migrations' try: source_migrations_dir = module_dir(import_module(module)) except (ImportError, ValueError): pass else: shutil.copytree(source_migrations_dir, target_migrations_dir) with extend_sys_path(temp_dir): new_module = os.path.basename(target_dir) + '.migrations' with self.settings(MIGRATION_MODULES={app_label: new_module}): yield target_migrations_dir finally: shutil.rmtree(temp_dir)
1f5b832d781fc2a7aa123155bc3c515405d98d0a7e7b2d6805a3b85fd1ee3833
import unittest from django.db import connection, migrations, models from django.db.migrations.state import ProjectState from django.test import override_settings from .test_operations import OperationTestBase try: import sqlparse except ImportError: sqlparse = None class AgnosticRouter(object): """ A router that doesn't have an opinion regarding migrating. """ def allow_migrate(self, db, app_label, **hints): return None class MigrateNothingRouter(object): """ A router that doesn't allow migrating. """ def allow_migrate(self, db, app_label, **hints): return False class MigrateEverythingRouter(object): """ A router that always allows migrating. """ def allow_migrate(self, db, app_label, **hints): return True class MigrateWhenFooRouter(object): """ A router that allows migrating depending on a hint. """ def allow_migrate(self, db, app_label, **hints): return hints.get('foo', False) class MultiDBOperationTests(OperationTestBase): multi_db = True def _test_create_model(self, app_label, should_run): """ Tests that CreateModel honours multi-db settings. """ operation = migrations.CreateModel( "Pony", [("id", models.AutoField(primary_key=True))], ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards(app_label, new_state) # Test the database alteration self.assertTableNotExists("%s_pony" % app_label) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) if should_run: self.assertTableExists("%s_pony" % app_label) else: self.assertTableNotExists("%s_pony" % app_label) # And test reversal with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) self.assertTableNotExists("%s_pony" % app_label) @override_settings(DATABASE_ROUTERS=[AgnosticRouter()]) def test_create_model(self): """ Test when router doesn't have an opinion (i.e. CreateModel should run). """ self._test_create_model("test_mltdb_crmo", should_run=True) @override_settings(DATABASE_ROUTERS=[MigrateNothingRouter()]) def test_create_model2(self): """ Test when router returns False (i.e. CreateModel shouldn't run). """ self._test_create_model("test_mltdb_crmo2", should_run=False) @override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter()]) def test_create_model3(self): """ Test when router returns True (i.e. CreateModel should run). """ self._test_create_model("test_mltdb_crmo3", should_run=True) def test_create_model4(self): """ Test multiple routers. """ with override_settings(DATABASE_ROUTERS=[AgnosticRouter(), AgnosticRouter()]): self._test_create_model("test_mltdb_crmo4", should_run=True) with override_settings(DATABASE_ROUTERS=[MigrateNothingRouter(), MigrateEverythingRouter()]): self._test_create_model("test_mltdb_crmo4", should_run=False) with override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter(), MigrateNothingRouter()]): self._test_create_model("test_mltdb_crmo4", should_run=True) def _test_run_sql(self, app_label, should_run, hints=None): with override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter()]): project_state = self.set_up_test_model(app_label) sql = """ INSERT INTO {0}_pony (pink, weight) VALUES (1, 3.55); INSERT INTO {0}_pony (pink, weight) VALUES (3, 5.0); """.format(app_label) operation = migrations.RunSQL(sql, hints=hints or {}) # Test the state alteration does nothing new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(new_state, project_state) # Test the database alteration self.assertEqual(project_state.apps.get_model(app_label, "Pony").objects.count(), 0) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) Pony = project_state.apps.get_model(app_label, "Pony") if should_run: self.assertEqual(Pony.objects.count(), 2) else: self.assertEqual(Pony.objects.count(), 0) @unittest.skipIf(sqlparse is None and connection.features.requires_sqlparse_for_splitting, "Missing sqlparse") @override_settings(DATABASE_ROUTERS=[MigrateNothingRouter()]) def test_run_sql(self): self._test_run_sql("test_mltdb_runsql", should_run=False) @unittest.skipIf(sqlparse is None and connection.features.requires_sqlparse_for_splitting, "Missing sqlparse") @override_settings(DATABASE_ROUTERS=[MigrateWhenFooRouter()]) def test_run_sql2(self): self._test_run_sql("test_mltdb_runsql2", should_run=False) self._test_run_sql("test_mltdb_runsql2", should_run=True, hints={'foo': True}) def _test_run_python(self, app_label, should_run, hints=None): with override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter()]): project_state = self.set_up_test_model(app_label) # Create the operation def inner_method(models, schema_editor): Pony = models.get_model(app_label, "Pony") Pony.objects.create(pink=1, weight=3.55) Pony.objects.create(weight=5) operation = migrations.RunPython(inner_method, hints=hints or {}) # Test the state alteration does nothing new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(new_state, project_state) # Test the database alteration self.assertEqual(project_state.apps.get_model(app_label, "Pony").objects.count(), 0) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) Pony = project_state.apps.get_model(app_label, "Pony") if should_run: self.assertEqual(Pony.objects.count(), 2) else: self.assertEqual(Pony.objects.count(), 0) @override_settings(DATABASE_ROUTERS=[MigrateNothingRouter()]) def test_run_python(self): self._test_run_python("test_mltdb_runpython", should_run=False) @override_settings(DATABASE_ROUTERS=[MigrateWhenFooRouter()]) def test_run_python2(self): self._test_run_python("test_mltdb_runpython2", should_run=False) self._test_run_python("test_mltdb_runpython2", should_run=True, hints={'foo': True})
b4ab4c3c60264c15fa3da8243ced419c6641829e9b756cd91c4138df9090f251
# -*- coding: utf-8 -*- import functools import re from django.apps import apps from django.conf import settings from django.contrib.auth.models import AbstractBaseUser from django.core.validators import RegexValidator, validate_slug from django.db import connection, models from django.db.migrations.autodetector import MigrationAutodetector from django.db.migrations.graph import MigrationGraph from django.db.migrations.loader import MigrationLoader from django.db.migrations.questioner import MigrationQuestioner from django.db.migrations.state import ModelState, ProjectState from django.test import TestCase, mock, override_settings from django.test.utils import isolate_lru_cache from .models import FoodManager, FoodQuerySet class DeconstructibleObject(object): """ A custom deconstructible object. """ def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs def deconstruct(self): return ( self.__module__ + '.' + self.__class__.__name__, self.args, self.kwargs ) class AutodetectorTests(TestCase): """ Tests the migration autodetector. """ author_empty = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) author_name = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ]) author_name_null = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, null=True)), ]) author_name_longer = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=400)), ]) author_name_renamed = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("names", models.CharField(max_length=200)), ]) author_name_default = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default='Ada Lovelace')), ]) author_dates_of_birth_auto_now = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("date_of_birth", models.DateField(auto_now=True)), ("date_time_of_birth", models.DateTimeField(auto_now=True)), ("time_of_birth", models.TimeField(auto_now=True)), ]) author_dates_of_birth_auto_now_add = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("date_of_birth", models.DateField(auto_now_add=True)), ("date_time_of_birth", models.DateTimeField(auto_now_add=True)), ("time_of_birth", models.TimeField(auto_now_add=True)), ]) author_name_deconstructible_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject())), ]) author_name_deconstructible_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject())), ]) author_name_deconstructible_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=models.IntegerField())), ]) author_name_deconstructible_4 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=models.IntegerField())), ]) author_name_deconstructible_list_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])), ]) author_name_deconstructible_list_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])), ]) author_name_deconstructible_list_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 999])), ]) author_name_deconstructible_tuple_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))), ]) author_name_deconstructible_tuple_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))), ]) author_name_deconstructible_tuple_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 999))), ]) author_name_deconstructible_dict_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 123 })), ]) author_name_deconstructible_dict_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 123 })), ]) author_name_deconstructible_dict_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 999 })), ]) author_name_nested_deconstructible_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_changed_arg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2-changed'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_extra_arg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), None, a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_changed_kwarg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c-changed')), ))), ]) author_name_nested_deconstructible_extra_kwarg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), c=None, ))), ]) author_custom_pk = ModelState("testapp", "Author", [("pk_field", models.IntegerField(primary_key=True))]) author_with_biography_non_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField()), ("biography", models.TextField()), ]) author_with_biography_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(blank=True)), ("biography", models.TextField(blank=True)), ]) author_with_book = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) author_with_book_order_wrt = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ], options={"order_with_respect_to": "book"}) author_renamed_with_book = ModelState("testapp", "Writer", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) author_with_publisher_string = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("publisher_name", models.CharField(max_length=200)), ]) author_with_publisher = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) author_with_user = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("user", models.ForeignKey("auth.User", models.CASCADE)), ]) author_with_custom_user = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("user", models.ForeignKey("thirdapp.CustomUser", models.CASCADE)), ]) author_proxy = ModelState("testapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)) author_proxy_options = ModelState("testapp", "AuthorProxy", [], { "proxy": True, "verbose_name": "Super Author", }, ("testapp.author", )) author_proxy_notproxy = ModelState("testapp", "AuthorProxy", [], {}, ("testapp.author", )) author_proxy_third = ModelState("thirdapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author", )) author_proxy_third_notproxy = ModelState("thirdapp", "AuthorProxy", [], {}, ("testapp.author", )) author_proxy_proxy = ModelState("testapp", "AAuthorProxyProxy", [], {"proxy": True}, ("testapp.authorproxy", )) author_unmanaged = ModelState("testapp", "AuthorUnmanaged", [], {"managed": False}, ("testapp.author", )) author_unmanaged_managed = ModelState("testapp", "AuthorUnmanaged", [], {}, ("testapp.author", )) author_unmanaged_default_pk = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) author_unmanaged_custom_pk = ModelState("testapp", "Author", [ ("pk_field", models.IntegerField(primary_key=True)), ]) author_with_m2m = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher")), ]) author_with_m2m_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", blank=True)), ]) author_with_m2m_through = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", through="testapp.Contract")), ]) author_with_renamed_m2m_through = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", through="testapp.Deal")), ]) author_with_former_m2m = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.CharField(max_length=100)), ]) author_with_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], { "permissions": [('can_hire', 'Can hire')], "verbose_name": "Authi", }) author_with_db_table_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_one"}) author_with_new_db_table_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_two"}) author_renamed_with_db_table_options = ModelState("testapp", "NewAuthor", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_one"}) author_renamed_with_new_db_table_options = ModelState("testapp", "NewAuthor", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_three"}) contract = ModelState("testapp", "Contract", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) contract_renamed = ModelState("testapp", "Deal", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) publisher = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ]) publisher_with_author = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) publisher_with_aardvark_author = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Aardvark", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) publisher_with_book = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("otherapp.Book", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) other_pony = ModelState("otherapp", "Pony", [ ("id", models.AutoField(primary_key=True)), ]) other_pony_food = ModelState("otherapp", "Pony", [ ("id", models.AutoField(primary_key=True)), ], managers=[ ('food_qs', FoodQuerySet.as_manager()), ('food_mgr', FoodManager('a', 'b')), ('food_mgr_kwargs', FoodManager('x', 'y', 3, 4)), ]) other_stable = ModelState("otherapp", "Stable", [("id", models.AutoField(primary_key=True))]) third_thing = ModelState("thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]) book = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_proxy_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("thirdapp.AuthorProxy", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_proxy_proxy_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.AAuthorProxyProxy", models.CASCADE)), ]) book_migrations_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("migrations.UnmigratedModel", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_no_author = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("title", models.CharField(max_length=200)), ]) book_with_author_renamed = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Writer", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_field_and_author_renamed = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("writer", models.ForeignKey("testapp.Writer", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_multiple_authors = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("authors", models.ManyToManyField("testapp.Author")), ("title", models.CharField(max_length=200)), ]) book_with_multiple_authors_through_attribution = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("authors", models.ManyToManyField("testapp.Author", through="otherapp.Attribution")), ("title", models.CharField(max_length=200)), ]) book_indexes = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "indexes": [models.Index(fields=["author", "title"], name="book_title_author_idx")], }) book_unordered_indexes = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "indexes": [models.Index(fields=["title", "author"], name="book_author_title_idx")], }) book_foo_together = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("author", "title")}, "unique_together": {("author", "title")}, }) book_foo_together_2 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "author")}, "unique_together": {("title", "author")}, }) book_foo_together_3 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("newfield", models.IntegerField()), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "newfield")}, "unique_together": {("title", "newfield")}, }) book_foo_together_4 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("newfield2", models.IntegerField()), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "newfield2")}, "unique_together": {("title", "newfield2")}, }) attribution = ModelState("otherapp", "Attribution", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) edition = ModelState("thirdapp", "Edition", [ ("id", models.AutoField(primary_key=True)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) custom_user = ModelState("thirdapp", "CustomUser", [ ("id", models.AutoField(primary_key=True)), ("username", models.CharField(max_length=255)), ], bases=(AbstractBaseUser, )) custom_user_no_inherit = ModelState("thirdapp", "CustomUser", [ ("id", models.AutoField(primary_key=True)), ("username", models.CharField(max_length=255)), ]) aardvark = ModelState("thirdapp", "Aardvark", [("id", models.AutoField(primary_key=True))]) aardvark_testapp = ModelState("testapp", "Aardvark", [("id", models.AutoField(primary_key=True))]) aardvark_based_on_author = ModelState("testapp", "Aardvark", [], bases=("testapp.Author", )) aardvark_pk_fk_author = ModelState("testapp", "Aardvark", [ ("id", models.OneToOneField("testapp.Author", models.CASCADE, primary_key=True)), ]) knight = ModelState("eggs", "Knight", [("id", models.AutoField(primary_key=True))]) rabbit = ModelState("eggs", "Rabbit", [ ("id", models.AutoField(primary_key=True)), ("knight", models.ForeignKey("eggs.Knight", models.CASCADE)), ("parent", models.ForeignKey("eggs.Rabbit", models.CASCADE)), ], { "unique_together": {("parent", "knight")}, "indexes": [models.Index(fields=["parent", "knight"], name='rabbit_circular_fk_index')], }) def repr_changes(self, changes, include_dependencies=False): output = "" for app_label, migrations in sorted(changes.items()): output += " %s:\n" % app_label for migration in migrations: output += " %s\n" % migration.name for operation in migration.operations: output += " %s\n" % operation if include_dependencies: output += " Dependencies:\n" if migration.dependencies: for dep in migration.dependencies: output += " %s\n" % (dep,) else: output += " None\n" return output def assertNumberMigrations(self, changes, app_label, number): if len(changes.get(app_label, [])) != number: self.fail("Incorrect number of migrations (%s) for %s (expected %s)\n%s" % ( len(changes.get(app_label, [])), app_label, number, self.repr_changes(changes), )) def assertMigrationDependencies(self, changes, app_label, position, dependencies): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if set(migration.dependencies) != set(dependencies): self.fail("Migration dependencies mismatch for %s.%s (expected %s):\n%s" % ( app_label, migration.name, dependencies, self.repr_changes(changes, include_dependencies=True), )) def assertOperationTypes(self, changes, app_label, position, types): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] real_types = [operation.__class__.__name__ for operation in migration.operations] if types != real_types: self.fail("Operation type mismatch for %s.%s (expected %s):\n%s" % ( app_label, migration.name, types, self.repr_changes(changes), )) def assertOperationAttributes(self, changes, app_label, position, operation_position, **attrs): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if len(changes[app_label]) < position + 1: self.fail("No operation at index %s for %s.%s\n%s" % ( operation_position, app_label, migration.name, self.repr_changes(changes), )) operation = migration.operations[operation_position] for attr, value in attrs.items(): if getattr(operation, attr, None) != value: self.fail("Attribute mismatch for %s.%s op #%s, %s (expected %r, got %r):\n%s" % ( app_label, migration.name, operation_position, attr, value, getattr(operation, attr, None), self.repr_changes(changes), )) def assertOperationFieldAttributes(self, changes, app_label, position, operation_position, **attrs): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if len(changes[app_label]) < position + 1: self.fail("No operation at index %s for %s.%s\n%s" % ( operation_position, app_label, migration.name, self.repr_changes(changes), )) operation = migration.operations[operation_position] if not hasattr(operation, 'field'): self.fail("No field attribute for %s.%s op #%s." % ( app_label, migration.name, operation_position, )) field = operation.field for attr, value in attrs.items(): if getattr(field, attr, None) != value: self.fail("Field attribute mismatch for %s.%s op #%s, field.%s (expected %r, got %r):\n%s" % ( app_label, migration.name, operation_position, attr, value, getattr(field, attr, None), self.repr_changes(changes), )) def make_project_state(self, model_states): "Shortcut to make ProjectStates from lists of predefined models" project_state = ProjectState() for model_state in model_states: project_state.add_model(model_state.clone()) return project_state def get_changes(self, before_states, after_states, questioner=None): return MigrationAutodetector( self.make_project_state(before_states), self.make_project_state(after_states), questioner, )._detect_changes() def test_arrange_for_graph(self): """Tests auto-naming of migrations for graph matching.""" # Make a fake graph graph = MigrationGraph() graph.add_node(("testapp", "0001_initial"), None) graph.add_node(("testapp", "0002_foobar"), None) graph.add_node(("otherapp", "0001_initial"), None) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("testapp", "0001_initial")) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("otherapp", "0001_initial")) # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Run through arrange_for_graph changes = autodetector.arrange_for_graph(changes, graph) # Make sure there's a new name, deps match, etc. self.assertEqual(changes["testapp"][0].name, "0003_author") self.assertEqual(changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]) self.assertEqual(changes["otherapp"][0].name, "0002_pony_stable") self.assertEqual(changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]) def test_trim_apps(self): """ Tests that trim does not remove dependencies but does remove unwanted apps. """ # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable, self.third_thing]) autodetector = MigrationAutodetector(before, after, MigrationQuestioner({"ask_initial": True})) changes = autodetector._detect_changes() # Run through arrange_for_graph graph = MigrationGraph() changes = autodetector.arrange_for_graph(changes, graph) changes["testapp"][0].dependencies.append(("otherapp", "0001_initial")) changes = autodetector._trim_to_apps(changes, {"testapp"}) # Make sure there's the right set of migrations self.assertEqual(changes["testapp"][0].name, "0001_initial") self.assertEqual(changes["otherapp"][0].name, "0001_initial") self.assertNotIn("thirdapp", changes) def test_custom_migration_name(self): """Tests custom naming of migrations for graph matching.""" # Make a fake graph graph = MigrationGraph() graph.add_node(("testapp", "0001_initial"), None) graph.add_node(("testapp", "0002_foobar"), None) graph.add_node(("otherapp", "0001_initial"), None) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("testapp", "0001_initial")) # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Run through arrange_for_graph migration_name = 'custom_name' changes = autodetector.arrange_for_graph(changes, graph, migration_name) # Make sure there's a new name, deps match, etc. self.assertEqual(changes["testapp"][0].name, "0003_%s" % migration_name) self.assertEqual(changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]) self.assertEqual(changes["otherapp"][0].name, "0002_%s" % migration_name) self.assertEqual(changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]) def test_new_model(self): """Tests autodetection of new models.""" changes = self.get_changes([], [self.other_pony_food]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Pony") self.assertEqual([name for name, mgr in changes['otherapp'][0].operations[0].managers], ['food_qs', 'food_mgr', 'food_mgr_kwargs']) def test_old_model(self): """Tests deletion of old models.""" changes = self.get_changes([self.author_empty], []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["DeleteModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author") def test_add_field(self): """Tests autodetection of new fields.""" changes = self.get_changes([self.author_empty], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name") @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_date_fields_with_auto_now_not_asking_for_default(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now=True) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_date_fields_with_auto_now_add_not_asking_for_null_addition(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now_add]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_auto_now_add_addition') def test_add_date_fields_with_auto_now_add_asking_for_default(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now_add]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True) self.assertEqual(mocked_ask_method.call_count, 3) def test_remove_field(self): """Tests autodetection of removed fields.""" changes = self.get_changes([self.author_name], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RemoveField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name") def test_alter_field(self): """Tests autodetection of new fields.""" changes = self.get_changes([self.author_name], [self.author_name_longer]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) def test_supports_functools_partial(self): def _content_file_name(instance, filename, key, **kwargs): return '{}/{}'.format(instance, filename) def content_file_name(key, **kwargs): return functools.partial(_content_file_name, key, **kwargs) # An unchanged partial reference. before = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file'))), ])] after = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file'))), ])] changes = self.get_changes(before, after) self.assertNumberMigrations(changes, 'testapp', 0) # A changed partial reference. args_changed = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('other-file'))), ])] changes = self.get_changes(before, args_changed) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AlterField']) # Can't use assertOperationFieldAttributes because we need the # deconstructed version, i.e., the exploded func/args/keywords rather # than the partial: we don't care if it's not the same instance of the # partial, only if it's the same source function, args, and keywords. value = changes['testapp'][0].operations[0].field.upload_to self.assertEqual( (_content_file_name, ('other-file',), {}), (value.func, value.args, value.keywords) ) kwargs_changed = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file', spam='eggs'))), ])] changes = self.get_changes(before, kwargs_changed) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AlterField']) value = changes['testapp'][0].operations[0].field.upload_to self.assertEqual( (_content_file_name, ('file',), {'spam': 'eggs'}), (value.func, value.args, value.keywords) ) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', side_effect=AssertionError("Should not have prompted for not null addition")) def test_alter_field_to_not_null_with_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name_default]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default='Ada Lovelace') @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', return_value=models.NOT_PROVIDED) def test_alter_field_to_not_null_without_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name]) self.assertEqual(mocked_ask_method.call_count, 1) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default=models.NOT_PROVIDED) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', return_value='Some Name') def test_alter_field_to_not_null_oneoff_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name]) self.assertEqual(mocked_ask_method.call_count, 1) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=False) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default="Some Name") def test_rename_field(self): """Tests autodetection of renamed fields.""" changes = self.get_changes( [self.author_name], [self.author_name_renamed], MigrationQuestioner({"ask_rename": True}) ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="name", new_name="names") def test_rename_model(self): """Tests autodetection of renamed models.""" changes = self.get_changes( [self.author_with_book, self.book], [self.author_renamed_with_book, self.book_with_author_renamed], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="Author", new_name="Writer") # Now that RenameModel handles related fields too, there should be # no AlterField for the related field. self.assertNumberMigrations(changes, 'otherapp', 0) def test_rename_m2m_through_model(self): """ Tests autodetection of renamed models that are used in M2M relations as through models. """ changes = self.get_changes( [self.author_with_m2m_through, self.publisher, self.contract], [self.author_with_renamed_m2m_through, self.publisher, self.contract_renamed], MigrationQuestioner({'ask_rename_model': True}) ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['RenameModel']) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name='Contract', new_name='Deal') def test_rename_model_with_renamed_rel_field(self): """ Tests autodetection of renamed models while simultaneously renaming one of the fields that relate to the renamed model. """ changes = self.get_changes( [self.author_with_book, self.book], [self.author_renamed_with_book, self.book_with_field_and_author_renamed], MigrationQuestioner({"ask_rename": True, "ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="Author", new_name="Writer") # Right number/type of migrations for related field rename? # Alter is already taken care of. self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["RenameField"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, old_name="author", new_name="writer") def test_rename_model_with_fks_in_different_position(self): """ #24537 - Tests that the order of fields in a model does not influence the RenameModel detection. """ before = [ ModelState("testapp", "EntityA", [ ("id", models.AutoField(primary_key=True)), ]), ModelState("testapp", "EntityB", [ ("id", models.AutoField(primary_key=True)), ("some_label", models.CharField(max_length=255)), ("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)), ]), ] after = [ ModelState("testapp", "EntityA", [ ("id", models.AutoField(primary_key=True)), ]), ModelState("testapp", "RenamedEntityB", [ ("id", models.AutoField(primary_key=True)), ("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)), ("some_label", models.CharField(max_length=255)), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({"ask_rename_model": True})) self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="EntityB", new_name="RenamedEntityB") def test_fk_dependency(self): """Tests that having a ForeignKey automatically adds a dependency.""" # Note that testapp (author) has no dependencies, # otherapp (book) depends on testapp (author), # thirdapp (edition) depends on otherapp (book) changes = self.get_changes([], [self.author_name, self.book, self.edition]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("testapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="Edition") self.assertMigrationDependencies(changes, 'thirdapp', 0, [("otherapp", "auto_1")]) def test_proxy_fk_dependency(self): """Tests that FK dependencies still work on proxy models.""" # Note that testapp (author) has no dependencies, # otherapp (book) depends on testapp (authorproxy) changes = self.get_changes([], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("thirdapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="AuthorProxy") self.assertMigrationDependencies(changes, 'thirdapp', 0, [("testapp", "auto_1")]) def test_same_app_no_fk_dependency(self): """ Tests that a migration with a FK between two models of the same app does not have a dependency to itself. """ changes = self.get_changes([], [self.author_with_publisher, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel", "AddField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher") self.assertMigrationDependencies(changes, 'testapp', 0, []) def test_circular_fk_dependency(self): """ Tests that having a circular ForeignKey dependency automatically resolves the situation into 2 migrations on one side and 1 on the other. """ changes = self.get_changes([], [self.author_with_book, self.book, self.publisher_with_book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher") self.assertMigrationDependencies(changes, 'testapp', 0, [("otherapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 2) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'otherapp', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'otherapp', 0, []) self.assertMigrationDependencies(changes, 'otherapp', 1, [("otherapp", "auto_1"), ("testapp", "auto_1")]) # both split migrations should be `initial` self.assertTrue(changes['otherapp'][0].initial) self.assertTrue(changes['otherapp'][1].initial) def test_same_app_circular_fk_dependency(self): """ Tests that a migration with a FK between two models of the same app does not have a dependency to itself. """ changes = self.get_changes([], [self.author_with_publisher, self.publisher_with_author]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel", "AddField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher") self.assertMigrationDependencies(changes, 'testapp', 0, []) def test_same_app_circular_fk_dependency_with_unique_together_and_indexes(self): """ #22275 - Tests that a migration with circular FK dependency does not try to create unique together constraint and indexes before creating all required fields first. """ changes = self.get_changes([], [self.knight, self.rabbit]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'eggs', 1) self.assertOperationTypes( changes, 'eggs', 0, ["CreateModel", "CreateModel", "AddIndex", "AlterUniqueTogether"] ) self.assertNotIn("unique_together", changes['eggs'][0].operations[0].options) self.assertNotIn("unique_together", changes['eggs'][0].operations[1].options) self.assertMigrationDependencies(changes, 'eggs', 0, []) def test_alter_db_table_add(self): """Tests detection for adding db_table in model's options.""" changes = self.get_changes([self.author_empty], [self.author_with_db_table_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table="author_one") def test_alter_db_table_change(self): """Tests detection for changing db_table in model's options'.""" changes = self.get_changes([self.author_with_db_table_options], [self.author_with_new_db_table_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table="author_two") def test_alter_db_table_remove(self): """Tests detection for removing db_table in model's options.""" changes = self.get_changes([self.author_with_db_table_options], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table=None) def test_alter_db_table_no_changes(self): """ Tests that alter_db_table doesn't generate a migration if no changes have been made. """ changes = self.get_changes([self.author_with_db_table_options], [self.author_with_db_table_options]) # Right number of migrations? self.assertEqual(len(changes), 0) def test_keep_db_table_with_model_change(self): """ Tests when model changes but db_table stays as-is, autodetector must not create more than one operation. """ changes = self.get_changes( [self.author_with_db_table_options], [self.author_renamed_with_db_table_options], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor") def test_alter_db_table_with_model_change(self): """ Tests when model and db_table changes, autodetector must create two operations. """ changes = self.get_changes( [self.author_with_db_table_options], [self.author_renamed_with_new_db_table_options], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel", "AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor") self.assertOperationAttributes(changes, "testapp", 0, 1, name="newauthor", table="author_three") def test_identical_regex_doesnt_alter(self): from_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[ RegexValidator( re.compile('^[-a-zA-Z0-9_]+\\Z'), "Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens.", 'invalid' ) ]))] ) to_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[validate_slug]))] ) changes = self.get_changes([from_state], [to_state]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 0) def test_different_regex_does_alter(self): from_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[ RegexValidator( re.compile('^[a-z]+\\Z', 32), "Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens.", 'invalid' ) ]))] ) to_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[validate_slug]))] ) changes = self.get_changes([from_state], [to_state]) self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterField"]) def test_empty_foo_together(self): """ #23452 - Empty unique/index_together shouldn't generate a migration. """ # Explicitly testing for not specified, since this is the case after # a CreateModel operation w/o any definition on the original model model_state_not_specified = ModelState("a", "model", [("id", models.AutoField(primary_key=True))]) # Explicitly testing for None, since this was the issue in #23452 after # a AlterFooTogether operation with e.g. () as value model_state_none = ModelState("a", "model", [ ("id", models.AutoField(primary_key=True)) ], { "index_together": None, "unique_together": None, }) # Explicitly testing for the empty set, since we now always have sets. # During removal (('col1', 'col2'),) --> () this becomes set([]) model_state_empty = ModelState("a", "model", [ ("id", models.AutoField(primary_key=True)) ], { "index_together": set(), "unique_together": set(), }) def test(from_state, to_state, msg): changes = self.get_changes([from_state], [to_state]) if len(changes) > 0: ops = ', '.join(o.__class__.__name__ for o in changes['a'][0].operations) self.fail('Created operation(s) %s from %s' % (ops, msg)) tests = ( (model_state_not_specified, model_state_not_specified, '"not specified" to "not specified"'), (model_state_not_specified, model_state_none, '"not specified" to "None"'), (model_state_not_specified, model_state_empty, '"not specified" to "empty"'), (model_state_none, model_state_not_specified, '"None" to "not specified"'), (model_state_none, model_state_none, '"None" to "None"'), (model_state_none, model_state_empty, '"None" to "empty"'), (model_state_empty, model_state_not_specified, '"empty" to "not specified"'), (model_state_empty, model_state_none, '"empty" to "None"'), (model_state_empty, model_state_empty, '"empty" to "empty"'), ) for t in tests: test(*t) def test_create_model_with_indexes(self): """Test creation of new model with indexes already defined.""" author = ModelState('otherapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ], {'indexes': [models.Index(fields=['name'], name='create_model_with_indexes_idx')]}) changes = self.get_changes([], [author]) added_index = models.Index(fields=['name'], name='create_model_with_indexes_idx') # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 2) # Right actions order? self.assertOperationTypes(changes, 'otherapp', 0, ['CreateModel', 'AddIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='author', index=added_index) def test_add_indexes(self): """Test change detection of new indexes.""" changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_indexes]) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AddIndex']) added_index = models.Index(fields=['author', 'title'], name='book_title_author_idx') self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', index=added_index) def test_remove_indexes(self): """Test change detection of removed indexes.""" changes = self.get_changes([self.author_empty, self.book_indexes], [self.author_empty, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', name='book_title_author_idx') def test_order_fields_indexes(self): """Test change detection of reordering of fields in indexes.""" changes = self.get_changes( [self.author_empty, self.book_indexes], [self.author_empty, self.book_unordered_indexes] ) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveIndex', 'AddIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', name='book_title_author_idx') added_index = models.Index(fields=['title', 'author'], name='book_author_title_idx') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='book', index=added_index) def test_add_foo_together(self): """Tests index/unique_together detection.""" changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_foo_together]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together={("author", "title")}) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together={("author", "title")}) def test_remove_foo_together(self): """Tests index/unique_together detection.""" changes = self.get_changes([self.author_empty, self.book_foo_together], [self.author_empty, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together=set()) def test_foo_together_remove_fk(self): """Tests unique_together and field removal detection & ordering""" changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_with_no_author] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, [ "AlterUniqueTogether", "AlterIndexTogether", "RemoveField" ]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 2, model_name="book", name="author") def test_foo_together_no_changes(self): """ Tests that index/unique_together doesn't generate a migration if no changes have been made. """ changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_foo_together] ) # Right number of migrations? self.assertEqual(len(changes), 0) def test_foo_together_ordering(self): """ Tests that index/unique_together also triggers on ordering changes. """ changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_foo_together_2] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together={("title", "author")}) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together={("title", "author")}) def test_add_field_and_foo_together(self): """ Tests that added fields will be created before using them in index/unique_together. """ changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_foo_together_3]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AddField", "AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", unique_together={("title", "newfield")}) self.assertOperationAttributes(changes, "otherapp", 0, 2, name="book", index_together={("title", "newfield")}) def test_create_model_and_unique_together(self): author = ModelState("otherapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ]) book_with_author = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("otherapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "author")}, "unique_together": {("title", "author")}, }) changes = self.get_changes([self.book_with_no_author], [author, book_with_author]) # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 4) # Right actions order? self.assertOperationTypes( changes, 'otherapp', 0, ['CreateModel', 'AddField', 'AlterUniqueTogether', 'AlterIndexTogether'] ) def test_remove_field_and_foo_together(self): """ Tests that removed fields will be removed after updating index/unique_together. """ changes = self.get_changes( [self.author_empty, self.book_foo_together_3], [self.author_empty, self.book_foo_together] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["RemoveField", "AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, model_name="book", name="newfield") self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", unique_together={("author", "title")}) self.assertOperationAttributes(changes, "otherapp", 0, 2, name="book", index_together={("author", "title")}) def test_rename_field_and_foo_together(self): """ Tests that removed fields will be removed after updating index/unique_together. """ changes = self.get_changes( [self.author_empty, self.book_foo_together_3], [self.author_empty, self.book_foo_together_4], MigrationQuestioner({"ask_rename": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["RenameField", "AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", unique_together={ ("title", "newfield2") }) self.assertOperationAttributes(changes, "otherapp", 0, 2, name="book", index_together={("title", "newfield2")}) def test_proxy(self): """Tests that the autodetector correctly deals with proxy models.""" # First, we test adding a proxy model changes = self.get_changes([self.author_empty], [self.author_empty, self.author_proxy]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"]) self.assertOperationAttributes( changes, "testapp", 0, 0, name="AuthorProxy", options={"proxy": True, "indexes": []} ) # Now, we test turning a proxy model into a non-proxy model # It should delete the proxy then make the real one changes = self.get_changes( [self.author_empty, self.author_proxy], [self.author_empty, self.author_proxy_notproxy] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="AuthorProxy") self.assertOperationAttributes(changes, "testapp", 0, 1, name="AuthorProxy", options={}) def test_proxy_custom_pk(self): """ #23415 - The autodetector must correctly deal with custom FK on proxy models. """ # First, we test the default pk field name changes = self.get_changes([], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'id') # Now, we test the custom pk field name changes = self.get_changes([], [self.author_custom_pk, self.author_proxy_third, self.book_proxy_fk]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'pk_field') def test_proxy_to_mti_with_fk_to_proxy(self): # First, test the pk table and field name. changes = self.get_changes( [], [self.author_empty, self.author_proxy_third, self.book_proxy_fk], ) self.assertEqual( changes['otherapp'][0].operations[0].fields[2][1].remote_field.model._meta.db_table, 'testapp_author', ) self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'id') # Change AuthorProxy to use MTI. changes = self.get_changes( [self.author_empty, self.author_proxy_third, self.book_proxy_fk], [self.author_empty, self.author_proxy_third_notproxy, self.book_proxy_fk], ) # Right number/type of migrations for the AuthorProxy model? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ['DeleteModel', 'CreateModel']) # Right number/type of migrations for the Book model with a FK to # AuthorProxy? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) # otherapp should depend on thirdapp. self.assertMigrationDependencies(changes, 'otherapp', 0, [('thirdapp', 'auto_1')]) # Now, test the pk table and field name. self.assertEqual( changes['otherapp'][0].operations[0].field.remote_field.model._meta.db_table, 'thirdapp_authorproxy', ) self.assertEqual(changes['otherapp'][0].operations[0].field.remote_field.field_name, 'author_ptr') def test_proxy_to_mti_with_fk_to_proxy_proxy(self): # First, test the pk table and field name. changes = self.get_changes( [], [self.author_empty, self.author_proxy, self.author_proxy_proxy, self.book_proxy_proxy_fk], ) self.assertEqual( changes['otherapp'][0].operations[0].fields[1][1].remote_field.model._meta.db_table, 'testapp_author', ) self.assertEqual(changes['otherapp'][0].operations[0].fields[1][1].remote_field.field_name, 'id') # Change AuthorProxy to use MTI. FK still points to AAuthorProxyProxy, # a proxy of AuthorProxy. changes = self.get_changes( [self.author_empty, self.author_proxy, self.author_proxy_proxy, self.book_proxy_proxy_fk], [self.author_empty, self.author_proxy_notproxy, self.author_proxy_proxy, self.book_proxy_proxy_fk], ) # Right number/type of migrations for the AuthorProxy model? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['DeleteModel', 'CreateModel']) # Right number/type of migrations for the Book model with a FK to # AAuthorProxyProxy? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) # otherapp should depend on testapp. self.assertMigrationDependencies(changes, 'otherapp', 0, [('testapp', 'auto_1')]) # Now, test the pk table and field name. self.assertEqual( changes['otherapp'][0].operations[0].field.remote_field.model._meta.db_table, 'testapp_authorproxy', ) self.assertEqual(changes['otherapp'][0].operations[0].field.remote_field.field_name, 'author_ptr') def test_unmanaged_create(self): """Tests that the autodetector correctly deals with managed models.""" # First, we test adding an unmanaged model changes = self.get_changes([self.author_empty], [self.author_empty, self.author_unmanaged]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="AuthorUnmanaged", options={"managed": False}) def test_unmanaged_to_managed(self): # Now, we test turning an unmanaged model into a managed model changes = self.get_changes( [self.author_empty, self.author_unmanaged], [self.author_empty, self.author_unmanaged_managed] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="authorunmanaged", options={}) def test_managed_to_unmanaged(self): # Now, we turn managed to unmanaged. changes = self.get_changes( [self.author_empty, self.author_unmanaged_managed], [self.author_empty, self.author_unmanaged] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="authorunmanaged", options={"managed": False}) def test_unmanaged_custom_pk(self): """ #23415 - The autodetector must correctly deal with custom FK on unmanaged models. """ # First, we test the default pk field name changes = self.get_changes([], [self.author_unmanaged_default_pk, self.book]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'id') # Now, we test the custom pk field name changes = self.get_changes([], [self.author_unmanaged_custom_pk, self.book]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'pk_field') @override_settings(AUTH_USER_MODEL="thirdapp.CustomUser") def test_swappable(self): with isolate_lru_cache(apps.get_swappable_settings_name): changes = self.get_changes([self.custom_user], [self.custom_user, self.author_with_custom_user]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, [("__setting__", "AUTH_USER_MODEL")]) def test_swappable_changed(self): with isolate_lru_cache(apps.get_swappable_settings_name): before = self.make_project_state([self.custom_user, self.author_with_user]) with override_settings(AUTH_USER_MODEL="thirdapp.CustomUser"): after = self.make_project_state([self.custom_user, self.author_with_custom_user]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name="author", name='user') fk_field = changes['testapp'][0].operations[0].field to_model = '%s.%s' % ( fk_field.remote_field.model._meta.app_label, fk_field.remote_field.model._meta.object_name, ) self.assertEqual(to_model, 'thirdapp.CustomUser') def test_add_field_with_default(self): """#22030 - Adding a field with a default should work.""" changes = self.get_changes([self.author_empty], [self.author_name_default]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="name") def test_custom_deconstructible(self): """ Two instances which deconstruct to the same value aren't considered a change. """ changes = self.get_changes([self.author_name_deconstructible_1], [self.author_name_deconstructible_2]) # Right number of migrations? self.assertEqual(len(changes), 0) def test_deconstruct_field_kwarg(self): """Field instances are handled correctly by nested deconstruction.""" changes = self.get_changes([self.author_name_deconstructible_3], [self.author_name_deconstructible_4]) self.assertEqual(changes, {}) def test_deconstructible_list(self): """Nested deconstruction descends into lists.""" # When lists contain items that deconstruct to identical values, those lists # should be considered equal for the purpose of detecting state changes # (even if the original items are unequal). changes = self.get_changes( [self.author_name_deconstructible_list_1], [self.author_name_deconstructible_list_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed lists should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_list_1], [self.author_name_deconstructible_list_3] ) self.assertEqual(len(changes), 1) def test_deconstructible_tuple(self): """Nested deconstruction descends into tuples.""" # When tuples contain items that deconstruct to identical values, those tuples # should be considered equal for the purpose of detecting state changes # (even if the original items are unequal). changes = self.get_changes( [self.author_name_deconstructible_tuple_1], [self.author_name_deconstructible_tuple_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed tuples should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_tuple_1], [self.author_name_deconstructible_tuple_3] ) self.assertEqual(len(changes), 1) def test_deconstructible_dict(self): """Nested deconstruction descends into dict values.""" # When dicts contain items whose values deconstruct to identical values, # those dicts should be considered equal for the purpose of detecting # state changes (even if the original values are unequal). changes = self.get_changes( [self.author_name_deconstructible_dict_1], [self.author_name_deconstructible_dict_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed dicts should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_dict_1], [self.author_name_deconstructible_dict_3] ) self.assertEqual(len(changes), 1) def test_nested_deconstructible_objects(self): """ Nested deconstruction is applied recursively to the args/kwargs of deconstructed objects. """ # If the items within a deconstructed object's args/kwargs have the same # deconstructed values - whether or not the items themselves are different # instances - then the object as a whole is regarded as unchanged. changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_2] ) self.assertEqual(changes, {}) # Differences that exist solely within the args list of a deconstructed object # should be reported as changes changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_changed_arg] ) self.assertEqual(len(changes), 1) # Additional args should also be reported as a change changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_extra_arg] ) self.assertEqual(len(changes), 1) # Differences that exist solely within the kwargs dict of a deconstructed object # should be reported as changes changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_changed_kwarg] ) self.assertEqual(len(changes), 1) # Additional kwargs should also be reported as a change changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_extra_kwarg] ) self.assertEqual(len(changes), 1) def test_deconstruct_type(self): """ #22951 -- Uninstantiated classes with deconstruct are correctly returned by deep_deconstruct during serialization. """ author = ModelState( "testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField( max_length=200, # IntegerField intentionally not instantiated. default=models.IntegerField, )) ], ) changes = self.get_changes([], [author]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) def test_replace_string_with_foreignkey(self): """ #22300 - Adding an FK in the same "spot" as a deleted CharField should work. """ changes = self.get_changes([self.author_with_publisher_string], [self.author_with_publisher, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "RemoveField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Publisher") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publisher_name") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="publisher") def test_foreign_key_removed_before_target_model(self): """ Removing an FK and the model it targets in the same change must remove the FK field before the model to maintain consistency. """ changes = self.get_changes( [self.author_with_publisher, self.publisher], [self.author_name] ) # removes both the model and FK # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RemoveField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publisher") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Publisher") @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_many_to_many(self, mocked_ask_method): """#22435 - Adding a ManyToManyField should not prompt for a default.""" changes = self.get_changes([self.author_empty, self.publisher], [self.author_with_m2m, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers") def test_alter_many_to_many(self): changes = self.get_changes( [self.author_with_m2m, self.publisher], [self.author_with_m2m_blank, self.publisher] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers") def test_create_with_through_model(self): """ Adding a m2m with a through model and the models that use it should be ordered correctly. """ changes = self.get_changes([], [self.author_with_m2m_through, self.publisher, self.contract]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, [ "CreateModel", "CreateModel", "CreateModel", "AddField", "AddField" ]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Contract") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="Publisher") self.assertOperationAttributes(changes, 'testapp', 0, 3, model_name='contract', name='publisher') self.assertOperationAttributes(changes, 'testapp', 0, 4, model_name='author', name='publishers') def test_many_to_many_removed_before_through_model(self): """ Removing a ManyToManyField and the "through" model in the same change must remove the field before the model to maintain consistency. """ changes = self.get_changes( [self.book_with_multiple_authors_through_attribution, self.author_name, self.attribution], [self.book_with_no_author, self.author_name], ) # Remove both the through model and ManyToMany # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["RemoveField", "RemoveField", "RemoveField", "DeleteModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="author", model_name='attribution') self.assertOperationAttributes(changes, 'otherapp', 0, 1, name="book", model_name='attribution') self.assertOperationAttributes(changes, 'otherapp', 0, 2, name="authors", model_name='book') self.assertOperationAttributes(changes, 'otherapp', 0, 3, name='Attribution') def test_many_to_many_removed_before_through_model_2(self): """ Removing a model that contains a ManyToManyField and the "through" model in the same change must remove the field before the model to maintain consistency. """ changes = self.get_changes( [self.book_with_multiple_authors_through_attribution, self.author_name, self.attribution], [self.author_name], ) # Remove both the through model and ManyToMany # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, [ "RemoveField", "RemoveField", "RemoveField", "DeleteModel", "DeleteModel" ]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="author", model_name='attribution') self.assertOperationAttributes(changes, 'otherapp', 0, 1, name="book", model_name='attribution') self.assertOperationAttributes(changes, 'otherapp', 0, 2, name="authors", model_name='book') self.assertOperationAttributes(changes, 'otherapp', 0, 3, name='Attribution') self.assertOperationAttributes(changes, 'otherapp', 0, 4, name='Book') def test_m2m_w_through_multistep_remove(self): """ A model with a m2m field that specifies a "through" model cannot be removed in the same migration as that through model as the schema will pass through an inconsistent state. The autodetector should produce two migrations to avoid this issue. """ changes = self.get_changes([self.author_with_m2m_through, self.publisher, self.contract], [self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, [ "RemoveField", "RemoveField", "RemoveField", "DeleteModel", "DeleteModel" ]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="publishers", model_name='author') self.assertOperationAttributes(changes, "testapp", 0, 1, name="author", model_name='contract') self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher", model_name='contract') self.assertOperationAttributes(changes, "testapp", 0, 3, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 4, name="Contract") def test_concrete_field_changed_to_many_to_many(self): """ #23938 - Tests that changing a concrete field into a ManyToManyField first removes the concrete field and then adds the m2m field. """ changes = self.get_changes([self.author_with_former_m2m], [self.author_with_m2m, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "RemoveField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name='Publisher') self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 2, name="publishers", model_name='author') def test_many_to_many_changed_to_concrete_field(self): """ #23938 - Tests that changing a ManyToManyField into a concrete field first removes the m2m field and then adds the concrete field. """ changes = self.get_changes([self.author_with_m2m, self.publisher], [self.author_with_former_m2m]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "AddField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 2, name='Publisher') self.assertOperationFieldAttributes(changes, 'testapp', 0, 1, max_length=100) def test_non_circular_foreignkey_dependency_removal(self): """ If two models with a ForeignKey from one to the other are removed at the same time, the autodetector should remove them in the correct order. """ changes = self.get_changes([self.author_with_publisher, self.publisher_with_author], []) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "RemoveField", "DeleteModel", "DeleteModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="publisher", model_name='author') self.assertOperationAttributes(changes, "testapp", 0, 1, name="author", model_name='publisher') self.assertOperationAttributes(changes, "testapp", 0, 2, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 3, name="Publisher") def test_alter_model_options(self): """Changing a model's options should make a change.""" changes = self.get_changes([self.author_empty], [self.author_with_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, options={ "permissions": [('can_hire', 'Can hire')], "verbose_name": "Authi", }) # Changing them back to empty should also make a change changes = self.get_changes([self.author_with_options], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", options={}) def test_alter_model_options_proxy(self): """Changing a proxy model's options should also make a change.""" changes = self.get_changes( [self.author_proxy, self.author_empty], [self.author_proxy_options, self.author_empty] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="authorproxy", options={ "verbose_name": "Super Author" }) def test_set_alter_order_with_respect_to(self): """Tests that setting order_with_respect_to adds a field.""" changes = self.get_changes([self.book, self.author_with_book], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterOrderWithRespectTo"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="author", order_with_respect_to="book") def test_add_alter_order_with_respect_to(self): """ Tests that setting order_with_respect_to when adding the FK too does things in the right order. """ changes = self.get_changes([self.author_name], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AlterOrderWithRespectTo"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name="author", name="book") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="author", order_with_respect_to="book") def test_remove_alter_order_with_respect_to(self): """ Tests that removing order_with_respect_to when removing the FK too does things in the right order. """ changes = self.get_changes([self.book, self.author_with_book_order_wrt], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterOrderWithRespectTo", "RemoveField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="author", order_with_respect_to=None) self.assertOperationAttributes(changes, 'testapp', 0, 1, model_name="author", name="book") def test_add_model_order_with_respect_to(self): """ Tests that setting order_with_respect_to when adding the whole model does things in the right order. """ changes = self.get_changes([], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "AlterOrderWithRespectTo"]) self.assertOperationAttributes(changes, 'testapp', 0, 1, name="author", order_with_respect_to="book") self.assertNotIn("_order", [name for name, field in changes['testapp'][0].operations[0].fields]) def test_alter_model_managers(self): """ Tests that changing the model managers adds a new operation. """ changes = self.get_changes([self.other_pony], [self.other_pony_food]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["AlterModelManagers"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="pony") self.assertEqual([name for name, mgr in changes['otherapp'][0].operations[0].managers], ['food_qs', 'food_mgr', 'food_mgr_kwargs']) self.assertEqual(changes['otherapp'][0].operations[0].managers[1][1].args, ('a', 'b', 1, 2)) self.assertEqual(changes['otherapp'][0].operations[0].managers[2][1].args, ('x', 'y', 3, 4)) def test_swappable_first_inheritance(self): """Tests that swappable models get their CreateModel first.""" changes = self.get_changes([], [self.custom_user, self.aardvark]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="CustomUser") self.assertOperationAttributes(changes, 'thirdapp', 0, 1, name="Aardvark") @override_settings(AUTH_USER_MODEL="thirdapp.CustomUser") def test_swappable_first_setting(self): """Tests that swappable models get their CreateModel first.""" with isolate_lru_cache(apps.get_swappable_settings_name): changes = self.get_changes([], [self.custom_user_no_inherit, self.aardvark]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="CustomUser") self.assertOperationAttributes(changes, 'thirdapp', 0, 1, name="Aardvark") def test_bases_first(self): """Tests that bases of other models come first.""" changes = self.get_changes([], [self.aardvark_based_on_author, self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Aardvark") def test_multiple_bases(self): """#23956 - Tests that inheriting models doesn't move *_ptr fields into AddField operations.""" A = ModelState("app", "A", [("a_id", models.AutoField(primary_key=True))]) B = ModelState("app", "B", [("b_id", models.AutoField(primary_key=True))]) C = ModelState("app", "C", [], bases=("app.A", "app.B")) D = ModelState("app", "D", [], bases=("app.A", "app.B")) E = ModelState("app", "E", [], bases=("app.A", "app.B")) changes = self.get_changes([], [A, B, C, D, E]) # Right number/type of migrations? self.assertNumberMigrations(changes, "app", 1) self.assertOperationTypes(changes, "app", 0, [ "CreateModel", "CreateModel", "CreateModel", "CreateModel", "CreateModel" ]) self.assertOperationAttributes(changes, "app", 0, 0, name="A") self.assertOperationAttributes(changes, "app", 0, 1, name="B") self.assertOperationAttributes(changes, "app", 0, 2, name="C") self.assertOperationAttributes(changes, "app", 0, 3, name="D") self.assertOperationAttributes(changes, "app", 0, 4, name="E") def test_proxy_bases_first(self): """Tests that bases of proxies come first.""" changes = self.get_changes([], [self.author_empty, self.author_proxy, self.author_proxy_proxy]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="AuthorProxy") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="AAuthorProxyProxy") def test_pk_fk_included(self): """ Tests that a relation used as the primary key is kept as part of CreateModel. """ changes = self.get_changes([], [self.aardvark_pk_fk_author, self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Aardvark") def test_first_dependency(self): """ Tests that a dependency to an app with no migrations uses __first__. """ # Load graph loader = MigrationLoader(connection) before = self.make_project_state([]) after = self.make_project_state([self.book_migrations_fk]) after.real_apps = ["migrations"] autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes(graph=loader.graph) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("migrations", "__first__")]) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_last_dependency(self): """ Tests that a dependency to an app with existing migrations uses the last migration of that app. """ # Load graph loader = MigrationLoader(connection) before = self.make_project_state([]) after = self.make_project_state([self.book_migrations_fk]) after.real_apps = ["migrations"] autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes(graph=loader.graph) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("migrations", "0002_second")]) def test_alter_fk_before_model_deletion(self): """ Tests that ForeignKeys are altered _before_ the model they used to refer to are deleted. """ changes = self.get_changes( [self.author_name, self.publisher_with_author], [self.aardvark_testapp, self.publisher_with_aardvark_author] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "AlterField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Aardvark") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="author") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="Author") def test_fk_dependency_other_app(self): """ #23100 - Tests that ForeignKeys correctly depend on other apps' models. """ changes = self.get_changes([self.author_name, self.book], [self.author_with_book, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="book") self.assertMigrationDependencies(changes, 'testapp', 0, [("otherapp", "__first__")]) def test_circular_dependency_mixed_addcreate(self): """ #23315 - Tests that the dependency resolver knows to put all CreateModel before AddField and not become unsolvable. """ address = ModelState("a", "Address", [ ("id", models.AutoField(primary_key=True)), ("country", models.ForeignKey("b.DeliveryCountry", models.CASCADE)), ]) person = ModelState("a", "Person", [ ("id", models.AutoField(primary_key=True)), ]) apackage = ModelState("b", "APackage", [ ("id", models.AutoField(primary_key=True)), ("person", models.ForeignKey("a.Person", models.CASCADE)), ]) country = ModelState("b", "DeliveryCountry", [ ("id", models.AutoField(primary_key=True)), ]) changes = self.get_changes([], [address, person, apackage, country]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'a', 0, ["CreateModel", "CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertOperationTypes(changes, 'b', 0, ["CreateModel", "CreateModel"]) @override_settings(AUTH_USER_MODEL="a.Tenant") def test_circular_dependency_swappable(self): """ #23322 - Tests that the dependency resolver knows to explicitly resolve swappable models. """ with isolate_lru_cache(apps.get_swappable_settings_name): tenant = ModelState("a", "Tenant", [ ("id", models.AutoField(primary_key=True)), ("primary_address", models.ForeignKey("b.Address", models.CASCADE))], bases=(AbstractBaseUser, ) ) address = ModelState("b", "Address", [ ("id", models.AutoField(primary_key=True)), ("tenant", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)), ]) changes = self.get_changes([], [address, tenant]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'a', 0, []) self.assertMigrationDependencies(changes, 'a', 1, [('a', 'auto_1'), ('b', 'auto_1')]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'b', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'b', 0, [('__setting__', 'AUTH_USER_MODEL')]) @override_settings(AUTH_USER_MODEL="b.Tenant") def test_circular_dependency_swappable2(self): """ #23322 - Tests that the dependency resolver knows to explicitly resolve swappable models but with the swappable not being the first migrated model. """ with isolate_lru_cache(apps.get_swappable_settings_name): address = ModelState("a", "Address", [ ("id", models.AutoField(primary_key=True)), ("tenant", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)), ]) tenant = ModelState("b", "Tenant", [ ("id", models.AutoField(primary_key=True)), ("primary_address", models.ForeignKey("a.Address", models.CASCADE))], bases=(AbstractBaseUser, ) ) changes = self.get_changes([], [address, tenant]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'a', 0, []) self.assertMigrationDependencies(changes, 'a', 1, [('__setting__', 'AUTH_USER_MODEL'), ('a', 'auto_1')]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'b', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'b', 0, [('a', 'auto_1')]) @override_settings(AUTH_USER_MODEL="a.Person") def test_circular_dependency_swappable_self(self): """ #23322 - Tests that the dependency resolver knows to explicitly resolve swappable models. """ with isolate_lru_cache(apps.get_swappable_settings_name): person = ModelState("a", "Person", [ ("id", models.AutoField(primary_key=True)), ("parent1", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE, related_name='children')) ]) changes = self.get_changes([], [person]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 1) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'a', 0, []) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_blank_textfield_and_charfield(self, mocked_ask_method): """ #23405 - Adding a NOT NULL and blank `CharField` or `TextField` without default should not prompt for a default. """ changes = self.get_changes([self.author_empty], [self.author_with_biography_blank]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition') def test_add_non_blank_textfield_and_charfield(self, mocked_ask_method): """ #23405 - Adding a NOT NULL and non-blank `CharField` or `TextField` without default should prompt for a default. """ changes = self.get_changes([self.author_empty], [self.author_with_biography_non_blank]) self.assertEqual(mocked_ask_method.call_count, 2) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0)
d21abc1851f19dc715d83f878f110d15037b2142f2ec8e6ab7173b8a6c5a74c4
from django.apps.registry import Apps from django.contrib.contenttypes.fields import GenericForeignKey from django.db import models from django.db.migrations.exceptions import InvalidBasesError from django.db.migrations.operations import ( AddField, AlterField, DeleteModel, RemoveField, ) from django.db.migrations.state import ( ModelState, ProjectState, get_related_models_recursive, ) from django.test import SimpleTestCase, override_settings from django.test.utils import isolate_apps from django.utils import six from .models import ( FoodManager, FoodQuerySet, ModelWithCustomBase, NoMigrationFoodManager, UnicodeModel, ) class StateTests(SimpleTestCase): """ Tests state construction, rendering and modification by operations. """ def test_create(self): """ Tests making a ProjectState from an Apps """ new_apps = Apps(["migrations"]) class Author(models.Model): name = models.CharField(max_length=255) bio = models.TextField() age = models.IntegerField(blank=True, null=True) class Meta: app_label = "migrations" apps = new_apps unique_together = ["name", "bio"] index_together = ["bio", "age"] class AuthorProxy(Author): class Meta: app_label = "migrations" apps = new_apps proxy = True ordering = ["name"] class SubAuthor(Author): width = models.FloatField(null=True) class Meta: app_label = "migrations" apps = new_apps class Book(models.Model): title = models.CharField(max_length=1000) author = models.ForeignKey(Author, models.CASCADE) contributors = models.ManyToManyField(Author) class Meta: app_label = "migrations" apps = new_apps verbose_name = "tome" db_table = "test_tome" indexes = [models.Index(fields=['title'])] class Food(models.Model): food_mgr = FoodManager('a', 'b') food_qs = FoodQuerySet.as_manager() food_no_mgr = NoMigrationFoodManager('x', 'y') class Meta: app_label = "migrations" apps = new_apps class FoodNoManagers(models.Model): class Meta: app_label = "migrations" apps = new_apps class FoodNoDefaultManager(models.Model): food_no_mgr = NoMigrationFoodManager('x', 'y') food_mgr = FoodManager('a', 'b') food_qs = FoodQuerySet.as_manager() class Meta: app_label = "migrations" apps = new_apps mgr1 = FoodManager('a', 'b') mgr2 = FoodManager('x', 'y', c=3, d=4) class FoodOrderedManagers(models.Model): # The managers on this model should be ordered by their creation # counter and not by the order in model body food_no_mgr = NoMigrationFoodManager('x', 'y') food_mgr2 = mgr2 food_mgr1 = mgr1 class Meta: app_label = "migrations" apps = new_apps project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] author_proxy_state = project_state.models['migrations', 'authorproxy'] sub_author_state = project_state.models['migrations', 'subauthor'] book_state = project_state.models['migrations', 'book'] food_state = project_state.models['migrations', 'food'] food_no_managers_state = project_state.models['migrations', 'foodnomanagers'] food_no_default_manager_state = project_state.models['migrations', 'foodnodefaultmanager'] food_order_manager_state = project_state.models['migrations', 'foodorderedmanagers'] book_index = models.Index(fields=['title']) book_index.set_name_with_model(Book) self.assertEqual(author_state.app_label, "migrations") self.assertEqual(author_state.name, "Author") self.assertEqual([x for x, y in author_state.fields], ["id", "name", "bio", "age"]) self.assertEqual(author_state.fields[1][1].max_length, 255) self.assertIs(author_state.fields[2][1].null, False) self.assertIs(author_state.fields[3][1].null, True) self.assertEqual( author_state.options, {"unique_together": {("name", "bio")}, "index_together": {("bio", "age")}, "indexes": []} ) self.assertEqual(author_state.bases, (models.Model, )) self.assertEqual(book_state.app_label, "migrations") self.assertEqual(book_state.name, "Book") self.assertEqual([x for x, y in book_state.fields], ["id", "title", "author", "contributors"]) self.assertEqual(book_state.fields[1][1].max_length, 1000) self.assertIs(book_state.fields[2][1].null, False) self.assertEqual(book_state.fields[3][1].__class__.__name__, "ManyToManyField") self.assertEqual( book_state.options, {"verbose_name": "tome", "db_table": "test_tome", "indexes": [book_index]}, ) self.assertEqual(book_state.bases, (models.Model, )) self.assertEqual(author_proxy_state.app_label, "migrations") self.assertEqual(author_proxy_state.name, "AuthorProxy") self.assertEqual(author_proxy_state.fields, []) self.assertEqual(author_proxy_state.options, {"proxy": True, "ordering": ["name"], "indexes": []}) self.assertEqual(author_proxy_state.bases, ("migrations.author", )) self.assertEqual(sub_author_state.app_label, "migrations") self.assertEqual(sub_author_state.name, "SubAuthor") self.assertEqual(len(sub_author_state.fields), 2) self.assertEqual(sub_author_state.bases, ("migrations.author", )) # The default manager is used in migrations self.assertEqual([name for name, mgr in food_state.managers], ['food_mgr']) self.assertTrue(all(isinstance(name, six.text_type) for name, mgr in food_state.managers)) self.assertEqual(food_state.managers[0][1].args, ('a', 'b', 1, 2)) # No explicit managers defined. Migrations will fall back to the default self.assertEqual(food_no_managers_state.managers, []) # food_mgr is used in migration but isn't the default mgr, hence add the # default self.assertEqual([name for name, mgr in food_no_default_manager_state.managers], ['food_no_mgr', 'food_mgr']) self.assertTrue(all(isinstance(name, six.text_type) for name, mgr in food_no_default_manager_state.managers)) self.assertEqual(food_no_default_manager_state.managers[0][1].__class__, models.Manager) self.assertIsInstance(food_no_default_manager_state.managers[1][1], FoodManager) self.assertEqual([name for name, mgr in food_order_manager_state.managers], ['food_mgr1', 'food_mgr2']) self.assertTrue(all(isinstance(name, six.text_type) for name, mgr in food_order_manager_state.managers)) self.assertEqual([mgr.args for name, mgr in food_order_manager_state.managers], [('a', 'b', 1, 2), ('x', 'y', 3, 4)]) def test_custom_default_manager_added_to_the_model_state(self): """ When the default manager of the model is a custom manager, it needs to be added to the model state. """ new_apps = Apps(['migrations']) custom_manager = models.Manager() class Author(models.Model): objects = models.TextField() authors = custom_manager class Meta: app_label = 'migrations' apps = new_apps project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.managers, [('authors', custom_manager)]) def test_custom_default_manager_named_objects_with_false_migration_flag(self): """ When a manager is added with a name of 'objects' but it does not have `use_in_migrations = True`, no migration should be added to the model state (#26643). """ new_apps = Apps(['migrations']) class Author(models.Model): objects = models.Manager() class Meta: app_label = 'migrations' apps = new_apps project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.managers, []) def test_no_duplicate_managers(self): """ When a manager is added with `use_in_migrations = True` and a parent model had a manager with the same name and `use_in_migrations = True`, the parent's manager shouldn't appear in the model state (#26881). """ new_apps = Apps(['migrations']) class PersonManager(models.Manager): use_in_migrations = True class Person(models.Model): objects = PersonManager() class Meta: abstract = True class BossManager(PersonManager): use_in_migrations = True class Boss(Person): objects = BossManager() class Meta: app_label = 'migrations' apps = new_apps project_state = ProjectState.from_apps(new_apps) boss_state = project_state.models['migrations', 'boss'] self.assertEqual(boss_state.managers, [('objects', Boss.objects)]) def test_custom_default_manager(self): new_apps = Apps(['migrations']) class Author(models.Model): manager1 = models.Manager() manager2 = models.Manager() class Meta: app_label = 'migrations' apps = new_apps default_manager_name = 'manager2' project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.options['default_manager_name'], 'manager2') self.assertEqual(author_state.managers, [('manager2', Author.manager1)]) def test_custom_base_manager(self): new_apps = Apps(['migrations']) class Author(models.Model): manager1 = models.Manager() manager2 = models.Manager() class Meta: app_label = 'migrations' apps = new_apps base_manager_name = 'manager2' class Author2(models.Model): manager1 = models.Manager() manager2 = models.Manager() class Meta: app_label = 'migrations' apps = new_apps base_manager_name = 'manager1' project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.options['base_manager_name'], 'manager2') self.assertEqual(author_state.managers, [ ('manager1', Author.manager1), ('manager2', Author.manager2), ]) author2_state = project_state.models['migrations', 'author2'] self.assertEqual(author2_state.options['base_manager_name'], 'manager1') self.assertEqual(author2_state.managers, [ ('manager1', Author2.manager1), ]) def test_apps_bulk_update(self): """ StateApps.bulk_update() should update apps.ready to False and reset the value afterwards. """ project_state = ProjectState() apps = project_state.apps with apps.bulk_update(): self.assertFalse(apps.ready) self.assertTrue(apps.ready) with self.assertRaises(ValueError): with apps.bulk_update(): self.assertFalse(apps.ready) raise ValueError() self.assertTrue(apps.ready) def test_render(self): """ Tests rendering a ProjectState into an Apps. """ project_state = ProjectState() project_state.add_model(ModelState( app_label="migrations", name="Tag", fields=[ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ("hidden", models.BooleanField()), ], )) project_state.add_model(ModelState( app_label="migrations", name="SubTag", fields=[ ('tag_ptr', models.OneToOneField( 'migrations.Tag', models.CASCADE, auto_created=True, primary_key=True, to_field='id', serialize=False, )), ("awesome", models.BooleanField()), ], bases=("migrations.Tag",), )) base_mgr = models.Manager() mgr1 = FoodManager('a', 'b') mgr2 = FoodManager('x', 'y', c=3, d=4) project_state.add_model(ModelState( app_label="migrations", name="Food", fields=[ ("id", models.AutoField(primary_key=True)), ], managers=[ # The ordering we really want is objects, mgr1, mgr2 ('default', base_mgr), ('food_mgr2', mgr2), (b'food_mgr1', mgr1), ] )) new_apps = project_state.apps self.assertEqual(new_apps.get_model("migrations", "Tag")._meta.get_field("name").max_length, 100) self.assertIs(new_apps.get_model("migrations", "Tag")._meta.get_field("hidden").null, False) self.assertEqual(len(new_apps.get_model("migrations", "SubTag")._meta.local_fields), 2) Food = new_apps.get_model("migrations", "Food") self.assertEqual([mgr.name for mgr in Food._meta.managers], ['default', 'food_mgr1', 'food_mgr2']) self.assertTrue(all(isinstance(mgr.name, six.text_type) for mgr in Food._meta.managers)) self.assertEqual([mgr.__class__ for mgr in Food._meta.managers], [models.Manager, FoodManager, FoodManager]) def test_render_model_inheritance(self): class Book(models.Model): title = models.CharField(max_length=1000) class Meta: app_label = "migrations" apps = Apps() class Novel(Book): class Meta: app_label = "migrations" apps = Apps() # First, test rendering individually apps = Apps(["migrations"]) # We shouldn't be able to render yet ms = ModelState.from_model(Novel) with self.assertRaises(InvalidBasesError): ms.render(apps) # Once the parent model is in the app registry, it should be fine ModelState.from_model(Book).render(apps) ModelState.from_model(Novel).render(apps) def test_render_model_with_multiple_inheritance(self): class Foo(models.Model): class Meta: app_label = "migrations" apps = Apps() class Bar(models.Model): class Meta: app_label = "migrations" apps = Apps() class FooBar(Foo, Bar): class Meta: app_label = "migrations" apps = Apps() class AbstractSubFooBar(FooBar): class Meta: abstract = True apps = Apps() class SubFooBar(AbstractSubFooBar): class Meta: app_label = "migrations" apps = Apps() apps = Apps(["migrations"]) # We shouldn't be able to render yet ms = ModelState.from_model(FooBar) with self.assertRaises(InvalidBasesError): ms.render(apps) # Once the parent models are in the app registry, it should be fine ModelState.from_model(Foo).render(apps) self.assertSequenceEqual(ModelState.from_model(Foo).bases, [models.Model]) ModelState.from_model(Bar).render(apps) self.assertSequenceEqual(ModelState.from_model(Bar).bases, [models.Model]) ModelState.from_model(FooBar).render(apps) self.assertSequenceEqual(ModelState.from_model(FooBar).bases, ['migrations.foo', 'migrations.bar']) ModelState.from_model(SubFooBar).render(apps) self.assertSequenceEqual(ModelState.from_model(SubFooBar).bases, ['migrations.foobar']) def test_render_project_dependencies(self): """ Tests that the ProjectState render method correctly renders models to account for inter-model base dependencies. """ new_apps = Apps() class A(models.Model): class Meta: app_label = "migrations" apps = new_apps class B(A): class Meta: app_label = "migrations" apps = new_apps class C(B): class Meta: app_label = "migrations" apps = new_apps class D(A): class Meta: app_label = "migrations" apps = new_apps class E(B): class Meta: app_label = "migrations" apps = new_apps proxy = True class F(D): class Meta: app_label = "migrations" apps = new_apps proxy = True # Make a ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) project_state.add_model(ModelState.from_model(C)) project_state.add_model(ModelState.from_model(D)) project_state.add_model(ModelState.from_model(E)) project_state.add_model(ModelState.from_model(F)) final_apps = project_state.apps self.assertEqual(len(final_apps.get_models()), 6) # Now make an invalid ProjectState and make sure it fails project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) project_state.add_model(ModelState.from_model(C)) project_state.add_model(ModelState.from_model(F)) with self.assertRaises(InvalidBasesError): project_state.apps def test_render_unique_app_labels(self): """ Tests that the ProjectState render method doesn't raise an ImproperlyConfigured exception about unique labels if two dotted app names have the same last part. """ class A(models.Model): class Meta: app_label = "django.contrib.auth" class B(models.Model): class Meta: app_label = "vendor.auth" # Make a ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) self.assertEqual(len(project_state.apps.get_models()), 2) def test_add_relations(self): """ #24573 - Adding relations to existing models should reload the referenced models too. """ new_apps = Apps() class A(models.Model): class Meta: app_label = 'something' apps = new_apps class B(A): class Meta: app_label = 'something' apps = new_apps class C(models.Model): class Meta: app_label = 'something' apps = new_apps project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) project_state.add_model(ModelState.from_model(C)) project_state.apps # We need to work with rendered models old_state = project_state.clone() model_a_old = old_state.apps.get_model('something', 'A') model_b_old = old_state.apps.get_model('something', 'B') model_c_old = old_state.apps.get_model('something', 'C') # Check that the relations between the old models are correct self.assertIs(model_a_old._meta.get_field('b').related_model, model_b_old) self.assertIs(model_b_old._meta.get_field('a_ptr').related_model, model_a_old) operation = AddField('c', 'to_a', models.OneToOneField( 'something.A', models.CASCADE, related_name='from_c', )) operation.state_forwards('something', project_state) model_a_new = project_state.apps.get_model('something', 'A') model_b_new = project_state.apps.get_model('something', 'B') model_c_new = project_state.apps.get_model('something', 'C') # Check that all models have changed self.assertIsNot(model_a_old, model_a_new) self.assertIsNot(model_b_old, model_b_new) self.assertIsNot(model_c_old, model_c_new) # Check that the relations between the old models still hold self.assertIs(model_a_old._meta.get_field('b').related_model, model_b_old) self.assertIs(model_b_old._meta.get_field('a_ptr').related_model, model_a_old) # Check that the relations between the new models correct self.assertIs(model_a_new._meta.get_field('b').related_model, model_b_new) self.assertIs(model_b_new._meta.get_field('a_ptr').related_model, model_a_new) self.assertIs(model_a_new._meta.get_field('from_c').related_model, model_c_new) self.assertIs(model_c_new._meta.get_field('to_a').related_model, model_a_new) def test_remove_relations(self): """ #24225 - Tests that relations between models are updated while remaining the relations and references for models of an old state. """ new_apps = Apps() class A(models.Model): class Meta: app_label = "something" apps = new_apps class B(models.Model): to_a = models.ForeignKey(A, models.CASCADE) class Meta: app_label = "something" apps = new_apps def get_model_a(state): return [mod for mod in state.apps.get_models() if mod._meta.model_name == 'a'][0] project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) self.assertEqual(len(get_model_a(project_state)._meta.related_objects), 1) old_state = project_state.clone() operation = RemoveField("b", "to_a") operation.state_forwards("something", project_state) # Tests that model from old_state still has the relation model_a_old = get_model_a(old_state) model_a_new = get_model_a(project_state) self.assertIsNot(model_a_old, model_a_new) self.assertEqual(len(model_a_old._meta.related_objects), 1) self.assertEqual(len(model_a_new._meta.related_objects), 0) # Same test for deleted model project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) old_state = project_state.clone() operation = DeleteModel("b") operation.state_forwards("something", project_state) model_a_old = get_model_a(old_state) model_a_new = get_model_a(project_state) self.assertIsNot(model_a_old, model_a_new) self.assertEqual(len(model_a_old._meta.related_objects), 1) self.assertEqual(len(model_a_new._meta.related_objects), 0) def test_self_relation(self): """ #24513 - Modifying an object pointing to itself would cause it to be rendered twice and thus breaking its related M2M through objects. """ class A(models.Model): to_a = models.ManyToManyField('something.A', symmetrical=False) class Meta: app_label = "something" def get_model_a(state): return [mod for mod in state.apps.get_models() if mod._meta.model_name == 'a'][0] project_state = ProjectState() project_state.add_model((ModelState.from_model(A))) self.assertEqual(len(get_model_a(project_state)._meta.related_objects), 1) old_state = project_state.clone() operation = AlterField( model_name="a", name="to_a", field=models.ManyToManyField("something.A", symmetrical=False, blank=True) ) # At this point the model would be rendered twice causing its related # M2M through objects to point to an old copy and thus breaking their # attribute lookup. operation.state_forwards("something", project_state) model_a_old = get_model_a(old_state) model_a_new = get_model_a(project_state) self.assertIsNot(model_a_old, model_a_new) # Tests that the old model's _meta is still consistent field_to_a_old = model_a_old._meta.get_field("to_a") self.assertEqual(field_to_a_old.m2m_field_name(), "from_a") self.assertEqual(field_to_a_old.m2m_reverse_field_name(), "to_a") self.assertIs(field_to_a_old.related_model, model_a_old) self.assertIs(field_to_a_old.remote_field.through._meta.get_field('to_a').related_model, model_a_old) self.assertIs(field_to_a_old.remote_field.through._meta.get_field('from_a').related_model, model_a_old) # Tests that the new model's _meta is still consistent field_to_a_new = model_a_new._meta.get_field("to_a") self.assertEqual(field_to_a_new.m2m_field_name(), "from_a") self.assertEqual(field_to_a_new.m2m_reverse_field_name(), "to_a") self.assertIs(field_to_a_new.related_model, model_a_new) self.assertIs(field_to_a_new.remote_field.through._meta.get_field('to_a').related_model, model_a_new) self.assertIs(field_to_a_new.remote_field.through._meta.get_field('from_a').related_model, model_a_new) def test_equality(self): """ Tests that == and != are implemented correctly. """ # Test two things that should be equal project_state = ProjectState() project_state.add_model(ModelState( "migrations", "Tag", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ("hidden", models.BooleanField()), ], {}, None, )) project_state.apps # Fill the apps cached property other_state = project_state.clone() self.assertEqual(project_state, project_state) self.assertEqual(project_state, other_state) self.assertIs(project_state != project_state, False) self.assertIs(project_state != other_state, False) self.assertNotEqual(project_state.apps, other_state.apps) # Make a very small change (max_len 99) and see if that affects it project_state = ProjectState() project_state.add_model(ModelState( "migrations", "Tag", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=99)), ("hidden", models.BooleanField()), ], {}, None, )) self.assertNotEqual(project_state, other_state) self.assertIs(project_state == other_state, False) def test_dangling_references_throw_error(self): new_apps = Apps() class Author(models.Model): name = models.TextField() class Meta: app_label = "migrations" apps = new_apps class Publisher(models.Model): name = models.TextField() class Meta: app_label = "migrations" apps = new_apps class Book(models.Model): author = models.ForeignKey(Author, models.CASCADE) publisher = models.ForeignKey(Publisher, models.CASCADE) class Meta: app_label = "migrations" apps = new_apps class Magazine(models.Model): authors = models.ManyToManyField(Author) class Meta: app_label = "migrations" apps = new_apps # Make a valid ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(Author)) project_state.add_model(ModelState.from_model(Publisher)) project_state.add_model(ModelState.from_model(Book)) project_state.add_model(ModelState.from_model(Magazine)) self.assertEqual(len(project_state.apps.get_models()), 4) # now make an invalid one with a ForeignKey project_state = ProjectState() project_state.add_model(ModelState.from_model(Book)) msg = ( "The field migrations.Book.author was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Book.publisher was declared with a lazy reference " "to 'migrations.publisher', but app 'migrations' doesn't provide model 'publisher'." ) with self.assertRaisesMessage(ValueError, msg): project_state.apps # And another with ManyToManyField. project_state = ProjectState() project_state.add_model(ModelState.from_model(Magazine)) msg = ( "The field migrations.Magazine.authors was declared with a lazy reference " "to 'migrations.author\', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Magazine_authors.author was declared with a lazy reference " "to \'migrations.author\', but app 'migrations' doesn't provide model 'author'." ) with self.assertRaisesMessage(ValueError, msg): project_state.apps # And now with multiple models and multiple fields. project_state.add_model(ModelState.from_model(Book)) msg = ( "The field migrations.Book.author was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Book.publisher was declared with a lazy reference " "to 'migrations.publisher', but app 'migrations' doesn't provide model 'publisher'.\n" "The field migrations.Magazine.authors was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Magazine_authors.author was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'." ) with self.assertRaisesMessage(ValueError, msg): project_state.apps def test_real_apps(self): """ Tests that including real apps can resolve dangling FK errors. This test relies on the fact that contenttypes is always loaded. """ new_apps = Apps() class TestModel(models.Model): ct = models.ForeignKey("contenttypes.ContentType", models.CASCADE) class Meta: app_label = "migrations" apps = new_apps # If we just stick it into an empty state it should fail project_state = ProjectState() project_state.add_model(ModelState.from_model(TestModel)) with self.assertRaises(ValueError): project_state.apps # If we include the real app it should succeed project_state = ProjectState(real_apps=["contenttypes"]) project_state.add_model(ModelState.from_model(TestModel)) rendered_state = project_state.apps self.assertEqual( len([x for x in rendered_state.get_models() if x._meta.app_label == "migrations"]), 1, ) def test_ignore_order_wrt(self): """ Makes sure ProjectState doesn't include OrderWrt fields when making from existing models. """ new_apps = Apps() class Author(models.Model): name = models.TextField() class Meta: app_label = "migrations" apps = new_apps class Book(models.Model): author = models.ForeignKey(Author, models.CASCADE) class Meta: app_label = "migrations" apps = new_apps order_with_respect_to = "author" # Make a valid ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(Author)) project_state.add_model(ModelState.from_model(Book)) self.assertEqual( [name for name, field in project_state.models["migrations", "book"].fields], ["id", "author"], ) def test_manager_refer_correct_model_version(self): """ #24147 - Tests that managers refer to the correct version of a historical model """ project_state = ProjectState() project_state.add_model(ModelState( app_label="migrations", name="Tag", fields=[ ("id", models.AutoField(primary_key=True)), ("hidden", models.BooleanField()), ], managers=[ ('food_mgr', FoodManager('a', 'b')), ('food_qs', FoodQuerySet.as_manager()), ] )) old_model = project_state.apps.get_model('migrations', 'tag') new_state = project_state.clone() operation = RemoveField("tag", "hidden") operation.state_forwards("migrations", new_state) new_model = new_state.apps.get_model('migrations', 'tag') self.assertIsNot(old_model, new_model) self.assertIs(old_model, old_model.food_mgr.model) self.assertIs(old_model, old_model.food_qs.model) self.assertIs(new_model, new_model.food_mgr.model) self.assertIs(new_model, new_model.food_qs.model) self.assertIsNot(old_model.food_mgr, new_model.food_mgr) self.assertIsNot(old_model.food_qs, new_model.food_qs) self.assertIsNot(old_model.food_mgr.model, new_model.food_mgr.model) self.assertIsNot(old_model.food_qs.model, new_model.food_qs.model) def test_choices_iterator(self): """ #24483 - ProjectState.from_apps should not destructively consume Field.choices iterators. """ new_apps = Apps(["migrations"]) choices = [('a', 'A'), ('b', 'B')] class Author(models.Model): name = models.CharField(max_length=255) choice = models.CharField(max_length=255, choices=iter(choices)) class Meta: app_label = "migrations" apps = new_apps ProjectState.from_apps(new_apps) choices_field = Author._meta.get_field('choice') self.assertEqual(list(choices_field.choices), choices) class ModelStateTests(SimpleTestCase): def test_custom_model_base(self): state = ModelState.from_model(ModelWithCustomBase) self.assertEqual(state.bases, (models.Model,)) def test_bound_field_sanity_check(self): field = models.CharField(max_length=1) field.model = models.Model with self.assertRaisesMessage(ValueError, 'ModelState.fields cannot be bound to a model - "field" is.'): ModelState('app', 'Model', [('field', field)]) def test_sanity_check_to(self): field = models.ForeignKey(UnicodeModel, models.CASCADE) with self.assertRaisesMessage( ValueError, 'ModelState.fields cannot refer to a model class - "field.to" does. ' 'Use a string reference instead.' ): ModelState('app', 'Model', [('field', field)]) def test_sanity_check_through(self): field = models.ManyToManyField('UnicodeModel') field.remote_field.through = UnicodeModel with self.assertRaisesMessage( ValueError, 'ModelState.fields cannot refer to a model class - "field.through" does. ' 'Use a string reference instead.' ): ModelState('app', 'Model', [('field', field)]) def test_sanity_index_name(self): field = models.IntegerField() options = {'indexes': [models.Index(fields=['field'])]} msg = "Indexes passed to ModelState require a name attribute. <Index: fields='field'> doesn't have one." with self.assertRaisesMessage(ValueError, msg): ModelState('app', 'Model', [('field', field)], options=options) def test_fields_immutability(self): """ Tests that rendering a model state doesn't alter its internal fields. """ apps = Apps() field = models.CharField(max_length=1) state = ModelState('app', 'Model', [('name', field)]) Model = state.render(apps) self.assertNotEqual(Model._meta.get_field('name'), field) def test_repr(self): field = models.CharField(max_length=1) state = ModelState('app', 'Model', [('name', field)], bases=['app.A', 'app.B', 'app.C']) self.assertEqual(repr(state), "<ModelState: 'app.Model'>") project_state = ProjectState() project_state.add_model(state) with self.assertRaisesMessage(InvalidBasesError, "Cannot resolve bases for [<ModelState: 'app.Model'>]"): project_state.apps @override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel') def test_create_swappable(self): """ Tests making a ProjectState from an Apps with a swappable model """ new_apps = Apps(['migrations']) class Author(models.Model): name = models.CharField(max_length=255) bio = models.TextField() age = models.IntegerField(blank=True, null=True) class Meta: app_label = 'migrations' apps = new_apps swappable = 'TEST_SWAPPABLE_MODEL' author_state = ModelState.from_model(Author) self.assertEqual(author_state.app_label, 'migrations') self.assertEqual(author_state.name, 'Author') self.assertEqual([x for x, y in author_state.fields], ['id', 'name', 'bio', 'age']) self.assertEqual(author_state.fields[1][1].max_length, 255) self.assertIs(author_state.fields[2][1].null, False) self.assertIs(author_state.fields[3][1].null, True) self.assertEqual(author_state.options, {'swappable': 'TEST_SWAPPABLE_MODEL', 'indexes': []}) self.assertEqual(author_state.bases, (models.Model, )) self.assertEqual(author_state.managers, []) @override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel') def test_custom_manager_swappable(self): """ Tests making a ProjectState from unused models with custom managers """ new_apps = Apps(['migrations']) class Food(models.Model): food_mgr = FoodManager('a', 'b') food_qs = FoodQuerySet.as_manager() food_no_mgr = NoMigrationFoodManager('x', 'y') class Meta: app_label = "migrations" apps = new_apps swappable = 'TEST_SWAPPABLE_MODEL' food_state = ModelState.from_model(Food) # The default manager is used in migrations self.assertEqual([name for name, mgr in food_state.managers], ['food_mgr']) self.assertEqual(food_state.managers[0][1].args, ('a', 'b', 1, 2)) @isolate_apps('migrations', 'django.contrib.contenttypes') def test_order_with_respect_to_private_field(self): class PrivateFieldModel(models.Model): content_type = models.ForeignKey('contenttypes.ContentType', models.CASCADE) object_id = models.PositiveIntegerField() private = GenericForeignKey() class Meta: order_with_respect_to = 'private' state = ModelState.from_model(PrivateFieldModel) self.assertNotIn('order_with_respect_to', state.options) class RelatedModelsTests(SimpleTestCase): def setUp(self): self.apps = Apps(['migrations.related_models_app']) def create_model(self, name, foreign_keys=[], bases=(), abstract=False, proxy=False): test_name = 'related_models_app' assert not (abstract and proxy) meta_contents = { 'abstract': abstract, 'app_label': test_name, 'apps': self.apps, 'proxy': proxy, } meta = type(str("Meta"), tuple(), meta_contents) if not bases: bases = (models.Model,) body = { 'Meta': meta, '__module__': "__fake__", } fname_base = fname = '%s_%%d' % name.lower() for i, fk in enumerate(foreign_keys, 1): fname = fname_base % i body[fname] = fk return type(name, bases, body) def assertRelated(self, model, needle): self.assertEqual( get_related_models_recursive(model), {(n._meta.app_label, n._meta.model_name) for n in needle}, ) def test_unrelated(self): A = self.create_model("A") B = self.create_model("B") self.assertRelated(A, []) self.assertRelated(B, []) def test_direct_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B") self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_direct_hidden_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE, related_name='+')]) B = self.create_model("B") self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_fk_through_proxy(self): A = self.create_model("A") B = self.create_model("B", bases=(A,), proxy=True) C = self.create_model("C", bases=(B,), proxy=True) D = self.create_model("D", foreign_keys=[models.ForeignKey('C', models.CASCADE)]) self.assertRelated(A, [B, C, D]) self.assertRelated(B, [A, C, D]) self.assertRelated(C, [A, B, D]) self.assertRelated(D, [A, B, C]) def test_nested_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B", foreign_keys=[models.ForeignKey('C', models.CASCADE)]) C = self.create_model("C") self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_two_sided(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B", foreign_keys=[models.ForeignKey('A', models.CASCADE)]) self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_circle(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B", foreign_keys=[models.ForeignKey('C', models.CASCADE)]) C = self.create_model("C", foreign_keys=[models.ForeignKey('A', models.CASCADE)]) self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,)) self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_nested_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,)) C = self.create_model("C", bases=(B,)) self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_multiple_bases(self): A = self.create_model("A") B = self.create_model("B") C = self.create_model("C", bases=(A, B,)) self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_multiple_nested_bases(self): A = self.create_model("A") B = self.create_model("B") C = self.create_model("C", bases=(A, B,)) D = self.create_model("D") E = self.create_model("E", bases=(D,)) F = self.create_model("F", bases=(C, E,)) Y = self.create_model("Y") Z = self.create_model("Z", bases=(Y,)) self.assertRelated(A, [B, C, D, E, F]) self.assertRelated(B, [A, C, D, E, F]) self.assertRelated(C, [A, B, D, E, F]) self.assertRelated(D, [A, B, C, E, F]) self.assertRelated(E, [A, B, C, D, F]) self.assertRelated(F, [A, B, C, D, E]) self.assertRelated(Y, [Z]) self.assertRelated(Z, [Y]) def test_base_to_base_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('Y', models.CASCADE)]) B = self.create_model("B", bases=(A,)) Y = self.create_model("Y") Z = self.create_model("Z", bases=(Y,)) self.assertRelated(A, [B, Y, Z]) self.assertRelated(B, [A, Y, Z]) self.assertRelated(Y, [A, B, Z]) self.assertRelated(Z, [A, B, Y]) def test_base_to_subclass_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('Z', models.CASCADE)]) B = self.create_model("B", bases=(A,)) Y = self.create_model("Y") Z = self.create_model("Z", bases=(Y,)) self.assertRelated(A, [B, Y, Z]) self.assertRelated(B, [A, Y, Z]) self.assertRelated(Y, [A, B, Z]) self.assertRelated(Z, [A, B, Y]) def test_direct_m2m(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B')]) B = self.create_model("B") self.assertRelated(A, [A.a_1.rel.through, B]) self.assertRelated(B, [A, A.a_1.rel.through]) def test_direct_m2m_self(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('A')]) self.assertRelated(A, [A.a_1.rel.through]) def test_intermediate_m2m_self(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('A', through='T')]) T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('A', models.CASCADE), ]) self.assertRelated(A, [T]) self.assertRelated(T, [A]) def test_intermediate_m2m(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')]) B = self.create_model("B") T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('B', models.CASCADE), ]) self.assertRelated(A, [B, T]) self.assertRelated(B, [A, T]) self.assertRelated(T, [A, B]) def test_intermediate_m2m_extern_fk(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')]) B = self.create_model("B") Z = self.create_model("Z") T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('B', models.CASCADE), models.ForeignKey('Z', models.CASCADE), ]) self.assertRelated(A, [B, T, Z]) self.assertRelated(B, [A, T, Z]) self.assertRelated(T, [A, B, Z]) self.assertRelated(Z, [A, B, T]) def test_intermediate_m2m_base(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')]) B = self.create_model("B") S = self.create_model("S") T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('B', models.CASCADE), ], bases=(S,)) self.assertRelated(A, [B, S, T]) self.assertRelated(B, [A, S, T]) self.assertRelated(S, [A, B, T]) self.assertRelated(T, [A, B, S]) def test_generic_fk(self): A = self.create_model("A", foreign_keys=[ models.ForeignKey('B', models.CASCADE), GenericForeignKey(), ]) B = self.create_model("B", foreign_keys=[ models.ForeignKey('C', models.CASCADE), ]) self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_abstract_base(self): A = self.create_model("A", abstract=True) B = self.create_model("B", bases=(A,)) self.assertRelated(A, [B]) self.assertRelated(B, []) def test_nested_abstract_base(self): A = self.create_model("A", abstract=True) B = self.create_model("B", bases=(A,), abstract=True) C = self.create_model("C", bases=(B,)) self.assertRelated(A, [B, C]) self.assertRelated(B, [C]) self.assertRelated(C, []) def test_proxy_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,), proxy=True) self.assertRelated(A, [B]) self.assertRelated(B, []) def test_nested_proxy_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,), proxy=True) C = self.create_model("C", bases=(B,), proxy=True) self.assertRelated(A, [B, C]) self.assertRelated(B, [C]) self.assertRelated(C, []) def test_multiple_mixed_bases(self): A = self.create_model("A", abstract=True) M = self.create_model("M") P = self.create_model("P") Q = self.create_model("Q", bases=(P,), proxy=True) Z = self.create_model("Z", bases=(A, M, Q)) # M has a pointer O2O field p_ptr to P self.assertRelated(A, [M, P, Q, Z]) self.assertRelated(M, [P, Q, Z]) self.assertRelated(P, [M, Q, Z]) self.assertRelated(Q, [M, P, Z]) self.assertRelated(Z, [M, P, Q])
0378b6d216bcf9ddf7f08b6ff3853b8e1cd09600d71df5ceaf4407126e090ee5
# coding: utf-8 from __future__ import unicode_literals import re from django.forms import CharField, Form, Media from django.http import HttpRequest from django.middleware.csrf import ( CsrfViewMiddleware, _compare_salted_tokens as equivalent_tokens, get_token, ) from django.template import TemplateDoesNotExist, TemplateSyntaxError from django.template.backends.dummy import TemplateStrings from django.test import SimpleTestCase class TemplateStringsTests(SimpleTestCase): engine_class = TemplateStrings backend_name = 'dummy' options = {} @classmethod def setUpClass(cls): super(TemplateStringsTests, cls).setUpClass() params = { 'DIRS': [], 'APP_DIRS': True, 'NAME': cls.backend_name, 'OPTIONS': cls.options, } cls.engine = cls.engine_class(params) def test_from_string(self): template = self.engine.from_string("Hello!\n") content = template.render() self.assertEqual(content, "Hello!\n") def test_get_template(self): template = self.engine.get_template('template_backends/hello.html') content = template.render({'name': 'world'}) self.assertEqual(content, "Hello world!\n") def test_get_template_non_existing(self): with self.assertRaises(TemplateDoesNotExist) as e: self.engine.get_template('template_backends/non_existing.html') self.assertEqual(e.exception.backend, self.engine) def test_get_template_syntax_error(self): # There's no way to trigger a syntax error with the dummy backend. # The test still lives here to factor it between other backends. if self.backend_name == 'dummy': self.skipTest("test doesn't apply to dummy backend") with self.assertRaises(TemplateSyntaxError): self.engine.get_template('template_backends/syntax_error.html') def test_html_escaping(self): template = self.engine.get_template('template_backends/hello.html') context = {'name': '<script>alert("XSS!");</script>'} content = template.render(context) self.assertIn('&lt;script&gt;', content) self.assertNotIn('<script>', content) def test_django_html_escaping(self): if self.backend_name == 'dummy': self.skipTest("test doesn't apply to dummy backend") class TestForm(Form): test_field = CharField() media = Media(js=['my-script.js']) form = TestForm() template = self.engine.get_template('template_backends/django_escaping.html') content = template.render({'media': media, 'test_form': form}) expected = '{}\n\n{}\n\n{}'.format(media, form, form['test_field']) self.assertHTMLEqual(content, expected) def test_csrf_token(self): request = HttpRequest() CsrfViewMiddleware().process_view(request, lambda r: None, (), {}) template = self.engine.get_template('template_backends/csrf.html') content = template.render(request=request) expected = '<input type="hidden" name="csrfmiddlewaretoken" value="([^"]+)" />' match = re.match(expected, content) or re.match(expected.replace('"', "'"), content) self.assertTrue(match, "hidden csrftoken field not found in output") self.assertTrue(equivalent_tokens(match.group(1), get_token(request))) def test_no_directory_traversal(self): with self.assertRaises(TemplateDoesNotExist): self.engine.get_template('../forbidden/template_backends/hello.html') def test_non_ascii_characters(self): template = self.engine.get_template('template_backends/hello.html') content = template.render({'name': 'Jérôme'}) self.assertEqual(content, "Hello Jérôme!\n")
5defebaae7746c614ee256da62dc1c51b4e2864efd7df1c1f27dabff17906883
from django.core.exceptions import ImproperlyConfigured from django.template import engines from django.test import SimpleTestCase, override_settings class TemplateStringsTests(SimpleTestCase): @override_settings(TEMPLATES=[{ 'BACKEND': 'raise.import.error', }]) def test_backend_import_error(self): """ Failing to import a backend keeps raising the original import error. Regression test for #24265. """ with self.assertRaises(ImportError): engines.all() with self.assertRaises(ImportError): engines.all() @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', # Incorrect: APP_DIRS and loaders are mutually incompatible. 'APP_DIRS': True, 'OPTIONS': {'loaders': []}, }]) def test_backend_improperly_configured(self): """ Failing to initialize a backend keeps raising the original exception. Regression test for #24265. """ with self.assertRaises(ImproperlyConfigured): engines.all() with self.assertRaises(ImproperlyConfigured): engines.all() @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', }, { 'BACKEND': 'django.template.backends.django.DjangoTemplates', }]) def test_backend_names_must_be_unique(self): with self.assertRaises(ImproperlyConfigured): engines.all()
abc877a45ec2a5011515e4b7dbaea3e680429e2e1aab37144130508f31ca9830
from template_tests.test_response import test_processor_name from django.template import EngineHandler from django.template.backends.django import DjangoTemplates from django.template.library import InvalidTemplateLibrary from django.test import RequestFactory, override_settings from .test_dummy import TemplateStringsTests class DjangoTemplatesTests(TemplateStringsTests): engine_class = DjangoTemplates backend_name = 'django' def test_context_has_priority_over_template_context_processors(self): # See ticket #23789. engine = DjangoTemplates({ 'DIRS': [], 'APP_DIRS': False, 'NAME': 'django', 'OPTIONS': { 'context_processors': [test_processor_name], }, }) template = engine.from_string('{{ processors }}') request = RequestFactory().get('/') # Check that context processors run content = template.render({}, request) self.assertEqual(content, 'yes') # Check that context overrides context processors content = template.render({'processors': 'no'}, request) self.assertEqual(content, 'no') @override_settings(INSTALLED_APPS=['template_backends.apps.good']) def test_templatetag_discovery(self): engine = DjangoTemplates({ 'DIRS': [], 'APP_DIRS': False, 'NAME': 'django', 'OPTIONS': { 'libraries': { 'alternate': 'template_backends.apps.good.templatetags.good_tags', 'override': 'template_backends.apps.good.templatetags.good_tags', }, }, }) # libraries are discovered from installed applications self.assertEqual( engine.engine.libraries['good_tags'], 'template_backends.apps.good.templatetags.good_tags', ) self.assertEqual( engine.engine.libraries['subpackage.tags'], 'template_backends.apps.good.templatetags.subpackage.tags', ) # libraries are discovered from django.templatetags self.assertEqual( engine.engine.libraries['static'], 'django.templatetags.static', ) # libraries passed in OPTIONS are registered self.assertEqual( engine.engine.libraries['alternate'], 'template_backends.apps.good.templatetags.good_tags', ) # libraries passed in OPTIONS take precedence over discovered ones self.assertEqual( engine.engine.libraries['override'], 'template_backends.apps.good.templatetags.good_tags', ) @override_settings(INSTALLED_APPS=['template_backends.apps.importerror']) def test_templatetag_discovery_import_error(self): """ Import errors in tag modules should be reraised with a helpful message. """ with self.assertRaisesMessage( InvalidTemplateLibrary, "ImportError raised when trying to load " "'template_backends.apps.importerror.templatetags.broken_tags'" ): DjangoTemplates({ 'DIRS': [], 'APP_DIRS': False, 'NAME': 'django', 'OPTIONS': {}, }) def test_builtins_discovery(self): engine = DjangoTemplates({ 'DIRS': [], 'APP_DIRS': False, 'NAME': 'django', 'OPTIONS': { 'builtins': ['template_backends.apps.good.templatetags.good_tags'], }, }) self.assertEqual( engine.engine.builtins, [ 'django.template.defaulttags', 'django.template.defaultfilters', 'django.template.loader_tags', 'template_backends.apps.good.templatetags.good_tags', ] ) def test_autoescape_off(self): templates = [{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'OPTIONS': {'autoescape': False}, }] engines = EngineHandler(templates=templates) self.assertEqual( engines['django'].from_string('Hello, {{ name }}').render({'name': 'Bob & Jim'}), 'Hello, Bob & Jim' ) def test_autoescape_default(self): templates = [{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', }] engines = EngineHandler(templates=templates) self.assertEqual( engines['django'].from_string('Hello, {{ name }}').render({'name': 'Bob & Jim'}), 'Hello, Bob &amp; Jim' ) default_loaders = [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ] @override_settings(DEBUG=False) def test_non_debug_default_template_loaders(self): engine = DjangoTemplates({'DIRS': [], 'APP_DIRS': True, 'NAME': 'django', 'OPTIONS': {}}) self.assertEqual(engine.engine.loaders, [('django.template.loaders.cached.Loader', self.default_loaders)]) @override_settings(DEBUG=True) def test_debug_default_template_loaders(self): engine = DjangoTemplates({'DIRS': [], 'APP_DIRS': True, 'NAME': 'django', 'OPTIONS': {}}) self.assertEqual(engine.engine.loaders, self.default_loaders)