hash
stringlengths 64
64
| content
stringlengths 0
1.51M
|
---|---|
9ed2fdfd2cc1753708fac4926f6483428fa279f344bc0fcb03486e34140708bb | import json
import math
import re
from decimal import Decimal
from django.contrib.gis.db.models import functions
from django.contrib.gis.geos import (
GEOSGeometry, LineString, Point, Polygon, fromstr,
)
from django.contrib.gis.measure import Area
from django.db import NotSupportedError, connection
from django.db.models import Sum
from django.test import TestCase, ignore_warnings, skipUnlessDBFeature
from django.utils.deprecation import RemovedInDjango30Warning
from ..utils import FuncTestMixin, mysql, oracle, postgis, spatialite
from .models import City, Country, CountryWebMercator, State, Track
class GISFunctionsTests(FuncTestMixin, TestCase):
"""
Testing functions from django/contrib/gis/db/models/functions.py.
Area/Distance/Length/Perimeter are tested in distapp/tests.
Please keep the tests in function's alphabetic order.
"""
fixtures = ['initial']
def test_asgeojson(self):
# Only PostGIS and SpatiaLite support GeoJSON.
if not connection.features.has_AsGeoJSON_function:
with self.assertRaises(NotSupportedError):
list(Country.objects.annotate(json=functions.AsGeoJSON('mpoly')))
return
pueblo_json = '{"type":"Point","coordinates":[-104.609252,38.255001]}'
houston_json = (
'{"type":"Point","crs":{"type":"name","properties":'
'{"name":"EPSG:4326"}},"coordinates":[-95.363151,29.763374]}'
)
victoria_json = (
'{"type":"Point","bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],'
'"coordinates":[-123.305196,48.462611]}'
)
chicago_json = (
'{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},'
'"bbox":[-87.65018,41.85039,-87.65018,41.85039],"coordinates":[-87.65018,41.85039]}'
)
# MySQL ignores the crs option.
if mysql:
houston_json = json.loads(houston_json)
del houston_json['crs']
chicago_json = json.loads(chicago_json)
del chicago_json['crs']
# Precision argument should only be an integer
with self.assertRaises(TypeError):
City.objects.annotate(geojson=functions.AsGeoJSON('point', precision='foo'))
# Reference queries and values.
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 0)
# FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Pueblo';
self.assertJSONEqual(
pueblo_json,
City.objects.annotate(geojson=functions.AsGeoJSON('point')).get(name='Pueblo').geojson
)
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city"
# WHERE "geoapp_city"."name" = 'Houston';
# This time we want to include the CRS by using the `crs` keyword.
self.assertJSONEqual(
City.objects.annotate(json=functions.AsGeoJSON('point', crs=True)).get(name='Houston').json,
houston_json,
)
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city"
# WHERE "geoapp_city"."name" = 'Houston';
# This time we include the bounding box by using the `bbox` keyword.
self.assertJSONEqual(
victoria_json,
City.objects.annotate(
geojson=functions.AsGeoJSON('point', bbox=True)
).get(name='Victoria').geojson
)
# SELECT ST_AsGeoJson("geoapp_city"."point", 5, 3) FROM "geoapp_city"
# WHERE "geoapp_city"."name" = 'Chicago';
# Finally, we set every available keyword.
self.assertJSONEqual(
City.objects.annotate(
geojson=functions.AsGeoJSON('point', bbox=True, crs=True, precision=5)
).get(name='Chicago').geojson,
chicago_json,
)
@skipUnlessDBFeature("has_AsGML_function")
def test_asgml(self):
# Should throw a TypeError when trying to obtain GML from a
# non-geometry field.
qs = City.objects.all()
with self.assertRaises(TypeError):
qs.annotate(gml=functions.AsGML('name'))
ptown = City.objects.annotate(gml=functions.AsGML('point', precision=9)).get(name='Pueblo')
if oracle:
# No precision parameter for Oracle :-/
gml_regex = re.compile(
r'^<gml:Point srsName="EPSG:4326" xmlns:gml="http://www.opengis.net/gml">'
r'<gml:coordinates decimal="\." cs="," ts=" ">-104.60925\d+,38.25500\d+ '
r'</gml:coordinates></gml:Point>'
)
else:
gml_regex = re.compile(
r'^<gml:Point srsName="EPSG:4326"><gml:coordinates>'
r'-104\.60925\d+,38\.255001</gml:coordinates></gml:Point>'
)
self.assertTrue(gml_regex.match(ptown.gml))
self.assertIn(
'<gml:pos srsDimension="2">',
City.objects.annotate(gml=functions.AsGML('point', version=3)).get(name='Pueblo').gml
)
@skipUnlessDBFeature("has_AsKML_function")
def test_askml(self):
# Should throw a TypeError when trying to obtain KML from a
# non-geometry field.
with self.assertRaises(TypeError):
City.objects.annotate(kml=functions.AsKML('name'))
# Ensuring the KML is as expected.
ptown = City.objects.annotate(kml=functions.AsKML('point', precision=9)).get(name='Pueblo')
self.assertEqual('<Point><coordinates>-104.609252,38.255001</coordinates></Point>', ptown.kml)
@skipUnlessDBFeature("has_AsSVG_function")
def test_assvg(self):
with self.assertRaises(TypeError):
City.objects.annotate(svg=functions.AsSVG('point', precision='foo'))
# SELECT AsSVG(geoapp_city.point, 0, 8) FROM geoapp_city WHERE name = 'Pueblo';
svg1 = 'cx="-104.609252" cy="-38.255001"'
# Even though relative, only one point so it's practically the same except for
# the 'c' letter prefix on the x,y values.
svg2 = svg1.replace('c', '')
self.assertEqual(svg1, City.objects.annotate(svg=functions.AsSVG('point')).get(name='Pueblo').svg)
self.assertEqual(svg2, City.objects.annotate(svg=functions.AsSVG('point', relative=5)).get(name='Pueblo').svg)
@skipUnlessDBFeature("has_Azimuth_function")
def test_azimuth(self):
# Returns the azimuth in radians.
azimuth_expr = functions.Azimuth(Point(0, 0, srid=4326), Point(1, 1, srid=4326))
self.assertAlmostEqual(City.objects.annotate(azimuth=azimuth_expr).first().azimuth, math.pi / 4)
# Returns None if the two points are coincident.
azimuth_expr = functions.Azimuth(Point(0, 0, srid=4326), Point(0, 0, srid=4326))
self.assertIsNone(City.objects.annotate(azimuth=azimuth_expr).first().azimuth)
@skipUnlessDBFeature("has_BoundingCircle_function")
def test_bounding_circle(self):
def circle_num_points(num_seg):
# num_seg is the number of segments per quarter circle.
return (4 * num_seg) + 1
expected_areas = (169, 136) if postgis else (171, 126)
qs = Country.objects.annotate(circle=functions.BoundingCircle('mpoly')).order_by('name')
self.assertAlmostEqual(qs[0].circle.area, expected_areas[0], 0)
self.assertAlmostEqual(qs[1].circle.area, expected_areas[1], 0)
if postgis:
# By default num_seg=48.
self.assertEqual(qs[0].circle.num_points, circle_num_points(48))
self.assertEqual(qs[1].circle.num_points, circle_num_points(48))
qs = Country.objects.annotate(circle=functions.BoundingCircle('mpoly', num_seg=12)).order_by('name')
if postgis:
self.assertGreater(qs[0].circle.area, 168.4, 0)
self.assertLess(qs[0].circle.area, 169.5, 0)
self.assertAlmostEqual(qs[1].circle.area, 136, 0)
self.assertEqual(qs[0].circle.num_points, circle_num_points(12))
self.assertEqual(qs[1].circle.num_points, circle_num_points(12))
else:
self.assertAlmostEqual(qs[0].circle.area, expected_areas[0], 0)
self.assertAlmostEqual(qs[1].circle.area, expected_areas[1], 0)
@skipUnlessDBFeature("has_Centroid_function")
def test_centroid(self):
qs = State.objects.exclude(poly__isnull=True).annotate(centroid=functions.Centroid('poly'))
tol = 1.8 if mysql else (0.1 if oracle else 0.00001)
for state in qs:
self.assertTrue(state.poly.centroid.equals_exact(state.centroid, tol))
with self.assertRaisesMessage(TypeError, "'Centroid' takes exactly 1 argument (2 given)"):
State.objects.annotate(centroid=functions.Centroid('poly', 'poly'))
@skipUnlessDBFeature("has_Difference_function")
def test_difference(self):
geom = Point(5, 23, srid=4326)
qs = Country.objects.annotate(diff=functions.Difference('mpoly', geom))
# Oracle does something screwy with the Texas geometry.
if oracle:
qs = qs.exclude(name='Texas')
for c in qs:
self.assertTrue(c.mpoly.difference(geom).equals(c.diff))
@skipUnlessDBFeature("has_Difference_function", "has_Transform_function")
def test_difference_mixed_srid(self):
"""Testing with mixed SRID (Country has default 4326)."""
geom = Point(556597.4, 2632018.6, srid=3857) # Spherical Mercator
qs = Country.objects.annotate(difference=functions.Difference('mpoly', geom))
# Oracle does something screwy with the Texas geometry.
if oracle:
qs = qs.exclude(name='Texas')
for c in qs:
self.assertTrue(c.mpoly.difference(geom).equals(c.difference))
@skipUnlessDBFeature("has_Envelope_function")
def test_envelope(self):
countries = Country.objects.annotate(envelope=functions.Envelope('mpoly'))
for country in countries:
self.assertTrue(country.envelope.equals(country.mpoly.envelope))
@skipUnlessDBFeature("has_ForcePolygonCW_function")
def test_force_polygon_cw(self):
rings = (
((0, 0), (5, 0), (0, 5), (0, 0)),
((1, 1), (1, 3), (3, 1), (1, 1)),
)
rhr_rings = (
((0, 0), (0, 5), (5, 0), (0, 0)),
((1, 1), (3, 1), (1, 3), (1, 1)),
)
State.objects.create(name='Foo', poly=Polygon(*rings))
st = State.objects.annotate(force_polygon_cw=functions.ForcePolygonCW('poly')).get(name='Foo')
self.assertEqual(rhr_rings, st.force_polygon_cw.coords)
@skipUnlessDBFeature("has_ForceRHR_function")
@ignore_warnings(category=RemovedInDjango30Warning)
def test_force_rhr(self):
rings = (
((0, 0), (5, 0), (0, 5), (0, 0)),
((1, 1), (1, 3), (3, 1), (1, 1)),
)
rhr_rings = (
((0, 0), (0, 5), (5, 0), (0, 0)),
((1, 1), (3, 1), (1, 3), (1, 1)),
)
State.objects.create(name='Foo', poly=Polygon(*rings))
st = State.objects.annotate(force_rhr=functions.ForceRHR('poly')).get(name='Foo')
self.assertEqual(rhr_rings, st.force_rhr.coords)
@skipUnlessDBFeature("has_GeoHash_function")
def test_geohash(self):
# Reference query:
# SELECT ST_GeoHash(point) FROM geoapp_city WHERE name='Houston';
# SELECT ST_GeoHash(point, 5) FROM geoapp_city WHERE name='Houston';
ref_hash = '9vk1mfq8jx0c8e0386z6'
h1 = City.objects.annotate(geohash=functions.GeoHash('point')).get(name='Houston')
h2 = City.objects.annotate(geohash=functions.GeoHash('point', precision=5)).get(name='Houston')
self.assertEqual(ref_hash, h1.geohash[:len(ref_hash)])
self.assertEqual(ref_hash[:5], h2.geohash)
@skipUnlessDBFeature("has_Intersection_function")
def test_intersection(self):
geom = Point(5, 23, srid=4326)
qs = Country.objects.annotate(inter=functions.Intersection('mpoly', geom))
for c in qs:
if spatialite or (mysql and not connection.features.supports_empty_geometry_collection) or oracle:
# When the intersection is empty, some databases return None.
expected = None
else:
expected = c.mpoly.intersection(geom)
self.assertEqual(c.inter, expected)
@skipUnlessDBFeature("has_IsValid_function")
def test_isvalid(self):
valid_geom = fromstr('POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
invalid_geom = fromstr('POLYGON((0 0, 0 1, 1 1, 1 0, 1 1, 1 0, 0 0))')
State.objects.create(name='valid', poly=valid_geom)
State.objects.create(name='invalid', poly=invalid_geom)
valid = State.objects.filter(name='valid').annotate(isvalid=functions.IsValid('poly')).first()
invalid = State.objects.filter(name='invalid').annotate(isvalid=functions.IsValid('poly')).first()
self.assertIs(valid.isvalid, True)
self.assertIs(invalid.isvalid, False)
@skipUnlessDBFeature("has_Area_function")
def test_area_with_regular_aggregate(self):
# Create projected country objects, for this test to work on all backends.
for c in Country.objects.all():
CountryWebMercator.objects.create(name=c.name, mpoly=c.mpoly.transform(3857, clone=True))
# Test in projected coordinate system
qs = CountryWebMercator.objects.annotate(area_sum=Sum(functions.Area('mpoly')))
# Some backends (e.g. Oracle) cannot group by multipolygon values, so
# defer such fields in the aggregation query.
for c in qs.defer('mpoly'):
result = c.area_sum
# If the result is a measure object, get value.
if isinstance(result, Area):
result = result.sq_m
self.assertAlmostEqual((result - c.mpoly.area) / c.mpoly.area, 0)
@skipUnlessDBFeature("has_Area_function")
def test_area_lookups(self):
# Create projected countries so the test works on all backends.
CountryWebMercator.objects.bulk_create(
CountryWebMercator(name=c.name, mpoly=c.mpoly.transform(3857, clone=True))
for c in Country.objects.all()
)
qs = CountryWebMercator.objects.annotate(area=functions.Area('mpoly'))
self.assertEqual(qs.get(area__lt=Area(sq_km=500000)), CountryWebMercator.objects.get(name='New Zealand'))
with self.assertRaisesMessage(ValueError, 'AreaField only accepts Area measurement objects.'):
qs.get(area__lt=500000)
@skipUnlessDBFeature("has_LineLocatePoint_function")
def test_line_locate_point(self):
pos_expr = functions.LineLocatePoint(LineString((0, 0), (0, 3), srid=4326), Point(0, 1, srid=4326))
self.assertAlmostEqual(State.objects.annotate(pos=pos_expr).first().pos, 0.3333333)
@skipUnlessDBFeature("has_MakeValid_function")
def test_make_valid(self):
invalid_geom = fromstr('POLYGON((0 0, 0 1, 1 1, 1 0, 1 1, 1 0, 0 0))')
State.objects.create(name='invalid', poly=invalid_geom)
invalid = State.objects.filter(name='invalid').annotate(repaired=functions.MakeValid('poly')).first()
self.assertIs(invalid.repaired.valid, True)
self.assertEqual(invalid.repaired, fromstr('POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))', srid=invalid.poly.srid))
@skipUnlessDBFeature("has_MemSize_function")
def test_memsize(self):
ptown = City.objects.annotate(size=functions.MemSize('point')).get(name='Pueblo')
self.assertTrue(20 <= ptown.size <= 40) # Exact value may depend on PostGIS version
@skipUnlessDBFeature("has_NumGeom_function")
def test_num_geom(self):
# Both 'countries' only have two geometries.
for c in Country.objects.annotate(num_geom=functions.NumGeometries('mpoly')):
self.assertEqual(2, c.num_geom)
qs = City.objects.filter(point__isnull=False).annotate(num_geom=functions.NumGeometries('point'))
for city in qs:
# Oracle and PostGIS return 1 for the number of geometries on
# non-collections, whereas MySQL returns None.
if mysql:
self.assertIsNone(city.num_geom)
else:
self.assertEqual(1, city.num_geom)
@skipUnlessDBFeature("has_NumPoint_function")
def test_num_points(self):
coords = [(-95.363151, 29.763374), (-95.448601, 29.713803)]
Track.objects.create(name='Foo', line=LineString(coords))
qs = Track.objects.annotate(num_points=functions.NumPoints('line'))
self.assertEqual(qs.first().num_points, 2)
mpoly_qs = Country.objects.annotate(num_points=functions.NumPoints('mpoly'))
if not connection.features.supports_num_points_poly:
for c in mpoly_qs:
self.assertIsNone(c.num_points)
return
for c in mpoly_qs:
self.assertEqual(c.mpoly.num_points, c.num_points)
for c in City.objects.annotate(num_points=functions.NumPoints('point')):
self.assertEqual(c.num_points, 1)
@skipUnlessDBFeature("has_PointOnSurface_function")
def test_point_on_surface(self):
# Reference values.
if oracle:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(SDO_GEOM.SDO_POINTONSURFACE(GEOAPP_COUNTRY.MPOLY, 0.05))
# FROM GEOAPP_COUNTRY;
ref = {'New Zealand': fromstr('POINT (174.616364 -36.100861)', srid=4326),
'Texas': fromstr('POINT (-103.002434 36.500397)', srid=4326),
}
else:
# Using GEOSGeometry to compute the reference point on surface values
# -- since PostGIS also uses GEOS these should be the same.
ref = {'New Zealand': Country.objects.get(name='New Zealand').mpoly.point_on_surface,
'Texas': Country.objects.get(name='Texas').mpoly.point_on_surface
}
qs = Country.objects.annotate(point_on_surface=functions.PointOnSurface('mpoly'))
for country in qs:
tol = 0.00001 # SpatiaLite might have WKT-translation-related precision issues
self.assertTrue(ref[country.name].equals_exact(country.point_on_surface, tol))
@skipUnlessDBFeature("has_Reverse_function")
def test_reverse_geom(self):
coords = [(-95.363151, 29.763374), (-95.448601, 29.713803)]
Track.objects.create(name='Foo', line=LineString(coords))
track = Track.objects.annotate(reverse_geom=functions.Reverse('line')).get(name='Foo')
coords.reverse()
self.assertEqual(tuple(coords), track.reverse_geom.coords)
@skipUnlessDBFeature("has_Scale_function")
def test_scale(self):
xfac, yfac = 2, 3
tol = 5 # The low precision tolerance is for SpatiaLite
qs = Country.objects.annotate(scaled=functions.Scale('mpoly', xfac, yfac))
for country in qs:
for p1, p2 in zip(country.mpoly, country.scaled):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
self.assertAlmostEqual(c1[0] * xfac, c2[0], tol)
self.assertAlmostEqual(c1[1] * yfac, c2[1], tol)
# Test float/Decimal values
qs = Country.objects.annotate(scaled=functions.Scale('mpoly', 1.5, Decimal('2.5')))
self.assertGreater(qs[0].scaled.area, qs[0].mpoly.area)
@skipUnlessDBFeature("has_SnapToGrid_function")
def test_snap_to_grid(self):
# Let's try and break snap_to_grid() with bad combinations of arguments.
for bad_args in ((), range(3), range(5)):
with self.assertRaises(ValueError):
Country.objects.annotate(snap=functions.SnapToGrid('mpoly', *bad_args))
for bad_args in (('1.0',), (1.0, None), tuple(map(str, range(4)))):
with self.assertRaises(TypeError):
Country.objects.annotate(snap=functions.SnapToGrid('mpoly', *bad_args))
# Boundary for San Marino, courtesy of Bjorn Sandvik of thematicmapping.org
# from the world borders dataset he provides.
wkt = ('MULTIPOLYGON(((12.41580 43.95795,12.45055 43.97972,12.45389 43.98167,'
'12.46250 43.98472,12.47167 43.98694,12.49278 43.98917,'
'12.50555 43.98861,12.51000 43.98694,12.51028 43.98277,'
'12.51167 43.94333,12.51056 43.93916,12.49639 43.92333,'
'12.49500 43.91472,12.48778 43.90583,12.47444 43.89722,'
'12.46472 43.89555,12.45917 43.89611,12.41639 43.90472,'
'12.41222 43.90610,12.40782 43.91366,12.40389 43.92667,'
'12.40500 43.94833,12.40889 43.95499,12.41580 43.95795)))')
Country.objects.create(name='San Marino', mpoly=fromstr(wkt))
# Because floating-point arithmetic isn't exact, we set a tolerance
# to pass into GEOS `equals_exact`.
tol = 0.000000001
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.1)) FROM "geoapp_country"
# WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 44,12.5 44,12.5 43.9,12.4 43.9,12.4 44)))')
self.assertTrue(
ref.equals_exact(
Country.objects.annotate(
snap=functions.SnapToGrid('mpoly', 0.1)
).get(name='San Marino').snap,
tol
)
)
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.05, 0.23)) FROM "geoapp_country"
# WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 43.93,12.45 43.93,12.5 43.93,12.45 43.93,12.4 43.93)))')
self.assertTrue(
ref.equals_exact(
Country.objects.annotate(
snap=functions.SnapToGrid('mpoly', 0.05, 0.23)
).get(name='San Marino').snap,
tol
)
)
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.5, 0.17, 0.05, 0.23)) FROM "geoapp_country"
# WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr(
'MULTIPOLYGON(((12.4 43.87,12.45 43.87,12.45 44.1,12.5 44.1,12.5 43.87,12.45 43.87,12.4 43.87)))'
)
self.assertTrue(
ref.equals_exact(
Country.objects.annotate(
snap=functions.SnapToGrid('mpoly', 0.05, 0.23, 0.5, 0.17)
).get(name='San Marino').snap,
tol
)
)
@skipUnlessDBFeature("has_SymDifference_function")
def test_sym_difference(self):
geom = Point(5, 23, srid=4326)
qs = Country.objects.annotate(sym_difference=functions.SymDifference('mpoly', geom))
# Oracle does something screwy with the Texas geometry.
if oracle:
qs = qs.exclude(name='Texas')
for country in qs:
self.assertTrue(country.mpoly.sym_difference(geom).equals(country.sym_difference))
@skipUnlessDBFeature("has_Transform_function")
def test_transform(self):
# Pre-transformed points for Houston and Pueblo.
ptown = fromstr('POINT(992363.390841912 481455.395105533)', srid=2774)
prec = 3 # Precision is low due to version variations in PROJ and GDAL.
# Asserting the result of the transform operation with the values in
# the pre-transformed points.
h = City.objects.annotate(pt=functions.Transform('point', ptown.srid)).get(name='Pueblo')
self.assertEqual(2774, h.pt.srid)
self.assertAlmostEqual(ptown.x, h.pt.x, prec)
self.assertAlmostEqual(ptown.y, h.pt.y, prec)
@skipUnlessDBFeature("has_Translate_function")
def test_translate(self):
xfac, yfac = 5, -23
qs = Country.objects.annotate(translated=functions.Translate('mpoly', xfac, yfac))
for c in qs:
for p1, p2 in zip(c.mpoly, c.translated):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
# The low precision is for SpatiaLite
self.assertAlmostEqual(c1[0] + xfac, c2[0], 5)
self.assertAlmostEqual(c1[1] + yfac, c2[1], 5)
# Some combined function tests
@skipUnlessDBFeature(
"has_Difference_function", "has_Intersection_function",
"has_SymDifference_function", "has_Union_function")
def test_diff_intersection_union(self):
geom = Point(5, 23, srid=4326)
qs = Country.objects.all().annotate(
difference=functions.Difference('mpoly', geom),
sym_difference=functions.SymDifference('mpoly', geom),
union=functions.Union('mpoly', geom),
intersection=functions.Intersection('mpoly', geom),
)
if oracle:
# Should be able to execute the queries; however, they won't be the same
# as GEOS (because Oracle doesn't use GEOS internally like PostGIS or
# SpatiaLite).
return
for c in qs:
self.assertTrue(c.mpoly.difference(geom).equals(c.difference))
if not (spatialite or mysql):
self.assertEqual(c.mpoly.intersection(geom), c.intersection)
self.assertTrue(c.mpoly.sym_difference(geom).equals(c.sym_difference))
self.assertTrue(c.mpoly.union(geom).equals(c.union))
@skipUnlessDBFeature("has_Union_function")
def test_union(self):
"""Union with all combinations of geometries/geometry fields."""
geom = Point(-95.363151, 29.763374, srid=4326)
union = City.objects.annotate(union=functions.Union('point', geom)).get(name='Dallas').union
expected = fromstr('MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)', srid=4326)
self.assertTrue(expected.equals(union))
union = City.objects.annotate(union=functions.Union(geom, 'point')).get(name='Dallas').union
self.assertTrue(expected.equals(union))
union = City.objects.annotate(union=functions.Union('point', 'point')).get(name='Dallas').union
expected = GEOSGeometry('POINT(-96.801611 32.782057)', srid=4326)
self.assertTrue(expected.equals(union))
union = City.objects.annotate(union=functions.Union(geom, geom)).get(name='Dallas').union
self.assertTrue(geom.equals(union))
@skipUnlessDBFeature("has_Union_function", "has_Transform_function")
def test_union_mixed_srid(self):
"""The result SRID depends on the order of parameters."""
geom = Point(61.42915, 55.15402, srid=4326)
geom_3857 = geom.transform(3857, clone=True)
tol = 0.001
for city in City.objects.annotate(union=functions.Union('point', geom_3857)):
expected = city.point | geom
self.assertTrue(city.union.equals_exact(expected, tol))
self.assertEqual(city.union.srid, 4326)
for city in City.objects.annotate(union=functions.Union(geom_3857, 'point')):
expected = geom_3857 | city.point.transform(3857, clone=True)
self.assertTrue(expected.equals_exact(city.union, tol))
self.assertEqual(city.union.srid, 3857)
def test_argument_validation(self):
with self.assertRaisesMessage(ValueError, 'SRID is required for all geometries.'):
City.objects.annotate(geo=functions.GeoFunc(Point(1, 1)))
msg = 'GeoFunc function requires a GeometryField in position 1, got CharField.'
with self.assertRaisesMessage(TypeError, msg):
City.objects.annotate(geo=functions.GeoFunc('name'))
msg = 'GeoFunc function requires a geometric argument in position 1.'
with self.assertRaisesMessage(TypeError, msg):
City.objects.annotate(union=functions.GeoFunc(1, 'point')).get(name='Dallas')
|
90f3bc11b11eb2cffab7ce7b2c73239c23883932719f0f1d3aecf0addfceb317 | from unittest import skipUnless
from django.contrib.gis.db.models import F, GeometryField, Value, functions
from django.contrib.gis.geos import Point, Polygon
from django.db import connection
from django.db.models import Count, Min
from django.test import TestCase, skipUnlessDBFeature
from ..utils import postgis
from .models import City, ManyPointModel, MultiFields
class GeoExpressionsTests(TestCase):
fixtures = ['initial']
def test_geometry_value_annotation(self):
p = Point(1, 1, srid=4326)
point = City.objects.annotate(p=Value(p, GeometryField(srid=4326))).first().p
self.assertEqual(point, p)
@skipUnlessDBFeature('supports_transform')
def test_geometry_value_annotation_different_srid(self):
p = Point(1, 1, srid=32140)
point = City.objects.annotate(p=Value(p, GeometryField(srid=4326))).first().p
self.assertTrue(point.equals_exact(p.transform(4326, clone=True), 10 ** -5))
self.assertEqual(point.srid, 4326)
@skipUnless(postgis, 'Only postgis has geography fields.')
def test_geography_value(self):
p = Polygon(((1, 1), (1, 2), (2, 2), (2, 1), (1, 1)))
area = City.objects.annotate(a=functions.Area(Value(p, GeometryField(srid=4326, geography=True)))).first().a
self.assertAlmostEqual(area.sq_km, 12305.1, 0)
def test_update_from_other_field(self):
p1 = Point(1, 1, srid=4326)
p2 = Point(2, 2, srid=4326)
obj = ManyPointModel.objects.create(
point1=p1,
point2=p2,
point3=p2.transform(3857, clone=True),
)
# Updating a point to a point of the same SRID.
ManyPointModel.objects.filter(pk=obj.pk).update(point2=F('point1'))
obj.refresh_from_db()
self.assertEqual(obj.point2, p1)
# Updating a point to a point with a different SRID.
if connection.features.supports_transform:
ManyPointModel.objects.filter(pk=obj.pk).update(point3=F('point1'))
obj.refresh_from_db()
self.assertTrue(obj.point3.equals_exact(p1.transform(3857, clone=True), 0.1))
def test_multiple_annotation(self):
multi_field = MultiFields.objects.create(
point=Point(1, 1),
city=City.objects.get(name='Houston'),
poly=Polygon(((1, 1), (1, 2), (2, 2), (2, 1), (1, 1))),
)
qs = City.objects.values('name').annotate(
distance=Min(functions.Distance('multifields__point', multi_field.city.point)),
).annotate(count=Count('multifields'))
self.assertTrue(qs.first())
@skipUnlessDBFeature('has_Translate_function')
def test_update_with_expression(self):
city = City.objects.create(point=Point(1, 1, srid=4326))
City.objects.filter(pk=city.pk).update(point=functions.Translate('point', 1, 1))
city.refresh_from_db()
self.assertEqual(city.point, Point(2, 2, srid=4326))
|
594e544346c28c13a938592237ef58d41ab3c49d01f238e39b5d6ce0d0b7a0d3 | import tempfile
from io import StringIO
from django.contrib.gis import gdal
from django.contrib.gis.db.models import Extent, MakeLine, Union, functions
from django.contrib.gis.geos import (
GeometryCollection, GEOSGeometry, LinearRing, LineString, MultiLineString,
MultiPoint, MultiPolygon, Point, Polygon, fromstr,
)
from django.core.management import call_command
from django.db import NotSupportedError, connection
from django.test import TestCase, skipUnlessDBFeature
from ..utils import (
mysql, no_oracle, oracle, postgis, skipUnlessGISLookup, spatialite,
)
from .models import (
City, Country, Feature, MinusOneSRID, NonConcreteModel, PennsylvaniaCity,
State, Track,
)
class GeoModelTest(TestCase):
fixtures = ['initial']
def test_fixtures(self):
"Testing geographic model initialization from fixtures."
# Ensuring that data was loaded from initial data fixtures.
self.assertEqual(2, Country.objects.count())
self.assertEqual(8, City.objects.count())
self.assertEqual(2, State.objects.count())
def test_proxy(self):
"Testing Lazy-Geometry support (using the GeometryProxy)."
# Testing on a Point
pnt = Point(0, 0)
nullcity = City(name='NullCity', point=pnt)
nullcity.save()
# Making sure TypeError is thrown when trying to set with an
# incompatible type.
for bad in [5, 2.0, LineString((0, 0), (1, 1))]:
with self.assertRaisesMessage(TypeError, 'Cannot set'):
nullcity.point = bad
# Now setting with a compatible GEOS Geometry, saving, and ensuring
# the save took, notice no SRID is explicitly set.
new = Point(5, 23)
nullcity.point = new
# Ensuring that the SRID is automatically set to that of the
# field after assignment, but before saving.
self.assertEqual(4326, nullcity.point.srid)
nullcity.save()
# Ensuring the point was saved correctly after saving
self.assertEqual(new, City.objects.get(name='NullCity').point)
# Setting the X and Y of the Point
nullcity.point.x = 23
nullcity.point.y = 5
# Checking assignments pre & post-save.
self.assertNotEqual(Point(23, 5, srid=4326), City.objects.get(name='NullCity').point)
nullcity.save()
self.assertEqual(Point(23, 5, srid=4326), City.objects.get(name='NullCity').point)
nullcity.delete()
# Testing on a Polygon
shell = LinearRing((0, 0), (0, 90), (100, 90), (100, 0), (0, 0))
inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40))
# Creating a State object using a built Polygon
ply = Polygon(shell, inner)
nullstate = State(name='NullState', poly=ply)
self.assertEqual(4326, nullstate.poly.srid) # SRID auto-set from None
nullstate.save()
ns = State.objects.get(name='NullState')
self.assertEqual(ply, ns.poly)
# Testing the `ogr` and `srs` lazy-geometry properties.
self.assertIsInstance(ns.poly.ogr, gdal.OGRGeometry)
self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb)
self.assertIsInstance(ns.poly.srs, gdal.SpatialReference)
self.assertEqual('WGS 84', ns.poly.srs.name)
# Changing the interior ring on the poly attribute.
new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30), (30, 30))
ns.poly[1] = new_inner
ply[1] = new_inner
self.assertEqual(4326, ns.poly.srid)
ns.save()
self.assertEqual(ply, State.objects.get(name='NullState').poly)
ns.delete()
@skipUnlessDBFeature("supports_transform")
def test_lookup_insert_transform(self):
"Testing automatic transform for lookups and inserts."
# San Antonio in 'WGS84' (SRID 4326)
sa_4326 = 'POINT (-98.493183 29.424170)'
wgs_pnt = fromstr(sa_4326, srid=4326) # Our reference point in WGS84
# San Antonio in 'WGS 84 / Pseudo-Mercator' (SRID 3857)
other_srid_pnt = wgs_pnt.transform(3857, clone=True)
# Constructing & querying with a point from a different SRID. Oracle
# `SDO_OVERLAPBDYINTERSECT` operates differently from
# `ST_Intersects`, so contains is used instead.
if oracle:
tx = Country.objects.get(mpoly__contains=other_srid_pnt)
else:
tx = Country.objects.get(mpoly__intersects=other_srid_pnt)
self.assertEqual('Texas', tx.name)
# Creating San Antonio. Remember the Alamo.
sa = City.objects.create(name='San Antonio', point=other_srid_pnt)
# Now verifying that San Antonio was transformed correctly
sa = City.objects.get(name='San Antonio')
self.assertAlmostEqual(wgs_pnt.x, sa.point.x, 6)
self.assertAlmostEqual(wgs_pnt.y, sa.point.y, 6)
# If the GeometryField SRID is -1, then we shouldn't perform any
# transformation if the SRID of the input geometry is different.
m1 = MinusOneSRID(geom=Point(17, 23, srid=4326))
m1.save()
self.assertEqual(-1, m1.geom.srid)
def test_createnull(self):
"Testing creating a model instance and the geometry being None"
c = City()
self.assertIsNone(c.point)
def test_geometryfield(self):
"Testing the general GeometryField."
Feature(name='Point', geom=Point(1, 1)).save()
Feature(name='LineString', geom=LineString((0, 0), (1, 1), (5, 5))).save()
Feature(name='Polygon', geom=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0)))).save()
Feature(name='GeometryCollection',
geom=GeometryCollection(Point(2, 2), LineString((0, 0), (2, 2)),
Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))))).save()
f_1 = Feature.objects.get(name='Point')
self.assertIsInstance(f_1.geom, Point)
self.assertEqual((1.0, 1.0), f_1.geom.tuple)
f_2 = Feature.objects.get(name='LineString')
self.assertIsInstance(f_2.geom, LineString)
self.assertEqual(((0.0, 0.0), (1.0, 1.0), (5.0, 5.0)), f_2.geom.tuple)
f_3 = Feature.objects.get(name='Polygon')
self.assertIsInstance(f_3.geom, Polygon)
f_4 = Feature.objects.get(name='GeometryCollection')
self.assertIsInstance(f_4.geom, GeometryCollection)
self.assertEqual(f_3.geom, f_4.geom[2])
# TODO: fix on Oracle: ORA-22901: cannot compare nested table or VARRAY or
# LOB attributes of an object type.
@no_oracle
@skipUnlessDBFeature("supports_transform")
def test_inherited_geofields(self):
"Database functions on inherited Geometry fields."
# Creating a Pennsylvanian city.
PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)')
# All transformation SQL will need to be performed on the
# _parent_ table.
qs = PennsylvaniaCity.objects.annotate(new_point=functions.Transform('point', srid=32128))
self.assertEqual(1, qs.count())
for pc in qs:
self.assertEqual(32128, pc.new_point.srid)
def test_raw_sql_query(self):
"Testing raw SQL query."
cities1 = City.objects.all()
point_select = connection.ops.select % 'point'
cities2 = list(City.objects.raw(
'select id, name, %s as point from geoapp_city' % point_select
))
self.assertEqual(len(cities1), len(cities2))
with self.assertNumQueries(0): # Ensure point isn't deferred.
self.assertIsInstance(cities2[0].point, Point)
def test_dumpdata_loaddata_cycle(self):
"""
Test a dumpdata/loaddata cycle with geographic data.
"""
out = StringIO()
original_data = list(City.objects.all().order_by('name'))
call_command('dumpdata', 'geoapp.City', stdout=out)
result = out.getvalue()
houston = City.objects.get(name='Houston')
self.assertIn('"point": "%s"' % houston.point.ewkt, result)
# Reload now dumped data
with tempfile.NamedTemporaryFile(mode='w', suffix='.json') as tmp:
tmp.write(result)
tmp.seek(0)
call_command('loaddata', tmp.name, verbosity=0)
self.assertEqual(original_data, list(City.objects.all().order_by('name')))
@skipUnlessDBFeature("supports_empty_geometries")
def test_empty_geometries(self):
geometry_classes = [
Point,
LineString,
LinearRing,
Polygon,
MultiPoint,
MultiLineString,
MultiPolygon,
GeometryCollection,
]
for klass in geometry_classes:
g = klass(srid=4326)
feature = Feature.objects.create(name='Empty %s' % klass.__name__, geom=g)
feature.refresh_from_db()
if klass is LinearRing:
# LinearRing isn't representable in WKB, so GEOSGeomtry.wkb
# uses LineString instead.
g = LineString(srid=4326)
self.assertEqual(feature.geom, g)
self.assertEqual(feature.geom.srid, g.srid)
class GeoLookupTest(TestCase):
fixtures = ['initial']
def test_disjoint_lookup(self):
"Testing the `disjoint` lookup type."
ptown = City.objects.get(name='Pueblo')
qs1 = City.objects.filter(point__disjoint=ptown.point)
self.assertEqual(7, qs1.count())
if connection.features.supports_real_shape_operations:
qs2 = State.objects.filter(poly__disjoint=ptown.point)
self.assertEqual(1, qs2.count())
self.assertEqual('Kansas', qs2[0].name)
def test_contains_contained_lookups(self):
"Testing the 'contained', 'contains', and 'bbcontains' lookup types."
# Getting Texas, yes we were a country -- once ;)
texas = Country.objects.get(name='Texas')
# Seeing what cities are in Texas, should get Houston and Dallas,
# and Oklahoma City because 'contained' only checks on the
# _bounding box_ of the Geometries.
if connection.features.supports_contained_lookup:
qs = City.objects.filter(point__contained=texas.mpoly)
self.assertEqual(3, qs.count())
cities = ['Houston', 'Dallas', 'Oklahoma City']
for c in qs:
self.assertIn(c.name, cities)
# Pulling out some cities.
houston = City.objects.get(name='Houston')
wellington = City.objects.get(name='Wellington')
pueblo = City.objects.get(name='Pueblo')
okcity = City.objects.get(name='Oklahoma City')
lawrence = City.objects.get(name='Lawrence')
# Now testing contains on the countries using the points for
# Houston and Wellington.
tx = Country.objects.get(mpoly__contains=houston.point) # Query w/GEOSGeometry
nz = Country.objects.get(mpoly__contains=wellington.point.hex) # Query w/EWKBHEX
self.assertEqual('Texas', tx.name)
self.assertEqual('New Zealand', nz.name)
# Testing `contains` on the states using the point for Lawrence.
ks = State.objects.get(poly__contains=lawrence.point)
self.assertEqual('Kansas', ks.name)
# Pueblo and Oklahoma City (even though OK City is within the bounding box of Texas)
# are not contained in Texas or New Zealand.
self.assertEqual(len(Country.objects.filter(mpoly__contains=pueblo.point)), 0) # Query w/GEOSGeometry object
self.assertEqual(len(Country.objects.filter(mpoly__contains=okcity.point.wkt)),
0 if connection.features.supports_real_shape_operations else 1) # Query w/WKT
# OK City is contained w/in bounding box of Texas.
if connection.features.supports_bbcontains_lookup:
qs = Country.objects.filter(mpoly__bbcontains=okcity.point)
self.assertEqual(1, len(qs))
self.assertEqual('Texas', qs[0].name)
@skipUnlessDBFeature("supports_crosses_lookup")
def test_crosses_lookup(self):
Track.objects.create(
name='Line1',
line=LineString([(-95, 29), (-60, 0)])
)
self.assertEqual(
Track.objects.filter(line__crosses=LineString([(-95, 0), (-60, 29)])).count(),
1
)
self.assertEqual(
Track.objects.filter(line__crosses=LineString([(-95, 30), (0, 30)])).count(),
0
)
@skipUnlessDBFeature("supports_isvalid_lookup")
def test_isvalid_lookup(self):
invalid_geom = fromstr('POLYGON((0 0, 0 1, 1 1, 1 0, 1 1, 1 0, 0 0))')
State.objects.create(name='invalid', poly=invalid_geom)
qs = State.objects.all()
if oracle or mysql:
# Kansas has adjacent vertices with distance 6.99244813842e-12
# which is smaller than the default Oracle tolerance.
# It's invalid on MySQL too.
qs = qs.exclude(name='Kansas')
self.assertEqual(State.objects.filter(name='Kansas', poly__isvalid=False).count(), 1)
self.assertEqual(qs.filter(poly__isvalid=False).count(), 1)
self.assertEqual(qs.filter(poly__isvalid=True).count(), qs.count() - 1)
@skipUnlessDBFeature("supports_left_right_lookups")
def test_left_right_lookups(self):
"Testing the 'left' and 'right' lookup types."
# Left: A << B => true if xmax(A) < xmin(B)
# Right: A >> B => true if xmin(A) > xmax(B)
# See: BOX2D_left() and BOX2D_right() in lwgeom_box2dfloat4.c in PostGIS source.
# Getting the borders for Colorado & Kansas
co_border = State.objects.get(name='Colorado').poly
ks_border = State.objects.get(name='Kansas').poly
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
# These cities should be strictly to the right of the CO border.
cities = ['Houston', 'Dallas', 'Oklahoma City',
'Lawrence', 'Chicago', 'Wellington']
qs = City.objects.filter(point__right=co_border)
self.assertEqual(6, len(qs))
for c in qs:
self.assertIn(c.name, cities)
# These cities should be strictly to the right of the KS border.
cities = ['Chicago', 'Wellington']
qs = City.objects.filter(point__right=ks_border)
self.assertEqual(2, len(qs))
for c in qs:
self.assertIn(c.name, cities)
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
vic = City.objects.get(point__left=co_border)
self.assertEqual('Victoria', vic.name)
cities = ['Pueblo', 'Victoria']
qs = City.objects.filter(point__left=ks_border)
self.assertEqual(2, len(qs))
for c in qs:
self.assertIn(c.name, cities)
@skipUnlessGISLookup("strictly_above", "strictly_below")
def test_strictly_above_below_lookups(self):
dallas = City.objects.get(name='Dallas')
self.assertQuerysetEqual(
City.objects.filter(point__strictly_above=dallas.point).order_by('name'),
['Chicago', 'Lawrence', 'Oklahoma City', 'Pueblo', 'Victoria'],
lambda b: b.name
)
self.assertQuerysetEqual(
City.objects.filter(point__strictly_below=dallas.point).order_by('name'),
['Houston', 'Wellington'],
lambda b: b.name
)
def test_equals_lookups(self):
"Testing the 'same_as' and 'equals' lookup types."
pnt = fromstr('POINT (-95.363151 29.763374)', srid=4326)
c1 = City.objects.get(point=pnt)
c2 = City.objects.get(point__same_as=pnt)
c3 = City.objects.get(point__equals=pnt)
for c in [c1, c2, c3]:
self.assertEqual('Houston', c.name)
@skipUnlessDBFeature("supports_null_geometries")
def test_null_geometries(self):
"Testing NULL geometry support, and the `isnull` lookup type."
# Creating a state with a NULL boundary.
State.objects.create(name='Puerto Rico')
# Querying for both NULL and Non-NULL values.
nullqs = State.objects.filter(poly__isnull=True)
validqs = State.objects.filter(poly__isnull=False)
# Puerto Rico should be NULL (it's a commonwealth unincorporated territory)
self.assertEqual(1, len(nullqs))
self.assertEqual('Puerto Rico', nullqs[0].name)
# GeometryField=None is an alias for __isnull=True.
self.assertCountEqual(State.objects.filter(poly=None), nullqs)
self.assertCountEqual(State.objects.exclude(poly=None), validqs)
# The valid states should be Colorado & Kansas
self.assertEqual(2, len(validqs))
state_names = [s.name for s in validqs]
self.assertIn('Colorado', state_names)
self.assertIn('Kansas', state_names)
# Saving another commonwealth w/a NULL geometry.
nmi = State.objects.create(name='Northern Mariana Islands', poly=None)
self.assertIsNone(nmi.poly)
# Assigning a geometry and saving -- then UPDATE back to NULL.
nmi.poly = 'POLYGON((0 0,1 0,1 1,1 0,0 0))'
nmi.save()
State.objects.filter(name='Northern Mariana Islands').update(poly=None)
self.assertIsNone(State.objects.get(name='Northern Mariana Islands').poly)
@skipUnlessDBFeature('supports_null_geometries', 'supports_crosses_lookup', 'supports_relate_lookup')
def test_null_geometries_excluded_in_lookups(self):
"""NULL features are excluded in spatial lookup functions."""
null = State.objects.create(name='NULL', poly=None)
queries = [
('equals', Point(1, 1)),
('disjoint', Point(1, 1)),
('touches', Point(1, 1)),
('crosses', LineString((0, 0), (1, 1), (5, 5))),
('within', Point(1, 1)),
('overlaps', LineString((0, 0), (1, 1), (5, 5))),
('contains', LineString((0, 0), (1, 1), (5, 5))),
('intersects', LineString((0, 0), (1, 1), (5, 5))),
('relate', (Point(1, 1), 'T*T***FF*')),
('same_as', Point(1, 1)),
('exact', Point(1, 1)),
('coveredby', Point(1, 1)),
('covers', Point(1, 1)),
]
for lookup, geom in queries:
with self.subTest(lookup=lookup):
self.assertNotIn(null, State.objects.filter(**{'poly__%s' % lookup: geom}))
@skipUnlessDBFeature("supports_relate_lookup")
def test_relate_lookup(self):
"Testing the 'relate' lookup type."
# To make things more interesting, we will have our Texas reference point in
# different SRIDs.
pnt1 = fromstr('POINT (649287.0363174 4177429.4494686)', srid=2847)
pnt2 = fromstr('POINT(-98.4919715741052 29.4333344025053)', srid=4326)
# Not passing in a geometry as first param raises a TypeError when
# initializing the QuerySet.
with self.assertRaises(ValueError):
Country.objects.filter(mpoly__relate=(23, 'foo'))
# Making sure the right exception is raised for the given
# bad arguments.
for bad_args, e in [((pnt1, 0), ValueError), ((pnt2, 'T*T***FF*', 0), ValueError)]:
qs = Country.objects.filter(mpoly__relate=bad_args)
with self.assertRaises(e):
qs.count()
# Relate works differently for the different backends.
if postgis or spatialite:
contains_mask = 'T*T***FF*'
within_mask = 'T*F**F***'
intersects_mask = 'T********'
elif oracle:
contains_mask = 'contains'
within_mask = 'inside'
# TODO: This is not quite the same as the PostGIS mask above
intersects_mask = 'overlapbdyintersect'
# Testing contains relation mask.
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, contains_mask)).name)
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, contains_mask)).name)
# Testing within relation mask.
ks = State.objects.get(name='Kansas')
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, within_mask)).name)
# Testing intersection relation mask.
if not oracle:
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, intersects_mask)).name)
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, intersects_mask)).name)
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, intersects_mask)).name)
# With a complex geometry expression
mask = 'anyinteract' if oracle else within_mask
self.assertFalse(City.objects.exclude(point__relate=(functions.Union('point', 'point'), mask)))
def test_gis_lookups_with_complex_expressions(self):
multiple_arg_lookups = {'dwithin', 'relate'} # These lookups are tested elsewhere.
lookups = connection.ops.gis_operators.keys() - multiple_arg_lookups
self.assertTrue(lookups, 'No lookups found')
for lookup in lookups:
with self.subTest(lookup):
City.objects.filter(**{'point__' + lookup: functions.Union('point', 'point')}).exists()
class GeoQuerySetTest(TestCase):
# TODO: GeoQuerySet is removed, organize these test better.
fixtures = ['initial']
@skipUnlessDBFeature("supports_extent_aggr")
def test_extent(self):
"""
Testing the `Extent` aggregate.
"""
# Reference query:
# `SELECT ST_extent(point) FROM geoapp_city WHERE (name='Houston' or name='Dallas');`
# => BOX(-96.8016128540039 29.7633724212646,-95.3631439208984 32.7820587158203)
expected = (-96.8016128540039, 29.7633724212646, -95.3631439208984, 32.782058715820)
qs = City.objects.filter(name__in=('Houston', 'Dallas'))
extent = qs.aggregate(Extent('point'))['point__extent']
for val, exp in zip(extent, expected):
self.assertAlmostEqual(exp, val, 4)
self.assertIsNone(City.objects.filter(name=('Smalltown')).aggregate(Extent('point'))['point__extent'])
@skipUnlessDBFeature("supports_extent_aggr")
def test_extent_with_limit(self):
"""
Testing if extent supports limit.
"""
extent1 = City.objects.all().aggregate(Extent('point'))['point__extent']
extent2 = City.objects.all()[:3].aggregate(Extent('point'))['point__extent']
self.assertNotEqual(extent1, extent2)
def test_make_line(self):
"""
Testing the `MakeLine` aggregate.
"""
if not connection.features.supports_make_line_aggr:
with self.assertRaises(NotSupportedError):
City.objects.all().aggregate(MakeLine('point'))
return
# MakeLine on an inappropriate field returns simply None
self.assertIsNone(State.objects.aggregate(MakeLine('poly'))['poly__makeline'])
# Reference query:
# SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city;
ref_line = GEOSGeometry(
'LINESTRING(-95.363151 29.763374,-96.801611 32.782057,'
'-97.521157 34.464642,174.783117 -41.315268,-104.609252 38.255001,'
'-95.23506 38.971823,-87.650175 41.850385,-123.305196 48.462611)',
srid=4326
)
# We check for equality with a tolerance of 10e-5 which is a lower bound
# of the precisions of ref_line coordinates
line = City.objects.aggregate(MakeLine('point'))['point__makeline']
self.assertTrue(
ref_line.equals_exact(line, tolerance=10e-5),
"%s != %s" % (ref_line, line)
)
@skipUnlessDBFeature('supports_union_aggr')
def test_unionagg(self):
"""
Testing the `Union` aggregate.
"""
tx = Country.objects.get(name='Texas').mpoly
# Houston, Dallas -- Ordering may differ depending on backend or GEOS version.
union = GEOSGeometry('MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)')
qs = City.objects.filter(point__within=tx)
with self.assertRaises(ValueError):
qs.aggregate(Union('name'))
# Using `field_name` keyword argument in one query and specifying an
# order in the other (which should not be used because this is
# an aggregate method on a spatial column)
u1 = qs.aggregate(Union('point'))['point__union']
u2 = qs.order_by('name').aggregate(Union('point'))['point__union']
self.assertTrue(union.equals(u1))
self.assertTrue(union.equals(u2))
qs = City.objects.filter(name='NotACity')
self.assertIsNone(qs.aggregate(Union('point'))['point__union'])
def test_within_subquery(self):
"""
Using a queryset inside a geo lookup is working (using a subquery)
(#14483).
"""
tex_cities = City.objects.filter(
point__within=Country.objects.filter(name='Texas').values('mpoly')).order_by('name')
expected = ['Dallas', 'Houston']
if not connection.features.supports_real_shape_operations:
expected.append('Oklahoma City')
self.assertEqual(
list(tex_cities.values_list('name', flat=True)),
expected
)
def test_non_concrete_field(self):
NonConcreteModel.objects.create(point=Point(0, 0), name='name')
list(NonConcreteModel.objects.all())
def test_values_srid(self):
for c, v in zip(City.objects.all(), City.objects.values()):
self.assertEqual(c.point.srid, v['point'].srid)
|
ca2d4a6806eeef2eee14c7d35a8f6ea2a947b8c945cffc5ec1e4e9d21bb90eea | from xml.dom import minidom
from django.conf import settings
from django.contrib.sites.models import Site
from django.test import TestCase, modify_settings, override_settings
from .models import City
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'})
@override_settings(ROOT_URLCONF='gis_tests.geoapp.urls')
class GeoFeedTest(TestCase):
fixtures = ['initial']
def setUp(self):
Site(id=settings.SITE_ID, domain="example.com", name="example.com").save()
def assertChildNodes(self, elem, expected):
"Taken from syndication/tests.py."
actual = {n.nodeName for n in elem.childNodes}
expected = set(expected)
self.assertEqual(actual, expected)
def test_geofeed_rss(self):
"Tests geographic feeds using GeoRSS over RSSv2."
# Uses `GEOSGeometry` in `item_geometry`
doc1 = minidom.parseString(self.client.get('/feeds/rss1/').content)
# Uses a 2-tuple in `item_geometry`
doc2 = minidom.parseString(self.client.get('/feeds/rss2/').content)
feed1, feed2 = doc1.firstChild, doc2.firstChild
# Making sure the box got added to the second GeoRSS feed.
self.assertChildNodes(feed2.getElementsByTagName('channel')[0],
['title', 'link', 'description', 'language',
'lastBuildDate', 'item', 'georss:box', 'atom:link']
)
# Incrementing through the feeds.
for feed in [feed1, feed2]:
# Ensuring the georss namespace was added to the <rss> element.
self.assertEqual(feed.getAttribute('xmlns:georss'), 'http://www.georss.org/georss')
chan = feed.getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
self.assertEqual(len(items), City.objects.count())
# Ensuring the georss element was added to each item in the feed.
for item in items:
self.assertChildNodes(item, ['title', 'link', 'description', 'guid', 'georss:point'])
def test_geofeed_atom(self):
"Testing geographic feeds using GeoRSS over Atom."
doc1 = minidom.parseString(self.client.get('/feeds/atom1/').content)
doc2 = minidom.parseString(self.client.get('/feeds/atom2/').content)
feed1, feed2 = doc1.firstChild, doc2.firstChild
# Making sure the box got added to the second GeoRSS feed.
self.assertChildNodes(feed2, ['title', 'link', 'id', 'updated', 'entry', 'georss:box'])
for feed in [feed1, feed2]:
# Ensuring the georsss namespace was added to the <feed> element.
self.assertEqual(feed.getAttribute('xmlns:georss'), 'http://www.georss.org/georss')
entries = feed.getElementsByTagName('entry')
self.assertEqual(len(entries), City.objects.count())
# Ensuring the georss element was added to each entry in the feed.
for entry in entries:
self.assertChildNodes(entry, ['title', 'link', 'id', 'summary', 'georss:point'])
def test_geofeed_w3c(self):
"Testing geographic feeds using W3C Geo."
doc = minidom.parseString(self.client.get('/feeds/w3cgeo1/').content)
feed = doc.firstChild
# Ensuring the geo namespace was added to the <feed> element.
self.assertEqual(feed.getAttribute('xmlns:geo'), 'http://www.w3.org/2003/01/geo/wgs84_pos#')
chan = feed.getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
self.assertEqual(len(items), City.objects.count())
# Ensuring the geo:lat and geo:lon element was added to each item in the feed.
for item in items:
self.assertChildNodes(item, ['title', 'link', 'description', 'guid', 'geo:lat', 'geo:lon'])
# Boxes and Polygons aren't allowed in W3C Geo feeds.
with self.assertRaises(ValueError): # Box in <channel>
self.client.get('/feeds/w3cgeo2/')
with self.assertRaises(ValueError): # Polygons in <entry>
self.client.get('/feeds/w3cgeo3/')
|
50d3728426c72ef48ecf81626f27ee0d7d318a794006d69c129a70a889f4b9e0 | from django.contrib.gis.db import models
from ..utils import gisfield_may_be_null
class NamedModel(models.Model):
name = models.CharField(max_length=30)
class Meta:
abstract = True
def __str__(self):
return self.name
class Country(NamedModel):
mpoly = models.MultiPolygonField() # SRID, by default, is 4326
class CountryWebMercator(NamedModel):
mpoly = models.MultiPolygonField(srid=3857)
class City(NamedModel):
point = models.PointField()
class Meta:
app_label = 'geoapp'
# This is an inherited model from City
class PennsylvaniaCity(City):
county = models.CharField(max_length=30)
founded = models.DateTimeField(null=True)
class Meta:
app_label = 'geoapp'
class State(NamedModel):
poly = models.PolygonField(null=gisfield_may_be_null) # Allowing NULL geometries here.
class Meta:
app_label = 'geoapp'
class Track(NamedModel):
line = models.LineStringField()
class MultiFields(NamedModel):
city = models.ForeignKey(City, models.CASCADE)
point = models.PointField()
poly = models.PolygonField()
class UniqueTogetherModel(models.Model):
city = models.CharField(max_length=30)
point = models.PointField()
class Meta:
unique_together = ('city', 'point')
required_db_features = ['supports_geometry_field_unique_index']
class Truth(models.Model):
val = models.BooleanField(default=False)
class Feature(NamedModel):
geom = models.GeometryField()
class MinusOneSRID(models.Model):
geom = models.PointField(srid=-1) # Minus one SRID.
class NonConcreteField(models.IntegerField):
def db_type(self, connection):
return None
def get_attname_column(self):
attname, column = super().get_attname_column()
return attname, None
class NonConcreteModel(NamedModel):
non_concrete = NonConcreteField()
point = models.PointField(geography=True)
class ManyPointModel(NamedModel):
point1 = models.PointField()
point2 = models.PointField()
point3 = models.PointField(srid=3857)
|
dffd0929f6ba5cea04b96b1426f907d825380284072b21212215c5c897821823 | from django.contrib.gis.sitemaps import KMLSitemap, KMZSitemap
from .models import City, Country
sitemaps = {'kml': KMLSitemap([City, Country]),
'kmz': KMZSitemap([City, Country]),
}
|
329ebbb95addfb4bfd048b7e90db3755843bbe2667c48fdc4d66ce1004198bac | from django.contrib.gis import feeds
from .models import City
class TestGeoRSS1(feeds.Feed):
link = '/city/'
title = 'Test GeoDjango Cities'
def items(self):
return City.objects.all()
def item_link(self, item):
return '/city/%s/' % item.pk
def item_geometry(self, item):
return item.point
class TestGeoRSS2(TestGeoRSS1):
def geometry(self, obj):
# This should attach a <georss:box> element for the extent of
# of the cities in the database. This tuple came from
# calling `City.objects.aggregate(Extent())` -- we can't do that call
# here because `Extent` is not implemented for MySQL/Oracle.
return (-123.30, -41.32, 174.78, 48.46)
def item_geometry(self, item):
# Returning a simple tuple for the geometry.
return item.point.x, item.point.y
class TestGeoAtom1(TestGeoRSS1):
feed_type = feeds.GeoAtom1Feed
class TestGeoAtom2(TestGeoRSS2):
feed_type = feeds.GeoAtom1Feed
def geometry(self, obj):
# This time we'll use a 2-tuple of coordinates for the box.
return ((-123.30, -41.32), (174.78, 48.46))
class TestW3CGeo1(TestGeoRSS1):
feed_type = feeds.W3CGeoFeed
# The following feeds are invalid, and will raise exceptions.
class TestW3CGeo2(TestGeoRSS2):
feed_type = feeds.W3CGeoFeed
class TestW3CGeo3(TestGeoRSS1):
feed_type = feeds.W3CGeoFeed
def item_geometry(self, item):
from django.contrib.gis.geos import Polygon
return Polygon(((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)))
# The feed dictionary to use for URLs.
feed_dict = {
'rss1': TestGeoRSS1,
'rss2': TestGeoRSS2,
'atom1': TestGeoAtom1,
'atom2': TestGeoAtom2,
'w3cgeo1': TestW3CGeo1,
'w3cgeo2': TestW3CGeo2,
'w3cgeo3': TestW3CGeo3,
}
|
a81e2f4c57286155ce875f36a4eaff6149d7f9638c58eb7c7a63221b9819d8a0 | import json
from django.contrib.gis.geos import LinearRing, Point, Polygon
from django.core import serializers
from django.test import TestCase
from .models import City, MultiFields, PennsylvaniaCity
class GeoJSONSerializerTests(TestCase):
fixtures = ['initial']
def test_builtin_serializers(self):
"""
'geojson' should be listed in available serializers.
"""
all_formats = set(serializers.get_serializer_formats())
public_formats = set(serializers.get_public_serializer_formats())
self.assertIn('geojson', all_formats),
self.assertIn('geojson', public_formats)
def test_serialization_base(self):
geojson = serializers.serialize('geojson', City.objects.all().order_by('name'))
geodata = json.loads(geojson)
self.assertEqual(len(geodata['features']), len(City.objects.all()))
self.assertEqual(geodata['features'][0]['geometry']['type'], 'Point')
self.assertEqual(geodata['features'][0]['properties']['name'], 'Chicago')
first_city = City.objects.all().order_by('name').first()
self.assertEqual(geodata['features'][0]['properties']['pk'], str(first_city.pk))
def test_geometry_field_option(self):
"""
When a model has several geometry fields, the 'geometry_field' option
can be used to specify the field to use as the 'geometry' key.
"""
MultiFields.objects.create(
city=City.objects.first(), name='Name', point=Point(5, 23),
poly=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))))
geojson = serializers.serialize('geojson', MultiFields.objects.all())
geodata = json.loads(geojson)
self.assertEqual(geodata['features'][0]['geometry']['type'], 'Point')
geojson = serializers.serialize(
'geojson',
MultiFields.objects.all(),
geometry_field='poly'
)
geodata = json.loads(geojson)
self.assertEqual(geodata['features'][0]['geometry']['type'], 'Polygon')
# geometry_field is considered even if not in fields (#26138).
geojson = serializers.serialize(
'geojson',
MultiFields.objects.all(),
geometry_field='poly',
fields=('city',)
)
geodata = json.loads(geojson)
self.assertEqual(geodata['features'][0]['geometry']['type'], 'Polygon')
def test_fields_option(self):
"""
The fields option allows to define a subset of fields to be present in
the 'properties' of the generated output.
"""
PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)')
geojson = serializers.serialize(
'geojson', PennsylvaniaCity.objects.all(), fields=('county', 'point'),
)
geodata = json.loads(geojson)
self.assertIn('county', geodata['features'][0]['properties'])
self.assertNotIn('founded', geodata['features'][0]['properties'])
self.assertNotIn('pk', geodata['features'][0]['properties'])
def test_srid_option(self):
geojson = serializers.serialize('geojson', City.objects.all().order_by('name'), srid=2847)
geodata = json.loads(geojson)
self.assertEqual(
[int(c) for c in geodata['features'][0]['geometry']['coordinates']],
[1564802, 5613214]
)
def test_deserialization_exception(self):
"""
GeoJSON cannot be deserialized.
"""
with self.assertRaises(serializers.base.SerializerDoesNotExist):
serializers.deserialize('geojson', '{}')
|
895c9df0630e5b49970b95a2abcc6c9cfce361f57d59e5b55e560b0941390a20 | from django.conf.urls import url
from django.contrib.gis import views as gis_views
from django.contrib.gis.sitemaps import views as gis_sitemap_views
from django.contrib.sitemaps import views as sitemap_views
from .feeds import feed_dict
from .sitemaps import sitemaps
urlpatterns = [
url(r'^feeds/(?P<url>.*)/$', gis_views.feed, {'feed_dict': feed_dict}),
]
urlpatterns += [
url(r'^sitemaps/(?P<section>\w+)\.xml$', sitemap_views.sitemap, {'sitemaps': sitemaps}),
]
urlpatterns += [
url(r'^sitemaps/kml/(?P<label>\w+)/(?P<model>\w+)/(?P<field_name>\w+)\.kml$',
gis_sitemap_views.kml,
name='django.contrib.gis.sitemaps.views.kml'),
url(r'^sitemaps/kml/(?P<label>\w+)/(?P<model>\w+)/(?P<field_name>\w+)\.kmz$',
gis_sitemap_views.kmz,
name='django.contrib.gis.sitemaps.views.kmz'),
]
|
8180207888a24934b7600023e129e3c6354524a29869bac9d20912003cf3244f | from datetime import datetime
from django.contrib.gis.db.models import Extent
from django.contrib.gis.shortcuts import render_to_kmz
from django.db.models import Count, Min
from django.test import TestCase, skipUnlessDBFeature
from ..utils import no_oracle
from .models import City, PennsylvaniaCity, State, Truth
class GeoRegressionTests(TestCase):
fixtures = ['initial']
def test_update(self):
"Testing QuerySet.update() (#10411)."
pnt = City.objects.get(name='Pueblo').point
bak = pnt.clone()
pnt.y += 0.005
pnt.x += 0.005
City.objects.filter(name='Pueblo').update(point=pnt)
self.assertEqual(pnt, City.objects.get(name='Pueblo').point)
City.objects.filter(name='Pueblo').update(point=bak)
self.assertEqual(bak, City.objects.get(name='Pueblo').point)
def test_kmz(self):
"Testing `render_to_kmz` with non-ASCII data. See #11624."
name = "Åland Islands"
places = [{
'name': name,
'description': name,
'kml': '<Point><coordinates>5.0,23.0</coordinates></Point>'
}]
render_to_kmz('gis/kml/placemarks.kml', {'places': places})
@skipUnlessDBFeature("supports_extent_aggr")
def test_extent(self):
"Testing `extent` on a table with a single point. See #11827."
pnt = City.objects.get(name='Pueblo').point
ref_ext = (pnt.x, pnt.y, pnt.x, pnt.y)
extent = City.objects.filter(name='Pueblo').aggregate(Extent('point'))['point__extent']
for ref_val, val in zip(ref_ext, extent):
self.assertAlmostEqual(ref_val, val, 4)
def test_unicode_date(self):
"Testing dates are converted properly, even on SpatiaLite. See #16408."
founded = datetime(1857, 5, 23)
PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)',
founded=founded)
self.assertEqual(founded, PennsylvaniaCity.objects.datetimes('founded', 'day')[0])
self.assertEqual(founded, PennsylvaniaCity.objects.aggregate(Min('founded'))['founded__min'])
def test_empty_count(self):
"Testing that PostGISAdapter.__eq__ does check empty strings. See #13670."
# contrived example, but need a geo lookup paired with an id__in lookup
pueblo = City.objects.get(name='Pueblo')
state = State.objects.filter(poly__contains=pueblo.point)
cities_within_state = City.objects.filter(id__in=state)
# .count() should not throw TypeError in __eq__
self.assertEqual(cities_within_state.count(), 1)
# TODO: fix on Oracle -- get the following error because the SQL is ordered
# by a geometry object, which Oracle apparently doesn't like:
# ORA-22901: cannot compare nested table or VARRAY or LOB attributes of an object type
@no_oracle
def test_defer_or_only_with_annotate(self):
"Regression for #16409. Make sure defer() and only() work with annotate()"
self.assertIsInstance(list(City.objects.annotate(Count('point')).defer('name')), list)
self.assertIsInstance(list(City.objects.annotate(Count('point')).only('name')), list)
def test_boolean_conversion(self):
"Testing Boolean value conversion with the spatial backend, see #15169."
t1 = Truth.objects.create(val=True)
t2 = Truth.objects.create(val=False)
val1 = Truth.objects.get(pk=t1.pk).val
val2 = Truth.objects.get(pk=t2.pk).val
# verify types -- shouldn't be 0/1
self.assertIsInstance(val1, bool)
self.assertIsInstance(val2, bool)
# verify values
self.assertIs(val1, True)
self.assertIs(val2, False)
|
8d894bf4cc043c9b0c84800a7b6afc106573f29693662c8ede6fa732fc20242d | import zipfile
from io import BytesIO
from xml.dom import minidom
from django.conf import settings
from django.contrib.sites.models import Site
from django.test import TestCase, modify_settings, override_settings
from .models import City, Country
@modify_settings(INSTALLED_APPS={'append': ['django.contrib.sites', 'django.contrib.sitemaps']})
@override_settings(ROOT_URLCONF='gis_tests.geoapp.urls')
class GeoSitemapTest(TestCase):
def setUp(self):
super().setUp()
Site(id=settings.SITE_ID, domain="example.com", name="example.com").save()
def assertChildNodes(self, elem, expected):
"Taken from syndication/tests.py."
actual = {n.nodeName for n in elem.childNodes}
expected = set(expected)
self.assertEqual(actual, expected)
def test_geositemap_kml(self):
"Tests KML/KMZ geographic sitemaps."
for kml_type in ('kml', 'kmz'):
doc = minidom.parseString(self.client.get('/sitemaps/%s.xml' % kml_type).content)
# Ensuring the right sitemaps namespace is present.
urlset = doc.firstChild
self.assertEqual(urlset.getAttribute('xmlns'), 'http://www.sitemaps.org/schemas/sitemap/0.9')
urls = urlset.getElementsByTagName('url')
self.assertEqual(2, len(urls)) # Should only be 2 sitemaps.
for url in urls:
self.assertChildNodes(url, ['loc'])
# Getting the relative URL since we don't have a real site.
kml_url = url.getElementsByTagName('loc')[0].childNodes[0].data.split('http://example.com')[1]
if kml_type == 'kml':
kml_doc = minidom.parseString(self.client.get(kml_url).content)
elif kml_type == 'kmz':
# Have to decompress KMZ before parsing.
buf = BytesIO(self.client.get(kml_url).content)
with zipfile.ZipFile(buf) as zf:
self.assertEqual(1, len(zf.filelist))
self.assertEqual('doc.kml', zf.filelist[0].filename)
kml_doc = minidom.parseString(zf.read('doc.kml'))
# Ensuring the correct number of placemarks are in the KML doc.
if 'city' in kml_url:
model = City
elif 'country' in kml_url:
model = Country
self.assertEqual(model.objects.count(), len(kml_doc.getElementsByTagName('Placemark')))
|
3137b0dde70eac4365601bb4912c6fed23af4d3e674ae3b94706fcb7ab00f27a | """
Tests for geography support in PostGIS
"""
import os
from unittest import skipIf, skipUnless
from django.contrib.gis.db import models
from django.contrib.gis.db.models.functions import Area, Distance
from django.contrib.gis.measure import D
from django.db import NotSupportedError, connection
from django.db.models.functions import Cast
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from ..utils import FuncTestMixin, oracle, postgis, spatialite
from .models import City, County, Zipcode
class GeographyTest(TestCase):
fixtures = ['initial']
def test01_fixture_load(self):
"Ensure geography features loaded properly."
self.assertEqual(8, City.objects.count())
@skipIf(spatialite, "SpatiaLite doesn't support distance lookups with Distance objects.")
@skipUnlessDBFeature("supports_distances_lookups", "supports_distance_geodetic")
def test02_distance_lookup(self):
"Testing distance lookup support on non-point geography fields."
z = Zipcode.objects.get(code='77002')
cities1 = list(City.objects
.filter(point__distance_lte=(z.poly, D(mi=500)))
.order_by('name')
.values_list('name', flat=True))
cities2 = list(City.objects
.filter(point__dwithin=(z.poly, D(mi=500)))
.order_by('name')
.values_list('name', flat=True))
for cities in [cities1, cities2]:
self.assertEqual(['Dallas', 'Houston', 'Oklahoma City'], cities)
@skipUnless(postgis, "This is a PostGIS-specific test")
def test04_invalid_operators_functions(self):
"Ensuring exceptions are raised for operators & functions invalid on geography fields."
# Only a subset of the geometry functions & operator are available
# to PostGIS geography types. For more information, visit:
# http://postgis.refractions.net/documentation/manual-1.5/ch08.html#PostGIS_GeographyFunctions
z = Zipcode.objects.get(code='77002')
# ST_Within not available.
with self.assertRaises(ValueError):
City.objects.filter(point__within=z.poly).count()
# `@` operator not available.
with self.assertRaises(ValueError):
City.objects.filter(point__contained=z.poly).count()
# Regression test for #14060, `~=` was never really implemented for PostGIS.
htown = City.objects.get(name='Houston')
with self.assertRaises(ValueError):
City.objects.get(point__exact=htown.point)
def test05_geography_layermapping(self):
"Testing LayerMapping support on models with geography fields."
# There is a similar test in `layermap` that uses the same data set,
# but the County model here is a bit different.
from django.contrib.gis.utils import LayerMapping
# Getting the shapefile and mapping dictionary.
shp_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'data'))
co_shp = os.path.join(shp_path, 'counties', 'counties.shp')
co_mapping = {'name': 'Name',
'state': 'State',
'mpoly': 'MULTIPOLYGON',
}
# Reference county names, number of polygons, and state names.
names = ['Bexar', 'Galveston', 'Harris', 'Honolulu', 'Pueblo']
num_polys = [1, 2, 1, 19, 1] # Number of polygons for each.
st_names = ['Texas', 'Texas', 'Texas', 'Hawaii', 'Colorado']
lm = LayerMapping(County, co_shp, co_mapping, source_srs=4269, unique='name')
lm.save(silent=True, strict=True)
for c, name, num_poly, state in zip(County.objects.order_by('name'), names, num_polys, st_names):
self.assertEqual(4326, c.mpoly.srid)
self.assertEqual(num_poly, len(c.mpoly))
self.assertEqual(name, c.name)
self.assertEqual(state, c.state)
class GeographyFunctionTests(FuncTestMixin, TestCase):
fixtures = ['initial']
@skipUnlessDBFeature("supports_extent_aggr")
def test_cast_aggregate(self):
"""
Cast a geography to a geometry field for an aggregate function that
expects a geometry input.
"""
if not connection.ops.geography:
self.skipTest("This test needs geography support")
expected = (-96.8016128540039, 29.7633724212646, -95.3631439208984, 32.782058715820)
res = City.objects.filter(
name__in=('Houston', 'Dallas')
).aggregate(extent=models.Extent(Cast('point', models.PointField())))
for val, exp in zip(res['extent'], expected):
self.assertAlmostEqual(exp, val, 4)
@skipUnlessDBFeature("has_Distance_function", "supports_distance_geodetic")
def test_distance_function(self):
"""
Testing Distance() support on non-point geography fields.
"""
if oracle:
ref_dists = [0, 4899.68, 8081.30, 9115.15]
elif spatialite:
# SpatiaLite returns non-zero distance for polygons and points
# covered by that polygon.
ref_dists = [326.61, 4899.68, 8081.30, 9115.15]
else:
ref_dists = [0, 4891.20, 8071.64, 9123.95]
htown = City.objects.get(name='Houston')
qs = Zipcode.objects.annotate(
distance=Distance('poly', htown.point),
distance2=Distance(htown.point, 'poly'),
)
for z, ref in zip(qs, ref_dists):
self.assertAlmostEqual(z.distance.m, ref, 2)
if postgis:
# PostGIS casts geography to geometry when distance2 is calculated.
ref_dists = [0, 4899.68, 8081.30, 9115.15]
for z, ref in zip(qs, ref_dists):
self.assertAlmostEqual(z.distance2.m, ref, 2)
if not spatialite:
# Distance function combined with a lookup.
hzip = Zipcode.objects.get(code='77002')
self.assertEqual(qs.get(distance__lte=0), hzip)
@skipUnlessDBFeature("has_Area_function", "supports_area_geodetic")
def test_geography_area(self):
"""
Testing that Area calculations work on geography columns.
"""
# SELECT ST_Area(poly) FROM geogapp_zipcode WHERE code='77002';
z = Zipcode.objects.annotate(area=Area('poly')).get(code='77002')
# Round to the nearest thousand as possible values (depending on
# the database and geolib) include 5439084, 5439100, 5439101.
rounded_value = z.area.sq_m
rounded_value -= z.area.sq_m % 1000
self.assertEqual(rounded_value, 5439000)
@skipUnlessDBFeature("has_Area_function")
@skipIfDBFeature("supports_area_geodetic")
def test_geodetic_area_raises_if_not_supported(self):
with self.assertRaisesMessage(NotSupportedError, 'Area on geodetic coordinate systems not supported.'):
Zipcode.objects.annotate(area=Area('poly')).get(code='77002')
|
bf12e6b539ca81fccd1af48882c43c6d7f1d70baf25d98a8865fbe9e40e1c95b | from django.contrib.gis.db import models
class NamedModel(models.Model):
name = models.CharField(max_length=30)
class Meta:
abstract = True
def __str__(self):
return self.name
class City(NamedModel):
point = models.PointField(geography=True)
class Meta:
app_label = 'geogapp'
class Zipcode(NamedModel):
code = models.CharField(max_length=10)
poly = models.PolygonField(geography=True)
class County(NamedModel):
state = models.CharField(max_length=20)
mpoly = models.MultiPolygonField(geography=True)
class Meta:
app_label = 'geogapp'
def __str__(self):
return ' County, '.join([self.name, self.state])
|
208f604f819c21b8fe13485f6b60ed1eac9a6ec018abc892a63481bc7e6f99eb | from django.core.management import call_command
from django.db import connection
from django.test import TransactionTestCase
class MigrateTests(TransactionTestCase):
"""
Tests running the migrate command in Geodjango.
"""
available_apps = ["gis_tests.gis_migrations"]
def get_table_description(self, table):
with connection.cursor() as cursor:
return connection.introspection.get_table_description(cursor, table)
def assertTableExists(self, table):
with connection.cursor() as cursor:
self.assertIn(table, connection.introspection.table_names(cursor))
def assertTableNotExists(self, table):
with connection.cursor() as cursor:
self.assertNotIn(table, connection.introspection.table_names(cursor))
def test_migrate_gis(self):
"""
Tests basic usage of the migrate command when a model uses Geodjango
fields (#22001).
It's also used to showcase an error in migrations where spatialite is
enabled and geo tables are renamed resulting in unique constraint
failure on geometry_columns (#23030).
"""
# The right tables exist
self.assertTableExists("gis_migrations_neighborhood")
self.assertTableExists("gis_migrations_household")
self.assertTableExists("gis_migrations_family")
if connection.features.supports_raster:
self.assertTableExists("gis_migrations_heatmap")
# Unmigrate everything
call_command("migrate", "gis_migrations", "zero", verbosity=0)
# All tables are gone
self.assertTableNotExists("gis_migrations_neighborhood")
self.assertTableNotExists("gis_migrations_household")
self.assertTableNotExists("gis_migrations_family")
if connection.features.supports_raster:
self.assertTableNotExists("gis_migrations_heatmap")
# Even geometry columns metadata
try:
GeoColumn = connection.ops.geometry_columns()
except NotImplementedError:
# Not all GIS backends have geometry columns model
pass
else:
qs = GeoColumn.objects.filter(
**{'%s__in' % GeoColumn.table_name_col(): ["gis_neighborhood", "gis_household"]}
)
self.assertEqual(qs.count(), 0)
# Revert the "unmigration"
call_command("migrate", "gis_migrations", verbosity=0)
|
f187f335a5c8735862704a98993555abc06a2e80f499a65d835fd3e91f5ac9b3 | from unittest import skipIf
from django.contrib.gis.db.models import fields
from django.contrib.gis.geos import MultiPolygon, Polygon
from django.core.exceptions import ImproperlyConfigured
from django.db import connection, migrations, models
from django.db.migrations.migration import Migration
from django.db.migrations.state import ProjectState
from django.test import (
TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature,
)
from ..utils import mysql, spatialite
try:
GeometryColumns = connection.ops.geometry_columns()
HAS_GEOMETRY_COLUMNS = True
except NotImplementedError:
HAS_GEOMETRY_COLUMNS = False
class OperationTestCase(TransactionTestCase):
available_apps = ['gis_tests.gis_migrations']
def tearDown(self):
# Delete table after testing
if hasattr(self, 'current_state'):
self.apply_operations('gis', self.current_state, [migrations.DeleteModel('Neighborhood')])
super().tearDown()
@property
def has_spatial_indexes(self):
if mysql:
with connection.cursor() as cursor:
return connection.introspection.supports_spatial_index(cursor, 'gis_neighborhood')
return True
def get_table_description(self, table):
with connection.cursor() as cursor:
return connection.introspection.get_table_description(cursor, table)
def assertColumnExists(self, table, column):
self.assertIn(column, [c.name for c in self.get_table_description(table)])
def assertColumnNotExists(self, table, column):
self.assertNotIn(column, [c.name for c in self.get_table_description(table)])
def apply_operations(self, app_label, project_state, operations):
migration = Migration('name', app_label)
migration.operations = operations
with connection.schema_editor() as editor:
return migration.apply(project_state, editor)
def set_up_test_model(self, force_raster_creation=False):
test_fields = [
('id', models.AutoField(primary_key=True)),
('name', models.CharField(max_length=100, unique=True)),
('geom', fields.MultiPolygonField(srid=4326))
]
if connection.features.supports_raster or force_raster_creation:
test_fields += [('rast', fields.RasterField(srid=4326, null=True))]
operations = [migrations.CreateModel('Neighborhood', test_fields)]
self.current_state = self.apply_operations('gis', ProjectState(), operations)
def assertGeometryColumnsCount(self, expected_count):
table_name = 'gis_neighborhood'
if connection.features.uppercases_column_names:
table_name = table_name.upper()
self.assertEqual(
GeometryColumns.objects.filter(**{
GeometryColumns.table_name_col(): table_name,
}).count(),
expected_count
)
def assertSpatialIndexExists(self, table, column, raster=False):
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(cursor, table)
if raster:
self.assertTrue(any(
'st_convexhull(%s)' % column in c['definition']
for c in constraints.values()
if c['definition'] is not None
))
else:
self.assertIn([column], [c['columns'] for c in constraints.values()])
def alter_gis_model(self, migration_class, model_name, field_name,
blank=False, field_class=None, field_class_kwargs=None):
args = [model_name, field_name]
if field_class:
field_class_kwargs = field_class_kwargs or {'srid': 4326, 'blank': blank}
args.append(field_class(**field_class_kwargs))
operation = migration_class(*args)
old_state = self.current_state.clone()
operation.state_forwards('gis', self.current_state)
with connection.schema_editor() as editor:
operation.database_forwards('gis', editor, old_state, self.current_state)
class OperationTests(OperationTestCase):
def setUp(self):
super().setUp()
self.set_up_test_model()
def test_add_geom_field(self):
"""
Test the AddField operation with a geometry-enabled column.
"""
self.alter_gis_model(migrations.AddField, 'Neighborhood', 'path', False, fields.LineStringField)
self.assertColumnExists('gis_neighborhood', 'path')
# Test GeometryColumns when available
if HAS_GEOMETRY_COLUMNS:
self.assertGeometryColumnsCount(2)
# Test spatial indices when available
if self.has_spatial_indexes:
self.assertSpatialIndexExists('gis_neighborhood', 'path')
@skipUnlessDBFeature('supports_raster')
def test_add_raster_field(self):
"""
Test the AddField operation with a raster-enabled column.
"""
self.alter_gis_model(migrations.AddField, 'Neighborhood', 'heatmap', False, fields.RasterField)
self.assertColumnExists('gis_neighborhood', 'heatmap')
# Test spatial indices when available
if self.has_spatial_indexes:
self.assertSpatialIndexExists('gis_neighborhood', 'heatmap', raster=True)
def test_add_blank_geom_field(self):
"""
Should be able to add a GeometryField with blank=True.
"""
self.alter_gis_model(migrations.AddField, 'Neighborhood', 'path', True, fields.LineStringField)
self.assertColumnExists('gis_neighborhood', 'path')
# Test GeometryColumns when available
if HAS_GEOMETRY_COLUMNS:
self.assertGeometryColumnsCount(2)
# Test spatial indices when available
if self.has_spatial_indexes:
self.assertSpatialIndexExists('gis_neighborhood', 'path')
@skipUnlessDBFeature('supports_raster')
def test_add_blank_raster_field(self):
"""
Should be able to add a RasterField with blank=True.
"""
self.alter_gis_model(migrations.AddField, 'Neighborhood', 'heatmap', True, fields.RasterField)
self.assertColumnExists('gis_neighborhood', 'heatmap')
# Test spatial indices when available
if self.has_spatial_indexes:
self.assertSpatialIndexExists('gis_neighborhood', 'heatmap', raster=True)
def test_remove_geom_field(self):
"""
Test the RemoveField operation with a geometry-enabled column.
"""
self.alter_gis_model(migrations.RemoveField, 'Neighborhood', 'geom')
self.assertColumnNotExists('gis_neighborhood', 'geom')
# Test GeometryColumns when available
if HAS_GEOMETRY_COLUMNS:
self.assertGeometryColumnsCount(0)
@skipUnlessDBFeature('supports_raster')
def test_remove_raster_field(self):
"""
Test the RemoveField operation with a raster-enabled column.
"""
self.alter_gis_model(migrations.RemoveField, 'Neighborhood', 'rast')
self.assertColumnNotExists('gis_neighborhood', 'rast')
def test_create_model_spatial_index(self):
if not self.has_spatial_indexes:
self.skipTest('No support for Spatial indexes')
self.assertSpatialIndexExists('gis_neighborhood', 'geom')
if connection.features.supports_raster:
self.assertSpatialIndexExists('gis_neighborhood', 'rast', raster=True)
@skipUnlessDBFeature("supports_3d_storage")
@skipIf(spatialite, "Django currently doesn't support altering Spatialite geometry fields")
def test_alter_geom_field_dim(self):
Neighborhood = self.current_state.apps.get_model('gis', 'Neighborhood')
p1 = Polygon(((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)))
Neighborhood.objects.create(name='TestDim', geom=MultiPolygon(p1, p1))
# Add 3rd dimension.
self.alter_gis_model(
migrations.AlterField, 'Neighborhood', 'geom', False,
fields.MultiPolygonField, field_class_kwargs={'srid': 4326, 'dim': 3}
)
self.assertTrue(Neighborhood.objects.first().geom.hasz)
# Rewind to 2 dimensions.
self.alter_gis_model(
migrations.AlterField, 'Neighborhood', 'geom', False,
fields.MultiPolygonField, field_class_kwargs={'srid': 4326, 'dim': 2}
)
self.assertFalse(Neighborhood.objects.first().geom.hasz)
@skipIfDBFeature('supports_raster')
class NoRasterSupportTests(OperationTestCase):
def test_create_raster_model_on_db_without_raster_support(self):
msg = 'Raster fields require backends with raster support.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.set_up_test_model(force_raster_creation=True)
def test_add_raster_field_on_db_without_raster_support(self):
msg = 'Raster fields require backends with raster support.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.set_up_test_model()
self.alter_gis_model(
migrations.AddField, 'Neighborhood', 'heatmap',
False, fields.RasterField
)
|
c90458b88e9c2f6d6e22c2211801e39200e491acf9edd6862389cbe73ff9800f | # Copyright (c) 2008-2009 Aryeh Leib Taurog, http://www.aryehleib.com
# All rights reserved.
#
# Modified from original contribution by Aryeh Leib Taurog, which was
# released under the New BSD license.
import unittest
from django.contrib.gis.geos.mutable_list import ListMixin
class UserListA(ListMixin):
_mytype = tuple
def __init__(self, i_list, *args, **kwargs):
self._list = self._mytype(i_list)
super().__init__(*args, **kwargs)
def __len__(self):
return len(self._list)
def __str__(self):
return str(self._list)
def __repr__(self):
return repr(self._list)
def _set_list(self, length, items):
# this would work:
# self._list = self._mytype(items)
# but then we wouldn't be testing length parameter
itemList = ['x'] * length
for i, v in enumerate(items):
itemList[i] = v
self._list = self._mytype(itemList)
def _get_single_external(self, index):
return self._list[index]
class UserListB(UserListA):
_mytype = list
def _set_single(self, index, value):
self._list[index] = value
def nextRange(length):
nextRange.start += 100
return range(nextRange.start, nextRange.start + length)
nextRange.start = 0
class ListMixinTest(unittest.TestCase):
"""
Tests base class ListMixin by comparing a list clone which is
a ListMixin subclass with a real Python list.
"""
limit = 3
listType = UserListA
def lists_of_len(self, length=None):
if length is None:
length = self.limit
pl = list(range(length))
return pl, self.listType(pl)
def limits_plus(self, b):
return range(-self.limit - b, self.limit + b)
def step_range(self):
return list(range(-1 - self.limit, 0)) + list(range(1, 1 + self.limit))
def test01_getslice(self):
'Slice retrieval'
pl, ul = self.lists_of_len()
for i in self.limits_plus(1):
self.assertEqual(pl[i:], ul[i:], 'slice [%d:]' % (i))
self.assertEqual(pl[:i], ul[:i], 'slice [:%d]' % (i))
for j in self.limits_plus(1):
self.assertEqual(pl[i:j], ul[i:j], 'slice [%d:%d]' % (i, j))
for k in self.step_range():
self.assertEqual(pl[i:j:k], ul[i:j:k], 'slice [%d:%d:%d]' % (i, j, k))
for k in self.step_range():
self.assertEqual(pl[i::k], ul[i::k], 'slice [%d::%d]' % (i, k))
self.assertEqual(pl[:i:k], ul[:i:k], 'slice [:%d:%d]' % (i, k))
for k in self.step_range():
self.assertEqual(pl[::k], ul[::k], 'slice [::%d]' % (k))
def test02_setslice(self):
'Slice assignment'
def setfcn(x, i, j, k, L):
x[i:j:k] = range(L)
pl, ul = self.lists_of_len()
for slen in range(self.limit + 1):
ssl = nextRange(slen)
ul[:] = ssl
pl[:] = ssl
self.assertEqual(pl, ul[:], 'set slice [:]')
for i in self.limits_plus(1):
ssl = nextRange(slen)
ul[i:] = ssl
pl[i:] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d:]' % (i))
ssl = nextRange(slen)
ul[:i] = ssl
pl[:i] = ssl
self.assertEqual(pl, ul[:], 'set slice [:%d]' % (i))
for j in self.limits_plus(1):
ssl = nextRange(slen)
ul[i:j] = ssl
pl[i:j] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d:%d]' % (i, j))
for k in self.step_range():
ssl = nextRange(len(ul[i:j:k]))
ul[i:j:k] = ssl
pl[i:j:k] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d:%d:%d]' % (i, j, k))
sliceLen = len(ul[i:j:k])
with self.assertRaises(ValueError):
setfcn(ul, i, j, k, sliceLen + 1)
if sliceLen > 2:
with self.assertRaises(ValueError):
setfcn(ul, i, j, k, sliceLen - 1)
for k in self.step_range():
ssl = nextRange(len(ul[i::k]))
ul[i::k] = ssl
pl[i::k] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d::%d]' % (i, k))
ssl = nextRange(len(ul[:i:k]))
ul[:i:k] = ssl
pl[:i:k] = ssl
self.assertEqual(pl, ul[:], 'set slice [:%d:%d]' % (i, k))
for k in self.step_range():
ssl = nextRange(len(ul[::k]))
ul[::k] = ssl
pl[::k] = ssl
self.assertEqual(pl, ul[:], 'set slice [::%d]' % (k))
def test03_delslice(self):
'Delete slice'
for Len in range(self.limit):
pl, ul = self.lists_of_len(Len)
del pl[:]
del ul[:]
self.assertEqual(pl[:], ul[:], 'del slice [:]')
for i in range(-Len - 1, Len + 1):
pl, ul = self.lists_of_len(Len)
del pl[i:]
del ul[i:]
self.assertEqual(pl[:], ul[:], 'del slice [%d:]' % (i))
pl, ul = self.lists_of_len(Len)
del pl[:i]
del ul[:i]
self.assertEqual(pl[:], ul[:], 'del slice [:%d]' % (i))
for j in range(-Len - 1, Len + 1):
pl, ul = self.lists_of_len(Len)
del pl[i:j]
del ul[i:j]
self.assertEqual(pl[:], ul[:], 'del slice [%d:%d]' % (i, j))
for k in list(range(-Len - 1, 0)) + list(range(1, Len)):
pl, ul = self.lists_of_len(Len)
del pl[i:j:k]
del ul[i:j:k]
self.assertEqual(pl[:], ul[:], 'del slice [%d:%d:%d]' % (i, j, k))
for k in list(range(-Len - 1, 0)) + list(range(1, Len)):
pl, ul = self.lists_of_len(Len)
del pl[:i:k]
del ul[:i:k]
self.assertEqual(pl[:], ul[:], 'del slice [:%d:%d]' % (i, k))
pl, ul = self.lists_of_len(Len)
del pl[i::k]
del ul[i::k]
self.assertEqual(pl[:], ul[:], 'del slice [%d::%d]' % (i, k))
for k in list(range(-Len - 1, 0)) + list(range(1, Len)):
pl, ul = self.lists_of_len(Len)
del pl[::k]
del ul[::k]
self.assertEqual(pl[:], ul[:], 'del slice [::%d]' % (k))
def test04_get_set_del_single(self):
'Get/set/delete single item'
pl, ul = self.lists_of_len()
for i in self.limits_plus(0):
self.assertEqual(pl[i], ul[i], 'get single item [%d]' % i)
for i in self.limits_plus(0):
pl, ul = self.lists_of_len()
pl[i] = 100
ul[i] = 100
self.assertEqual(pl[:], ul[:], 'set single item [%d]' % i)
for i in self.limits_plus(0):
pl, ul = self.lists_of_len()
del pl[i]
del ul[i]
self.assertEqual(pl[:], ul[:], 'del single item [%d]' % i)
def test05_out_of_range_exceptions(self):
'Out of range exceptions'
def setfcn(x, i):
x[i] = 20
def getfcn(x, i):
return x[i]
def delfcn(x, i):
del x[i]
pl, ul = self.lists_of_len()
for i in (-1 - self.limit, self.limit):
with self.assertRaises(IndexError): # 'set index %d' % i)
setfcn(ul, i)
with self.assertRaises(IndexError): # 'get index %d' % i)
getfcn(ul, i)
with self.assertRaises(IndexError): # 'del index %d' % i)
delfcn(ul, i)
def test06_list_methods(self):
'List methods'
pl, ul = self.lists_of_len()
pl.append(40)
ul.append(40)
self.assertEqual(pl[:], ul[:], 'append')
pl.extend(range(50, 55))
ul.extend(range(50, 55))
self.assertEqual(pl[:], ul[:], 'extend')
pl.reverse()
ul.reverse()
self.assertEqual(pl[:], ul[:], 'reverse')
for i in self.limits_plus(1):
pl, ul = self.lists_of_len()
pl.insert(i, 50)
ul.insert(i, 50)
self.assertEqual(pl[:], ul[:], 'insert at %d' % i)
for i in self.limits_plus(0):
pl, ul = self.lists_of_len()
self.assertEqual(pl.pop(i), ul.pop(i), 'popped value at %d' % i)
self.assertEqual(pl[:], ul[:], 'after pop at %d' % i)
pl, ul = self.lists_of_len()
self.assertEqual(pl.pop(), ul.pop(i), 'popped value')
self.assertEqual(pl[:], ul[:], 'after pop')
pl, ul = self.lists_of_len()
def popfcn(x, i):
x.pop(i)
with self.assertRaises(IndexError):
popfcn(ul, self.limit)
with self.assertRaises(IndexError):
popfcn(ul, -1 - self.limit)
pl, ul = self.lists_of_len()
for val in range(self.limit):
self.assertEqual(pl.index(val), ul.index(val), 'index of %d' % val)
for val in self.limits_plus(2):
self.assertEqual(pl.count(val), ul.count(val), 'count %d' % val)
for val in range(self.limit):
pl, ul = self.lists_of_len()
pl.remove(val)
ul.remove(val)
self.assertEqual(pl[:], ul[:], 'after remove val %d' % val)
def indexfcn(x, v):
return x.index(v)
def removefcn(x, v):
return x.remove(v)
with self.assertRaises(ValueError):
indexfcn(ul, 40)
with self.assertRaises(ValueError):
removefcn(ul, 40)
def test07_allowed_types(self):
'Type-restricted list'
pl, ul = self.lists_of_len()
ul._allowed = int
ul[1] = 50
ul[:2] = [60, 70, 80]
def setfcn(x, i, v):
x[i] = v
with self.assertRaises(TypeError):
setfcn(ul, 2, 'hello')
with self.assertRaises(TypeError):
setfcn(ul, slice(0, 3, 2), ('hello', 'goodbye'))
def test08_min_length(self):
'Length limits'
pl, ul = self.lists_of_len(5)
ul._minlength = 3
def delfcn(x, i):
del x[:i]
def setfcn(x, i):
x[:i] = []
for i in range(len(ul) - ul._minlength + 1, len(ul)):
with self.assertRaises(ValueError):
delfcn(ul, i)
with self.assertRaises(ValueError):
setfcn(ul, i)
del ul[:len(ul) - ul._minlength]
ul._maxlength = 4
for i in range(0, ul._maxlength - len(ul)):
ul.append(i)
with self.assertRaises(ValueError):
ul.append(10)
def test09_iterable_check(self):
'Error on assigning non-iterable to slice'
pl, ul = self.lists_of_len(self.limit + 1)
def setfcn(x, i, v):
x[i] = v
with self.assertRaises(TypeError):
setfcn(ul, slice(0, 3, 2), 2)
def test10_checkindex(self):
'Index check'
pl, ul = self.lists_of_len()
for i in self.limits_plus(0):
if i < 0:
self.assertEqual(ul._checkindex(i), i + self.limit, '_checkindex(neg index)')
else:
self.assertEqual(ul._checkindex(i), i, '_checkindex(pos index)')
for i in (-self.limit - 1, self.limit):
with self.assertRaises(IndexError):
ul._checkindex(i)
def test_11_sorting(self):
'Sorting'
pl, ul = self.lists_of_len()
pl.insert(0, pl.pop())
ul.insert(0, ul.pop())
pl.sort()
ul.sort()
self.assertEqual(pl[:], ul[:], 'sort')
mid = pl[len(pl) // 2]
pl.sort(key=lambda x: (mid - x) ** 2)
ul.sort(key=lambda x: (mid - x) ** 2)
self.assertEqual(pl[:], ul[:], 'sort w/ key')
pl.insert(0, pl.pop())
ul.insert(0, ul.pop())
pl.sort(reverse=True)
ul.sort(reverse=True)
self.assertEqual(pl[:], ul[:], 'sort w/ reverse')
mid = pl[len(pl) // 2]
pl.sort(key=lambda x: (mid - x) ** 2)
ul.sort(key=lambda x: (mid - x) ** 2)
self.assertEqual(pl[:], ul[:], 'sort w/ key')
def test_12_arithmetic(self):
'Arithmetic'
pl, ul = self.lists_of_len()
al = list(range(10, 14))
self.assertEqual(list(pl + al), list(ul + al), 'add')
self.assertEqual(type(ul), type(ul + al), 'type of add result')
self.assertEqual(list(al + pl), list(al + ul), 'radd')
self.assertEqual(type(al), type(al + ul), 'type of radd result')
objid = id(ul)
pl += al
ul += al
self.assertEqual(pl[:], ul[:], 'in-place add')
self.assertEqual(objid, id(ul), 'in-place add id')
for n in (-1, 0, 1, 3):
pl, ul = self.lists_of_len()
self.assertEqual(list(pl * n), list(ul * n), 'mul by %d' % n)
self.assertEqual(type(ul), type(ul * n), 'type of mul by %d result' % n)
self.assertEqual(list(n * pl), list(n * ul), 'rmul by %d' % n)
self.assertEqual(type(ul), type(n * ul), 'type of rmul by %d result' % n)
objid = id(ul)
pl *= n
ul *= n
self.assertEqual(pl[:], ul[:], 'in-place mul by %d' % n)
self.assertEqual(objid, id(ul), 'in-place mul by %d id' % n)
pl, ul = self.lists_of_len()
self.assertEqual(pl, ul, 'cmp for equal')
self.assertNotEqual(ul, pl + [2], 'cmp for not equal')
self.assertGreaterEqual(pl, ul, 'cmp for gte self')
self.assertLessEqual(pl, ul, 'cmp for lte self')
self.assertGreaterEqual(ul, pl, 'cmp for self gte')
self.assertLessEqual(ul, pl, 'cmp for self lte')
self.assertGreater(pl + [5], ul, 'cmp')
self.assertGreaterEqual(pl + [5], ul, 'cmp')
self.assertLess(pl, ul + [2], 'cmp')
self.assertLessEqual(pl, ul + [2], 'cmp')
self.assertGreater(ul + [5], pl, 'cmp')
self.assertGreaterEqual(ul + [5], pl, 'cmp')
self.assertLess(ul, pl + [2], 'cmp')
self.assertLessEqual(ul, pl + [2], 'cmp')
pl[1] = 20
self.assertGreater(pl, ul, 'cmp for gt self')
self.assertLess(ul, pl, 'cmp for self lt')
pl[1] = -20
self.assertLess(pl, ul, 'cmp for lt self')
self.assertGreater(ul, pl, 'cmp for gt self')
class ListMixinTestSingle(ListMixinTest):
listType = UserListB
|
b4ab0417ea67b441bce0070d2cea384f7730ce66ab4ce12fc924595d5a26f137 | # Copyright (c) 2008-2009 Aryeh Leib Taurog, all rights reserved.
# Modified from original contribution by Aryeh Leib Taurog, which was
# released under the New BSD license.
import unittest
from django.contrib.gis.geos import (
LinearRing, LineString, MultiPoint, Point, Polygon, fromstr,
)
def api_get_distance(x):
return x.distance(Point(-200, -200))
def api_get_buffer(x):
return x.buffer(10)
def api_get_geom_typeid(x):
return x.geom_typeid
def api_get_num_coords(x):
return x.num_coords
def api_get_centroid(x):
return x.centroid
def api_get_empty(x):
return x.empty
def api_get_valid(x):
return x.valid
def api_get_simple(x):
return x.simple
def api_get_ring(x):
return x.ring
def api_get_boundary(x):
return x.boundary
def api_get_convex_hull(x):
return x.convex_hull
def api_get_extent(x):
return x.extent
def api_get_area(x):
return x.area
def api_get_length(x):
return x.length
geos_function_tests = [
val for name, val in vars().items()
if hasattr(val, '__call__') and name.startswith('api_get_')
]
class GEOSMutationTest(unittest.TestCase):
"""
Tests Pythonic Mutability of Python GEOS geometry wrappers
get/set/delitem on a slice, normal list methods
"""
def test00_GEOSIndexException(self):
'Testing Geometry IndexError'
p = Point(1, 2)
for i in range(-2, 2):
p._checkindex(i)
with self.assertRaises(IndexError):
p._checkindex(2)
with self.assertRaises(IndexError):
p._checkindex(-3)
def test01_PointMutations(self):
'Testing Point mutations'
for p in (Point(1, 2, 3), fromstr('POINT (1 2 3)')):
self.assertEqual(p._get_single_external(1), 2.0, 'Point _get_single_external')
# _set_single
p._set_single(0, 100)
self.assertEqual(p.coords, (100.0, 2.0, 3.0), 'Point _set_single')
# _set_list
p._set_list(2, (50, 3141))
self.assertEqual(p.coords, (50.0, 3141.0), 'Point _set_list')
def test02_PointExceptions(self):
'Testing Point exceptions'
with self.assertRaises(TypeError):
Point(range(1))
with self.assertRaises(TypeError):
Point(range(4))
def test03_PointApi(self):
'Testing Point API'
q = Point(4, 5, 3)
for p in (Point(1, 2, 3), fromstr('POINT (1 2 3)')):
p[0:2] = [4, 5]
for f in geos_function_tests:
self.assertEqual(f(q), f(p), 'Point ' + f.__name__)
def test04_LineStringMutations(self):
'Testing LineString mutations'
for ls in (LineString((1, 0), (4, 1), (6, -1)),
fromstr('LINESTRING (1 0,4 1,6 -1)')):
self.assertEqual(ls._get_single_external(1), (4.0, 1.0), 'LineString _get_single_external')
# _set_single
ls._set_single(0, (-50, 25))
self.assertEqual(ls.coords, ((-50.0, 25.0), (4.0, 1.0), (6.0, -1.0)), 'LineString _set_single')
# _set_list
ls._set_list(2, ((-50.0, 25.0), (6.0, -1.0)))
self.assertEqual(ls.coords, ((-50.0, 25.0), (6.0, -1.0)), 'LineString _set_list')
lsa = LineString(ls.coords)
for f in geos_function_tests:
self.assertEqual(f(lsa), f(ls), 'LineString ' + f.__name__)
def test05_Polygon(self):
'Testing Polygon mutations'
for pg in (Polygon(((1, 0), (4, 1), (6, -1), (8, 10), (1, 0)),
((5, 4), (6, 4), (6, 3), (5, 4))),
fromstr('POLYGON ((1 0,4 1,6 -1,8 10,1 0),(5 4,6 4,6 3,5 4))')):
self.assertEqual(pg._get_single_external(0),
LinearRing((1, 0), (4, 1), (6, -1), (8, 10), (1, 0)),
'Polygon _get_single_external(0)')
self.assertEqual(pg._get_single_external(1),
LinearRing((5, 4), (6, 4), (6, 3), (5, 4)),
'Polygon _get_single_external(1)')
# _set_list
pg._set_list(2, (((1, 2), (10, 0), (12, 9), (-1, 15), (1, 2)), ((4, 2), (5, 2), (5, 3), (4, 2))))
self.assertEqual(
pg.coords,
(((1.0, 2.0), (10.0, 0.0), (12.0, 9.0), (-1.0, 15.0), (1.0, 2.0)),
((4.0, 2.0), (5.0, 2.0), (5.0, 3.0), (4.0, 2.0))),
'Polygon _set_list')
lsa = Polygon(*pg.coords)
for f in geos_function_tests:
self.assertEqual(f(lsa), f(pg), 'Polygon ' + f.__name__)
def test06_Collection(self):
'Testing Collection mutations'
points = (
MultiPoint(*map(Point, ((3, 4), (-1, 2), (5, -4), (2, 8)))),
fromstr('MULTIPOINT (3 4,-1 2,5 -4,2 8)'),
)
for mp in points:
self.assertEqual(mp._get_single_external(2), Point(5, -4), 'Collection _get_single_external')
mp._set_list(3, map(Point, ((5, 5), (3, -2), (8, 1))))
self.assertEqual(mp.coords, ((5.0, 5.0), (3.0, -2.0), (8.0, 1.0)), 'Collection _set_list')
lsa = MultiPoint(*map(Point, ((5, 5), (3, -2), (8, 1))))
for f in geos_function_tests:
self.assertEqual(f(lsa), f(mp), 'MultiPoint ' + f.__name__)
|
e682a0bf31d117cc816da2bf96c75da3ffa3af09a5ee0564697d3836fc548f45 | import binascii
from django.contrib.gis.geos import (
GEOSGeometry, Point, Polygon, WKBReader, WKBWriter, WKTReader, WKTWriter,
)
from django.test import SimpleTestCase
class GEOSIOTest(SimpleTestCase):
def test01_wktreader(self):
# Creating a WKTReader instance
wkt_r = WKTReader()
wkt = 'POINT (5 23)'
# read() should return a GEOSGeometry
ref = GEOSGeometry(wkt)
g1 = wkt_r.read(wkt.encode())
g2 = wkt_r.read(wkt)
for geom in (g1, g2):
self.assertEqual(ref, geom)
# Should only accept string objects.
with self.assertRaises(TypeError):
wkt_r.read(1)
with self.assertRaises(TypeError):
wkt_r.read(memoryview(b'foo'))
def test02_wktwriter(self):
# Creating a WKTWriter instance, testing its ptr property.
wkt_w = WKTWriter()
with self.assertRaises(TypeError):
wkt_w.ptr = WKTReader.ptr_type()
ref = GEOSGeometry('POINT (5 23)')
ref_wkt = 'POINT (5.0000000000000000 23.0000000000000000)'
self.assertEqual(ref_wkt, wkt_w.write(ref).decode())
def test_wktwriter_constructor_arguments(self):
wkt_w = WKTWriter(dim=3, trim=True, precision=3)
ref = GEOSGeometry('POINT (5.34562 23 1.5)')
ref_wkt = 'POINT Z (5.35 23 1.5)'
self.assertEqual(ref_wkt, wkt_w.write(ref).decode())
def test03_wkbreader(self):
# Creating a WKBReader instance
wkb_r = WKBReader()
hex = b'000000000140140000000000004037000000000000'
wkb = memoryview(binascii.a2b_hex(hex))
ref = GEOSGeometry(hex)
# read() should return a GEOSGeometry on either a hex string or
# a WKB buffer.
g1 = wkb_r.read(wkb)
g2 = wkb_r.read(hex)
for geom in (g1, g2):
self.assertEqual(ref, geom)
bad_input = (1, 5.23, None, False)
for bad_wkb in bad_input:
with self.assertRaises(TypeError):
wkb_r.read(bad_wkb)
def test04_wkbwriter(self):
wkb_w = WKBWriter()
# Representations of 'POINT (5 23)' in hex -- one normal and
# the other with the byte order changed.
g = GEOSGeometry('POINT (5 23)')
hex1 = b'010100000000000000000014400000000000003740'
wkb1 = memoryview(binascii.a2b_hex(hex1))
hex2 = b'000000000140140000000000004037000000000000'
wkb2 = memoryview(binascii.a2b_hex(hex2))
self.assertEqual(hex1, wkb_w.write_hex(g))
self.assertEqual(wkb1, wkb_w.write(g))
# Ensuring bad byteorders are not accepted.
for bad_byteorder in (-1, 2, 523, 'foo', None):
# Equivalent of `wkb_w.byteorder = bad_byteorder`
with self.assertRaises(ValueError):
wkb_w._set_byteorder(bad_byteorder)
# Setting the byteorder to 0 (for Big Endian)
wkb_w.byteorder = 0
self.assertEqual(hex2, wkb_w.write_hex(g))
self.assertEqual(wkb2, wkb_w.write(g))
# Back to Little Endian
wkb_w.byteorder = 1
# Now, trying out the 3D and SRID flags.
g = GEOSGeometry('POINT (5 23 17)')
g.srid = 4326
hex3d = b'0101000080000000000000144000000000000037400000000000003140'
wkb3d = memoryview(binascii.a2b_hex(hex3d))
hex3d_srid = b'01010000A0E6100000000000000000144000000000000037400000000000003140'
wkb3d_srid = memoryview(binascii.a2b_hex(hex3d_srid))
# Ensuring bad output dimensions are not accepted
for bad_outdim in (-1, 0, 1, 4, 423, 'foo', None):
with self.assertRaisesMessage(ValueError, 'WKB output dimension must be 2 or 3'):
wkb_w.outdim = bad_outdim
# Now setting the output dimensions to be 3
wkb_w.outdim = 3
self.assertEqual(hex3d, wkb_w.write_hex(g))
self.assertEqual(wkb3d, wkb_w.write(g))
# Telling the WKBWriter to include the srid in the representation.
wkb_w.srid = True
self.assertEqual(hex3d_srid, wkb_w.write_hex(g))
self.assertEqual(wkb3d_srid, wkb_w.write(g))
def test_wkt_writer_trim(self):
wkt_w = WKTWriter()
self.assertFalse(wkt_w.trim)
self.assertEqual(wkt_w.write(Point(1, 1)), b'POINT (1.0000000000000000 1.0000000000000000)')
wkt_w.trim = True
self.assertTrue(wkt_w.trim)
self.assertEqual(wkt_w.write(Point(1, 1)), b'POINT (1 1)')
self.assertEqual(wkt_w.write(Point(1.1, 1)), b'POINT (1.1 1)')
self.assertEqual(wkt_w.write(Point(1. / 3, 1)), b'POINT (0.3333333333333333 1)')
wkt_w.trim = False
self.assertFalse(wkt_w.trim)
self.assertEqual(wkt_w.write(Point(1, 1)), b'POINT (1.0000000000000000 1.0000000000000000)')
def test_wkt_writer_precision(self):
wkt_w = WKTWriter()
self.assertIsNone(wkt_w.precision)
self.assertEqual(wkt_w.write(Point(1. / 3, 2. / 3)), b'POINT (0.3333333333333333 0.6666666666666666)')
wkt_w.precision = 1
self.assertEqual(wkt_w.precision, 1)
self.assertEqual(wkt_w.write(Point(1. / 3, 2. / 3)), b'POINT (0.3 0.7)')
wkt_w.precision = 0
self.assertEqual(wkt_w.precision, 0)
self.assertEqual(wkt_w.write(Point(1. / 3, 2. / 3)), b'POINT (0 1)')
wkt_w.precision = None
self.assertIsNone(wkt_w.precision)
self.assertEqual(wkt_w.write(Point(1. / 3, 2. / 3)), b'POINT (0.3333333333333333 0.6666666666666666)')
with self.assertRaisesMessage(AttributeError, 'WKT output rounding precision must be '):
wkt_w.precision = 'potato'
def test_empty_point_wkb(self):
p = Point(srid=4326)
wkb_w = WKBWriter()
wkb_w.srid = False
with self.assertRaisesMessage(ValueError, 'Empty point is not representable in WKB.'):
wkb_w.write(p)
with self.assertRaisesMessage(ValueError, 'Empty point is not representable in WKB.'):
wkb_w.write_hex(p)
wkb_w.srid = True
for byteorder, hex in enumerate([
b'0020000001000010E67FF80000000000007FF8000000000000',
b'0101000020E6100000000000000000F87F000000000000F87F',
]):
wkb_w.byteorder = byteorder
self.assertEqual(wkb_w.write_hex(p), hex)
self.assertEqual(GEOSGeometry(wkb_w.write_hex(p)), p)
self.assertEqual(wkb_w.write(p), memoryview(binascii.a2b_hex(hex)))
self.assertEqual(GEOSGeometry(wkb_w.write(p)), p)
def test_empty_polygon_wkb(self):
p = Polygon(srid=4326)
p_no_srid = Polygon()
wkb_w = WKBWriter()
wkb_w.srid = True
for byteorder, hexes in enumerate([
(b'000000000300000000', b'0020000003000010E600000000'),
(b'010300000000000000', b'0103000020E610000000000000'),
]):
wkb_w.byteorder = byteorder
for srid, hex in enumerate(hexes):
wkb_w.srid = srid
self.assertEqual(wkb_w.write_hex(p), hex)
self.assertEqual(GEOSGeometry(wkb_w.write_hex(p)), p if srid else p_no_srid)
self.assertEqual(wkb_w.write(p), memoryview(binascii.a2b_hex(hex)))
self.assertEqual(GEOSGeometry(wkb_w.write(p)), p if srid else p_no_srid)
|
c85b425678d92fc1a2fc11859bb32bbff5af5e6aeaa7881df0e27f49ef3870ad | from django.contrib.gis.geos import LineString
from django.test import SimpleTestCase
class GEOSCoordSeqTest(SimpleTestCase):
def test_getitem(self):
coord_seq = LineString([(x, x) for x in range(2)]).coord_seq
for i in (0, 1):
with self.subTest(i):
self.assertEqual(coord_seq[i], (i, i))
for i in (-3, 10):
msg = 'invalid GEOS Geometry index: %s' % i
with self.subTest(i):
with self.assertRaisesMessage(IndexError, msg):
coord_seq[i]
|
7f915fde6fba6b755e0b01eadaa92d3bdb3621aa6097467304781b950a755ad4 | import ctypes
import itertools
import json
import pickle
import random
from binascii import a2b_hex
from io import BytesIO
from unittest import mock
from django.contrib.gis import gdal
from django.contrib.gis.geos import (
GeometryCollection, GEOSException, GEOSGeometry, LinearRing, LineString,
MultiLineString, MultiPoint, MultiPolygon, Point, Polygon, fromfile,
fromstr,
)
from django.contrib.gis.geos.libgeos import geos_version_tuple
from django.contrib.gis.shortcuts import numpy
from django.template import Context
from django.template.engine import Engine
from django.test import SimpleTestCase
from ..test_data import TestDataMixin
class GEOSTest(SimpleTestCase, TestDataMixin):
def test_wkt(self):
"Testing WKT output."
for g in self.geometries.wkt_out:
geom = fromstr(g.wkt)
if geom.hasz:
self.assertEqual(g.ewkt, geom.wkt)
def test_hex(self):
"Testing HEX output."
for g in self.geometries.hex_wkt:
geom = fromstr(g.wkt)
self.assertEqual(g.hex, geom.hex.decode())
def test_hexewkb(self):
"Testing (HEX)EWKB output."
# For testing HEX(EWKB).
ogc_hex = b'01010000000000000000000000000000000000F03F'
ogc_hex_3d = b'01010000800000000000000000000000000000F03F0000000000000040'
# `SELECT ST_AsHEXEWKB(ST_GeomFromText('POINT(0 1)', 4326));`
hexewkb_2d = b'0101000020E61000000000000000000000000000000000F03F'
# `SELECT ST_AsHEXEWKB(ST_GeomFromEWKT('SRID=4326;POINT(0 1 2)'));`
hexewkb_3d = b'01010000A0E61000000000000000000000000000000000F03F0000000000000040'
pnt_2d = Point(0, 1, srid=4326)
pnt_3d = Point(0, 1, 2, srid=4326)
# OGC-compliant HEX will not have SRID value.
self.assertEqual(ogc_hex, pnt_2d.hex)
self.assertEqual(ogc_hex_3d, pnt_3d.hex)
# HEXEWKB should be appropriate for its dimension -- have to use an
# a WKBWriter w/dimension set accordingly, else GEOS will insert
# garbage into 3D coordinate if there is none.
self.assertEqual(hexewkb_2d, pnt_2d.hexewkb)
self.assertEqual(hexewkb_3d, pnt_3d.hexewkb)
self.assertIs(GEOSGeometry(hexewkb_3d).hasz, True)
# Same for EWKB.
self.assertEqual(memoryview(a2b_hex(hexewkb_2d)), pnt_2d.ewkb)
self.assertEqual(memoryview(a2b_hex(hexewkb_3d)), pnt_3d.ewkb)
# Redundant sanity check.
self.assertEqual(4326, GEOSGeometry(hexewkb_2d).srid)
def test_kml(self):
"Testing KML output."
for tg in self.geometries.wkt_out:
geom = fromstr(tg.wkt)
kml = getattr(tg, 'kml', False)
if kml:
self.assertEqual(kml, geom.kml)
def test_errors(self):
"Testing the Error handlers."
# string-based
for err in self.geometries.errors:
with self.assertRaises((GEOSException, ValueError)):
fromstr(err.wkt)
# Bad WKB
with self.assertRaises(GEOSException):
GEOSGeometry(memoryview(b'0'))
class NotAGeometry:
pass
# Some other object
with self.assertRaises(TypeError):
GEOSGeometry(NotAGeometry())
# None
with self.assertRaises(TypeError):
GEOSGeometry(None)
def test_wkb(self):
"Testing WKB output."
for g in self.geometries.hex_wkt:
geom = fromstr(g.wkt)
wkb = geom.wkb
self.assertEqual(wkb.hex().upper(), g.hex)
def test_create_hex(self):
"Testing creation from HEX."
for g in self.geometries.hex_wkt:
geom_h = GEOSGeometry(g.hex)
# we need to do this so decimal places get normalized
geom_t = fromstr(g.wkt)
self.assertEqual(geom_t.wkt, geom_h.wkt)
def test_create_wkb(self):
"Testing creation from WKB."
for g in self.geometries.hex_wkt:
wkb = memoryview(bytes.fromhex(g.hex))
geom_h = GEOSGeometry(wkb)
# we need to do this so decimal places get normalized
geom_t = fromstr(g.wkt)
self.assertEqual(geom_t.wkt, geom_h.wkt)
def test_ewkt(self):
"Testing EWKT."
srids = (-1, 32140)
for srid in srids:
for p in self.geometries.polygons:
ewkt = 'SRID=%d;%s' % (srid, p.wkt)
poly = fromstr(ewkt)
self.assertEqual(srid, poly.srid)
self.assertEqual(srid, poly.shell.srid)
self.assertEqual(srid, fromstr(poly.ewkt).srid) # Checking export
def test_json(self):
"Testing GeoJSON input/output (via GDAL)."
for g in self.geometries.json_geoms:
geom = GEOSGeometry(g.wkt)
if not hasattr(g, 'not_equal'):
# Loading jsons to prevent decimal differences
self.assertEqual(json.loads(g.json), json.loads(geom.json))
self.assertEqual(json.loads(g.json), json.loads(geom.geojson))
self.assertEqual(GEOSGeometry(g.wkt, 4326), GEOSGeometry(geom.json))
def test_json_srid(self):
geojson_data = {
"type": "Point",
"coordinates": [2, 49],
"crs": {
"type": "name",
"properties": {
"name": "urn:ogc:def:crs:EPSG::4322"
}
}
}
self.assertEqual(GEOSGeometry(json.dumps(geojson_data)), Point(2, 49, srid=4322))
def test_fromfile(self):
"Testing the fromfile() factory."
ref_pnt = GEOSGeometry('POINT(5 23)')
wkt_f = BytesIO()
wkt_f.write(ref_pnt.wkt.encode())
wkb_f = BytesIO()
wkb_f.write(bytes(ref_pnt.wkb))
# Other tests use `fromfile()` on string filenames so those
# aren't tested here.
for fh in (wkt_f, wkb_f):
fh.seek(0)
pnt = fromfile(fh)
self.assertEqual(ref_pnt, pnt)
def test_eq(self):
"Testing equivalence."
p = fromstr('POINT(5 23)')
self.assertEqual(p, p.wkt)
self.assertNotEqual(p, 'foo')
ls = fromstr('LINESTRING(0 0, 1 1, 5 5)')
self.assertEqual(ls, ls.wkt)
self.assertNotEqual(p, 'bar')
self.assertEqual(p, 'POINT(5.0 23.0)')
# Error shouldn't be raise on equivalence testing with
# an invalid type.
for g in (p, ls):
self.assertNotEqual(g, None)
self.assertNotEqual(g, {'foo': 'bar'})
self.assertNotEqual(g, False)
def test_hash(self):
point_1 = Point(5, 23)
point_2 = Point(5, 23, srid=4326)
point_3 = Point(5, 23, srid=32632)
multipoint_1 = MultiPoint(point_1, srid=4326)
multipoint_2 = MultiPoint(point_2)
multipoint_3 = MultiPoint(point_3)
self.assertNotEqual(hash(point_1), hash(point_2))
self.assertNotEqual(hash(point_1), hash(point_3))
self.assertNotEqual(hash(point_2), hash(point_3))
self.assertNotEqual(hash(multipoint_1), hash(multipoint_2))
self.assertEqual(hash(multipoint_2), hash(multipoint_3))
self.assertNotEqual(hash(multipoint_1), hash(point_1))
self.assertNotEqual(hash(multipoint_2), hash(point_2))
self.assertNotEqual(hash(multipoint_3), hash(point_3))
def test_eq_with_srid(self):
"Testing non-equivalence with different srids."
p0 = Point(5, 23)
p1 = Point(5, 23, srid=4326)
p2 = Point(5, 23, srid=32632)
# GEOS
self.assertNotEqual(p0, p1)
self.assertNotEqual(p1, p2)
# EWKT
self.assertNotEqual(p0, p1.ewkt)
self.assertNotEqual(p1, p0.ewkt)
self.assertNotEqual(p1, p2.ewkt)
# Equivalence with matching SRIDs
self.assertEqual(p2, p2)
self.assertEqual(p2, p2.ewkt)
# WKT contains no SRID so will not equal
self.assertNotEqual(p2, p2.wkt)
# SRID of 0
self.assertEqual(p0, 'SRID=0;POINT (5 23)')
self.assertNotEqual(p1, 'SRID=0;POINT (5 23)')
def test_points(self):
"Testing Point objects."
prev = fromstr('POINT(0 0)')
for p in self.geometries.points:
# Creating the point from the WKT
pnt = fromstr(p.wkt)
self.assertEqual(pnt.geom_type, 'Point')
self.assertEqual(pnt.geom_typeid, 0)
self.assertEqual(pnt.dims, 0)
self.assertEqual(p.x, pnt.x)
self.assertEqual(p.y, pnt.y)
self.assertEqual(pnt, fromstr(p.wkt))
self.assertEqual(False, pnt == prev) # Use assertEqual to test __eq__
# Making sure that the point's X, Y components are what we expect
self.assertAlmostEqual(p.x, pnt.tuple[0], 9)
self.assertAlmostEqual(p.y, pnt.tuple[1], 9)
# Testing the third dimension, and getting the tuple arguments
if hasattr(p, 'z'):
self.assertIs(pnt.hasz, True)
self.assertEqual(p.z, pnt.z)
self.assertEqual(p.z, pnt.tuple[2], 9)
tup_args = (p.x, p.y, p.z)
set_tup1 = (2.71, 3.14, 5.23)
set_tup2 = (5.23, 2.71, 3.14)
else:
self.assertIs(pnt.hasz, False)
self.assertIsNone(pnt.z)
tup_args = (p.x, p.y)
set_tup1 = (2.71, 3.14)
set_tup2 = (3.14, 2.71)
# Centroid operation on point should be point itself
self.assertEqual(p.centroid, pnt.centroid.tuple)
# Now testing the different constructors
pnt2 = Point(tup_args) # e.g., Point((1, 2))
pnt3 = Point(*tup_args) # e.g., Point(1, 2)
self.assertEqual(pnt, pnt2)
self.assertEqual(pnt, pnt3)
# Now testing setting the x and y
pnt.y = 3.14
pnt.x = 2.71
self.assertEqual(3.14, pnt.y)
self.assertEqual(2.71, pnt.x)
# Setting via the tuple/coords property
pnt.tuple = set_tup1
self.assertEqual(set_tup1, pnt.tuple)
pnt.coords = set_tup2
self.assertEqual(set_tup2, pnt.coords)
prev = pnt # setting the previous geometry
def test_multipoints(self):
"Testing MultiPoint objects."
for mp in self.geometries.multipoints:
mpnt = fromstr(mp.wkt)
self.assertEqual(mpnt.geom_type, 'MultiPoint')
self.assertEqual(mpnt.geom_typeid, 4)
self.assertEqual(mpnt.dims, 0)
self.assertAlmostEqual(mp.centroid[0], mpnt.centroid.tuple[0], 9)
self.assertAlmostEqual(mp.centroid[1], mpnt.centroid.tuple[1], 9)
with self.assertRaises(IndexError):
mpnt.__getitem__(len(mpnt))
self.assertEqual(mp.centroid, mpnt.centroid.tuple)
self.assertEqual(mp.coords, tuple(m.tuple for m in mpnt))
for p in mpnt:
self.assertEqual(p.geom_type, 'Point')
self.assertEqual(p.geom_typeid, 0)
self.assertIs(p.empty, False)
self.assertIs(p.valid, True)
def test_linestring(self):
"Testing LineString objects."
prev = fromstr('POINT(0 0)')
for l in self.geometries.linestrings:
ls = fromstr(l.wkt)
self.assertEqual(ls.geom_type, 'LineString')
self.assertEqual(ls.geom_typeid, 1)
self.assertEqual(ls.dims, 1)
self.assertIs(ls.empty, False)
self.assertIs(ls.ring, False)
if hasattr(l, 'centroid'):
self.assertEqual(l.centroid, ls.centroid.tuple)
if hasattr(l, 'tup'):
self.assertEqual(l.tup, ls.tuple)
self.assertEqual(ls, fromstr(l.wkt))
self.assertEqual(False, ls == prev) # Use assertEqual to test __eq__
with self.assertRaises(IndexError):
ls.__getitem__(len(ls))
prev = ls
# Creating a LineString from a tuple, list, and numpy array
self.assertEqual(ls, LineString(ls.tuple)) # tuple
self.assertEqual(ls, LineString(*ls.tuple)) # as individual arguments
self.assertEqual(ls, LineString([list(tup) for tup in ls.tuple])) # as list
# Point individual arguments
self.assertEqual(ls.wkt, LineString(*tuple(Point(tup) for tup in ls.tuple)).wkt)
if numpy:
self.assertEqual(ls, LineString(numpy.array(ls.tuple))) # as numpy array
with self.assertRaisesMessage(TypeError, 'Each coordinate should be a sequence (list or tuple)'):
LineString((0, 0))
with self.assertRaisesMessage(ValueError, 'LineString requires at least 2 points, got 1.'):
LineString([(0, 0)])
if numpy:
with self.assertRaisesMessage(ValueError, 'LineString requires at least 2 points, got 1.'):
LineString(numpy.array([(0, 0)]))
with mock.patch('django.contrib.gis.geos.linestring.numpy', False):
with self.assertRaisesMessage(TypeError, 'Invalid initialization input for LineStrings.'):
LineString('wrong input')
# Test __iter__().
self.assertEqual(list(LineString((0, 0), (1, 1), (2, 2))), [(0, 0), (1, 1), (2, 2)])
def test_multilinestring(self):
"Testing MultiLineString objects."
prev = fromstr('POINT(0 0)')
for l in self.geometries.multilinestrings:
ml = fromstr(l.wkt)
self.assertEqual(ml.geom_type, 'MultiLineString')
self.assertEqual(ml.geom_typeid, 5)
self.assertEqual(ml.dims, 1)
self.assertAlmostEqual(l.centroid[0], ml.centroid.x, 9)
self.assertAlmostEqual(l.centroid[1], ml.centroid.y, 9)
self.assertEqual(ml, fromstr(l.wkt))
self.assertEqual(False, ml == prev) # Use assertEqual to test __eq__
prev = ml
for ls in ml:
self.assertEqual(ls.geom_type, 'LineString')
self.assertEqual(ls.geom_typeid, 1)
self.assertIs(ls.empty, False)
with self.assertRaises(IndexError):
ml.__getitem__(len(ml))
self.assertEqual(ml.wkt, MultiLineString(*tuple(s.clone() for s in ml)).wkt)
self.assertEqual(ml, MultiLineString(*tuple(LineString(s.tuple) for s in ml)))
def test_linearring(self):
"Testing LinearRing objects."
for rr in self.geometries.linearrings:
lr = fromstr(rr.wkt)
self.assertEqual(lr.geom_type, 'LinearRing')
self.assertEqual(lr.geom_typeid, 2)
self.assertEqual(lr.dims, 1)
self.assertEqual(rr.n_p, len(lr))
self.assertIs(lr.valid, True)
self.assertIs(lr.empty, False)
# Creating a LinearRing from a tuple, list, and numpy array
self.assertEqual(lr, LinearRing(lr.tuple))
self.assertEqual(lr, LinearRing(*lr.tuple))
self.assertEqual(lr, LinearRing([list(tup) for tup in lr.tuple]))
if numpy:
self.assertEqual(lr, LinearRing(numpy.array(lr.tuple)))
with self.assertRaisesMessage(ValueError, 'LinearRing requires at least 4 points, got 3.'):
LinearRing((0, 0), (1, 1), (0, 0))
with self.assertRaisesMessage(ValueError, 'LinearRing requires at least 4 points, got 1.'):
LinearRing([(0, 0)])
if numpy:
with self.assertRaisesMessage(ValueError, 'LinearRing requires at least 4 points, got 1.'):
LinearRing(numpy.array([(0, 0)]))
def test_linearring_json(self):
self.assertJSONEqual(
LinearRing((0, 0), (0, 1), (1, 1), (0, 0)).json,
'{"coordinates": [[0, 0], [0, 1], [1, 1], [0, 0]], "type": "LineString"}',
)
def test_polygons_from_bbox(self):
"Testing `from_bbox` class method."
bbox = (-180, -90, 180, 90)
p = Polygon.from_bbox(bbox)
self.assertEqual(bbox, p.extent)
# Testing numerical precision
x = 3.14159265358979323
bbox = (0, 0, 1, x)
p = Polygon.from_bbox(bbox)
y = p.extent[-1]
self.assertEqual(format(x, '.13f'), format(y, '.13f'))
def test_polygons(self):
"Testing Polygon objects."
prev = fromstr('POINT(0 0)')
for p in self.geometries.polygons:
# Creating the Polygon, testing its properties.
poly = fromstr(p.wkt)
self.assertEqual(poly.geom_type, 'Polygon')
self.assertEqual(poly.geom_typeid, 3)
self.assertEqual(poly.dims, 2)
self.assertIs(poly.empty, False)
self.assertIs(poly.ring, False)
self.assertEqual(p.n_i, poly.num_interior_rings)
self.assertEqual(p.n_i + 1, len(poly)) # Testing __len__
self.assertEqual(p.n_p, poly.num_points)
# Area & Centroid
self.assertAlmostEqual(p.area, poly.area, 9)
self.assertAlmostEqual(p.centroid[0], poly.centroid.tuple[0], 9)
self.assertAlmostEqual(p.centroid[1], poly.centroid.tuple[1], 9)
# Testing the geometry equivalence
self.assertEqual(poly, fromstr(p.wkt))
# Should not be equal to previous geometry
self.assertEqual(False, poly == prev) # Use assertEqual to test __eq__
self.assertNotEqual(poly, prev) # Use assertNotEqual to test __ne__
# Testing the exterior ring
ring = poly.exterior_ring
self.assertEqual(ring.geom_type, 'LinearRing')
self.assertEqual(ring.geom_typeid, 2)
if p.ext_ring_cs:
self.assertEqual(p.ext_ring_cs, ring.tuple)
self.assertEqual(p.ext_ring_cs, poly[0].tuple) # Testing __getitem__
# Testing __getitem__ and __setitem__ on invalid indices
with self.assertRaises(IndexError):
poly.__getitem__(len(poly))
with self.assertRaises(IndexError):
poly.__setitem__(len(poly), False)
with self.assertRaises(IndexError):
poly.__getitem__(-1 * len(poly) - 1)
# Testing __iter__
for r in poly:
self.assertEqual(r.geom_type, 'LinearRing')
self.assertEqual(r.geom_typeid, 2)
# Testing polygon construction.
with self.assertRaises(TypeError):
Polygon(0, [1, 2, 3])
with self.assertRaises(TypeError):
Polygon('foo')
# Polygon(shell, (hole1, ... holeN))
rings = tuple(r for r in poly)
self.assertEqual(poly, Polygon(rings[0], rings[1:]))
# Polygon(shell_tuple, hole_tuple1, ... , hole_tupleN)
ring_tuples = tuple(r.tuple for r in poly)
self.assertEqual(poly, Polygon(*ring_tuples))
# Constructing with tuples of LinearRings.
self.assertEqual(poly.wkt, Polygon(*tuple(r for r in poly)).wkt)
self.assertEqual(poly.wkt, Polygon(*tuple(LinearRing(r.tuple) for r in poly)).wkt)
def test_polygons_templates(self):
# Accessing Polygon attributes in templates should work.
engine = Engine()
template = engine.from_string('{{ polygons.0.wkt }}')
polygons = [fromstr(p.wkt) for p in self.geometries.multipolygons[:2]]
content = template.render(Context({'polygons': polygons}))
self.assertIn('MULTIPOLYGON (((100', content)
def test_polygon_comparison(self):
p1 = Polygon(((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)))
p2 = Polygon(((0, 0), (0, 1), (1, 0), (0, 0)))
self.assertGreater(p1, p2)
self.assertLess(p2, p1)
p3 = Polygon(((0, 0), (0, 1), (1, 1), (2, 0), (0, 0)))
p4 = Polygon(((0, 0), (0, 1), (2, 2), (1, 0), (0, 0)))
self.assertGreater(p4, p3)
self.assertLess(p3, p4)
def test_multipolygons(self):
"Testing MultiPolygon objects."
fromstr('POINT (0 0)')
for mp in self.geometries.multipolygons:
mpoly = fromstr(mp.wkt)
self.assertEqual(mpoly.geom_type, 'MultiPolygon')
self.assertEqual(mpoly.geom_typeid, 6)
self.assertEqual(mpoly.dims, 2)
self.assertEqual(mp.valid, mpoly.valid)
if mp.valid:
self.assertEqual(mp.num_geom, mpoly.num_geom)
self.assertEqual(mp.n_p, mpoly.num_coords)
self.assertEqual(mp.num_geom, len(mpoly))
with self.assertRaises(IndexError):
mpoly.__getitem__(len(mpoly))
for p in mpoly:
self.assertEqual(p.geom_type, 'Polygon')
self.assertEqual(p.geom_typeid, 3)
self.assertIs(p.valid, True)
self.assertEqual(mpoly.wkt, MultiPolygon(*tuple(poly.clone() for poly in mpoly)).wkt)
def test_memory_hijinks(self):
"Testing Geometry __del__() on rings and polygons."
# #### Memory issues with rings and poly
# These tests are needed to ensure sanity with writable geometries.
# Getting a polygon with interior rings, and pulling out the interior rings
poly = fromstr(self.geometries.polygons[1].wkt)
ring1 = poly[0]
ring2 = poly[1]
# These deletes should be 'harmless' since they are done on child geometries
del ring1
del ring2
ring1 = poly[0]
ring2 = poly[1]
# Deleting the polygon
del poly
# Access to these rings is OK since they are clones.
str(ring1)
str(ring2)
def test_coord_seq(self):
"Testing Coordinate Sequence objects."
for p in self.geometries.polygons:
if p.ext_ring_cs:
# Constructing the polygon and getting the coordinate sequence
poly = fromstr(p.wkt)
cs = poly.exterior_ring.coord_seq
self.assertEqual(p.ext_ring_cs, cs.tuple) # done in the Polygon test too.
self.assertEqual(len(p.ext_ring_cs), len(cs)) # Making sure __len__ works
# Checks __getitem__ and __setitem__
for i in range(len(p.ext_ring_cs)):
c1 = p.ext_ring_cs[i] # Expected value
c2 = cs[i] # Value from coordseq
self.assertEqual(c1, c2)
# Constructing the test value to set the coordinate sequence with
if len(c1) == 2:
tset = (5, 23)
else:
tset = (5, 23, 8)
cs[i] = tset
# Making sure every set point matches what we expect
for j in range(len(tset)):
cs[i] = tset
self.assertEqual(tset[j], cs[i][j])
def test_relate_pattern(self):
"Testing relate() and relate_pattern()."
g = fromstr('POINT (0 0)')
with self.assertRaises(GEOSException):
g.relate_pattern(0, 'invalid pattern, yo')
for rg in self.geometries.relate_geoms:
a = fromstr(rg.wkt_a)
b = fromstr(rg.wkt_b)
self.assertEqual(rg.result, a.relate_pattern(b, rg.pattern))
self.assertEqual(rg.pattern, a.relate(b))
def test_intersection(self):
"Testing intersects() and intersection()."
for i in range(len(self.geometries.topology_geoms)):
a = fromstr(self.geometries.topology_geoms[i].wkt_a)
b = fromstr(self.geometries.topology_geoms[i].wkt_b)
i1 = fromstr(self.geometries.intersect_geoms[i].wkt)
self.assertIs(a.intersects(b), True)
i2 = a.intersection(b)
self.assertEqual(i1, i2)
self.assertEqual(i1, a & b) # __and__ is intersection operator
a &= b # testing __iand__
self.assertEqual(i1, a)
def test_union(self):
"Testing union()."
for i in range(len(self.geometries.topology_geoms)):
a = fromstr(self.geometries.topology_geoms[i].wkt_a)
b = fromstr(self.geometries.topology_geoms[i].wkt_b)
u1 = fromstr(self.geometries.union_geoms[i].wkt)
u2 = a.union(b)
self.assertEqual(u1, u2)
self.assertEqual(u1, a | b) # __or__ is union operator
a |= b # testing __ior__
self.assertEqual(u1, a)
def test_unary_union(self):
"Testing unary_union."
for i in range(len(self.geometries.topology_geoms)):
a = fromstr(self.geometries.topology_geoms[i].wkt_a)
b = fromstr(self.geometries.topology_geoms[i].wkt_b)
u1 = fromstr(self.geometries.union_geoms[i].wkt)
u2 = GeometryCollection(a, b).unary_union
self.assertTrue(u1.equals(u2))
def test_difference(self):
"Testing difference()."
for i in range(len(self.geometries.topology_geoms)):
a = fromstr(self.geometries.topology_geoms[i].wkt_a)
b = fromstr(self.geometries.topology_geoms[i].wkt_b)
d1 = fromstr(self.geometries.diff_geoms[i].wkt)
d2 = a.difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a - b) # __sub__ is difference operator
a -= b # testing __isub__
self.assertEqual(d1, a)
def test_symdifference(self):
"Testing sym_difference()."
for i in range(len(self.geometries.topology_geoms)):
a = fromstr(self.geometries.topology_geoms[i].wkt_a)
b = fromstr(self.geometries.topology_geoms[i].wkt_b)
d1 = fromstr(self.geometries.sdiff_geoms[i].wkt)
d2 = a.sym_difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a ^ b) # __xor__ is symmetric difference operator
a ^= b # testing __ixor__
self.assertEqual(d1, a)
def test_buffer(self):
bg = self.geometries.buffer_geoms[0]
g = fromstr(bg.wkt)
# Can't use a floating-point for the number of quadsegs.
with self.assertRaises(ctypes.ArgumentError):
g.buffer(bg.width, quadsegs=1.1)
self._test_buffer(self.geometries.buffer_geoms, 'buffer')
def test_buffer_with_style(self):
bg = self.geometries.buffer_with_style_geoms[0]
g = fromstr(bg.wkt)
# Can't use a floating-point for the number of quadsegs.
with self.assertRaises(ctypes.ArgumentError):
g.buffer_with_style(bg.width, quadsegs=1.1)
# Can't use a floating-point for the end cap style.
with self.assertRaises(ctypes.ArgumentError):
g.buffer_with_style(bg.width, end_cap_style=1.2)
# Can't use a end cap style that is not in the enum.
with self.assertRaises(GEOSException):
g.buffer_with_style(bg.width, end_cap_style=55)
# Can't use a floating-point for the join style.
with self.assertRaises(ctypes.ArgumentError):
g.buffer_with_style(bg.width, join_style=1.3)
# Can't use a join style that is not in the enum.
with self.assertRaises(GEOSException):
g.buffer_with_style(bg.width, join_style=66)
self._test_buffer(
itertools.chain(self.geometries.buffer_geoms, self.geometries.buffer_with_style_geoms),
'buffer_with_style',
)
def _test_buffer(self, geometries, buffer_method_name):
for bg in geometries:
g = fromstr(bg.wkt)
# The buffer we expect
exp_buf = fromstr(bg.buffer_wkt)
# Constructing our buffer
buf_kwargs = {
kwarg_name: getattr(bg, kwarg_name)
for kwarg_name in ('width', 'quadsegs', 'end_cap_style', 'join_style', 'mitre_limit')
if hasattr(bg, kwarg_name)
}
buf = getattr(g, buffer_method_name)(**buf_kwargs)
self.assertEqual(exp_buf.num_coords, buf.num_coords)
self.assertEqual(len(exp_buf), len(buf))
# Now assuring that each point in the buffer is almost equal
for j in range(len(exp_buf)):
exp_ring = exp_buf[j]
buf_ring = buf[j]
self.assertEqual(len(exp_ring), len(buf_ring))
for k in range(len(exp_ring)):
# Asserting the X, Y of each point are almost equal (due to floating point imprecision)
self.assertAlmostEqual(exp_ring[k][0], buf_ring[k][0], 9)
self.assertAlmostEqual(exp_ring[k][1], buf_ring[k][1], 9)
def test_covers(self):
poly = Polygon(((0, 0), (0, 10), (10, 10), (10, 0), (0, 0)))
self.assertTrue(poly.covers(Point(5, 5)))
self.assertFalse(poly.covers(Point(100, 100)))
def test_closed(self):
ls_closed = LineString((0, 0), (1, 1), (0, 0))
ls_not_closed = LineString((0, 0), (1, 1))
self.assertFalse(ls_not_closed.closed)
self.assertTrue(ls_closed.closed)
if geos_version_tuple() >= (3, 5):
self.assertFalse(MultiLineString(ls_closed, ls_not_closed).closed)
self.assertTrue(MultiLineString(ls_closed, ls_closed).closed)
with mock.patch('django.contrib.gis.geos.libgeos.geos_version', lambda: b'3.4.9'):
with self.assertRaisesMessage(GEOSException, "MultiLineString.closed requires GEOS >= 3.5.0."):
MultiLineString().closed
def test_srid(self):
"Testing the SRID property and keyword."
# Testing SRID keyword on Point
pnt = Point(5, 23, srid=4326)
self.assertEqual(4326, pnt.srid)
pnt.srid = 3084
self.assertEqual(3084, pnt.srid)
with self.assertRaises(ctypes.ArgumentError):
pnt.srid = '4326'
# Testing SRID keyword on fromstr(), and on Polygon rings.
poly = fromstr(self.geometries.polygons[1].wkt, srid=4269)
self.assertEqual(4269, poly.srid)
for ring in poly:
self.assertEqual(4269, ring.srid)
poly.srid = 4326
self.assertEqual(4326, poly.shell.srid)
# Testing SRID keyword on GeometryCollection
gc = GeometryCollection(Point(5, 23), LineString((0, 0), (1.5, 1.5), (3, 3)), srid=32021)
self.assertEqual(32021, gc.srid)
for i in range(len(gc)):
self.assertEqual(32021, gc[i].srid)
# GEOS may get the SRID from HEXEWKB
# 'POINT(5 23)' at SRID=4326 in hex form -- obtained from PostGIS
# using `SELECT GeomFromText('POINT (5 23)', 4326);`.
hex = '0101000020E610000000000000000014400000000000003740'
p1 = fromstr(hex)
self.assertEqual(4326, p1.srid)
p2 = fromstr(p1.hex)
self.assertIsNone(p2.srid)
p3 = fromstr(p1.hex, srid=-1) # -1 is intended.
self.assertEqual(-1, p3.srid)
# Testing that geometry SRID could be set to its own value
pnt_wo_srid = Point(1, 1)
pnt_wo_srid.srid = pnt_wo_srid.srid
# Input geometries that have an SRID.
self.assertEqual(GEOSGeometry(pnt.ewkt, srid=pnt.srid).srid, pnt.srid)
self.assertEqual(GEOSGeometry(pnt.ewkb, srid=pnt.srid).srid, pnt.srid)
with self.assertRaisesMessage(ValueError, 'Input geometry already has SRID: %d.' % pnt.srid):
GEOSGeometry(pnt.ewkt, srid=1)
with self.assertRaisesMessage(ValueError, 'Input geometry already has SRID: %d.' % pnt.srid):
GEOSGeometry(pnt.ewkb, srid=1)
def test_custom_srid(self):
"""Test with a null srid and a srid unknown to GDAL."""
for srid in [None, 999999]:
pnt = Point(111200, 220900, srid=srid)
self.assertTrue(pnt.ewkt.startswith(("SRID=%s;" % srid if srid else '') + "POINT (111200"))
self.assertIsInstance(pnt.ogr, gdal.OGRGeometry)
self.assertIsNone(pnt.srs)
# Test conversion from custom to a known srid
c2w = gdal.CoordTransform(
gdal.SpatialReference(
'+proj=mill +lat_0=0 +lon_0=0 +x_0=0 +y_0=0 +R_A +ellps=WGS84 '
'+datum=WGS84 +units=m +no_defs'
),
gdal.SpatialReference(4326))
new_pnt = pnt.transform(c2w, clone=True)
self.assertEqual(new_pnt.srid, 4326)
self.assertAlmostEqual(new_pnt.x, 1, 3)
self.assertAlmostEqual(new_pnt.y, 2, 3)
def test_mutable_geometries(self):
"Testing the mutability of Polygons and Geometry Collections."
# ### Testing the mutability of Polygons ###
for p in self.geometries.polygons:
poly = fromstr(p.wkt)
# Should only be able to use __setitem__ with LinearRing geometries.
with self.assertRaises(TypeError):
poly.__setitem__(0, LineString((1, 1), (2, 2)))
# Constructing the new shell by adding 500 to every point in the old shell.
shell_tup = poly.shell.tuple
new_coords = []
for point in shell_tup:
new_coords.append((point[0] + 500., point[1] + 500.))
new_shell = LinearRing(*tuple(new_coords))
# Assigning polygon's exterior ring w/the new shell
poly.exterior_ring = new_shell
str(new_shell) # new shell is still accessible
self.assertEqual(poly.exterior_ring, new_shell)
self.assertEqual(poly[0], new_shell)
# ### Testing the mutability of Geometry Collections
for tg in self.geometries.multipoints:
mp = fromstr(tg.wkt)
for i in range(len(mp)):
# Creating a random point.
pnt = mp[i]
new = Point(random.randint(21, 100), random.randint(21, 100))
# Testing the assignment
mp[i] = new
str(new) # what was used for the assignment is still accessible
self.assertEqual(mp[i], new)
self.assertEqual(mp[i].wkt, new.wkt)
self.assertNotEqual(pnt, mp[i])
# MultiPolygons involve much more memory management because each
# Polygon w/in the collection has its own rings.
for tg in self.geometries.multipolygons:
mpoly = fromstr(tg.wkt)
for i in range(len(mpoly)):
poly = mpoly[i]
old_poly = mpoly[i]
# Offsetting the each ring in the polygon by 500.
for j in range(len(poly)):
r = poly[j]
for k in range(len(r)):
r[k] = (r[k][0] + 500., r[k][1] + 500.)
poly[j] = r
self.assertNotEqual(mpoly[i], poly)
# Testing the assignment
mpoly[i] = poly
str(poly) # Still accessible
self.assertEqual(mpoly[i], poly)
self.assertNotEqual(mpoly[i], old_poly)
# Extreme (!!) __setitem__ -- no longer works, have to detect
# in the first object that __setitem__ is called in the subsequent
# objects -- maybe mpoly[0, 0, 0] = (3.14, 2.71)?
# mpoly[0][0][0] = (3.14, 2.71)
# self.assertEqual((3.14, 2.71), mpoly[0][0][0])
# Doing it more slowly..
# self.assertEqual((3.14, 2.71), mpoly[0].shell[0])
# del mpoly
def test_point_list_assignment(self):
p = Point(0, 0)
p[:] = (1, 2, 3)
self.assertEqual(p, Point(1, 2, 3))
p[:] = ()
self.assertEqual(p.wkt, Point())
p[:] = (1, 2)
self.assertEqual(p.wkt, Point(1, 2))
with self.assertRaises(ValueError):
p[:] = (1,)
with self.assertRaises(ValueError):
p[:] = (1, 2, 3, 4, 5)
def test_linestring_list_assignment(self):
ls = LineString((0, 0), (1, 1))
ls[:] = ()
self.assertEqual(ls, LineString())
ls[:] = ((0, 0), (1, 1), (2, 2))
self.assertEqual(ls, LineString((0, 0), (1, 1), (2, 2)))
with self.assertRaises(ValueError):
ls[:] = (1,)
def test_linearring_list_assignment(self):
ls = LinearRing((0, 0), (0, 1), (1, 1), (0, 0))
ls[:] = ()
self.assertEqual(ls, LinearRing())
ls[:] = ((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))
self.assertEqual(ls, LinearRing((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)))
with self.assertRaises(ValueError):
ls[:] = ((0, 0), (1, 1), (2, 2))
def test_polygon_list_assignment(self):
pol = Polygon()
pol[:] = (((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)),)
self.assertEqual(pol, Polygon(((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)),))
pol[:] = ()
self.assertEqual(pol, Polygon())
def test_geometry_collection_list_assignment(self):
p = Point()
gc = GeometryCollection()
gc[:] = [p]
self.assertEqual(gc, GeometryCollection(p))
gc[:] = ()
self.assertEqual(gc, GeometryCollection())
def test_threed(self):
"Testing three-dimensional geometries."
# Testing a 3D Point
pnt = Point(2, 3, 8)
self.assertEqual((2., 3., 8.), pnt.coords)
with self.assertRaises(TypeError):
pnt.tuple = (1., 2.)
pnt.coords = (1., 2., 3.)
self.assertEqual((1., 2., 3.), pnt.coords)
# Testing a 3D LineString
ls = LineString((2., 3., 8.), (50., 250., -117.))
self.assertEqual(((2., 3., 8.), (50., 250., -117.)), ls.tuple)
with self.assertRaises(TypeError):
ls.__setitem__(0, (1., 2.))
ls[0] = (1., 2., 3.)
self.assertEqual((1., 2., 3.), ls[0])
def test_distance(self):
"Testing the distance() function."
# Distance to self should be 0.
pnt = Point(0, 0)
self.assertEqual(0.0, pnt.distance(Point(0, 0)))
# Distance should be 1
self.assertEqual(1.0, pnt.distance(Point(0, 1)))
# Distance should be ~ sqrt(2)
self.assertAlmostEqual(1.41421356237, pnt.distance(Point(1, 1)), 11)
# Distances are from the closest vertex in each geometry --
# should be 3 (distance from (2, 2) to (5, 2)).
ls1 = LineString((0, 0), (1, 1), (2, 2))
ls2 = LineString((5, 2), (6, 1), (7, 0))
self.assertEqual(3, ls1.distance(ls2))
def test_length(self):
"Testing the length property."
# Points have 0 length.
pnt = Point(0, 0)
self.assertEqual(0.0, pnt.length)
# Should be ~ sqrt(2)
ls = LineString((0, 0), (1, 1))
self.assertAlmostEqual(1.41421356237, ls.length, 11)
# Should be circumference of Polygon
poly = Polygon(LinearRing((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)))
self.assertEqual(4.0, poly.length)
# Should be sum of each element's length in collection.
mpoly = MultiPolygon(poly.clone(), poly)
self.assertEqual(8.0, mpoly.length)
def test_emptyCollections(self):
"Testing empty geometries and collections."
geoms = [
GeometryCollection([]),
fromstr('GEOMETRYCOLLECTION EMPTY'),
GeometryCollection(),
fromstr('POINT EMPTY'),
Point(),
fromstr('LINESTRING EMPTY'),
LineString(),
fromstr('POLYGON EMPTY'),
Polygon(),
fromstr('MULTILINESTRING EMPTY'),
MultiLineString(),
fromstr('MULTIPOLYGON EMPTY'),
MultiPolygon(()),
MultiPolygon(),
]
if numpy:
geoms.append(LineString(numpy.array([])))
for g in geoms:
self.assertIs(g.empty, True)
# Testing len() and num_geom.
if isinstance(g, Polygon):
self.assertEqual(1, len(g)) # Has one empty linear ring
self.assertEqual(1, g.num_geom)
self.assertEqual(0, len(g[0]))
elif isinstance(g, (Point, LineString)):
self.assertEqual(1, g.num_geom)
self.assertEqual(0, len(g))
else:
self.assertEqual(0, g.num_geom)
self.assertEqual(0, len(g))
# Testing __getitem__ (doesn't work on Point or Polygon)
if isinstance(g, Point):
with self.assertRaises(IndexError):
g.x
elif isinstance(g, Polygon):
lr = g.shell
self.assertEqual('LINEARRING EMPTY', lr.wkt)
self.assertEqual(0, len(lr))
self.assertIs(lr.empty, True)
with self.assertRaises(IndexError):
lr.__getitem__(0)
else:
with self.assertRaises(IndexError):
g.__getitem__(0)
def test_collection_dims(self):
gc = GeometryCollection([])
self.assertEqual(gc.dims, -1)
gc = GeometryCollection(Point(0, 0))
self.assertEqual(gc.dims, 0)
gc = GeometryCollection(LineString((0, 0), (1, 1)), Point(0, 0))
self.assertEqual(gc.dims, 1)
gc = GeometryCollection(LineString((0, 0), (1, 1)), Polygon(((0, 0), (0, 1), (1, 1), (0, 0))), Point(0, 0))
self.assertEqual(gc.dims, 2)
def test_collections_of_collections(self):
"Testing GeometryCollection handling of other collections."
# Creating a GeometryCollection WKT string composed of other
# collections and polygons.
coll = [mp.wkt for mp in self.geometries.multipolygons if mp.valid]
coll.extend(mls.wkt for mls in self.geometries.multilinestrings)
coll.extend(p.wkt for p in self.geometries.polygons)
coll.extend(mp.wkt for mp in self.geometries.multipoints)
gc_wkt = 'GEOMETRYCOLLECTION(%s)' % ','.join(coll)
# Should construct ok from WKT
gc1 = GEOSGeometry(gc_wkt)
# Should also construct ok from individual geometry arguments.
gc2 = GeometryCollection(*tuple(g for g in gc1))
# And, they should be equal.
self.assertEqual(gc1, gc2)
def test_gdal(self):
"Testing `ogr` and `srs` properties."
g1 = fromstr('POINT(5 23)')
self.assertIsInstance(g1.ogr, gdal.OGRGeometry)
self.assertIsNone(g1.srs)
g1_3d = fromstr('POINT(5 23 8)')
self.assertIsInstance(g1_3d.ogr, gdal.OGRGeometry)
self.assertEqual(g1_3d.ogr.z, 8)
g2 = fromstr('LINESTRING(0 0, 5 5, 23 23)', srid=4326)
self.assertIsInstance(g2.ogr, gdal.OGRGeometry)
self.assertIsInstance(g2.srs, gdal.SpatialReference)
self.assertEqual(g2.hex, g2.ogr.hex)
self.assertEqual('WGS 84', g2.srs.name)
def test_copy(self):
"Testing use with the Python `copy` module."
import copy
poly = GEOSGeometry('POLYGON((0 0, 0 23, 23 23, 23 0, 0 0), (5 5, 5 10, 10 10, 10 5, 5 5))')
cpy1 = copy.copy(poly)
cpy2 = copy.deepcopy(poly)
self.assertNotEqual(poly._ptr, cpy1._ptr)
self.assertNotEqual(poly._ptr, cpy2._ptr)
def test_transform(self):
"Testing `transform` method."
orig = GEOSGeometry('POINT (-104.609 38.255)', 4326)
trans = GEOSGeometry('POINT (992385.4472045 481455.4944650)', 2774)
# Using a srid, a SpatialReference object, and a CoordTransform object
# for transformations.
t1, t2, t3 = orig.clone(), orig.clone(), orig.clone()
t1.transform(trans.srid)
t2.transform(gdal.SpatialReference('EPSG:2774'))
ct = gdal.CoordTransform(gdal.SpatialReference('WGS84'), gdal.SpatialReference(2774))
t3.transform(ct)
# Testing use of the `clone` keyword.
k1 = orig.clone()
k2 = k1.transform(trans.srid, clone=True)
self.assertEqual(k1, orig)
self.assertNotEqual(k1, k2)
prec = 3
for p in (t1, t2, t3, k2):
self.assertAlmostEqual(trans.x, p.x, prec)
self.assertAlmostEqual(trans.y, p.y, prec)
def test_transform_3d(self):
p3d = GEOSGeometry('POINT (5 23 100)', 4326)
p3d.transform(2774)
self.assertEqual(p3d.z, 100)
def test_transform_noop(self):
""" Testing `transform` method (SRID match) """
# transform() should no-op if source & dest SRIDs match,
# regardless of whether GDAL is available.
g = GEOSGeometry('POINT (-104.609 38.255)', 4326)
gt = g.tuple
g.transform(4326)
self.assertEqual(g.tuple, gt)
self.assertEqual(g.srid, 4326)
g = GEOSGeometry('POINT (-104.609 38.255)', 4326)
g1 = g.transform(4326, clone=True)
self.assertEqual(g1.tuple, g.tuple)
self.assertEqual(g1.srid, 4326)
self.assertIsNot(g1, g, "Clone didn't happen")
def test_transform_nosrid(self):
""" Testing `transform` method (no SRID or negative SRID) """
g = GEOSGeometry('POINT (-104.609 38.255)', srid=None)
with self.assertRaises(GEOSException):
g.transform(2774)
g = GEOSGeometry('POINT (-104.609 38.255)', srid=None)
with self.assertRaises(GEOSException):
g.transform(2774, clone=True)
g = GEOSGeometry('POINT (-104.609 38.255)', srid=-1)
with self.assertRaises(GEOSException):
g.transform(2774)
g = GEOSGeometry('POINT (-104.609 38.255)', srid=-1)
with self.assertRaises(GEOSException):
g.transform(2774, clone=True)
def test_extent(self):
"Testing `extent` method."
# The xmin, ymin, xmax, ymax of the MultiPoint should be returned.
mp = MultiPoint(Point(5, 23), Point(0, 0), Point(10, 50))
self.assertEqual((0.0, 0.0, 10.0, 50.0), mp.extent)
pnt = Point(5.23, 17.8)
# Extent of points is just the point itself repeated.
self.assertEqual((5.23, 17.8, 5.23, 17.8), pnt.extent)
# Testing on the 'real world' Polygon.
poly = fromstr(self.geometries.polygons[3].wkt)
ring = poly.shell
x, y = ring.x, ring.y
xmin, ymin = min(x), min(y)
xmax, ymax = max(x), max(y)
self.assertEqual((xmin, ymin, xmax, ymax), poly.extent)
def test_pickle(self):
"Testing pickling and unpickling support."
# Creating a list of test geometries for pickling,
# and setting the SRID on some of them.
def get_geoms(lst, srid=None):
return [GEOSGeometry(tg.wkt, srid) for tg in lst]
tgeoms = get_geoms(self.geometries.points)
tgeoms.extend(get_geoms(self.geometries.multilinestrings, 4326))
tgeoms.extend(get_geoms(self.geometries.polygons, 3084))
tgeoms.extend(get_geoms(self.geometries.multipolygons, 3857))
for geom in tgeoms:
s1 = pickle.dumps(geom)
g1 = pickle.loads(s1)
self.assertEqual(geom, g1)
self.assertEqual(geom.srid, g1.srid)
def test_prepared(self):
"Testing PreparedGeometry support."
# Creating a simple multipolygon and getting a prepared version.
mpoly = GEOSGeometry('MULTIPOLYGON(((0 0,0 5,5 5,5 0,0 0)),((5 5,5 10,10 10,10 5,5 5)))')
prep = mpoly.prepared
# A set of test points.
pnts = [Point(5, 5), Point(7.5, 7.5), Point(2.5, 7.5)]
for pnt in pnts:
# Results should be the same (but faster)
self.assertEqual(mpoly.contains(pnt), prep.contains(pnt))
self.assertEqual(mpoly.intersects(pnt), prep.intersects(pnt))
self.assertEqual(mpoly.covers(pnt), prep.covers(pnt))
self.assertTrue(prep.crosses(fromstr('LINESTRING(1 1, 15 15)')))
self.assertTrue(prep.disjoint(Point(-5, -5)))
poly = Polygon(((-1, -1), (1, 1), (1, 0), (-1, -1)))
self.assertTrue(prep.overlaps(poly))
poly = Polygon(((-5, 0), (-5, 5), (0, 5), (-5, 0)))
self.assertTrue(prep.touches(poly))
poly = Polygon(((-1, -1), (-1, 11), (11, 11), (11, -1), (-1, -1)))
self.assertTrue(prep.within(poly))
# Original geometry deletion should not crash the prepared one (#21662)
del mpoly
self.assertTrue(prep.covers(Point(5, 5)))
def test_line_merge(self):
"Testing line merge support"
ref_geoms = (fromstr('LINESTRING(1 1, 1 1, 3 3)'),
fromstr('MULTILINESTRING((1 1, 3 3), (3 3, 4 2))'),
)
ref_merged = (fromstr('LINESTRING(1 1, 3 3)'),
fromstr('LINESTRING (1 1, 3 3, 4 2)'),
)
for geom, merged in zip(ref_geoms, ref_merged):
self.assertEqual(merged, geom.merged)
def test_valid_reason(self):
"Testing IsValidReason support"
g = GEOSGeometry("POINT(0 0)")
self.assertTrue(g.valid)
self.assertIsInstance(g.valid_reason, str)
self.assertEqual(g.valid_reason, "Valid Geometry")
g = GEOSGeometry("LINESTRING(0 0, 0 0)")
self.assertFalse(g.valid)
self.assertIsInstance(g.valid_reason, str)
self.assertTrue(g.valid_reason.startswith("Too few points in geometry component"))
def test_linearref(self):
"Testing linear referencing"
ls = fromstr('LINESTRING(0 0, 0 10, 10 10, 10 0)')
mls = fromstr('MULTILINESTRING((0 0, 0 10), (10 0, 10 10))')
self.assertEqual(ls.project(Point(0, 20)), 10.0)
self.assertEqual(ls.project(Point(7, 6)), 24)
self.assertEqual(ls.project_normalized(Point(0, 20)), 1.0 / 3)
self.assertEqual(ls.interpolate(10), Point(0, 10))
self.assertEqual(ls.interpolate(24), Point(10, 6))
self.assertEqual(ls.interpolate_normalized(1.0 / 3), Point(0, 10))
self.assertEqual(mls.project(Point(0, 20)), 10)
self.assertEqual(mls.project(Point(7, 6)), 16)
self.assertEqual(mls.interpolate(9), Point(0, 9))
self.assertEqual(mls.interpolate(17), Point(10, 7))
def test_deconstructible(self):
"""
Geometry classes should be deconstructible.
"""
point = Point(4.337844, 50.827537, srid=4326)
path, args, kwargs = point.deconstruct()
self.assertEqual(path, 'django.contrib.gis.geos.point.Point')
self.assertEqual(args, (4.337844, 50.827537))
self.assertEqual(kwargs, {'srid': 4326})
ls = LineString(((0, 0), (1, 1)))
path, args, kwargs = ls.deconstruct()
self.assertEqual(path, 'django.contrib.gis.geos.linestring.LineString')
self.assertEqual(args, (((0, 0), (1, 1)),))
self.assertEqual(kwargs, {})
ls2 = LineString([Point(0, 0), Point(1, 1)], srid=4326)
path, args, kwargs = ls2.deconstruct()
self.assertEqual(path, 'django.contrib.gis.geos.linestring.LineString')
self.assertEqual(args, ([Point(0, 0), Point(1, 1)],))
self.assertEqual(kwargs, {'srid': 4326})
ext_coords = ((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))
int_coords = ((0.4, 0.4), (0.4, 0.6), (0.6, 0.6), (0.6, 0.4), (0.4, 0.4))
poly = Polygon(ext_coords, int_coords)
path, args, kwargs = poly.deconstruct()
self.assertEqual(path, 'django.contrib.gis.geos.polygon.Polygon')
self.assertEqual(args, (ext_coords, int_coords))
self.assertEqual(kwargs, {})
lr = LinearRing((0, 0), (0, 1), (1, 1), (0, 0))
path, args, kwargs = lr.deconstruct()
self.assertEqual(path, 'django.contrib.gis.geos.linestring.LinearRing')
self.assertEqual(args, ((0, 0), (0, 1), (1, 1), (0, 0)))
self.assertEqual(kwargs, {})
mp = MultiPoint(Point(0, 0), Point(1, 1))
path, args, kwargs = mp.deconstruct()
self.assertEqual(path, 'django.contrib.gis.geos.collections.MultiPoint')
self.assertEqual(args, (Point(0, 0), Point(1, 1)))
self.assertEqual(kwargs, {})
ls1 = LineString((0, 0), (1, 1))
ls2 = LineString((2, 2), (3, 3))
mls = MultiLineString(ls1, ls2)
path, args, kwargs = mls.deconstruct()
self.assertEqual(path, 'django.contrib.gis.geos.collections.MultiLineString')
self.assertEqual(args, (ls1, ls2))
self.assertEqual(kwargs, {})
p1 = Polygon(((0, 0), (0, 1), (1, 1), (0, 0)))
p2 = Polygon(((1, 1), (1, 2), (2, 2), (1, 1)))
mp = MultiPolygon(p1, p2)
path, args, kwargs = mp.deconstruct()
self.assertEqual(path, 'django.contrib.gis.geos.collections.MultiPolygon')
self.assertEqual(args, (p1, p2))
self.assertEqual(kwargs, {})
poly = Polygon(((0, 0), (0, 1), (1, 1), (0, 0)))
gc = GeometryCollection(Point(0, 0), MultiPoint(Point(0, 0), Point(1, 1)), poly)
path, args, kwargs = gc.deconstruct()
self.assertEqual(path, 'django.contrib.gis.geos.collections.GeometryCollection')
self.assertEqual(args, (Point(0, 0), MultiPoint(Point(0, 0), Point(1, 1)), poly))
self.assertEqual(kwargs, {})
def test_subclassing(self):
"""
GEOSGeometry subclass may itself be subclassed without being forced-cast
to the parent class during `__init__`.
"""
class ExtendedPolygon(Polygon):
def __init__(self, *args, data=0, **kwargs):
super().__init__(*args, **kwargs)
self._data = data
def __str__(self):
return "EXT_POLYGON - data: %d - %s" % (self._data, self.wkt)
ext_poly = ExtendedPolygon(((0, 0), (0, 1), (1, 1), (0, 0)), data=3)
self.assertEqual(type(ext_poly), ExtendedPolygon)
# ExtendedPolygon.__str__ should be called (instead of Polygon.__str__).
self.assertEqual(str(ext_poly), "EXT_POLYGON - data: 3 - POLYGON ((0 0, 0 1, 1 1, 0 0))")
self.assertJSONEqual(
ext_poly.json,
'{"coordinates": [[[0, 0], [0, 1], [1, 1], [0, 0]]], "type": "Polygon"}',
)
def test_geos_version_tuple(self):
versions = (
(b'3.0.0rc4-CAPI-1.3.3', (3, 0, 0)),
(b'3.0.0-CAPI-1.4.1', (3, 0, 0)),
(b'3.4.0dev-CAPI-1.8.0', (3, 4, 0)),
(b'3.4.0dev-CAPI-1.8.0 r0', (3, 4, 0)),
(b'3.6.2-CAPI-1.10.2 4d2925d6', (3, 6, 2)),
)
for version_string, version_tuple in versions:
with self.subTest(version_string=version_string):
with mock.patch('django.contrib.gis.geos.libgeos.geos_version', lambda: version_string):
self.assertEqual(geos_version_tuple(), version_tuple)
def test_from_gml(self):
self.assertEqual(
GEOSGeometry('POINT(0 0)'),
GEOSGeometry.from_gml(
'<gml:Point gml:id="p21" srsName="http://www.opengis.net/def/crs/EPSG/0/4326">'
' <gml:pos srsDimension="2">0 0</gml:pos>'
'</gml:Point>'
),
)
def test_from_ewkt(self):
self.assertEqual(GEOSGeometry.from_ewkt('SRID=1;POINT(1 1)'), Point(1, 1, srid=1))
self.assertEqual(GEOSGeometry.from_ewkt('POINT(1 1)'), Point(1, 1))
def test_from_ewkt_empty_string(self):
msg = 'Expected WKT but got an empty string.'
with self.assertRaisesMessage(ValueError, msg):
GEOSGeometry.from_ewkt('')
with self.assertRaisesMessage(ValueError, msg):
GEOSGeometry.from_ewkt('SRID=1;')
def test_from_ewkt_invalid_srid(self):
msg = 'EWKT has invalid SRID part.'
with self.assertRaisesMessage(ValueError, msg):
GEOSGeometry.from_ewkt('SRUD=1;POINT(1 1)')
with self.assertRaisesMessage(ValueError, msg):
GEOSGeometry.from_ewkt('SRID=WGS84;POINT(1 1)')
def test_fromstr_scientific_wkt(self):
self.assertEqual(GEOSGeometry('POINT(1.0e-1 1.0e+1)'), Point(.1, 10))
def test_normalize(self):
g = MultiPoint(Point(0, 0), Point(2, 2), Point(1, 1))
self.assertIsNone(g.normalize())
self.assertTrue(g.equals_exact(MultiPoint(Point(2, 2), Point(1, 1), Point(0, 0))))
def test_empty_point(self):
p = Point(srid=4326)
self.assertEqual(p.ogr.ewkt, p.ewkt)
self.assertEqual(p.transform(2774, clone=True), Point(srid=2774))
p.transform(2774)
self.assertEqual(p, Point(srid=2774))
|
b5d52620bf052fba4c73b75be51111252611b45d0477a19c0216e92c1b066157 | from django.contrib.gis.db.models import Collect, Count, Extent, F, Union
from django.contrib.gis.geos import GEOSGeometry, MultiPoint, Point
from django.db import NotSupportedError, connection
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import override_settings
from django.utils import timezone
from ..utils import no_oracle
from .models import (
Article, Author, Book, City, DirectoryEntry, Event, Location, Parcel,
)
class RelatedGeoModelTest(TestCase):
fixtures = ['initial']
def test02_select_related(self):
"Testing `select_related` on geographic models (see #7126)."
qs1 = City.objects.order_by('id')
qs2 = City.objects.order_by('id').select_related()
qs3 = City.objects.order_by('id').select_related('location')
# Reference data for what's in the fixtures.
cities = (
('Aurora', 'TX', -97.516111, 33.058333),
('Roswell', 'NM', -104.528056, 33.387222),
('Kecksburg', 'PA', -79.460734, 40.18476),
)
for qs in (qs1, qs2, qs3):
for ref, c in zip(cities, qs):
nm, st, lon, lat = ref
self.assertEqual(nm, c.name)
self.assertEqual(st, c.state)
self.assertEqual(Point(lon, lat, srid=c.location.point.srid), c.location.point)
@skipUnlessDBFeature("supports_extent_aggr")
def test_related_extent_aggregate(self):
"Testing the `Extent` aggregate on related geographic models."
# This combines the Extent and Union aggregates into one query
aggs = City.objects.aggregate(Extent('location__point'))
# One for all locations, one that excludes New Mexico (Roswell).
all_extent = (-104.528056, 29.763374, -79.460734, 40.18476)
txpa_extent = (-97.516111, 29.763374, -79.460734, 40.18476)
e1 = City.objects.aggregate(Extent('location__point'))['location__point__extent']
e2 = City.objects.exclude(state='NM').aggregate(Extent('location__point'))['location__point__extent']
e3 = aggs['location__point__extent']
# The tolerance value is to four decimal places because of differences
# between the Oracle and PostGIS spatial backends on the extent calculation.
tol = 4
for ref, e in [(all_extent, e1), (txpa_extent, e2), (all_extent, e3)]:
for ref_val, e_val in zip(ref, e):
self.assertAlmostEqual(ref_val, e_val, tol)
@skipUnlessDBFeature("supports_extent_aggr")
def test_related_extent_annotate(self):
"""
Test annotation with Extent GeoAggregate.
"""
cities = City.objects.annotate(points_extent=Extent('location__point')).order_by('name')
tol = 4
self.assertAlmostEqual(
cities[0].points_extent,
(-97.516111, 33.058333, -97.516111, 33.058333),
tol
)
@skipUnlessDBFeature('supports_union_aggr')
def test_related_union_aggregate(self):
"Testing the `Union` aggregate on related geographic models."
# This combines the Extent and Union aggregates into one query
aggs = City.objects.aggregate(Union('location__point'))
# These are the points that are components of the aggregate geographic
# union that is returned. Each point # corresponds to City PK.
p1 = Point(-104.528056, 33.387222)
p2 = Point(-97.516111, 33.058333)
p3 = Point(-79.460734, 40.18476)
p4 = Point(-96.801611, 32.782057)
p5 = Point(-95.363151, 29.763374)
# The second union aggregate is for a union
# query that includes limiting information in the WHERE clause (in other
# words a `.filter()` precedes the call to `.aggregate(Union()`).
ref_u1 = MultiPoint(p1, p2, p4, p5, p3, srid=4326)
ref_u2 = MultiPoint(p2, p3, srid=4326)
u1 = City.objects.aggregate(Union('location__point'))['location__point__union']
u2 = City.objects.exclude(
name__in=('Roswell', 'Houston', 'Dallas', 'Fort Worth'),
).aggregate(Union('location__point'))['location__point__union']
u3 = aggs['location__point__union']
self.assertEqual(type(u1), MultiPoint)
self.assertEqual(type(u3), MultiPoint)
# Ordering of points in the result of the union is not defined and
# implementation-dependent (DB backend, GEOS version)
self.assertEqual({p.ewkt for p in ref_u1}, {p.ewkt for p in u1})
self.assertEqual({p.ewkt for p in ref_u2}, {p.ewkt for p in u2})
self.assertEqual({p.ewkt for p in ref_u1}, {p.ewkt for p in u3})
def test05_select_related_fk_to_subclass(self):
"Testing that calling select_related on a query over a model with an FK to a model subclass works"
# Regression test for #9752.
list(DirectoryEntry.objects.all().select_related())
def test06_f_expressions(self):
"Testing F() expressions on GeometryFields."
# Constructing a dummy parcel border and getting the City instance for
# assigning the FK.
b1 = GEOSGeometry(
'POLYGON((-97.501205 33.052520,-97.501205 33.052576,'
'-97.501150 33.052576,-97.501150 33.052520,-97.501205 33.052520))',
srid=4326
)
pcity = City.objects.get(name='Aurora')
# First parcel has incorrect center point that is equal to the City;
# it also has a second border that is different from the first as a
# 100ft buffer around the City.
c1 = pcity.location.point
c2 = c1.transform(2276, clone=True)
b2 = c2.buffer(100)
Parcel.objects.create(name='P1', city=pcity, center1=c1, center2=c2, border1=b1, border2=b2)
# Now creating a second Parcel where the borders are the same, just
# in different coordinate systems. The center points are also the
# same (but in different coordinate systems), and this time they
# actually correspond to the centroid of the border.
c1 = b1.centroid
c2 = c1.transform(2276, clone=True)
b2 = b1 if connection.features.supports_transform else b1.transform(2276, clone=True)
Parcel.objects.create(name='P2', city=pcity, center1=c1, center2=c2, border1=b1, border2=b2)
# Should return the second Parcel, which has the center within the
# border.
qs = Parcel.objects.filter(center1__within=F('border1'))
self.assertEqual(1, len(qs))
self.assertEqual('P2', qs[0].name)
# This time center2 is in a different coordinate system and needs to be
# wrapped in transformation SQL.
qs = Parcel.objects.filter(center2__within=F('border1'))
if connection.features.supports_transform:
self.assertEqual('P2', qs.get().name)
else:
msg = "This backend doesn't support the Transform function."
with self.assertRaisesMessage(NotSupportedError, msg):
list(qs)
# Should return the first Parcel, which has the center point equal
# to the point in the City ForeignKey.
qs = Parcel.objects.filter(center1=F('city__location__point'))
self.assertEqual(1, len(qs))
self.assertEqual('P1', qs[0].name)
# This time the city column should be wrapped in transformation SQL.
qs = Parcel.objects.filter(border2__contains=F('city__location__point'))
if connection.features.supports_transform:
self.assertEqual('P1', qs.get().name)
else:
msg = "This backend doesn't support the Transform function."
with self.assertRaisesMessage(NotSupportedError, msg):
list(qs)
def test07_values(self):
"Testing values() and values_list()."
gqs = Location.objects.all()
gvqs = Location.objects.values()
gvlqs = Location.objects.values_list()
# Incrementing through each of the models, dictionaries, and tuples
# returned by each QuerySet.
for m, d, t in zip(gqs, gvqs, gvlqs):
# The values should be Geometry objects and not raw strings returned
# by the spatial database.
self.assertIsInstance(d['point'], GEOSGeometry)
self.assertIsInstance(t[1], GEOSGeometry)
self.assertEqual(m.point, d['point'])
self.assertEqual(m.point, t[1])
@override_settings(USE_TZ=True)
def test_07b_values(self):
"Testing values() and values_list() with aware datetime. See #21565."
Event.objects.create(name="foo", when=timezone.now())
list(Event.objects.values_list('when'))
def test08_defer_only(self):
"Testing defer() and only() on Geographic models."
qs = Location.objects.all()
def_qs = Location.objects.defer('point')
for loc, def_loc in zip(qs, def_qs):
self.assertEqual(loc.point, def_loc.point)
def test09_pk_relations(self):
"Ensuring correct primary key column is selected across relations. See #10757."
# The expected ID values -- notice the last two location IDs
# are out of order. Dallas and Houston have location IDs that differ
# from their PKs -- this is done to ensure that the related location
# ID column is selected instead of ID column for the city.
city_ids = (1, 2, 3, 4, 5)
loc_ids = (1, 2, 3, 5, 4)
ids_qs = City.objects.order_by('id').values('id', 'location__id')
for val_dict, c_id, l_id in zip(ids_qs, city_ids, loc_ids):
self.assertEqual(val_dict['id'], c_id)
self.assertEqual(val_dict['location__id'], l_id)
# TODO: fix on Oracle -- qs2 returns an empty result for an unknown reason
@no_oracle
def test10_combine(self):
"Testing the combination of two QuerySets (#10807)."
buf1 = City.objects.get(name='Aurora').location.point.buffer(0.1)
buf2 = City.objects.get(name='Kecksburg').location.point.buffer(0.1)
qs1 = City.objects.filter(location__point__within=buf1)
qs2 = City.objects.filter(location__point__within=buf2)
combined = qs1 | qs2
names = [c.name for c in combined]
self.assertEqual(2, len(names))
self.assertIn('Aurora', names)
self.assertIn('Kecksburg', names)
# TODO: fix on Oracle -- get the following error because the SQL is ordered
# by a geometry object, which Oracle apparently doesn't like:
# ORA-22901: cannot compare nested table or VARRAY or LOB attributes of an object type
@no_oracle
def test12a_count(self):
"Testing `Count` aggregate on geo-fields."
# The City, 'Fort Worth' uses the same location as Dallas.
dallas = City.objects.get(name='Dallas')
# Count annotation should be 2 for the Dallas location now.
loc = Location.objects.annotate(num_cities=Count('city')).get(id=dallas.location.id)
self.assertEqual(2, loc.num_cities)
def test12b_count(self):
"Testing `Count` aggregate on non geo-fields."
# Should only be one author (Trevor Paglen) returned by this query, and
# the annotation should have 3 for the number of books, see #11087.
# Also testing with a values(), see #11489.
qs = Author.objects.annotate(num_books=Count('books')).filter(num_books__gt=1)
vqs = Author.objects.values('name').annotate(num_books=Count('books')).filter(num_books__gt=1)
self.assertEqual(1, len(qs))
self.assertEqual(3, qs[0].num_books)
self.assertEqual(1, len(vqs))
self.assertEqual(3, vqs[0]['num_books'])
# TODO: fix on Oracle -- get the following error because the SQL is ordered
# by a geometry object, which Oracle apparently doesn't like:
# ORA-22901: cannot compare nested table or VARRAY or LOB attributes of an object type
@no_oracle
def test13c_count(self):
"Testing `Count` aggregate with `.values()`. See #15305."
qs = Location.objects.filter(id=5).annotate(num_cities=Count('city')).values('id', 'point', 'num_cities')
self.assertEqual(1, len(qs))
self.assertEqual(2, qs[0]['num_cities'])
self.assertIsInstance(qs[0]['point'], GEOSGeometry)
# TODO: The phantom model does appear on Oracle.
@no_oracle
def test13_select_related_null_fk(self):
"Testing `select_related` on a nullable ForeignKey."
Book.objects.create(title='Without Author')
b = Book.objects.select_related('author').get(title='Without Author')
# Should be `None`, and not a 'dummy' model.
self.assertIsNone(b.author)
@skipUnlessDBFeature("supports_collect_aggr")
def test_collect(self):
"""
Testing the `Collect` aggregate.
"""
# Reference query:
# SELECT AsText(ST_Collect("relatedapp_location"."point")) FROM "relatedapp_city" LEFT OUTER JOIN
# "relatedapp_location" ON ("relatedapp_city"."location_id" = "relatedapp_location"."id")
# WHERE "relatedapp_city"."state" = 'TX';
ref_geom = GEOSGeometry(
'MULTIPOINT(-97.516111 33.058333,-96.801611 32.782057,'
'-95.363151 29.763374,-96.801611 32.782057)'
)
coll = City.objects.filter(state='TX').aggregate(Collect('location__point'))['location__point__collect']
# Even though Dallas and Ft. Worth share same point, Collect doesn't
# consolidate -- that's why 4 points in MultiPoint.
self.assertEqual(4, len(coll))
self.assertTrue(ref_geom.equals(coll))
def test15_invalid_select_related(self):
"Testing doing select_related on the related name manager of a unique FK. See #13934."
qs = Article.objects.select_related('author__article')
# This triggers TypeError when `get_default_columns` has no `local_only`
# keyword. The TypeError is swallowed if QuerySet is actually
# evaluated as list generation swallows TypeError in CPython.
str(qs.query)
def test16_annotated_date_queryset(self):
"Ensure annotated date querysets work if spatial backend is used. See #14648."
birth_years = [dt.year for dt in
list(Author.objects.annotate(num_books=Count('books')).dates('dob', 'year'))]
birth_years.sort()
self.assertEqual([1950, 1974], birth_years)
# TODO: Related tests for KML, GML, and distance lookups.
|
eca7e0fe046364a72eed6ceafade608995e68814db2c285eb8dc4e30f235ff45 | from django.contrib.gis.db import models
class SimpleModel(models.Model):
class Meta:
abstract = True
class Location(SimpleModel):
point = models.PointField()
def __str__(self):
return self.point.wkt
class City(SimpleModel):
name = models.CharField(max_length=50)
state = models.CharField(max_length=2)
location = models.ForeignKey(Location, models.CASCADE)
def __str__(self):
return self.name
class AugmentedLocation(Location):
extra_text = models.TextField(blank=True)
class DirectoryEntry(SimpleModel):
listing_text = models.CharField(max_length=50)
location = models.ForeignKey(AugmentedLocation, models.CASCADE)
class Parcel(SimpleModel):
name = models.CharField(max_length=30)
city = models.ForeignKey(City, models.CASCADE)
center1 = models.PointField()
# Throwing a curveball w/`db_column` here.
center2 = models.PointField(srid=2276, db_column='mycenter')
border1 = models.PolygonField()
border2 = models.PolygonField(srid=2276)
def __str__(self):
return self.name
class Author(SimpleModel):
name = models.CharField(max_length=100)
dob = models.DateField()
class Article(SimpleModel):
title = models.CharField(max_length=100)
author = models.ForeignKey(Author, models.CASCADE, unique=True)
class Book(SimpleModel):
title = models.CharField(max_length=100)
author = models.ForeignKey(Author, models.SET_NULL, related_name='books', null=True)
class Event(SimpleModel):
name = models.CharField(max_length=100)
when = models.DateTimeField()
|
98d0ed3a9231034beb9af118257c5d3fbc3a985ce84305fafb7e41d343b75934 | import os
import struct
import tempfile
from django.contrib.gis.gdal import GDAL_VERSION, GDALRaster
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.gdal.raster.band import GDALBand
from django.contrib.gis.shortcuts import numpy
from django.test import SimpleTestCase
from ..data.rasters.textrasters import JSON_RASTER
class GDALRasterTests(SimpleTestCase):
"""
Test a GDALRaster instance created from a file (GeoTiff).
"""
def setUp(self):
self.rs_path = os.path.join(os.path.dirname(__file__), '../data/rasters/raster.tif')
self.rs = GDALRaster(self.rs_path)
def test_rs_name_repr(self):
self.assertEqual(self.rs_path, self.rs.name)
self.assertRegex(repr(self.rs), r"<Raster object at 0x\w+>")
def test_rs_driver(self):
self.assertEqual(self.rs.driver.name, 'GTiff')
def test_rs_size(self):
self.assertEqual(self.rs.width, 163)
self.assertEqual(self.rs.height, 174)
def test_rs_srs(self):
self.assertEqual(self.rs.srs.srid, 3086)
self.assertEqual(self.rs.srs.units, (1.0, 'metre'))
def test_rs_srid(self):
rast = GDALRaster({
'width': 16,
'height': 16,
'srid': 4326,
})
self.assertEqual(rast.srid, 4326)
rast.srid = 3086
self.assertEqual(rast.srid, 3086)
def test_geotransform_and_friends(self):
# Assert correct values for file based raster
self.assertEqual(
self.rs.geotransform,
[511700.4680706557, 100.0, 0.0, 435103.3771231986, 0.0, -100.0]
)
self.assertEqual(self.rs.origin, [511700.4680706557, 435103.3771231986])
self.assertEqual(self.rs.origin.x, 511700.4680706557)
self.assertEqual(self.rs.origin.y, 435103.3771231986)
self.assertEqual(self.rs.scale, [100.0, -100.0])
self.assertEqual(self.rs.scale.x, 100.0)
self.assertEqual(self.rs.scale.y, -100.0)
self.assertEqual(self.rs.skew, [0, 0])
self.assertEqual(self.rs.skew.x, 0)
self.assertEqual(self.rs.skew.y, 0)
# Create in-memory rasters and change gtvalues
rsmem = GDALRaster(JSON_RASTER)
# geotransform accepts both floats and ints
rsmem.geotransform = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0]
self.assertEqual(rsmem.geotransform, [0.0, 1.0, 2.0, 3.0, 4.0, 5.0])
rsmem.geotransform = range(6)
self.assertEqual(rsmem.geotransform, [float(x) for x in range(6)])
self.assertEqual(rsmem.origin, [0, 3])
self.assertEqual(rsmem.origin.x, 0)
self.assertEqual(rsmem.origin.y, 3)
self.assertEqual(rsmem.scale, [1, 5])
self.assertEqual(rsmem.scale.x, 1)
self.assertEqual(rsmem.scale.y, 5)
self.assertEqual(rsmem.skew, [2, 4])
self.assertEqual(rsmem.skew.x, 2)
self.assertEqual(rsmem.skew.y, 4)
self.assertEqual(rsmem.width, 5)
self.assertEqual(rsmem.height, 5)
def test_geotransform_bad_inputs(self):
rsmem = GDALRaster(JSON_RASTER)
error_geotransforms = [
[1, 2],
[1, 2, 3, 4, 5, 'foo'],
[1, 2, 3, 4, 5, 6, 'foo'],
]
msg = 'Geotransform must consist of 6 numeric values.'
for geotransform in error_geotransforms:
with self.subTest(i=geotransform), self.assertRaisesMessage(ValueError, msg):
rsmem.geotransform = geotransform
def test_rs_extent(self):
self.assertEqual(
self.rs.extent,
(511700.4680706557, 417703.3771231986, 528000.4680706557, 435103.3771231986)
)
def test_rs_bands(self):
self.assertEqual(len(self.rs.bands), 1)
self.assertIsInstance(self.rs.bands[0], GDALBand)
def test_memory_based_raster_creation(self):
# Create uint8 raster with full pixel data range (0-255)
rast = GDALRaster({
'datatype': 1,
'width': 16,
'height': 16,
'srid': 4326,
'bands': [{
'data': range(256),
'nodata_value': 255,
}],
})
# Get array from raster
result = rast.bands[0].data()
if numpy:
result = result.flatten().tolist()
# Assert data is same as original input
self.assertEqual(result, list(range(256)))
def test_file_based_raster_creation(self):
# Prepare tempfile
rstfile = tempfile.NamedTemporaryFile(suffix='.tif')
# Create file-based raster from scratch
GDALRaster({
'datatype': self.rs.bands[0].datatype(),
'driver': 'tif',
'name': rstfile.name,
'width': 163,
'height': 174,
'nr_of_bands': 1,
'srid': self.rs.srs.wkt,
'origin': (self.rs.origin.x, self.rs.origin.y),
'scale': (self.rs.scale.x, self.rs.scale.y),
'skew': (self.rs.skew.x, self.rs.skew.y),
'bands': [{
'data': self.rs.bands[0].data(),
'nodata_value': self.rs.bands[0].nodata_value,
}],
})
# Reload newly created raster from file
restored_raster = GDALRaster(rstfile.name)
self.assertEqual(restored_raster.srs.wkt, self.rs.srs.wkt)
self.assertEqual(restored_raster.geotransform, self.rs.geotransform)
if numpy:
numpy.testing.assert_equal(
restored_raster.bands[0].data(),
self.rs.bands[0].data()
)
else:
self.assertEqual(restored_raster.bands[0].data(), self.rs.bands[0].data())
def test_vsi_raster_creation(self):
# Open a raster as a file object.
with open(self.rs_path, 'rb') as dat:
# Instantiate a raster from the file binary buffer.
vsimem = GDALRaster(dat.read())
# The data of the in-memory file is equal to the source file.
result = vsimem.bands[0].data()
target = self.rs.bands[0].data()
if numpy:
result = result.flatten().tolist()
target = target.flatten().tolist()
self.assertEqual(result, target)
def test_vsi_raster_deletion(self):
path = '/vsimem/raster.tif'
# Create a vsi-based raster from scratch.
vsimem = GDALRaster({
'name': path,
'driver': 'tif',
'width': 4,
'height': 4,
'srid': 4326,
'bands': [{
'data': range(16),
}],
})
# The virtual file exists.
rst = GDALRaster(path)
self.assertEqual(rst.width, 4)
# Delete GDALRaster.
del vsimem
del rst
# The virtual file has been removed.
msg = 'Could not open the datasource at "/vsimem/raster.tif"'
with self.assertRaisesMessage(GDALException, msg):
GDALRaster(path)
def test_vsi_invalid_buffer_error(self):
msg = 'Failed creating VSI raster from the input buffer.'
with self.assertRaisesMessage(GDALException, msg):
GDALRaster(b'not-a-raster-buffer')
def test_vsi_buffer_property(self):
# Create a vsi-based raster from scratch.
rast = GDALRaster({
'name': '/vsimem/raster.tif',
'driver': 'tif',
'width': 4,
'height': 4,
'srid': 4326,
'bands': [{
'data': range(16),
}],
})
# Do a round trip from raster to buffer to raster.
result = GDALRaster(rast.vsi_buffer).bands[0].data()
if numpy:
result = result.flatten().tolist()
# Band data is equal to nodata value except on input block of ones.
self.assertEqual(result, list(range(16)))
# The vsi buffer is None for rasters that are not vsi based.
self.assertIsNone(self.rs.vsi_buffer)
def test_offset_size_and_shape_on_raster_creation(self):
rast = GDALRaster({
'datatype': 1,
'width': 4,
'height': 4,
'srid': 4326,
'bands': [{
'data': (1,),
'offset': (1, 1),
'size': (2, 2),
'shape': (1, 1),
'nodata_value': 2,
}],
})
# Get array from raster.
result = rast.bands[0].data()
if numpy:
result = result.flatten().tolist()
# Band data is equal to nodata value except on input block of ones.
self.assertEqual(
result,
[2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 1, 2, 2, 2, 2, 2]
)
def test_set_nodata_value_on_raster_creation(self):
# Create raster filled with nodata values.
rast = GDALRaster({
'datatype': 1,
'width': 2,
'height': 2,
'srid': 4326,
'bands': [{'nodata_value': 23}],
})
# Get array from raster.
result = rast.bands[0].data()
if numpy:
result = result.flatten().tolist()
# All band data is equal to nodata value.
self.assertEqual(result, [23] * 4)
def test_set_nodata_none_on_raster_creation(self):
if GDAL_VERSION < (2, 1):
self.skipTest("GDAL >= 2.1 is required for this test.")
# Create raster without data and without nodata value.
rast = GDALRaster({
'datatype': 1,
'width': 2,
'height': 2,
'srid': 4326,
'bands': [{'nodata_value': None}],
})
# Get array from raster.
result = rast.bands[0].data()
if numpy:
result = result.flatten().tolist()
# Band data is equal to zero becaues no nodata value has been specified.
self.assertEqual(result, [0] * 4)
def test_raster_metadata_property(self):
data = self.rs.metadata
self.assertEqual(data['DEFAULT'], {'AREA_OR_POINT': 'Area'})
self.assertEqual(data['IMAGE_STRUCTURE'], {'INTERLEAVE': 'BAND'})
# Create file-based raster from scratch
source = GDALRaster({
'datatype': 1,
'width': 2,
'height': 2,
'srid': 4326,
'bands': [{'data': range(4), 'nodata_value': 99}],
})
# Set metadata on raster and on a band.
metadata = {
'DEFAULT': {'OWNER': 'Django', 'VERSION': '1.0', 'AREA_OR_POINT': 'Point'},
}
source.metadata = metadata
source.bands[0].metadata = metadata
self.assertEqual(source.metadata['DEFAULT'], metadata['DEFAULT'])
self.assertEqual(source.bands[0].metadata['DEFAULT'], metadata['DEFAULT'])
# Update metadata on raster.
metadata = {
'DEFAULT': {'VERSION': '2.0'},
}
source.metadata = metadata
self.assertEqual(source.metadata['DEFAULT']['VERSION'], '2.0')
# Remove metadata on raster.
metadata = {
'DEFAULT': {'OWNER': None},
}
source.metadata = metadata
self.assertNotIn('OWNER', source.metadata['DEFAULT'])
def test_raster_info_accessor(self):
if GDAL_VERSION < (2, 1):
msg = 'GDAL ≥ 2.1 is required for using the info property.'
with self.assertRaisesMessage(ValueError, msg):
self.rs.info
return
gdalinfo = """
Driver: GTiff/GeoTIFF
Files: {0}
Size is 163, 174
Coordinate System is:
PROJCS["NAD83 / Florida GDL Albers",
GEOGCS["NAD83",
DATUM["North_American_Datum_1983",
SPHEROID["GRS 1980",6378137,298.257222101,
AUTHORITY["EPSG","7019"]],
TOWGS84[0,0,0,0,0,0,0],
AUTHORITY["EPSG","6269"]],
PRIMEM["Greenwich",0,
AUTHORITY["EPSG","8901"]],
UNIT["degree",0.0174532925199433,
AUTHORITY["EPSG","9122"]],
AUTHORITY["EPSG","4269"]],
PROJECTION["Albers_Conic_Equal_Area"],
PARAMETER["standard_parallel_1",24],
PARAMETER["standard_parallel_2",31.5],
PARAMETER["latitude_of_center",24],
PARAMETER["longitude_of_center",-84],
PARAMETER["false_easting",400000],
PARAMETER["false_northing",0],
UNIT["metre",1,
AUTHORITY["EPSG","9001"]],
AXIS["X",EAST],
AXIS["Y",NORTH],
AUTHORITY["EPSG","3086"]]
Origin = (511700.468070655711927,435103.377123198588379)
Pixel Size = (100.000000000000000,-100.000000000000000)
Metadata:
AREA_OR_POINT=Area
Image Structure Metadata:
INTERLEAVE=BAND
Corner Coordinates:
Upper Left ( 511700.468, 435103.377) ( 82d51'46.16"W, 27d55' 1.53"N)
Lower Left ( 511700.468, 417703.377) ( 82d51'52.04"W, 27d45'37.50"N)
Upper Right ( 528000.468, 435103.377) ( 82d41'48.81"W, 27d54'56.30"N)
Lower Right ( 528000.468, 417703.377) ( 82d41'55.54"W, 27d45'32.28"N)
Center ( 519850.468, 426403.377) ( 82d46'50.64"W, 27d50'16.99"N)
Band 1 Block=163x50 Type=Byte, ColorInterp=Gray
NoData Value=15
""".format(self.rs_path)
# Data
info_dyn = [line.strip() for line in self.rs.info.split('\n') if line.strip() != '']
info_ref = [line.strip() for line in gdalinfo.split('\n') if line.strip() != '']
self.assertEqual(info_dyn, info_ref)
def test_compressed_file_based_raster_creation(self):
rstfile = tempfile.NamedTemporaryFile(suffix='.tif')
# Make a compressed copy of an existing raster.
compressed = self.rs.warp({'papsz_options': {'compress': 'packbits'}, 'name': rstfile.name})
# Check physically if compression worked.
self.assertLess(os.path.getsize(compressed.name), os.path.getsize(self.rs.name))
# Create file-based raster with options from scratch.
compressed = GDALRaster({
'datatype': 1,
'driver': 'tif',
'name': rstfile.name,
'width': 40,
'height': 40,
'srid': 3086,
'origin': (500000, 400000),
'scale': (100, -100),
'skew': (0, 0),
'bands': [{
'data': range(40 ^ 2),
'nodata_value': 255,
}],
'papsz_options': {
'compress': 'packbits',
'pixeltype': 'signedbyte',
'blockxsize': 23,
'blockysize': 23,
}
})
# Check if options used on creation are stored in metadata.
# Reopening the raster ensures that all metadata has been written
# to the file.
compressed = GDALRaster(compressed.name)
self.assertEqual(compressed.metadata['IMAGE_STRUCTURE']['COMPRESSION'], 'PACKBITS',)
self.assertEqual(compressed.bands[0].metadata['IMAGE_STRUCTURE']['PIXELTYPE'], 'SIGNEDBYTE')
if GDAL_VERSION >= (2, 1):
self.assertIn('Block=40x23', compressed.info)
def test_raster_warp(self):
# Create in memory raster
source = GDALRaster({
'datatype': 1,
'driver': 'MEM',
'name': 'sourceraster',
'width': 4,
'height': 4,
'nr_of_bands': 1,
'srid': 3086,
'origin': (500000, 400000),
'scale': (100, -100),
'skew': (0, 0),
'bands': [{
'data': range(16),
'nodata_value': 255,
}],
})
# Test altering the scale, width, and height of a raster
data = {
'scale': [200, -200],
'width': 2,
'height': 2,
}
target = source.warp(data)
self.assertEqual(target.width, data['width'])
self.assertEqual(target.height, data['height'])
self.assertEqual(target.scale, data['scale'])
self.assertEqual(target.bands[0].datatype(), source.bands[0].datatype())
self.assertEqual(target.name, 'sourceraster_copy.MEM')
result = target.bands[0].data()
if numpy:
result = result.flatten().tolist()
self.assertEqual(result, [5, 7, 13, 15])
# Test altering the name and datatype (to float)
data = {
'name': '/path/to/targetraster.tif',
'datatype': 6,
}
target = source.warp(data)
self.assertEqual(target.bands[0].datatype(), 6)
self.assertEqual(target.name, '/path/to/targetraster.tif')
self.assertEqual(target.driver.name, 'MEM')
result = target.bands[0].data()
if numpy:
result = result.flatten().tolist()
self.assertEqual(
result,
[0.0, 1.0, 2.0, 3.0,
4.0, 5.0, 6.0, 7.0,
8.0, 9.0, 10.0, 11.0,
12.0, 13.0, 14.0, 15.0]
)
def test_raster_warp_nodata_zone(self):
# Create in memory raster.
source = GDALRaster({
'datatype': 1,
'driver': 'MEM',
'width': 4,
'height': 4,
'srid': 3086,
'origin': (500000, 400000),
'scale': (100, -100),
'skew': (0, 0),
'bands': [{
'data': range(16),
'nodata_value': 23,
}],
})
# Warp raster onto a location that does not cover any pixels of the original.
result = source.warp({'origin': (200000, 200000)}).bands[0].data()
if numpy:
result = result.flatten().tolist()
# The result is an empty raster filled with the correct nodata value.
self.assertEqual(result, [23] * 16)
def test_raster_transform(self):
# Prepare tempfile and nodata value
rstfile = tempfile.NamedTemporaryFile(suffix='.tif')
ndv = 99
# Create in file based raster
source = GDALRaster({
'datatype': 1,
'driver': 'tif',
'name': rstfile.name,
'width': 5,
'height': 5,
'nr_of_bands': 1,
'srid': 4326,
'origin': (-5, 5),
'scale': (2, -2),
'skew': (0, 0),
'bands': [{
'data': range(25),
'nodata_value': ndv,
}],
})
# Transform raster into srid 4326.
target = source.transform(3086)
# Reload data from disk
target = GDALRaster(target.name)
self.assertEqual(target.srs.srid, 3086)
self.assertEqual(target.width, 7)
self.assertEqual(target.height, 7)
self.assertEqual(target.bands[0].datatype(), source.bands[0].datatype())
self.assertAlmostEqual(target.origin[0], 9124842.791079799)
self.assertAlmostEqual(target.origin[1], 1589911.6476407414)
self.assertAlmostEqual(target.scale[0], 223824.82664250192)
self.assertAlmostEqual(target.scale[1], -223824.82664250192)
self.assertEqual(target.skew, [0, 0])
result = target.bands[0].data()
if numpy:
result = result.flatten().tolist()
# The reprojection of a raster that spans over a large area
# skews the data matrix and might introduce nodata values.
self.assertEqual(
result,
[
ndv, ndv, ndv, ndv, 4, ndv, ndv,
ndv, ndv, 2, 3, 9, ndv, ndv,
ndv, 1, 2, 8, 13, 19, ndv,
0, 6, 6, 12, 18, 18, 24,
ndv, 10, 11, 16, 22, 23, ndv,
ndv, ndv, 15, 21, 22, ndv, ndv,
ndv, ndv, 20, ndv, ndv, ndv, ndv,
]
)
class GDALBandTests(SimpleTestCase):
def setUp(self):
self.rs_path = os.path.join(os.path.dirname(__file__), '../data/rasters/raster.tif')
rs = GDALRaster(self.rs_path)
self.band = rs.bands[0]
def test_band_data(self):
pam_file = self.rs_path + '.aux.xml'
self.assertEqual(self.band.width, 163)
self.assertEqual(self.band.height, 174)
self.assertEqual(self.band.description, '')
self.assertEqual(self.band.datatype(), 1)
self.assertEqual(self.band.datatype(as_string=True), 'GDT_Byte')
self.assertEqual(self.band.color_interp(), 1)
self.assertEqual(self.band.color_interp(as_string=True), 'GCI_GrayIndex')
self.assertEqual(self.band.nodata_value, 15)
if numpy:
data = self.band.data()
assert_array = numpy.loadtxt(
os.path.join(os.path.dirname(__file__), '../data/rasters/raster.numpy.txt')
)
numpy.testing.assert_equal(data, assert_array)
self.assertEqual(data.shape, (self.band.height, self.band.width))
try:
smin, smax, smean, sstd = self.band.statistics(approximate=True)
self.assertEqual(smin, 0)
self.assertEqual(smax, 9)
self.assertAlmostEqual(smean, 2.842331288343558)
self.assertAlmostEqual(sstd, 2.3965567248965356)
smin, smax, smean, sstd = self.band.statistics(approximate=False, refresh=True)
self.assertEqual(smin, 0)
self.assertEqual(smax, 9)
self.assertAlmostEqual(smean, 2.828326634228898)
self.assertAlmostEqual(sstd, 2.4260526986669095)
self.assertEqual(self.band.min, 0)
self.assertEqual(self.band.max, 9)
self.assertAlmostEqual(self.band.mean, 2.828326634228898)
self.assertAlmostEqual(self.band.std, 2.4260526986669095)
# Statistics are persisted into PAM file on band close
self.band = None
self.assertTrue(os.path.isfile(pam_file))
finally:
# Close band and remove file if created
self.band = None
if os.path.isfile(pam_file):
os.remove(pam_file)
def test_read_mode_error(self):
# Open raster in read mode
rs = GDALRaster(self.rs_path, write=False)
band = rs.bands[0]
# Setting attributes in write mode raises exception in the _flush method
with self.assertRaises(GDALException):
setattr(band, 'nodata_value', 10)
def test_band_data_setters(self):
# Create in-memory raster and get band
rsmem = GDALRaster({
'datatype': 1,
'driver': 'MEM',
'name': 'mem_rst',
'width': 10,
'height': 10,
'nr_of_bands': 1,
'srid': 4326,
})
bandmem = rsmem.bands[0]
# Set nodata value
bandmem.nodata_value = 99
self.assertEqual(bandmem.nodata_value, 99)
# Set data for entire dataset
bandmem.data(range(100))
if numpy:
numpy.testing.assert_equal(bandmem.data(), numpy.arange(100).reshape(10, 10))
else:
self.assertEqual(bandmem.data(), list(range(100)))
# Prepare data for setting values in subsequent tests
block = list(range(100, 104))
packed_block = struct.pack('<' + 'B B B B', *block)
# Set data from list
bandmem.data(block, (1, 1), (2, 2))
result = bandmem.data(offset=(1, 1), size=(2, 2))
if numpy:
numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2))
else:
self.assertEqual(result, block)
# Set data from packed block
bandmem.data(packed_block, (1, 1), (2, 2))
result = bandmem.data(offset=(1, 1), size=(2, 2))
if numpy:
numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2))
else:
self.assertEqual(result, block)
# Set data from bytes
bandmem.data(bytes(packed_block), (1, 1), (2, 2))
result = bandmem.data(offset=(1, 1), size=(2, 2))
if numpy:
numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2))
else:
self.assertEqual(result, block)
# Set data from bytearray
bandmem.data(bytearray(packed_block), (1, 1), (2, 2))
result = bandmem.data(offset=(1, 1), size=(2, 2))
if numpy:
numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2))
else:
self.assertEqual(result, block)
# Set data from memoryview
bandmem.data(memoryview(packed_block), (1, 1), (2, 2))
result = bandmem.data(offset=(1, 1), size=(2, 2))
if numpy:
numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2))
else:
self.assertEqual(result, block)
# Set data from numpy array
if numpy:
bandmem.data(numpy.array(block, dtype='int8').reshape(2, 2), (1, 1), (2, 2))
numpy.testing.assert_equal(
bandmem.data(offset=(1, 1), size=(2, 2)),
numpy.array(block).reshape(2, 2)
)
# Test json input data
rsmemjson = GDALRaster(JSON_RASTER)
bandmemjson = rsmemjson.bands[0]
if numpy:
numpy.testing.assert_equal(
bandmemjson.data(),
numpy.array(range(25)).reshape(5, 5)
)
else:
self.assertEqual(bandmemjson.data(), list(range(25)))
def test_band_statistics_automatic_refresh(self):
rsmem = GDALRaster({
'srid': 4326,
'width': 2,
'height': 2,
'bands': [{'data': [0] * 4, 'nodata_value': 99}],
})
band = rsmem.bands[0]
# Populate statistics cache
self.assertEqual(band.statistics(), (0, 0, 0, 0))
# Change data
band.data([1, 1, 0, 0])
# Statistics are properly updated
self.assertEqual(band.statistics(), (0.0, 1.0, 0.5, 0.5))
# Change nodata_value
band.nodata_value = 0
# Statistics are properly updated
self.assertEqual(band.statistics(), (1.0, 1.0, 1.0, 0.0))
def test_band_statistics_empty_band(self):
rsmem = GDALRaster({
'srid': 4326,
'width': 1,
'height': 1,
'bands': [{'data': [0], 'nodata_value': 0}],
})
self.assertEqual(rsmem.bands[0].statistics(), (None, None, None, None))
def test_band_delete_nodata(self):
rsmem = GDALRaster({
'srid': 4326,
'width': 1,
'height': 1,
'bands': [{'data': [0], 'nodata_value': 1}],
})
if GDAL_VERSION < (2, 1):
msg = 'GDAL >= 2.1 required to delete nodata values.'
with self.assertRaisesMessage(ValueError, msg):
rsmem.bands[0].nodata_value = None
else:
rsmem.bands[0].nodata_value = None
self.assertIsNone(rsmem.bands[0].nodata_value)
def test_band_data_replication(self):
band = GDALRaster({
'srid': 4326,
'width': 3,
'height': 3,
'bands': [{'data': range(10, 19), 'nodata_value': 0}],
}).bands[0]
# Variations for input (data, shape, expected result).
combos = (
([1], (1, 1), [1] * 9),
(range(3), (1, 3), [0, 0, 0, 1, 1, 1, 2, 2, 2]),
(range(3), (3, 1), [0, 1, 2, 0, 1, 2, 0, 1, 2]),
)
for combo in combos:
band.data(combo[0], shape=combo[1])
if numpy:
numpy.testing.assert_equal(band.data(), numpy.array(combo[2]).reshape(3, 3))
else:
self.assertEqual(band.data(), list(combo[2]))
|
f23ccd79f8a3160184ea5a9940bd86ccaf155e9d4131580700fd0f179dd73752 | import os
import re
from django.contrib.gis.gdal import (
GDAL_VERSION, DataSource, Envelope, GDALException, OGRGeometry,
)
from django.contrib.gis.gdal.field import OFTInteger, OFTReal, OFTString
from django.test import SimpleTestCase
from ..test_data import TEST_DATA, TestDS, get_ds_file
wgs_84_wkt = (
'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_1984",'
'6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",'
'0.017453292519943295]]'
)
# Using a regex because of small differences depending on GDAL versions.
wgs_84_wkt_regex = r'^GEOGCS\["(GCS_)?WGS[ _](19)?84".*$'
# List of acceptable data sources.
ds_list = (
TestDS(
'test_point', nfeat=5, nfld=3, geom='POINT', gtype=1, driver='ESRI Shapefile',
fields={'dbl': OFTReal, 'int': OFTInteger, 'str': OFTString},
extent=(-1.35011, 0.166623, -0.524093, 0.824508), # Got extent from QGIS
srs_wkt=wgs_84_wkt,
field_values={
'dbl': [float(i) for i in range(1, 6)],
'int': list(range(1, 6)),
'str': [str(i) for i in range(1, 6)],
},
fids=range(5)
),
TestDS(
'test_vrt', ext='vrt', nfeat=3, nfld=3, geom='POINT', gtype='Point25D',
driver='OGR_VRT' if GDAL_VERSION >= (2, 0) else 'VRT',
fields={
'POINT_X': OFTString,
'POINT_Y': OFTString,
'NUM': OFTString,
}, # VRT uses CSV, which all types are OFTString.
extent=(1.0, 2.0, 100.0, 523.5), # Min/Max from CSV
field_values={
'POINT_X': ['1.0', '5.0', '100.0'],
'POINT_Y': ['2.0', '23.0', '523.5'],
'NUM': ['5', '17', '23'],
},
fids=range(1, 4)
),
TestDS(
'test_poly', nfeat=3, nfld=3, geom='POLYGON', gtype=3,
driver='ESRI Shapefile',
fields={'float': OFTReal, 'int': OFTInteger, 'str': OFTString},
extent=(-1.01513, -0.558245, 0.161876, 0.839637), # Got extent from QGIS
srs_wkt=wgs_84_wkt,
)
)
bad_ds = (TestDS('foo'),)
class DataSourceTest(SimpleTestCase):
def test01_valid_shp(self):
"Testing valid SHP Data Source files."
for source in ds_list:
# Loading up the data source
ds = DataSource(source.ds)
# Making sure the layer count is what's expected (only 1 layer in a SHP file)
self.assertEqual(1, len(ds))
# Making sure GetName works
self.assertEqual(source.ds, ds.name)
# Making sure the driver name matches up
self.assertEqual(source.driver, str(ds.driver))
# Making sure indexing works
msg = 'Index out of range when accessing layers in a datasource: %s.'
with self.assertRaisesMessage(IndexError, msg % len(ds)):
ds.__getitem__(len(ds))
with self.assertRaisesMessage(IndexError, 'Invalid OGR layer name given: invalid.'):
ds.__getitem__('invalid')
def test02_invalid_shp(self):
"Testing invalid SHP files for the Data Source."
for source in bad_ds:
with self.assertRaises(GDALException):
DataSource(source.ds)
def test03a_layers(self):
"Testing Data Source Layers."
for source in ds_list:
ds = DataSource(source.ds)
# Incrementing through each layer, this tests DataSource.__iter__
for layer in ds:
# Making sure we get the number of features we expect
self.assertEqual(len(layer), source.nfeat)
# Making sure we get the number of fields we expect
self.assertEqual(source.nfld, layer.num_fields)
self.assertEqual(source.nfld, len(layer.fields))
# Testing the layer's extent (an Envelope), and its properties
self.assertIsInstance(layer.extent, Envelope)
self.assertAlmostEqual(source.extent[0], layer.extent.min_x, 5)
self.assertAlmostEqual(source.extent[1], layer.extent.min_y, 5)
self.assertAlmostEqual(source.extent[2], layer.extent.max_x, 5)
self.assertAlmostEqual(source.extent[3], layer.extent.max_y, 5)
# Now checking the field names.
flds = layer.fields
for f in flds:
self.assertIn(f, source.fields)
# Negative FIDs are not allowed.
with self.assertRaisesMessage(IndexError, 'Negative indices are not allowed on OGR Layers.'):
layer.__getitem__(-1)
with self.assertRaisesMessage(IndexError, 'Invalid feature id: 50000.'):
layer.__getitem__(50000)
if hasattr(source, 'field_values'):
# Testing `Layer.get_fields` (which uses Layer.__iter__)
for fld_name, fld_value in source.field_values.items():
self.assertEqual(fld_value, layer.get_fields(fld_name))
# Testing `Layer.__getitem__`.
for i, fid in enumerate(source.fids):
feat = layer[fid]
self.assertEqual(fid, feat.fid)
# Maybe this should be in the test below, but we might as well test
# the feature values here while in this loop.
for fld_name, fld_value in source.field_values.items():
self.assertEqual(fld_value[i], feat.get(fld_name))
msg = 'Index out of range when accessing field in a feature: %s.'
with self.assertRaisesMessage(IndexError, msg % len(feat)):
feat.__getitem__(len(feat))
with self.assertRaisesMessage(IndexError, 'Invalid OFT field name given: invalid.'):
feat.__getitem__('invalid')
def test03b_layer_slice(self):
"Test indexing and slicing on Layers."
# Using the first data-source because the same slice
# can be used for both the layer and the control values.
source = ds_list[0]
ds = DataSource(source.ds)
sl = slice(1, 3)
feats = ds[0][sl]
for fld_name in ds[0].fields:
test_vals = [feat.get(fld_name) for feat in feats]
control_vals = source.field_values[fld_name][sl]
self.assertEqual(control_vals, test_vals)
def test03c_layer_references(self):
"""
Ensure OGR objects keep references to the objects they belong to.
"""
source = ds_list[0]
# See ticket #9448.
def get_layer():
# This DataSource object is not accessible outside this
# scope. However, a reference should still be kept alive
# on the `Layer` returned.
ds = DataSource(source.ds)
return ds[0]
# Making sure we can call OGR routines on the Layer returned.
lyr = get_layer()
self.assertEqual(source.nfeat, len(lyr))
self.assertEqual(source.gtype, lyr.geom_type.num)
# Same issue for Feature/Field objects, see #18640
self.assertEqual(str(lyr[0]['str']), "1")
def test04_features(self):
"Testing Data Source Features."
for source in ds_list:
ds = DataSource(source.ds)
# Incrementing through each layer
for layer in ds:
# Incrementing through each feature in the layer
for feat in layer:
# Making sure the number of fields, and the geometry type
# are what's expected.
self.assertEqual(source.nfld, len(list(feat)))
self.assertEqual(source.gtype, feat.geom_type)
# Making sure the fields match to an appropriate OFT type.
for k, v in source.fields.items():
# Making sure we get the proper OGR Field instance, using
# a string value index for the feature.
self.assertIsInstance(feat[k], v)
self.assertIsInstance(feat.fields[0], str)
# Testing Feature.__iter__
for fld in feat:
self.assertIn(fld.name, source.fields)
def test05_geometries(self):
"Testing Geometries from Data Source Features."
for source in ds_list:
ds = DataSource(source.ds)
# Incrementing through each layer and feature.
for layer in ds:
for feat in layer:
g = feat.geom
# Making sure we get the right Geometry name & type
self.assertEqual(source.geom, g.geom_name)
self.assertEqual(source.gtype, g.geom_type)
# Making sure the SpatialReference is as expected.
if hasattr(source, 'srs_wkt'):
self.assertIsNotNone(re.match(wgs_84_wkt_regex, g.srs.wkt))
def test06_spatial_filter(self):
"Testing the Layer.spatial_filter property."
ds = DataSource(get_ds_file('cities', 'shp'))
lyr = ds[0]
# When not set, it should be None.
self.assertIsNone(lyr.spatial_filter)
# Must be set a/an OGRGeometry or 4-tuple.
with self.assertRaises(TypeError):
lyr._set_spatial_filter('foo')
# Setting the spatial filter with a tuple/list with the extent of
# a buffer centering around Pueblo.
with self.assertRaises(ValueError):
lyr._set_spatial_filter(list(range(5)))
filter_extent = (-105.609252, 37.255001, -103.609252, 39.255001)
lyr.spatial_filter = (-105.609252, 37.255001, -103.609252, 39.255001)
self.assertEqual(OGRGeometry.from_bbox(filter_extent), lyr.spatial_filter)
feats = [feat for feat in lyr]
self.assertEqual(1, len(feats))
self.assertEqual('Pueblo', feats[0].get('Name'))
# Setting the spatial filter with an OGRGeometry for buffer centering
# around Houston.
filter_geom = OGRGeometry(
'POLYGON((-96.363151 28.763374,-94.363151 28.763374,'
'-94.363151 30.763374,-96.363151 30.763374,-96.363151 28.763374))'
)
lyr.spatial_filter = filter_geom
self.assertEqual(filter_geom, lyr.spatial_filter)
feats = [feat for feat in lyr]
self.assertEqual(1, len(feats))
self.assertEqual('Houston', feats[0].get('Name'))
# Clearing the spatial filter by setting it to None. Now
# should indicate that there are 3 features in the Layer.
lyr.spatial_filter = None
self.assertEqual(3, len(lyr))
def test07_integer_overflow(self):
"Testing that OFTReal fields, treated as OFTInteger, do not overflow."
# Using *.dbf from Census 2010 TIGER Shapefile for Texas,
# which has land area ('ALAND10') stored in a Real field
# with no precision.
ds = DataSource(os.path.join(TEST_DATA, 'texas.dbf'))
feat = ds[0][0]
# Reference value obtained using `ogrinfo`.
self.assertEqual(676586997978, feat.get('ALAND10'))
|
49c9a92118c056b62452b5825b37e818580fa82e7682564c67341455d71ffadd | import json
import pickle
from django.contrib.gis.gdal import (
CoordTransform, GDALException, OGRGeometry, OGRGeomType, SpatialReference,
)
from django.template import Context
from django.template.engine import Engine
from django.test import SimpleTestCase
from ..test_data import TestDataMixin
class OGRGeomTest(SimpleTestCase, TestDataMixin):
"This tests the OGR Geometry."
def test_geomtype(self):
"Testing OGRGeomType object."
# OGRGeomType should initialize on all these inputs.
OGRGeomType(1)
OGRGeomType(7)
OGRGeomType('point')
OGRGeomType('GeometrycollectioN')
OGRGeomType('LINearrING')
OGRGeomType('Unknown')
# Should throw TypeError on this input
with self.assertRaises(GDALException):
OGRGeomType(23)
with self.assertRaises(GDALException):
OGRGeomType('fooD')
with self.assertRaises(GDALException):
OGRGeomType(9)
# Equivalence can take strings, ints, and other OGRGeomTypes
self.assertEqual(OGRGeomType(1), OGRGeomType(1))
self.assertEqual(OGRGeomType(7), 'GeometryCollection')
self.assertEqual(OGRGeomType('point'), 'POINT')
self.assertNotEqual(OGRGeomType('point'), 2)
self.assertEqual(OGRGeomType('unknown'), 0)
self.assertEqual(OGRGeomType(6), 'MULtiPolyGON')
self.assertEqual(OGRGeomType(1), OGRGeomType('point'))
self.assertNotEqual(OGRGeomType('POINT'), OGRGeomType(6))
# Testing the Django field name equivalent property.
self.assertEqual('PointField', OGRGeomType('Point').django)
self.assertEqual('GeometryField', OGRGeomType('Geometry').django)
self.assertEqual('GeometryField', OGRGeomType('Unknown').django)
self.assertIsNone(OGRGeomType('none').django)
# 'Geometry' initialization implies an unknown geometry type.
gt = OGRGeomType('Geometry')
self.assertEqual(0, gt.num)
self.assertEqual('Unknown', gt.name)
def test_geomtype_25d(self):
"Testing OGRGeomType object with 25D types."
wkb25bit = OGRGeomType.wkb25bit
self.assertEqual(OGRGeomType(wkb25bit + 1), 'Point25D')
self.assertEqual(OGRGeomType('MultiLineString25D'), (5 + wkb25bit))
self.assertEqual('GeometryCollectionField', OGRGeomType('GeometryCollection25D').django)
def test_wkt(self):
"Testing WKT output."
for g in self.geometries.wkt_out:
geom = OGRGeometry(g.wkt)
self.assertEqual(g.wkt, geom.wkt)
def test_ewkt(self):
"Testing EWKT input/output."
for ewkt_val in ('POINT (1 2 3)', 'LINEARRING (0 0,1 1,2 1,0 0)'):
# First with ewkt output when no SRID in EWKT
self.assertEqual(ewkt_val, OGRGeometry(ewkt_val).ewkt)
# No test consumption with an SRID specified.
ewkt_val = 'SRID=4326;%s' % ewkt_val
geom = OGRGeometry(ewkt_val)
self.assertEqual(ewkt_val, geom.ewkt)
self.assertEqual(4326, geom.srs.srid)
def test_gml(self):
"Testing GML output."
for g in self.geometries.wkt_out:
geom = OGRGeometry(g.wkt)
exp_gml = g.gml
self.assertEqual(exp_gml, geom.gml)
def test_hex(self):
"Testing HEX input/output."
for g in self.geometries.hex_wkt:
geom1 = OGRGeometry(g.wkt)
self.assertEqual(g.hex.encode(), geom1.hex)
# Constructing w/HEX
geom2 = OGRGeometry(g.hex)
self.assertEqual(geom1, geom2)
def test_wkb(self):
"Testing WKB input/output."
for g in self.geometries.hex_wkt:
geom1 = OGRGeometry(g.wkt)
wkb = geom1.wkb
self.assertEqual(wkb.hex().upper(), g.hex)
# Constructing w/WKB.
geom2 = OGRGeometry(wkb)
self.assertEqual(geom1, geom2)
def test_json(self):
"Testing GeoJSON input/output."
for g in self.geometries.json_geoms:
geom = OGRGeometry(g.wkt)
if not hasattr(g, 'not_equal'):
# Loading jsons to prevent decimal differences
self.assertEqual(json.loads(g.json), json.loads(geom.json))
self.assertEqual(json.loads(g.json), json.loads(geom.geojson))
self.assertEqual(OGRGeometry(g.wkt), OGRGeometry(geom.json))
# Test input with some garbage content (but valid json) (#15529)
geom = OGRGeometry('{"type": "Point", "coordinates": [ 100.0, 0.0 ], "other": "<test>"}')
self.assertIsInstance(geom, OGRGeometry)
def test_points(self):
"Testing Point objects."
OGRGeometry('POINT(0 0)')
for p in self.geometries.points:
if not hasattr(p, 'z'): # No 3D
pnt = OGRGeometry(p.wkt)
self.assertEqual(1, pnt.geom_type)
self.assertEqual('POINT', pnt.geom_name)
self.assertEqual(p.x, pnt.x)
self.assertEqual(p.y, pnt.y)
self.assertEqual((p.x, p.y), pnt.tuple)
def test_multipoints(self):
"Testing MultiPoint objects."
for mp in self.geometries.multipoints:
mgeom1 = OGRGeometry(mp.wkt) # First one from WKT
self.assertEqual(4, mgeom1.geom_type)
self.assertEqual('MULTIPOINT', mgeom1.geom_name)
mgeom2 = OGRGeometry('MULTIPOINT') # Creating empty multipoint
mgeom3 = OGRGeometry('MULTIPOINT')
for g in mgeom1:
mgeom2.add(g) # adding each point from the multipoints
mgeom3.add(g.wkt) # should take WKT as well
self.assertEqual(mgeom1, mgeom2) # they should equal
self.assertEqual(mgeom1, mgeom3)
self.assertEqual(mp.coords, mgeom2.coords)
self.assertEqual(mp.n_p, mgeom2.point_count)
def test_linestring(self):
"Testing LineString objects."
prev = OGRGeometry('POINT(0 0)')
for ls in self.geometries.linestrings:
linestr = OGRGeometry(ls.wkt)
self.assertEqual(2, linestr.geom_type)
self.assertEqual('LINESTRING', linestr.geom_name)
self.assertEqual(ls.n_p, linestr.point_count)
self.assertEqual(ls.coords, linestr.tuple)
self.assertEqual(linestr, OGRGeometry(ls.wkt))
self.assertNotEqual(linestr, prev)
msg = 'Index out of range when accessing points of a line string: %s.'
with self.assertRaisesMessage(IndexError, msg % len(linestr)):
linestr.__getitem__(len(linestr))
prev = linestr
# Testing the x, y properties.
x = [tmpx for tmpx, tmpy in ls.coords]
y = [tmpy for tmpx, tmpy in ls.coords]
self.assertEqual(x, linestr.x)
self.assertEqual(y, linestr.y)
def test_multilinestring(self):
"Testing MultiLineString objects."
prev = OGRGeometry('POINT(0 0)')
for mls in self.geometries.multilinestrings:
mlinestr = OGRGeometry(mls.wkt)
self.assertEqual(5, mlinestr.geom_type)
self.assertEqual('MULTILINESTRING', mlinestr.geom_name)
self.assertEqual(mls.n_p, mlinestr.point_count)
self.assertEqual(mls.coords, mlinestr.tuple)
self.assertEqual(mlinestr, OGRGeometry(mls.wkt))
self.assertNotEqual(mlinestr, prev)
prev = mlinestr
for ls in mlinestr:
self.assertEqual(2, ls.geom_type)
self.assertEqual('LINESTRING', ls.geom_name)
msg = 'Index out of range when accessing geometry in a collection: %s.'
with self.assertRaisesMessage(IndexError, msg % len(mlinestr)):
mlinestr.__getitem__(len(mlinestr))
def test_linearring(self):
"Testing LinearRing objects."
prev = OGRGeometry('POINT(0 0)')
for rr in self.geometries.linearrings:
lr = OGRGeometry(rr.wkt)
# self.assertEqual(101, lr.geom_type.num)
self.assertEqual('LINEARRING', lr.geom_name)
self.assertEqual(rr.n_p, len(lr))
self.assertEqual(lr, OGRGeometry(rr.wkt))
self.assertNotEqual(lr, prev)
prev = lr
def test_polygons(self):
"Testing Polygon objects."
# Testing `from_bbox` class method
bbox = (-180, -90, 180, 90)
p = OGRGeometry.from_bbox(bbox)
self.assertEqual(bbox, p.extent)
prev = OGRGeometry('POINT(0 0)')
for p in self.geometries.polygons:
poly = OGRGeometry(p.wkt)
self.assertEqual(3, poly.geom_type)
self.assertEqual('POLYGON', poly.geom_name)
self.assertEqual(p.n_p, poly.point_count)
self.assertEqual(p.n_i + 1, len(poly))
msg = 'Index out of range when accessing rings of a polygon: %s.'
with self.assertRaisesMessage(IndexError, msg % len(poly)):
poly.__getitem__(len(poly))
# Testing area & centroid.
self.assertAlmostEqual(p.area, poly.area, 9)
x, y = poly.centroid.tuple
self.assertAlmostEqual(p.centroid[0], x, 9)
self.assertAlmostEqual(p.centroid[1], y, 9)
# Testing equivalence
self.assertEqual(poly, OGRGeometry(p.wkt))
self.assertNotEqual(poly, prev)
if p.ext_ring_cs:
ring = poly[0]
self.assertEqual(p.ext_ring_cs, ring.tuple)
self.assertEqual(p.ext_ring_cs, poly[0].tuple)
self.assertEqual(len(p.ext_ring_cs), ring.point_count)
for r in poly:
self.assertEqual('LINEARRING', r.geom_name)
def test_polygons_templates(self):
# Accessing Polygon attributes in templates should work.
engine = Engine()
template = engine.from_string('{{ polygons.0.wkt }}')
polygons = [OGRGeometry(p.wkt) for p in self.geometries.multipolygons[:2]]
content = template.render(Context({'polygons': polygons}))
self.assertIn('MULTIPOLYGON (((100', content)
def test_closepolygons(self):
"Testing closing Polygon objects."
# Both rings in this geometry are not closed.
poly = OGRGeometry('POLYGON((0 0, 5 0, 5 5, 0 5), (1 1, 2 1, 2 2, 2 1))')
self.assertEqual(8, poly.point_count)
with self.assertRaises(GDALException):
poly.centroid
poly.close_rings()
self.assertEqual(10, poly.point_count) # Two closing points should've been added
self.assertEqual(OGRGeometry('POINT(2.5 2.5)'), poly.centroid)
def test_multipolygons(self):
"Testing MultiPolygon objects."
OGRGeometry('POINT(0 0)')
for mp in self.geometries.multipolygons:
mpoly = OGRGeometry(mp.wkt)
self.assertEqual(6, mpoly.geom_type)
self.assertEqual('MULTIPOLYGON', mpoly.geom_name)
if mp.valid:
self.assertEqual(mp.n_p, mpoly.point_count)
self.assertEqual(mp.num_geom, len(mpoly))
msg = 'Index out of range when accessing geometry in a collection: %s.'
with self.assertRaisesMessage(IndexError, msg % len(mpoly)):
mpoly.__getitem__(len(mpoly))
for p in mpoly:
self.assertEqual('POLYGON', p.geom_name)
self.assertEqual(3, p.geom_type)
self.assertEqual(mpoly.wkt, OGRGeometry(mp.wkt).wkt)
def test_srs(self):
"Testing OGR Geometries with Spatial Reference objects."
for mp in self.geometries.multipolygons:
# Creating a geometry w/spatial reference
sr = SpatialReference('WGS84')
mpoly = OGRGeometry(mp.wkt, sr)
self.assertEqual(sr.wkt, mpoly.srs.wkt)
# Ensuring that SRS is propagated to clones.
klone = mpoly.clone()
self.assertEqual(sr.wkt, klone.srs.wkt)
# Ensuring all children geometries (polygons and their rings) all
# return the assigned spatial reference as well.
for poly in mpoly:
self.assertEqual(sr.wkt, poly.srs.wkt)
for ring in poly:
self.assertEqual(sr.wkt, ring.srs.wkt)
# Ensuring SRS propagate in topological ops.
a = OGRGeometry(self.geometries.topology_geoms[0].wkt_a, sr)
b = OGRGeometry(self.geometries.topology_geoms[0].wkt_b, sr)
diff = a.difference(b)
union = a.union(b)
self.assertEqual(sr.wkt, diff.srs.wkt)
self.assertEqual(sr.srid, union.srs.srid)
# Instantiating w/an integer SRID
mpoly = OGRGeometry(mp.wkt, 4326)
self.assertEqual(4326, mpoly.srid)
mpoly.srs = SpatialReference(4269)
self.assertEqual(4269, mpoly.srid)
self.assertEqual('NAD83', mpoly.srs.name)
# Incrementing through the multipolygon after the spatial reference
# has been re-assigned.
for poly in mpoly:
self.assertEqual(mpoly.srs.wkt, poly.srs.wkt)
poly.srs = 32140
for ring in poly:
# Changing each ring in the polygon
self.assertEqual(32140, ring.srs.srid)
self.assertEqual('NAD83 / Texas South Central', ring.srs.name)
ring.srs = str(SpatialReference(4326)) # back to WGS84
self.assertEqual(4326, ring.srs.srid)
# Using the `srid` property.
ring.srid = 4322
self.assertEqual('WGS 72', ring.srs.name)
self.assertEqual(4322, ring.srid)
# srs/srid may be assigned their own values, even when srs is None.
mpoly = OGRGeometry(mp.wkt, srs=None)
mpoly.srs = mpoly.srs
mpoly.srid = mpoly.srid
def test_srs_transform(self):
"Testing transform()."
orig = OGRGeometry('POINT (-104.609 38.255)', 4326)
trans = OGRGeometry('POINT (992385.4472045 481455.4944650)', 2774)
# Using an srid, a SpatialReference object, and a CoordTransform object
# or transformations.
t1, t2, t3 = orig.clone(), orig.clone(), orig.clone()
t1.transform(trans.srid)
t2.transform(SpatialReference('EPSG:2774'))
ct = CoordTransform(SpatialReference('WGS84'), SpatialReference(2774))
t3.transform(ct)
# Testing use of the `clone` keyword.
k1 = orig.clone()
k2 = k1.transform(trans.srid, clone=True)
self.assertEqual(k1, orig)
self.assertNotEqual(k1, k2)
prec = 3
for p in (t1, t2, t3, k2):
self.assertAlmostEqual(trans.x, p.x, prec)
self.assertAlmostEqual(trans.y, p.y, prec)
def test_transform_dim(self):
"Testing coordinate dimension is the same on transformed geometries."
ls_orig = OGRGeometry('LINESTRING(-104.609 38.255)', 4326)
ls_trans = OGRGeometry('LINESTRING(992385.4472045 481455.4944650)', 2774)
prec = 3
ls_orig.transform(ls_trans.srs)
# Making sure the coordinate dimension is still 2D.
self.assertEqual(2, ls_orig.coord_dim)
self.assertAlmostEqual(ls_trans.x[0], ls_orig.x[0], prec)
self.assertAlmostEqual(ls_trans.y[0], ls_orig.y[0], prec)
def test_difference(self):
"Testing difference()."
for i in range(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
d1 = OGRGeometry(self.geometries.diff_geoms[i].wkt)
d2 = a.difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a - b) # __sub__ is difference operator
a -= b # testing __isub__
self.assertEqual(d1, a)
def test_intersection(self):
"Testing intersects() and intersection()."
for i in range(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
i1 = OGRGeometry(self.geometries.intersect_geoms[i].wkt)
self.assertTrue(a.intersects(b))
i2 = a.intersection(b)
self.assertEqual(i1, i2)
self.assertEqual(i1, a & b) # __and__ is intersection operator
a &= b # testing __iand__
self.assertEqual(i1, a)
def test_symdifference(self):
"Testing sym_difference()."
for i in range(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
d1 = OGRGeometry(self.geometries.sdiff_geoms[i].wkt)
d2 = a.sym_difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a ^ b) # __xor__ is symmetric difference operator
a ^= b # testing __ixor__
self.assertEqual(d1, a)
def test_union(self):
"Testing union()."
for i in range(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
u1 = OGRGeometry(self.geometries.union_geoms[i].wkt)
u2 = a.union(b)
self.assertEqual(u1, u2)
self.assertEqual(u1, a | b) # __or__ is union operator
a |= b # testing __ior__
self.assertEqual(u1, a)
def test_add(self):
"Testing GeometryCollection.add()."
# Can't insert a Point into a MultiPolygon.
mp = OGRGeometry('MultiPolygon')
pnt = OGRGeometry('POINT(5 23)')
with self.assertRaises(GDALException):
mp.add(pnt)
# GeometryCollection.add may take an OGRGeometry (if another collection
# of the same type all child geoms will be added individually) or WKT.
for mp in self.geometries.multipolygons:
mpoly = OGRGeometry(mp.wkt)
mp1 = OGRGeometry('MultiPolygon')
mp2 = OGRGeometry('MultiPolygon')
mp3 = OGRGeometry('MultiPolygon')
for poly in mpoly:
mp1.add(poly) # Adding a geometry at a time
mp2.add(poly.wkt) # Adding WKT
mp3.add(mpoly) # Adding a MultiPolygon's entire contents at once.
for tmp in (mp1, mp2, mp3):
self.assertEqual(mpoly, tmp)
def test_extent(self):
"Testing `extent` property."
# The xmin, ymin, xmax, ymax of the MultiPoint should be returned.
mp = OGRGeometry('MULTIPOINT(5 23, 0 0, 10 50)')
self.assertEqual((0.0, 0.0, 10.0, 50.0), mp.extent)
# Testing on the 'real world' Polygon.
poly = OGRGeometry(self.geometries.polygons[3].wkt)
ring = poly.shell
x, y = ring.x, ring.y
xmin, ymin = min(x), min(y)
xmax, ymax = max(x), max(y)
self.assertEqual((xmin, ymin, xmax, ymax), poly.extent)
def test_25D(self):
"Testing 2.5D geometries."
pnt_25d = OGRGeometry('POINT(1 2 3)')
self.assertEqual('Point25D', pnt_25d.geom_type.name)
self.assertEqual(3.0, pnt_25d.z)
self.assertEqual(3, pnt_25d.coord_dim)
ls_25d = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)')
self.assertEqual('LineString25D', ls_25d.geom_type.name)
self.assertEqual([1.0, 2.0, 3.0], ls_25d.z)
self.assertEqual(3, ls_25d.coord_dim)
def test_pickle(self):
"Testing pickle support."
g1 = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)', 'WGS84')
g2 = pickle.loads(pickle.dumps(g1))
self.assertEqual(g1, g2)
self.assertEqual(4326, g2.srs.srid)
self.assertEqual(g1.srs.wkt, g2.srs.wkt)
def test_ogrgeometry_transform_workaround(self):
"Testing coordinate dimensions on geometries after transformation."
# A bug in GDAL versions prior to 1.7 changes the coordinate
# dimension of a geometry after it has been transformed.
# This test ensures that the bug workarounds employed within
# `OGRGeometry.transform` indeed work.
wkt_2d = "MULTILINESTRING ((0 0,1 1,2 2))"
wkt_3d = "MULTILINESTRING ((0 0 0,1 1 1,2 2 2))"
srid = 4326
# For both the 2D and 3D MultiLineString, ensure _both_ the dimension
# of the collection and the component LineString have the expected
# coordinate dimension after transform.
geom = OGRGeometry(wkt_2d, srid)
geom.transform(srid)
self.assertEqual(2, geom.coord_dim)
self.assertEqual(2, geom[0].coord_dim)
self.assertEqual(wkt_2d, geom.wkt)
geom = OGRGeometry(wkt_3d, srid)
geom.transform(srid)
self.assertEqual(3, geom.coord_dim)
self.assertEqual(3, geom[0].coord_dim)
self.assertEqual(wkt_3d, geom.wkt)
# Testing binary predicates, `assertIs` is used to check that bool is returned.
def test_equivalence_regression(self):
"Testing equivalence methods with non-OGRGeometry instances."
self.assertIsNotNone(OGRGeometry('POINT(0 0)'))
self.assertNotEqual(OGRGeometry('LINESTRING(0 0, 1 1)'), 3)
def test_contains(self):
self.assertIs(OGRGeometry('POINT(0 0)').contains(OGRGeometry('POINT(0 0)')), True)
self.assertIs(OGRGeometry('POINT(0 0)').contains(OGRGeometry('POINT(0 1)')), False)
def test_crosses(self):
self.assertIs(OGRGeometry('LINESTRING(0 0, 1 1)').crosses(OGRGeometry('LINESTRING(0 1, 1 0)')), True)
self.assertIs(OGRGeometry('LINESTRING(0 0, 0 1)').crosses(OGRGeometry('LINESTRING(1 0, 1 1)')), False)
def test_disjoint(self):
self.assertIs(OGRGeometry('LINESTRING(0 0, 1 1)').disjoint(OGRGeometry('LINESTRING(0 1, 1 0)')), False)
self.assertIs(OGRGeometry('LINESTRING(0 0, 0 1)').disjoint(OGRGeometry('LINESTRING(1 0, 1 1)')), True)
def test_equals(self):
self.assertIs(OGRGeometry('POINT(0 0)').contains(OGRGeometry('POINT(0 0)')), True)
self.assertIs(OGRGeometry('POINT(0 0)').contains(OGRGeometry('POINT(0 1)')), False)
def test_intersects(self):
self.assertIs(OGRGeometry('LINESTRING(0 0, 1 1)').intersects(OGRGeometry('LINESTRING(0 1, 1 0)')), True)
self.assertIs(OGRGeometry('LINESTRING(0 0, 0 1)').intersects(OGRGeometry('LINESTRING(1 0, 1 1)')), False)
def test_overlaps(self):
self.assertIs(
OGRGeometry('POLYGON ((0 0, 0 2, 2 2, 2 0, 0 0))').overlaps(
OGRGeometry('POLYGON ((1 1, 1 5, 5 5, 5 1, 1 1))')
), True
)
self.assertIs(OGRGeometry('POINT(0 0)').overlaps(OGRGeometry('POINT(0 1)')), False)
def test_touches(self):
self.assertIs(
OGRGeometry('POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))').touches(OGRGeometry('LINESTRING(0 2, 2 0)')), True
)
self.assertIs(OGRGeometry('POINT(0 0)').touches(OGRGeometry('POINT(0 1)')), False)
def test_within(self):
self.assertIs(
OGRGeometry('POINT(0.5 0.5)').within(OGRGeometry('POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))')), True
)
self.assertIs(OGRGeometry('POINT(0 0)').within(OGRGeometry('POINT(0 1)')), False)
def test_from_gml(self):
self.assertEqual(
OGRGeometry('POINT(0 0)'),
OGRGeometry.from_gml(
'<gml:Point gml:id="p21" srsName="http://www.opengis.net/def/crs/EPSG/0/4326">'
' <gml:pos srsDimension="2">0 0</gml:pos>'
'</gml:Point>'
),
)
def test_empty(self):
self.assertIs(OGRGeometry('POINT (0 0)').empty, False)
self.assertIs(OGRGeometry('POINT EMPTY').empty, True)
def test_empty_point_to_geos(self):
p = OGRGeometry('POINT EMPTY', srs=4326)
self.assertEqual(p.geos.ewkt, p.ewkt)
|
61518a35f5ab8ba0d3f8f9a0d5b03939e1e71e55de49e574c4b359bbe994a9f0 | import unittest
from django.contrib.gis.gdal import Envelope, GDALException
class TestPoint:
def __init__(self, x, y):
self.x = x
self.y = y
class EnvelopeTest(unittest.TestCase):
def setUp(self):
self.e = Envelope(0, 0, 5, 5)
def test01_init(self):
"Testing Envelope initialization."
e1 = Envelope((0, 0, 5, 5))
Envelope(0, 0, 5, 5)
Envelope(0, '0', '5', 5) # Thanks to ww for this
Envelope(e1._envelope)
with self.assertRaises(GDALException):
Envelope((5, 5, 0, 0))
with self.assertRaises(GDALException):
Envelope(5, 5, 0, 0)
with self.assertRaises(GDALException):
Envelope((0, 0, 5, 5, 3))
with self.assertRaises(GDALException):
Envelope(())
with self.assertRaises(ValueError):
Envelope(0, 'a', 5, 5)
with self.assertRaises(TypeError):
Envelope('foo')
with self.assertRaises(GDALException):
Envelope((1, 1, 0, 0))
# Shouldn't raise an exception for min_x == max_x or min_y == max_y
Envelope(0, 0, 0, 0)
def test02_properties(self):
"Testing Envelope properties."
e = Envelope(0, 0, 2, 3)
self.assertEqual(0, e.min_x)
self.assertEqual(0, e.min_y)
self.assertEqual(2, e.max_x)
self.assertEqual(3, e.max_y)
self.assertEqual((0, 0), e.ll)
self.assertEqual((2, 3), e.ur)
self.assertEqual((0, 0, 2, 3), e.tuple)
self.assertEqual('POLYGON((0.0 0.0,0.0 3.0,2.0 3.0,2.0 0.0,0.0 0.0))', e.wkt)
self.assertEqual('(0.0, 0.0, 2.0, 3.0)', str(e))
def test03_equivalence(self):
"Testing Envelope equivalence."
e1 = Envelope(0.523, 0.217, 253.23, 523.69)
e2 = Envelope((0.523, 0.217, 253.23, 523.69))
self.assertEqual(e1, e2)
self.assertEqual((0.523, 0.217, 253.23, 523.69), e1)
def test04_expand_to_include_pt_2_params(self):
"Testing Envelope expand_to_include -- point as two parameters."
self.e.expand_to_include(2, 6)
self.assertEqual((0, 0, 5, 6), self.e)
self.e.expand_to_include(-1, -1)
self.assertEqual((-1, -1, 5, 6), self.e)
def test05_expand_to_include_pt_2_tuple(self):
"Testing Envelope expand_to_include -- point as a single 2-tuple parameter."
self.e.expand_to_include((10, 10))
self.assertEqual((0, 0, 10, 10), self.e)
self.e.expand_to_include((-10, -10))
self.assertEqual((-10, -10, 10, 10), self.e)
def test06_expand_to_include_extent_4_params(self):
"Testing Envelope expand_to_include -- extent as 4 parameters."
self.e.expand_to_include(-1, 1, 3, 7)
self.assertEqual((-1, 0, 5, 7), self.e)
def test06_expand_to_include_extent_4_tuple(self):
"Testing Envelope expand_to_include -- extent as a single 4-tuple parameter."
self.e.expand_to_include((-1, 1, 3, 7))
self.assertEqual((-1, 0, 5, 7), self.e)
def test07_expand_to_include_envelope(self):
"Testing Envelope expand_to_include with Envelope as parameter."
self.e.expand_to_include(Envelope(-1, 1, 3, 7))
self.assertEqual((-1, 0, 5, 7), self.e)
def test08_expand_to_include_point(self):
"Testing Envelope expand_to_include with Point as parameter."
self.e.expand_to_include(TestPoint(-1, 1))
self.assertEqual((-1, 0, 5, 5), self.e)
self.e.expand_to_include(TestPoint(10, 10))
self.assertEqual((-1, 0, 10, 10), self.e)
|
cedeea94d5aa962684e63cff45f7f8d262d6876cb39e9a9a5edeec85735bbf88 | import unittest
from django.contrib.gis.gdal import (
CoordTransform, GDALException, SpatialReference, SRSException,
)
class TestSRS:
def __init__(self, wkt, **kwargs):
self.wkt = wkt
for key, value in kwargs.items():
setattr(self, key, value)
WGS84_proj = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs '
# Some Spatial Reference examples
srlist = (
TestSRS(
'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,'
'AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",'
'0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]',
epsg=4326, projected=False, geographic=True, local=False,
lin_name='unknown', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199,
auth={'GEOGCS': ('EPSG', '4326'), 'spheroid': ('EPSG', '7030')},
attr=(('DATUM', 'WGS_1984'), (('SPHEROID', 1), '6378137'), ('primem|authority', 'EPSG'),),
),
TestSRS(
'PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",'
'SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],'
'AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],'
'PARAMETER["standard_parallel_1",30.28333333333333],'
'PARAMETER["standard_parallel_2",28.38333333333333],'
'PARAMETER["latitude_of_origin",27.83333333333333],'
'PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],'
'PARAMETER["false_northing",4000000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],'
'AUTHORITY["EPSG","32140"]]',
epsg=32140, projected=True, geographic=False, local=False,
lin_name='metre', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199,
auth={'PROJCS': ('EPSG', '32140'), 'spheroid': ('EPSG', '7019'), 'unit': ('EPSG', '9001')},
attr=(
('DATUM', 'North_American_Datum_1983'),
(('SPHEROID', 2), '298.257222101'),
('PROJECTION', 'Lambert_Conformal_Conic_2SP'),
),
),
TestSRS(
'PROJCS["NAD_1983_StatePlane_Texas_South_Central_FIPS_4204_Feet",'
'GEOGCS["GCS_North_American_1983",DATUM["North_American_Datum_1983",'
'SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],'
'UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],'
'PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",13123333.33333333],'
'PARAMETER["Central_Meridian",-99.0],PARAMETER["Standard_Parallel_1",28.38333333333333],'
'PARAMETER["Standard_Parallel_2",30.28333333333334],PARAMETER["Latitude_Of_Origin",27.83333333333333],'
'UNIT["Foot_US",0.3048006096012192]]',
epsg=None, projected=True, geographic=False, local=False,
lin_name='Foot_US', ang_name='Degree', lin_units=0.3048006096012192, ang_units=0.0174532925199,
auth={'PROJCS': (None, None)},
attr=(('PROJCS|GeOgCs|spheroid', 'GRS_1980'), (('projcs', 9), 'UNIT'), (('projcs', 11), None),),
),
# This is really ESRI format, not WKT -- but the import should work the same
TestSRS(
'LOCAL_CS["Non-Earth (Meter)",LOCAL_DATUM["Local Datum",0],UNIT["Meter",1.0],AXIS["X",EAST],AXIS["Y",NORTH]]',
esri=True, epsg=None, projected=False, geographic=False, local=True,
lin_name='Meter', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199,
attr=(('LOCAL_DATUM', 'Local Datum'), ('unit', 'Meter')),
),
)
# Well-Known Names
well_known = (
TestSRS(
'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,'
'AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,'
'AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]',
wk='WGS84', name='WGS 84',
attrs=(('GEOGCS|AUTHORITY', 1, '4326'), ('SPHEROID', 'WGS 84')),
),
TestSRS(
'GEOGCS["WGS 72",DATUM["WGS_1972",SPHEROID["WGS 72",6378135,298.26,'
'AUTHORITY["EPSG","7043"]],AUTHORITY["EPSG","6322"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4322"]]',
wk='WGS72', name='WGS 72',
attrs=(('GEOGCS|AUTHORITY', 1, '4322'), ('SPHEROID', 'WGS 72')),
),
TestSRS(
'GEOGCS["NAD27",DATUM["North_American_Datum_1927",'
'SPHEROID["Clarke 1866",6378206.4,294.9786982138982,'
'AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4267"]]',
wk='NAD27', name='NAD27',
attrs=(('GEOGCS|AUTHORITY', 1, '4267'), ('SPHEROID', 'Clarke 1866'))
),
TestSRS(
'GEOGCS["NAD83",DATUM["North_American_Datum_1983",'
'SPHEROID["GRS 1980",6378137,298.257222101,'
'AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4269"]]',
wk='NAD83', name='NAD83',
attrs=(('GEOGCS|AUTHORITY', 1, '4269'), ('SPHEROID', 'GRS 1980')),
),
TestSRS(
'PROJCS["NZGD49 / Karamea Circuit",GEOGCS["NZGD49",'
'DATUM["New_Zealand_Geodetic_Datum_1949",'
'SPHEROID["International 1924",6378388,297,'
'AUTHORITY["EPSG","7022"]],'
'TOWGS84[59.47,-5.04,187.44,0.47,-0.1,1.024,-4.5993],'
'AUTHORITY["EPSG","6272"]],PRIMEM["Greenwich",0,'
'AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,'
'AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4272"]],'
'PROJECTION["Transverse_Mercator"],'
'PARAMETER["latitude_of_origin",-41.28991152777778],'
'PARAMETER["central_meridian",172.1090281944444],'
'PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],'
'PARAMETER["false_northing",700000],'
'UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","27216"]]',
wk='EPSG:27216', name='NZGD49 / Karamea Circuit',
attrs=(('PROJECTION', 'Transverse_Mercator'), ('SPHEROID', 'International 1924')),
),
)
bad_srlist = (
'Foobar',
'OOJCS["NAD83 / Texas South Central",GEOGCS["NAD83",'
'DATUM["North_American_Datum_1983",'
'SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],'
'AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],'
'PARAMETER["standard_parallel_1",30.28333333333333],'
'PARAMETER["standard_parallel_2",28.38333333333333],'
'PARAMETER["latitude_of_origin",27.83333333333333],'
'PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],'
'PARAMETER["false_northing",4000000],UNIT["metre",1,'
'AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32140"]]',
)
class SpatialRefTest(unittest.TestCase):
def test01_wkt(self):
"Testing initialization on valid OGC WKT."
for s in srlist:
SpatialReference(s.wkt)
def test02_bad_wkt(self):
"Testing initialization on invalid WKT."
for bad in bad_srlist:
try:
srs = SpatialReference(bad)
srs.validate()
except (SRSException, GDALException):
pass
else:
self.fail('Should not have initialized on bad WKT "%s"!')
def test03_get_wkt(self):
"Testing getting the WKT."
for s in srlist:
srs = SpatialReference(s.wkt)
self.assertEqual(s.wkt, srs.wkt)
def test04_proj(self):
"Test PROJ.4 import and export."
proj_parts = [
'+proj=longlat', '+ellps=WGS84', '+towgs84=0,0,0,0,0,0,0', '+datum=WGS84', '+no_defs'
]
srs1 = SpatialReference(srlist[0].wkt)
srs2 = SpatialReference(WGS84_proj)
self.assertTrue(all(part in proj_parts for part in srs1.proj.split()))
self.assertTrue(all(part in proj_parts for part in srs2.proj.split()))
def test05_epsg(self):
"Test EPSG import."
for s in srlist:
if s.epsg:
srs1 = SpatialReference(s.wkt)
srs2 = SpatialReference(s.epsg)
srs3 = SpatialReference(str(s.epsg))
srs4 = SpatialReference('EPSG:%d' % s.epsg)
for srs in (srs1, srs2, srs3, srs4):
for attr, expected in s.attr:
self.assertEqual(expected, srs[attr])
def test07_boolean_props(self):
"Testing the boolean properties."
for s in srlist:
srs = SpatialReference(s.wkt)
self.assertEqual(s.projected, srs.projected)
self.assertEqual(s.geographic, srs.geographic)
def test08_angular_linear(self):
"Testing the linear and angular units routines."
for s in srlist:
srs = SpatialReference(s.wkt)
self.assertEqual(s.ang_name, srs.angular_name)
self.assertEqual(s.lin_name, srs.linear_name)
self.assertAlmostEqual(s.ang_units, srs.angular_units, 9)
self.assertAlmostEqual(s.lin_units, srs.linear_units, 9)
def test09_authority(self):
"Testing the authority name & code routines."
for s in srlist:
if hasattr(s, 'auth'):
srs = SpatialReference(s.wkt)
for target, tup in s.auth.items():
self.assertEqual(tup[0], srs.auth_name(target))
self.assertEqual(tup[1], srs.auth_code(target))
def test10_attributes(self):
"Testing the attribute retrieval routines."
for s in srlist:
srs = SpatialReference(s.wkt)
for tup in s.attr:
att = tup[0] # Attribute to test
exp = tup[1] # Expected result
self.assertEqual(exp, srs[att])
def test11_wellknown(self):
"Testing Well Known Names of Spatial References."
for s in well_known:
srs = SpatialReference(s.wk)
self.assertEqual(s.name, srs.name)
for tup in s.attrs:
if len(tup) == 2:
key = tup[0]
exp = tup[1]
elif len(tup) == 3:
key = tup[:2]
exp = tup[2]
self.assertEqual(srs[key], exp)
def test12_coordtransform(self):
"Testing initialization of a CoordTransform."
target = SpatialReference('WGS84')
CoordTransform(SpatialReference(srlist[0].wkt), target)
def test13_attr_value(self):
"Testing the attr_value() method."
s1 = SpatialReference('WGS84')
with self.assertRaises(TypeError):
s1.__getitem__(0)
with self.assertRaises(TypeError):
s1.__getitem__(('GEOGCS', 'foo'))
self.assertEqual('WGS 84', s1['GEOGCS'])
self.assertEqual('WGS_1984', s1['DATUM'])
self.assertEqual('EPSG', s1['AUTHORITY'])
self.assertEqual(4326, int(s1['AUTHORITY', 1]))
self.assertIsNone(s1['FOOBAR'])
def test_unicode(self):
wkt = (
'PROJCS["DHDN / Soldner 39 Langschoß",'
'GEOGCS["DHDN",DATUM["Deutsches_Hauptdreiecksnetz",'
'SPHEROID["Bessel 1841",6377397.155,299.1528128,AUTHORITY["EPSG","7004"]],AUTHORITY["EPSG","6314"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4314"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],'
'PROJECTION["Cassini_Soldner"],PARAMETER["latitude_of_origin",50.66738711],'
'PARAMETER["central_meridian",6.28935703],PARAMETER["false_easting",0],'
'PARAMETER["false_northing",0],AUTHORITY["mj10777.de","187939"],AXIS["x",NORTH],AXIS["y",EAST]]'
)
srs = SpatialReference(wkt)
srs_list = [srs, srs.clone()]
srs.import_wkt(wkt)
for srs in srs_list:
self.assertEqual(srs.name, 'DHDN / Soldner 39 Langschoß')
self.assertEqual(srs.wkt, wkt)
self.assertIn('Langschoß', srs.pretty_wkt)
self.assertIn('Langschoß', srs.xml)
|
43ddea600f8ca9cb8b6c84f94fcb8aebed4d2242086f84d27c79add7be1ea9cf | import unittest
from unittest import mock
from django.contrib.gis.gdal import Driver, GDALException
valid_drivers = (
# vector
'ESRI Shapefile', 'MapInfo File', 'TIGER', 'S57', 'DGN', 'Memory', 'CSV',
'GML', 'KML',
# raster
'GTiff', 'JPEG', 'MEM', 'PNG',
)
invalid_drivers = ('Foo baz', 'clucka', 'ESRI Shp', 'ESRI rast')
aliases = {
'eSrI': 'ESRI Shapefile',
'TigER/linE': 'TIGER',
'SHAPE': 'ESRI Shapefile',
'sHp': 'ESRI Shapefile',
'tiFf': 'GTiff',
'tIf': 'GTiff',
'jPEg': 'JPEG',
'jpG': 'JPEG',
}
class DriverTest(unittest.TestCase):
def test01_valid_driver(self):
"Testing valid GDAL/OGR Data Source Drivers."
for d in valid_drivers:
dr = Driver(d)
self.assertEqual(d, str(dr))
def test02_invalid_driver(self):
"Testing invalid GDAL/OGR Data Source Drivers."
for i in invalid_drivers:
with self.assertRaises(GDALException):
Driver(i)
def test03_aliases(self):
"Testing driver aliases."
for alias, full_name in aliases.items():
dr = Driver(alias)
self.assertEqual(full_name, str(dr))
@mock.patch('django.contrib.gis.gdal.driver.vcapi.get_driver_count')
@mock.patch('django.contrib.gis.gdal.driver.rcapi.get_driver_count')
@mock.patch('django.contrib.gis.gdal.driver.vcapi.register_all')
@mock.patch('django.contrib.gis.gdal.driver.rcapi.register_all')
def test_registered(self, rreg, vreg, rcount, vcount):
"""
Prototypes are registered only if their respective driver counts are
zero.
"""
def check(rcount_val, vcount_val):
vreg.reset_mock()
rreg.reset_mock()
rcount.return_value = rcount_val
vcount.return_value = vcount_val
Driver.ensure_registered()
if rcount_val:
self.assertFalse(rreg.called)
else:
rreg.assert_called_once_with()
if vcount_val:
self.assertFalse(vreg.called)
else:
vreg.assert_called_once_with()
check(0, 0)
check(120, 0)
check(0, 120)
check(120, 120)
|
09c1a4db8cd94f806dd20c567c391c8538e03a609e4d45baf3cbb9fa7731200e | from django.contrib.gis import admin
from django.contrib.gis.geos import Point
from django.test import TestCase, override_settings
from .admin import UnmodifiableAdmin
from .models import City, site
@override_settings(ROOT_URLCONF='django.contrib.gis.tests.geoadmin.urls')
class GeoAdminTest(TestCase):
def test_ensure_geographic_media(self):
geoadmin = site._registry[City]
admin_js = geoadmin.media.render_js()
self.assertTrue(any(geoadmin.openlayers_url in js for js in admin_js))
def test_olmap_OSM_rendering(self):
delete_all_btn = """<a href="javascript:geodjango_point.clearFeatures()">Delete all Features</a>"""
original_geoadmin = site._registry[City]
params = original_geoadmin.get_map_widget(City._meta.get_field('point')).params
result = original_geoadmin.get_map_widget(City._meta.get_field('point'))(
).render('point', Point(-79.460734, 40.18476), params)
self.assertIn(
"""geodjango_point.layers.base = new OpenLayers.Layer.OSM("OpenStreetMap (Mapnik)");""",
result)
self.assertIn(delete_all_btn, result)
site.unregister(City)
site.register(City, UnmodifiableAdmin)
try:
geoadmin = site._registry[City]
params = geoadmin.get_map_widget(City._meta.get_field('point')).params
result = geoadmin.get_map_widget(City._meta.get_field('point'))(
).render('point', Point(-79.460734, 40.18476), params)
self.assertNotIn(delete_all_btn, result)
finally:
site.unregister(City)
site.register(City, original_geoadmin.__class__)
def test_olmap_WMS_rendering(self):
geoadmin = admin.GeoModelAdmin(City, site)
result = geoadmin.get_map_widget(City._meta.get_field('point'))(
).render('point', Point(-79.460734, 40.18476))
self.assertIn(
"""geodjango_point.layers.base = new OpenLayers.Layer.WMS("OpenLayers WMS", """
""""http://vmap0.tiles.osgeo.org/wms/vmap0", {layers: 'basic', format: 'image/jpeg'});""",
result)
def test_olwidget_has_changed(self):
"""
Changes are accurately noticed by OpenLayersWidget.
"""
geoadmin = site._registry[City]
form = geoadmin.get_changelist_form(None)()
has_changed = form.fields['point'].has_changed
initial = Point(13.4197458572965953, 52.5194108501149799, srid=4326)
data_same = "SRID=3857;POINT(1493879.2754093995 6894592.019687599)"
data_almost_same = "SRID=3857;POINT(1493879.2754093990 6894592.019687590)"
data_changed = "SRID=3857;POINT(1493884.0527237 6894593.8111804)"
self.assertTrue(has_changed(None, data_changed))
self.assertTrue(has_changed(initial, ""))
self.assertFalse(has_changed(None, ""))
self.assertFalse(has_changed(initial, data_same))
self.assertFalse(has_changed(initial, data_almost_same))
self.assertTrue(has_changed(initial, data_changed))
def test_olwidget_empty_string(self):
geoadmin = site._registry[City]
form = geoadmin.get_changelist_form(None)({'point': ''})
with self.assertRaisesMessage(AssertionError, 'no logs'):
with self.assertLogs('django.contrib.gis', 'ERROR'):
output = str(form['point'])
self.assertInHTML(
'<textarea id="id_point" class="vWKTField required" cols="150"'
' rows="10" name="point"></textarea>',
output
)
def test_olwidget_invalid_string(self):
geoadmin = site._registry[City]
form = geoadmin.get_changelist_form(None)({'point': 'INVALID()'})
with self.assertLogs('django.contrib.gis', 'ERROR') as cm:
output = str(form['point'])
self.assertInHTML(
'<textarea id="id_point" class="vWKTField required" cols="150"'
' rows="10" name="point"></textarea>',
output
)
self.assertEqual(len(cm.records), 1)
self.assertEqual(
cm.records[0].getMessage(),
"Error creating geometry from value 'INVALID()' (String input "
"unrecognized as WKT EWKT, and HEXEWKB.)"
)
|
c8b0524473ee30f6a1c18b5f32d959ead6a354d2cddf44d03c49eb2d1ecd73ab | from django.contrib.gis.db import models
from ..admin import admin
class City(models.Model):
name = models.CharField(max_length=30)
point = models.PointField()
class Meta:
app_label = 'geoadmin'
def __str__(self):
return self.name
site = admin.AdminSite(name='admin_gis')
site.register(City, admin.OSMGeoAdmin)
|
86321e50238fdb5021d4414a8c64b070f8b0f32dd9c57203618b5e0f1a26b038 | from django.contrib.gis import admin
class UnmodifiableAdmin(admin.OSMGeoAdmin):
modifiable = False
|
fb2537106a4b446ddda12fe69b37e14dfdb0580e5031d02e360ba145bd295e75 | from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
|
a94dfd798ce3e025819deb068623e8ce8218623d2be14b9b0009c8d3afb74a1c | """
Text-based test rasters
"""
JSON_RASTER = """{
"srid": 4326,
"origin": [0, 0],
"scale": [-1, 1],
"skew": [0, 0],
"width": 5,
"height": 5,
"nr_of_bands": 1,
"bands": [{"data": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 16, 17, 18, 19, 20, 21, 22, 23, 24]}]
}
"""
|
980dba6945a4c4016a79d95eb756ff6ecef0e614f840e7597b74dd304459c33a | from django.contrib.gis.db import models
from django.db import connection, migrations
ops = [
migrations.CreateModel(
name='Neighborhood',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, unique=True)),
('geom', models.MultiPolygonField(srid=4326)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Household',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('neighborhood', models.ForeignKey(
'gis_migrations.Neighborhood',
models.SET_NULL,
to_field='id',
null=True,
)),
('address', models.CharField(max_length=100)),
('zip_code', models.IntegerField(null=True, blank=True)),
('geom', models.PointField(srid=4326, geography=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Family',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, unique=True)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='household',
name='family',
field=models.ForeignKey('gis_migrations.Family', models.SET_NULL, blank=True, null=True),
preserve_default=True,
)
]
if connection.features.supports_raster:
ops += [
migrations.CreateModel(
name='Heatmap',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, unique=True)),
('rast', models.fields.RasterField(srid=4326)),
],
options={
},
bases=(models.Model,),
),
]
class Migration(migrations.Migration):
"""
Used for gis-specific migration tests.
"""
operations = ops
|
7345280adf477f30739bf3e933c591c46c61e8464b4f165a226de02238922072 | from django.core.management.base import BaseCommand
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("--list", action="store_true", dest="list", help="Print all options")
def handle(self, *args, **options):
pass
|
3f920835d9a60d7f7f570f2d69147921dee187fd975f0de516376c6139c8afe8 | from django.db import models
class Article(models.Model):
headline = models.CharField(max_length=100, default='Default headline')
pub_date = models.DateTimeField()
def __str__(self):
return self.headline
class Meta:
app_label = 'fixtures_model_package'
ordering = ('-pub_date', 'headline')
|
ee1c5a624395a8072277153562a73231e399ffb768a4280f4d632953a9597b24 | from .article import (
Article, ArticleIdea, ArticleTag, ArticleTranslation, NewsArticle,
)
from .customers import Address, Contact, Customer
from .empty_join import SlugPage
from .person import Country, Friendship, Group, Membership, Person
__all__ = [
'Address', 'Article', 'ArticleIdea', 'ArticleTag', 'ArticleTranslation',
'Contact', 'Country', 'Customer', 'Friendship', 'Group', 'Membership',
'NewsArticle', 'Person', 'SlugPage',
]
|
51769de3dbbd27dbd3d7a3af076c94901d048c6801be4e689e7b7aa4ef680a74 | from django.db import models
from django.db.models.fields.related import (
ForeignObjectRel, ReverseManyToOneDescriptor,
)
from django.db.models.lookups import StartsWith
from django.db.models.query_utils import PathInfo
class CustomForeignObjectRel(ForeignObjectRel):
"""
Define some extra Field methods so this Rel acts more like a Field, which
lets us use ReverseManyToOneDescriptor in both directions.
"""
@property
def foreign_related_fields(self):
return tuple(lhs_field for lhs_field, rhs_field in self.field.related_fields)
def get_attname(self):
return self.name
class StartsWithRelation(models.ForeignObject):
"""
A ForeignObject that uses StartsWith operator in its joins instead of
the default equality operator. This is logically a many-to-many relation
and creates a ReverseManyToOneDescriptor in both directions.
"""
auto_created = False
many_to_many = False
many_to_one = True
one_to_many = False
one_to_one = False
rel_class = CustomForeignObjectRel
def __init__(self, *args, **kwargs):
kwargs['on_delete'] = models.DO_NOTHING
super().__init__(*args, **kwargs)
@property
def field(self):
"""
Makes ReverseManyToOneDescriptor work in both directions.
"""
return self.remote_field
def get_extra_restriction(self, where_class, alias, related_alias):
to_field = self.remote_field.model._meta.get_field(self.to_fields[0])
from_field = self.model._meta.get_field(self.from_fields[0])
return StartsWith(to_field.get_col(alias), from_field.get_col(related_alias))
def get_joining_columns(self, reverse_join=False):
return ()
def get_path_info(self, filtered_relation=None):
to_opts = self.remote_field.model._meta
from_opts = self.model._meta
return [PathInfo(
from_opts=from_opts,
to_opts=to_opts,
target_fields=(to_opts.pk,),
join_field=self,
m2m=False,
direct=False,
filtered_relation=filtered_relation,
)]
def get_reverse_path_info(self, filtered_relation=None):
to_opts = self.model._meta
from_opts = self.remote_field.model._meta
return [PathInfo(
from_opts=from_opts,
to_opts=to_opts,
target_fields=(to_opts.pk,),
join_field=self.remote_field,
m2m=False,
direct=False,
filtered_relation=filtered_relation,
)]
def contribute_to_class(self, cls, name, private_only=False):
super().contribute_to_class(cls, name, private_only)
setattr(cls, self.name, ReverseManyToOneDescriptor(self))
class BrokenContainsRelation(StartsWithRelation):
"""
This model is designed to yield no join conditions and
raise an exception in ``Join.as_sql()``.
"""
def get_extra_restriction(self, where_class, alias, related_alias):
return None
class SlugPage(models.Model):
slug = models.CharField(max_length=20, unique=True)
descendants = StartsWithRelation(
'self',
from_fields=['slug'],
to_fields=['slug'],
related_name='ascendants',
)
containers = BrokenContainsRelation(
'self',
from_fields=['slug'],
to_fields=['slug'],
)
class Meta:
ordering = ['slug']
def __str__(self):
return 'SlugPage %s' % self.slug
|
766489150687564309b018875a4e9b9161d5971a6554f80670bf1d5c54f0a3e1 | import datetime
from django.db import models
class Country(models.Model):
# Table Column Fields
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class Person(models.Model):
# Table Column Fields
name = models.CharField(max_length=128)
person_country_id = models.IntegerField()
# Relation Fields
person_country = models.ForeignObject(
Country,
from_fields=['person_country_id'],
to_fields=['id'],
on_delete=models.CASCADE,
)
friends = models.ManyToManyField('self', through='Friendship', symmetrical=False)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class Group(models.Model):
# Table Column Fields
name = models.CharField(max_length=128)
group_country = models.ForeignKey(Country, models.CASCADE)
members = models.ManyToManyField(Person, related_name='groups', through='Membership')
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class Membership(models.Model):
# Table Column Fields
membership_country = models.ForeignKey(Country, models.CASCADE)
date_joined = models.DateTimeField(default=datetime.datetime.now)
invite_reason = models.CharField(max_length=64, null=True)
person_id = models.IntegerField()
group_id = models.IntegerField(blank=True, null=True)
# Relation Fields
person = models.ForeignObject(
Person,
from_fields=['person_id', 'membership_country'],
to_fields=['id', 'person_country_id'],
on_delete=models.CASCADE,
)
group = models.ForeignObject(
Group,
from_fields=['group_id', 'membership_country'],
to_fields=['id', 'group_country'],
on_delete=models.CASCADE,
)
class Meta:
ordering = ('date_joined', 'invite_reason')
def __str__(self):
group_name = self.group.name if self.group_id else 'NULL'
return "%s is a member of %s" % (self.person.name, group_name)
class Friendship(models.Model):
# Table Column Fields
from_friend_country = models.ForeignKey(Country, models.CASCADE, related_name="from_friend_country")
from_friend_id = models.IntegerField()
to_friend_country_id = models.IntegerField()
to_friend_id = models.IntegerField()
# Relation Fields
from_friend = models.ForeignObject(
Person,
on_delete=models.CASCADE,
from_fields=['from_friend_country', 'from_friend_id'],
to_fields=['person_country_id', 'id'],
related_name='from_friend',
)
to_friend_country = models.ForeignObject(
Country,
from_fields=['to_friend_country_id'],
to_fields=['id'],
related_name='to_friend_country',
on_delete=models.CASCADE,
)
to_friend = models.ForeignObject(
Person,
from_fields=['to_friend_country_id', 'to_friend_id'],
to_fields=['person_country_id', 'id'],
related_name='to_friend',
on_delete=models.CASCADE,
)
|
217d6f34b6bee194953e91c440d8f51e514530aace9aeb47de5deaea9aba3779 | from django.db import models
from django.db.models.fields.related import ForeignObject
class Address(models.Model):
company = models.CharField(max_length=1)
customer_id = models.IntegerField()
class Meta:
unique_together = [
('company', 'customer_id'),
]
class Customer(models.Model):
company = models.CharField(max_length=1)
customer_id = models.IntegerField()
address = ForeignObject(
Address, models.CASCADE, null=True,
# order mismatches the Contact ForeignObject.
from_fields=['company', 'customer_id'],
to_fields=['company', 'customer_id'],
)
class Meta:
unique_together = [
('company', 'customer_id'),
]
class Contact(models.Model):
company_code = models.CharField(max_length=1)
customer_code = models.IntegerField()
customer = ForeignObject(
Customer, models.CASCADE, related_name='contacts',
to_fields=['customer_id', 'company'],
from_fields=['customer_code', 'company_code'],
)
|
36c878bd621715958ac25c36f38da3032cb9c65b0dd2bffd3317ccb285f81700 | from django.db import models
from django.db.models.fields.related import ForwardManyToOneDescriptor
from django.utils.translation import get_language
class ArticleTranslationDescriptor(ForwardManyToOneDescriptor):
"""
The set of articletranslation should not set any local fields.
"""
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self.field.name)
self.field.set_cached_value(instance, value)
if value is not None and not self.field.remote_field.multiple:
self.field.remote_field.set_cached_value(value, instance)
class ColConstraint:
# Anything with as_sql() method works in get_extra_restriction().
def __init__(self, alias, col, value):
self.alias, self.col, self.value = alias, col, value
def as_sql(self, compiler, connection):
qn = compiler.quote_name_unless_alias
return '%s.%s = %%s' % (qn(self.alias), qn(self.col)), [self.value]
class ActiveTranslationField(models.ForeignObject):
"""
This field will allow querying and fetching the currently active translation
for Article from ArticleTranslation.
"""
requires_unique_target = False
def get_extra_restriction(self, where_class, alias, related_alias):
return ColConstraint(alias, 'lang', get_language())
def get_extra_descriptor_filter(self, instance):
return {'lang': get_language()}
def contribute_to_class(self, cls, name):
super().contribute_to_class(cls, name)
setattr(cls, self.name, ArticleTranslationDescriptor(self))
class ActiveTranslationFieldWithQ(ActiveTranslationField):
def get_extra_descriptor_filter(self, instance):
return models.Q(lang=get_language())
class Article(models.Model):
active_translation = ActiveTranslationField(
'ArticleTranslation',
from_fields=['id'],
to_fields=['article'],
related_name='+',
on_delete=models.CASCADE,
null=True,
)
active_translation_q = ActiveTranslationFieldWithQ(
'ArticleTranslation',
from_fields=['id'],
to_fields=['article'],
related_name='+',
on_delete=models.CASCADE,
null=True,
)
pub_date = models.DateField()
def __str__(self):
try:
return self.active_translation.title
except ArticleTranslation.DoesNotExist:
return '[No translation found]'
class NewsArticle(Article):
pass
class ArticleTranslation(models.Model):
article = models.ForeignKey(Article, models.CASCADE)
lang = models.CharField(max_length=2)
title = models.CharField(max_length=100)
body = models.TextField()
abstract = models.TextField(null=True)
class Meta:
unique_together = ('article', 'lang')
ordering = ('active_translation__title',)
class ArticleTag(models.Model):
article = models.ForeignKey(
Article,
models.CASCADE,
related_name='tags',
related_query_name='tag',
)
name = models.CharField(max_length=255)
class ArticleIdea(models.Model):
articles = models.ManyToManyField(
Article,
related_name='ideas',
related_query_name='idea_things',
)
name = models.CharField(max_length=255)
|
c233f0778b522b481733c8ba4988f827b119e456d9814e2dc31b101eef9a80c7 | from django.db import models
class Author(models.Model):
first_name = models.CharField(max_length=128)
last_name = models.CharField(max_length=128)
class Editor(models.Model):
name = models.CharField(max_length=128)
bestselling_author = models.ForeignKey(Author, models.CASCADE)
class Book(models.Model):
title = models.CharField(max_length=128)
authors = models.ManyToManyField(Author)
editor = models.ForeignKey(Editor, models.CASCADE, related_name="edited_books")
class Meta:
default_related_name = "books"
class Store(models.Model):
name = models.CharField(max_length=128)
address = models.CharField(max_length=128)
class Meta:
abstract = True
default_related_name = "%(app_label)s_%(model_name)ss"
class BookStore(Store):
available_books = models.ManyToManyField(Book)
class EditorStore(Store):
editor = models.ForeignKey(Editor, models.CASCADE)
available_books = models.ManyToManyField(Book)
class Meta:
default_related_name = "editor_stores"
|
fc0082ee0b89c36658cffe59d022f5ef27440ce48335652a592a0a2f3dfbca0e | from django.db import models
# Since the test database doesn't have tablespaces, it's impossible for Django
# to create the tables for models where db_tablespace is set. To avoid this
# problem, we mark the models as unmanaged, and temporarily revert them to
# managed during each test. We also set them to use the same tables as the
# "reference" models to avoid errors when other tests run 'migrate'
# (proxy_models_inheritance does).
class ScientistRef(models.Model):
name = models.CharField(max_length=50)
class ArticleRef(models.Model):
title = models.CharField(max_length=50, unique=True)
code = models.CharField(max_length=50, unique=True)
authors = models.ManyToManyField(ScientistRef, related_name='articles_written_set')
reviewers = models.ManyToManyField(ScientistRef, related_name='articles_reviewed_set')
class Scientist(models.Model):
name = models.CharField(max_length=50)
class Meta:
db_table = 'model_options_scientistref'
db_tablespace = 'tbl_tbsp'
managed = False
class Article(models.Model):
title = models.CharField(max_length=50, unique=True)
code = models.CharField(max_length=50, unique=True, db_tablespace='idx_tbsp')
authors = models.ManyToManyField(Scientist, related_name='articles_written_set')
reviewers = models.ManyToManyField(Scientist, related_name='articles_reviewed_set', db_tablespace='idx_tbsp')
class Meta:
db_table = 'model_options_articleref'
db_tablespace = 'tbl_tbsp'
managed = False
# Also set the tables for automatically created models
Authors = Article._meta.get_field('authors').remote_field.through
Authors._meta.db_table = 'model_options_articleref_authors'
Reviewers = Article._meta.get_field('reviewers').remote_field.through
Reviewers._meta.db_table = 'model_options_articleref_reviewers'
|
4bb5f6af279b8152a2d8f4c50f74be32643bf348c3af8717dd173e98ba7992ea | from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sites', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CustomArticle',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=50)),
('places_this_article_should_appear', models.ForeignKey('sites.Site', models.CASCADE)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ExclusiveArticle',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=50)),
('site', models.ForeignKey('sites.Site', models.CASCADE)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='SyndicatedArticle',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=50)),
('sites', models.ManyToManyField('sites.Site')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
]
|
84e81d8780ba36cccc5f2dcf5153aee7e5315ed199c9b6d6e789fbc73ed02aa5 | # This package is used to test the --exclude option of
# the makemessages and compilemessages management commands.
# The locale directory for this app is generated automatically
# by the test cases.
from django.utils.translation import gettext as _
# Translators: This comment should be extracted
dummy1 = _("This is a translatable string.")
# This comment should not be extracted
dummy2 = _("This is another translatable string.")
|
a6d7440fdc00ff5a54d1f4fb5380b401974cd3687da636df55fb8b869f8e736f | #!/usr/bin/env python
"""
Helper script to update sampleproject's translation catalogs.
When a bug has been identified related to i18n, this helps capture the issue
by using catalogs created from management commands.
Example:
The string "Two %% Three %%%" renders differently using trans and blocktrans.
This issue is difficult to debug, it could be a problem with extraction,
interpolation, or both.
How this script helps:
* Add {% trans "Two %% Three %%%" %} and blocktrans equivalent to templates.
* Run this script.
* Test extraction - verify the new msgid in sampleproject's django.po.
* Add a translation to sampleproject's django.po.
* Run this script.
* Test interpolation - verify templatetag rendering, test each in a template
that is rendered using an activated language from sampleproject's locale.
* Tests should fail, issue captured.
* Fix issue.
* Run this script.
* Tests all pass.
"""
import os
import re
import sys
proj_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.abspath(os.path.join(proj_dir, '..', '..', '..')))
def update_translation_catalogs():
"""Run makemessages and compilemessages in sampleproject."""
from django.core.management import call_command
prev_cwd = os.getcwd()
os.chdir(proj_dir)
call_command('makemessages')
call_command('compilemessages')
# keep the diff friendly - remove 'POT-Creation-Date'
pofile = os.path.join(proj_dir, 'locale', 'fr', 'LC_MESSAGES', 'django.po')
with open(pofile) as f:
content = f.read()
content = re.sub(r'^"POT-Creation-Date.+$\s', '', content, flags=re.MULTILINE)
with open(pofile, 'w') as f:
f.write(content)
os.chdir(prev_cwd)
if __name__ == "__main__":
update_translation_catalogs()
|
80b8ea9e5f4dd44baf6f68b4cb51c0bff9c5b00da37bf3f9aa297d17bea5f71d | #!/usr/bin/env python
import os
import sys
sys.path.append(os.path.abspath(os.path.join('..', '..', '..')))
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sampleproject.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
c7e2a4505513577ea87cf6ce5a7d627d027b08d3b2fb76f41ba86f21bc2616e9 | from django.utils.translation import gettext as _, ngettext
# Translators: This comment should be extracted
dummy1 = _("This is a translatable string.")
# This comment should not be extracted
dummy2 = _("This is another translatable string.")
# This file has a literal with plural forms. When processed first, makemessages
# shouldn't create a .po file with duplicate `Plural-Forms` headers
number = 3
dummy3 = ngettext("%(number)s Foo", "%(number)s Foos", number) % {'number': number}
dummy4 = _('Size')
# This string is intentionally duplicated in test.html
dummy5 = _('This literal should be included.')
|
f3f438f643e88bb78c7ec9639c9864e5a6a18ce8d872f3cbd8a624c78f5bd93a | import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpResponsePermanentRedirect
from django.middleware.locale import LocaleMiddleware
from django.template import Context, Template
from django.test import SimpleTestCase, override_settings
from django.test.client import RequestFactory
from django.test.utils import override_script_prefix
from django.urls import clear_url_caches, reverse, translate_url
from django.utils import translation
class PermanentRedirectLocaleMiddleWare(LocaleMiddleware):
response_redirect_class = HttpResponsePermanentRedirect
@override_settings(
USE_I18N=True,
LOCALE_PATHS=[
os.path.join(os.path.dirname(__file__), 'locale'),
],
LANGUAGE_CODE='en-us',
LANGUAGES=[
('nl', 'Dutch'),
('en', 'English'),
('pt-br', 'Brazilian Portuguese'),
],
MIDDLEWARE=[
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
],
ROOT_URLCONF='i18n.patterns.urls.default',
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(os.path.dirname(__file__), 'templates')],
'OPTIONS': {
'context_processors': [
'django.template.context_processors.i18n',
],
},
}],
)
class URLTestCaseBase(SimpleTestCase):
"""
TestCase base-class for the URL tests.
"""
def setUp(self):
# Make sure the cache is empty before we are doing our tests.
clear_url_caches()
def tearDown(self):
# Make sure we will leave an empty cache for other testcases.
clear_url_caches()
class URLPrefixTests(URLTestCaseBase):
"""
Tests if the `i18n_patterns` is adding the prefix correctly.
"""
def test_not_prefixed(self):
with translation.override('en'):
self.assertEqual(reverse('not-prefixed'), '/not-prefixed/')
self.assertEqual(reverse('not-prefixed-included-url'), '/not-prefixed-include/foo/')
with translation.override('nl'):
self.assertEqual(reverse('not-prefixed'), '/not-prefixed/')
self.assertEqual(reverse('not-prefixed-included-url'), '/not-prefixed-include/foo/')
def test_prefixed(self):
with translation.override('en'):
self.assertEqual(reverse('prefixed'), '/en/prefixed/')
with translation.override('nl'):
self.assertEqual(reverse('prefixed'), '/nl/prefixed/')
with translation.override(None):
self.assertEqual(reverse('prefixed'), '/%s/prefixed/' % settings.LANGUAGE_CODE)
@override_settings(ROOT_URLCONF='i18n.patterns.urls.wrong')
def test_invalid_prefix_use(self):
msg = 'Using i18n_patterns in an included URLconf is not allowed.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
reverse('account:register')
@override_settings(ROOT_URLCONF='i18n.patterns.urls.disabled')
class URLDisabledTests(URLTestCaseBase):
@override_settings(USE_I18N=False)
def test_prefixed_i18n_disabled(self):
with translation.override('en'):
self.assertEqual(reverse('prefixed'), '/prefixed/')
with translation.override('nl'):
self.assertEqual(reverse('prefixed'), '/prefixed/')
class RequestURLConfTests(SimpleTestCase):
@override_settings(ROOT_URLCONF='i18n.patterns.urls.path_unused')
def test_request_urlconf_considered(self):
request = RequestFactory().get('/nl/')
request.urlconf = 'i18n.patterns.urls.default'
middleware = LocaleMiddleware()
with translation.override('nl'):
middleware.process_request(request)
self.assertEqual(request.LANGUAGE_CODE, 'nl')
@override_settings(ROOT_URLCONF='i18n.patterns.urls.path_unused')
class PathUnusedTests(URLTestCaseBase):
"""
If no i18n_patterns is used in root URLconfs, then no language activation
activation happens based on url prefix.
"""
def test_no_lang_activate(self):
response = self.client.get('/nl/foo/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['content-language'], 'en')
self.assertEqual(response.context['LANGUAGE_CODE'], 'en')
class URLTranslationTests(URLTestCaseBase):
"""
Tests if the pattern-strings are translated correctly (within the
`i18n_patterns` and the normal `patterns` function).
"""
def test_no_prefix_translated(self):
with translation.override('en'):
self.assertEqual(reverse('no-prefix-translated'), '/translated/')
self.assertEqual(reverse('no-prefix-translated-slug', kwargs={'slug': 'yeah'}), '/translated/yeah/')
with translation.override('nl'):
self.assertEqual(reverse('no-prefix-translated'), '/vertaald/')
self.assertEqual(reverse('no-prefix-translated-slug', kwargs={'slug': 'yeah'}), '/vertaald/yeah/')
with translation.override('pt-br'):
self.assertEqual(reverse('no-prefix-translated'), '/traduzidos/')
self.assertEqual(reverse('no-prefix-translated-slug', kwargs={'slug': 'yeah'}), '/traduzidos/yeah/')
def test_users_url(self):
with translation.override('en'):
self.assertEqual(reverse('users'), '/en/users/')
with translation.override('nl'):
self.assertEqual(reverse('users'), '/nl/gebruikers/')
self.assertEqual(reverse('prefixed_xml'), '/nl/prefixed.xml')
with translation.override('pt-br'):
self.assertEqual(reverse('users'), '/pt-br/usuarios/')
def test_translate_url_utility(self):
with translation.override('en'):
self.assertEqual(translate_url('/en/nonexistent/', 'nl'), '/en/nonexistent/')
self.assertEqual(translate_url('/en/users/', 'nl'), '/nl/gebruikers/')
# Namespaced URL
self.assertEqual(translate_url('/en/account/register/', 'nl'), '/nl/profiel/registreren/')
# path() URL pattern
self.assertEqual(translate_url('/en/account/register-as-path/', 'nl'), '/nl/profiel/registreren-als-pad/')
self.assertEqual(translation.get_language(), 'en')
with translation.override('nl'):
self.assertEqual(translate_url('/nl/gebruikers/', 'en'), '/en/users/')
self.assertEqual(translation.get_language(), 'nl')
class URLNamespaceTests(URLTestCaseBase):
"""
Tests if the translations are still working within namespaces.
"""
def test_account_register(self):
with translation.override('en'):
self.assertEqual(reverse('account:register'), '/en/account/register/')
self.assertEqual(reverse('account:register-as-path'), '/en/account/register-as-path/')
with translation.override('nl'):
self.assertEqual(reverse('account:register'), '/nl/profiel/registreren/')
self.assertEqual(reverse('account:register-as-path'), '/nl/profiel/registreren-als-pad/')
class URLRedirectTests(URLTestCaseBase):
"""
Tests if the user gets redirected to the right URL when there is no
language-prefix in the request URL.
"""
def test_no_prefix_response(self):
response = self.client.get('/not-prefixed/')
self.assertEqual(response.status_code, 200)
def test_en_redirect(self):
response = self.client.get('/account/register/', HTTP_ACCEPT_LANGUAGE='en')
self.assertRedirects(response, '/en/account/register/')
response = self.client.get(response['location'])
self.assertEqual(response.status_code, 200)
def test_en_redirect_wrong_url(self):
response = self.client.get('/profiel/registreren/', HTTP_ACCEPT_LANGUAGE='en')
self.assertEqual(response.status_code, 404)
def test_nl_redirect(self):
response = self.client.get('/profiel/registreren/', HTTP_ACCEPT_LANGUAGE='nl')
self.assertRedirects(response, '/nl/profiel/registreren/')
response = self.client.get(response['location'])
self.assertEqual(response.status_code, 200)
def test_nl_redirect_wrong_url(self):
response = self.client.get('/account/register/', HTTP_ACCEPT_LANGUAGE='nl')
self.assertEqual(response.status_code, 404)
def test_pt_br_redirect(self):
response = self.client.get('/conta/registre-se/', HTTP_ACCEPT_LANGUAGE='pt-br')
self.assertRedirects(response, '/pt-br/conta/registre-se/')
response = self.client.get(response['location'])
self.assertEqual(response.status_code, 200)
def test_pl_pl_redirect(self):
# language from outside of the supported LANGUAGES list
response = self.client.get('/account/register/', HTTP_ACCEPT_LANGUAGE='pl-pl')
self.assertRedirects(response, '/en/account/register/')
response = self.client.get(response['location'])
self.assertEqual(response.status_code, 200)
@override_settings(
MIDDLEWARE=[
'i18n.patterns.tests.PermanentRedirectLocaleMiddleWare',
'django.middleware.common.CommonMiddleware',
],
)
def test_custom_redirect_class(self):
response = self.client.get('/account/register/', HTTP_ACCEPT_LANGUAGE='en')
self.assertRedirects(response, '/en/account/register/', 301)
class URLVaryAcceptLanguageTests(URLTestCaseBase):
"""
'Accept-Language' is not added to the Vary header when using prefixed URLs.
"""
def test_no_prefix_response(self):
response = self.client.get('/not-prefixed/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.get('Vary'), 'Accept-Language')
def test_en_redirect(self):
response = self.client.get('/account/register/', HTTP_ACCEPT_LANGUAGE='en')
self.assertRedirects(response, '/en/account/register/')
self.assertFalse(response.get('Vary'))
response = self.client.get(response['location'])
self.assertEqual(response.status_code, 200)
self.assertFalse(response.get('Vary'))
class URLRedirectWithoutTrailingSlashTests(URLTestCaseBase):
"""
Tests the redirect when the requested URL doesn't end with a slash
(`settings.APPEND_SLASH=True`).
"""
def test_not_prefixed_redirect(self):
response = self.client.get('/not-prefixed', HTTP_ACCEPT_LANGUAGE='en')
self.assertRedirects(response, '/not-prefixed/', 301)
def test_en_redirect(self):
response = self.client.get('/account/register', HTTP_ACCEPT_LANGUAGE='en', follow=True)
# We only want one redirect, bypassing CommonMiddleware
self.assertEqual(response.redirect_chain, [('/en/account/register/', 302)])
self.assertRedirects(response, '/en/account/register/', 302)
response = self.client.get('/prefixed.xml', HTTP_ACCEPT_LANGUAGE='en', follow=True)
self.assertRedirects(response, '/en/prefixed.xml', 302)
class URLRedirectWithoutTrailingSlashSettingTests(URLTestCaseBase):
"""
Tests the redirect when the requested URL doesn't end with a slash
(`settings.APPEND_SLASH=False`).
"""
@override_settings(APPEND_SLASH=False)
def test_not_prefixed_redirect(self):
response = self.client.get('/not-prefixed', HTTP_ACCEPT_LANGUAGE='en')
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=False)
def test_en_redirect(self):
response = self.client.get('/account/register-without-slash', HTTP_ACCEPT_LANGUAGE='en')
self.assertRedirects(response, '/en/account/register-without-slash', 302)
response = self.client.get(response['location'])
self.assertEqual(response.status_code, 200)
class URLResponseTests(URLTestCaseBase):
"""Tests if the response has the correct language code."""
def test_not_prefixed_with_prefix(self):
response = self.client.get('/en/not-prefixed/')
self.assertEqual(response.status_code, 404)
def test_en_url(self):
response = self.client.get('/en/account/register/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['content-language'], 'en')
self.assertEqual(response.context['LANGUAGE_CODE'], 'en')
def test_nl_url(self):
response = self.client.get('/nl/profiel/registreren/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['content-language'], 'nl')
self.assertEqual(response.context['LANGUAGE_CODE'], 'nl')
def test_wrong_en_prefix(self):
response = self.client.get('/en/profiel/registreren/')
self.assertEqual(response.status_code, 404)
def test_wrong_nl_prefix(self):
response = self.client.get('/nl/account/register/')
self.assertEqual(response.status_code, 404)
def test_pt_br_url(self):
response = self.client.get('/pt-br/conta/registre-se/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['content-language'], 'pt-br')
self.assertEqual(response.context['LANGUAGE_CODE'], 'pt-br')
def test_en_path(self):
response = self.client.get('/en/account/register-as-path/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['content-language'], 'en')
self.assertEqual(response.context['LANGUAGE_CODE'], 'en')
def test_nl_path(self):
response = self.client.get('/nl/profiel/registreren-als-pad/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response['content-language'], 'nl')
self.assertEqual(response.context['LANGUAGE_CODE'], 'nl')
class URLRedirectWithScriptAliasTests(URLTestCaseBase):
"""
#21579 - LocaleMiddleware should respect the script prefix.
"""
def test_language_prefix_with_script_prefix(self):
prefix = '/script_prefix'
with override_script_prefix(prefix):
response = self.client.get('/prefixed/', HTTP_ACCEPT_LANGUAGE='en', SCRIPT_NAME=prefix)
self.assertRedirects(response, '%s/en/prefixed/' % prefix, target_status_code=404)
class URLTagTests(URLTestCaseBase):
"""
Test if the language tag works.
"""
def test_strings_only(self):
t = Template("""{% load i18n %}
{% language 'nl' %}{% url 'no-prefix-translated' %}{% endlanguage %}
{% language 'pt-br' %}{% url 'no-prefix-translated' %}{% endlanguage %}""")
self.assertEqual(t.render(Context({})).strip().split(),
['/vertaald/', '/traduzidos/'])
def test_context(self):
ctx = Context({'lang1': 'nl', 'lang2': 'pt-br'})
tpl = Template("""{% load i18n %}
{% language lang1 %}{% url 'no-prefix-translated' %}{% endlanguage %}
{% language lang2 %}{% url 'no-prefix-translated' %}{% endlanguage %}""")
self.assertEqual(tpl.render(ctx).strip().split(),
['/vertaald/', '/traduzidos/'])
def test_args(self):
tpl = Template("""{% load i18n %}
{% language 'nl' %}{% url 'no-prefix-translated-slug' 'apo' %}{% endlanguage %}
{% language 'pt-br' %}{% url 'no-prefix-translated-slug' 'apo' %}{% endlanguage %}""")
self.assertEqual(tpl.render(Context({})).strip().split(),
['/vertaald/apo/', '/traduzidos/apo/'])
def test_kwargs(self):
tpl = Template("""{% load i18n %}
{% language 'nl' %}{% url 'no-prefix-translated-slug' slug='apo' %}{% endlanguage %}
{% language 'pt-br' %}{% url 'no-prefix-translated-slug' slug='apo' %}{% endlanguage %}""")
self.assertEqual(tpl.render(Context({})).strip().split(),
['/vertaald/apo/', '/traduzidos/apo/'])
|
991755ea6f56c9c0dbead0e1f70c7646a2ea80bff8503a7db9f239c1d1643f8a | # Sample project used by test_extraction.CustomLayoutExtractionTests
from django.utils.translation import gettext as _
string = _("This is a project-level string")
|
bddd4f426f64995d6e6f84c80fa37727423489e7f2c71eb867be29997b7d8231 | import os
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase, override_settings
from django.utils import translation
@override_settings(
USE_I18N=True,
LOCALE_PATHS=[
os.path.join(os.path.dirname(__file__), 'locale'),
],
LANGUAGE_CODE='en',
LANGUAGES=[
('en', 'English'),
('fr', 'French'),
],
)
class ContentTypeTests(TestCase):
def test_verbose_name(self):
company_type = ContentType.objects.get(app_label='i18n', model='company')
with translation.override('en'):
self.assertEqual(str(company_type), 'Company')
with translation.override('fr'):
self.assertEqual(str(company_type), 'Société')
|
ac6a5625dc9831aee0e890914a912e7f47ce6a290045601f128145f65edc99a4 | from django.conf.urls import url
from django.views.generic import TemplateView
view = TemplateView.as_view(template_name='dummy.html')
urlpatterns = [
url(r'^foo/$', view, name='not-prefixed-included-url'),
]
|
38ac260e15f92de1ff4b814515588ee71f395ba31e14786e7f1c24c1eaf799c6 | from django.conf.urls import include, url
from django.conf.urls.i18n import i18n_patterns
from django.utils.translation import gettext_lazy as _
urlpatterns = i18n_patterns(
url(_(r'^account/'), include('i18n.patterns.urls.wrong_namespace', namespace='account')),
)
|
cfba403a2cf19f986232d8f8304a581b911157f3e7eb39b678359c60340c96a1 | from django.conf.urls import include, url
from django.conf.urls.i18n import i18n_patterns
from django.utils.translation import gettext_lazy as _
from django.views.generic import TemplateView
view = TemplateView.as_view(template_name='dummy.html')
urlpatterns = [
url(r'^not-prefixed/$', view, name='not-prefixed'),
url(r'^not-prefixed-include/', include('i18n.patterns.urls.included')),
url(_(r'^translated/$'), view, name='no-prefix-translated'),
url(_(r'^translated/(?P<slug>[\w-]+)/$'), view, name='no-prefix-translated-slug'),
]
urlpatterns += i18n_patterns(
url(r'^prefixed/$', view, name='prefixed'),
url(r'^prefixed\.xml$', view, name='prefixed_xml'),
url(_(r'^users/$'), view, name='users'),
url(_(r'^account/'), include('i18n.patterns.urls.namespace', namespace='account')),
)
|
0592747dad4e472de9fa09bfe4aac4323060ae0721d14a7bebc61574a5781c88 | from django.conf.urls import url
from django.views.generic import TemplateView
view = TemplateView.as_view(template_name='dummy.html')
urlpatterns = [
url(r'^nl/foo/', view, name='not-translated'),
]
|
c5aaa7dc6eb5639db851e251fca3522a3490b32c7d5ddb05577262627243ddee | from django.conf.urls import url
from django.conf.urls.i18n import i18n_patterns
from django.utils.translation import gettext_lazy as _
from django.views.generic import TemplateView
view = TemplateView.as_view(template_name='dummy.html')
app_name = 'account'
urlpatterns = i18n_patterns(
url(_(r'^register/$'), view, name='register'),
)
|
b97e3d6f9fecd2dd4bb7a2d8a9814910c65b680051bf20db5a348631c87f16ac | from django.conf.urls import url
from django.urls import path
from django.utils.translation import gettext_lazy as _
from django.views.generic import TemplateView
view = TemplateView.as_view(template_name='dummy.html')
app_name = 'account'
urlpatterns = [
url(_(r'^register/$'), view, name='register'),
url(_(r'^register-without-slash$'), view, name='register-without-slash'),
path(_('register-as-path/'), view, name='register-as-path'),
]
|
d6d6fb3329436823fe54e212448a95b58acf1fcc0a2d383edde6335eeb15fa70 | from django.conf.urls import url
from django.conf.urls.i18n import i18n_patterns
from django.views.generic import TemplateView
view = TemplateView.as_view(template_name='dummy.html')
urlpatterns = i18n_patterns(
url(r'^prefixed/$', view, name='prefixed'),
)
|
6ff5f8e304b458873b332bf93e372473700367f2197d24f325b5a505784d7435 | # A user-defined format
CUSTOM_DAY_FORMAT = 'd/m/Y CUSTOM'
|
f2cd74a1430fb8d8ff18fd786d45bce0c8e0f56873ad8197b872a29d78d9f29f | from django.utils.translation import gettext as _
string = _("This app has a locale directory")
|
b2e23f2843079446be1e06df0b6d278b24e7bce3628164478272878769497a7a | from django.utils.translation import gettext as _
string = _("This app has no locale directory")
|
b2e99fd7ae01768de0c2e601027732dad93c9b100ab95885662e47a6d99eb053 | from django.db import migrations, models
class Migration(migrations.Migration):
operations = [
migrations.CreateModel(
"Signal",
[
("id", models.AutoField(primary_key=True)),
],
),
]
|
4fe518dbeaa6ffc56f2fa7189c294c5e8343476c22efd79837c5560f11f3bd5b | """
An interpreted text role to link docs to CVE issues. To use: :cve:`XXXXX`
"""
from docutils import nodes, utils
from docutils.parsers.rst import roles
def cve_role(name, rawtext, text, lineno, inliner, options=None, content=None):
if options is None:
options = {}
url_pattern = inliner.document.settings.env.app.config.cve_url
if url_pattern is None:
msg = inliner.reporter.warning("cve not configured: please configure cve_url in conf.py")
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
url = url_pattern % text
roles.set_classes(options)
node = nodes.reference(rawtext, utils.unescape('CVE-%s' % text), refuri=url, **options)
return [node], []
def setup(app):
app.add_config_value('cve_url', None, 'env')
app.add_role('cve', cve_role)
return {'parallel_read_safe': True}
|
43494feb1dea648cdd5fcc028e815934eb9f57059cff00477dd2b216cfd4db05 | """
An interpreted text role to link docs to Trac tickets.
To use: :ticket:`XXXXX`
Based on code from psycopg2 by Daniele Varrazzo.
"""
from docutils import nodes, utils
from docutils.parsers.rst import roles
def ticket_role(name, rawtext, text, lineno, inliner, options=None, content=None):
if options is None:
options = {}
try:
num = int(text.replace('#', ''))
except ValueError:
msg = inliner.reporter.error(
"ticket number must be... a number, got '%s'" % text)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
url_pattern = inliner.document.settings.env.app.config.ticket_url
if url_pattern is None:
msg = inliner.reporter.warning(
"ticket not configured: please configure ticket_url in conf.py")
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
url = url_pattern % num
roles.set_classes(options)
node = nodes.reference(rawtext, '#' + utils.unescape(text), refuri=url, **options)
return [node], []
def setup(app):
app.add_config_value('ticket_url', None, 'env')
app.add_role('ticket', ticket_role)
return {'parallel_read_safe': True}
|
d6bf6762fd2423982d611c5b8e580cd1578bd8409c360b89e2b1a1b0c48a391f | """
Sphinx plugins for Django documentation.
"""
import json
import os
import re
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from docutils.statemachine import ViewList
from sphinx import addnodes
from sphinx.builders.html import StandaloneHTMLBuilder
from sphinx.directives import CodeBlock
from sphinx.domains.std import Cmdoption
from sphinx.util.console import bold
from sphinx.util.nodes import set_source_info
from sphinx.writers.html import HTMLTranslator
# RE for option descriptions without a '--' prefix
simple_option_desc_re = re.compile(
r'([-_a-zA-Z0-9]+)(\s*.*?)(?=,\s+(?:/|-|--)|$)')
def setup(app):
app.add_crossref_type(
directivename="setting",
rolename="setting",
indextemplate="pair: %s; setting",
)
app.add_crossref_type(
directivename="templatetag",
rolename="ttag",
indextemplate="pair: %s; template tag"
)
app.add_crossref_type(
directivename="templatefilter",
rolename="tfilter",
indextemplate="pair: %s; template filter"
)
app.add_crossref_type(
directivename="fieldlookup",
rolename="lookup",
indextemplate="pair: %s; field lookup type",
)
app.add_description_unit(
directivename="django-admin",
rolename="djadmin",
indextemplate="pair: %s; django-admin command",
parse_node=parse_django_admin_node,
)
app.add_directive('django-admin-option', Cmdoption)
app.add_config_value('django_next_version', '0.0', True)
app.add_directive('versionadded', VersionDirective)
app.add_directive('versionchanged', VersionDirective)
app.add_builder(DjangoStandaloneHTMLBuilder)
# register the snippet directive
app.add_directive('snippet', SnippetWithFilename)
# register a node for snippet directive so that the xml parser
# knows how to handle the enter/exit parsing event
app.add_node(snippet_with_filename,
html=(visit_snippet, depart_snippet_literal),
latex=(visit_snippet_latex, depart_snippet_latex),
man=(visit_snippet_literal, depart_snippet_literal),
text=(visit_snippet_literal, depart_snippet_literal),
texinfo=(visit_snippet_literal, depart_snippet_literal))
app.set_translator('djangohtml', DjangoHTMLTranslator)
app.set_translator('json', DjangoHTMLTranslator)
app.add_node(
ConsoleNode,
html=(visit_console_html, None),
latex=(visit_console_dummy, depart_console_dummy),
man=(visit_console_dummy, depart_console_dummy),
text=(visit_console_dummy, depart_console_dummy),
texinfo=(visit_console_dummy, depart_console_dummy),
)
app.add_directive('console', ConsoleDirective)
app.connect('html-page-context', html_page_context_hook)
return {'parallel_read_safe': True}
class snippet_with_filename(nodes.literal_block):
"""
Subclass the literal_block to override the visit/depart event handlers
"""
pass
def visit_snippet_literal(self, node):
"""
default literal block handler
"""
self.visit_literal_block(node)
def depart_snippet_literal(self, node):
"""
default literal block handler
"""
self.depart_literal_block(node)
def visit_snippet(self, node):
"""
HTML document generator visit handler
"""
lang = self.highlightlang
linenos = node.rawsource.count('\n') >= self.highlightlinenothreshold - 1
fname = node['filename']
highlight_args = node.get('highlight_args', {})
if 'language' in node:
# code-block directives
lang = node['language']
highlight_args['force'] = True
if 'linenos' in node:
linenos = node['linenos']
def warner(msg):
self.builder.warn(msg, (self.builder.current_docname, node.line))
highlighted = self.highlighter.highlight_block(node.rawsource, lang,
warn=warner,
linenos=linenos,
**highlight_args)
starttag = self.starttag(node, 'div', suffix='',
CLASS='highlight-%s snippet' % lang)
self.body.append(starttag)
self.body.append('<div class="snippet-filename">%s</div>\n''' % (fname,))
self.body.append(highlighted)
self.body.append('</div>\n')
raise nodes.SkipNode
def visit_snippet_latex(self, node):
"""
Latex document generator visit handler
"""
code = node.rawsource.rstrip('\n')
lang = self.hlsettingstack[-1][0]
linenos = code.count('\n') >= self.hlsettingstack[-1][1] - 1
fname = node['filename']
highlight_args = node.get('highlight_args', {})
if 'language' in node:
# code-block directives
lang = node['language']
highlight_args['force'] = True
if 'linenos' in node:
linenos = node['linenos']
def warner(msg):
self.builder.warn(msg, (self.curfilestack[-1], node.line))
hlcode = self.highlighter.highlight_block(code, lang, warn=warner,
linenos=linenos,
**highlight_args)
self.body.append(
'\n{\\colorbox[rgb]{0.9,0.9,0.9}'
'{\\makebox[\\textwidth][l]'
'{\\small\\texttt{%s}}}}\n' % (
# Some filenames have '_', which is special in latex.
fname.replace('_', r'\_'),
)
)
if self.table:
hlcode = hlcode.replace('\\begin{Verbatim}',
'\\begin{OriginalVerbatim}')
self.table.has_problematic = True
self.table.has_verbatim = True
hlcode = hlcode.rstrip()[:-14] # strip \end{Verbatim}
hlcode = hlcode.rstrip() + '\n'
self.body.append('\n' + hlcode + '\\end{%sVerbatim}\n' %
(self.table and 'Original' or ''))
# Prevent rawsource from appearing in output a second time.
raise nodes.SkipNode
def depart_snippet_latex(self, node):
"""
Latex document generator depart handler.
"""
pass
class SnippetWithFilename(Directive):
"""
The 'snippet' directive that allows to add the filename (optional)
of a code snippet in the document. This is modeled after CodeBlock.
"""
has_content = True
optional_arguments = 1
option_spec = {'filename': directives.unchanged_required}
def run(self):
code = '\n'.join(self.content)
literal = snippet_with_filename(code, code)
if self.arguments:
literal['language'] = self.arguments[0]
literal['filename'] = self.options['filename']
set_source_info(self, literal)
return [literal]
class VersionDirective(Directive):
has_content = True
required_arguments = 1
optional_arguments = 1
final_argument_whitespace = True
option_spec = {}
def run(self):
if len(self.arguments) > 1:
msg = """Only one argument accepted for directive '{directive_name}::'.
Comments should be provided as content,
not as an extra argument.""".format(directive_name=self.name)
raise self.error(msg)
env = self.state.document.settings.env
ret = []
node = addnodes.versionmodified()
ret.append(node)
if self.arguments[0] == env.config.django_next_version:
node['version'] = "Development version"
else:
node['version'] = self.arguments[0]
node['type'] = self.name
if self.content:
self.state.nested_parse(self.content, self.content_offset, node)
env.note_versionchange(node['type'], node['version'], node, self.lineno)
return ret
class DjangoHTMLTranslator(HTMLTranslator):
"""
Django-specific reST to HTML tweaks.
"""
# Don't use border=1, which docutils does by default.
def visit_table(self, node):
self.context.append(self.compact_p)
self.compact_p = True
self._table_row_index = 0 # Needed by Sphinx
self.body.append(self.starttag(node, 'table', CLASS='docutils'))
def depart_table(self, node):
self.compact_p = self.context.pop()
self.body.append('</table>\n')
def visit_desc_parameterlist(self, node):
self.body.append('(') # by default sphinx puts <big> around the "("
self.first_param = 1
self.optional_param_level = 0
self.param_separator = node.child_text_separator
self.required_params_left = sum(isinstance(c, addnodes.desc_parameter) for c in node.children)
def depart_desc_parameterlist(self, node):
self.body.append(')')
#
# Turn the "new in version" stuff (versionadded/versionchanged) into a
# better callout -- the Sphinx default is just a little span,
# which is a bit less obvious that I'd like.
#
# FIXME: these messages are all hardcoded in English. We need to change
# that to accommodate other language docs, but I can't work out how to make
# that work.
#
version_text = {
'versionchanged': 'Changed in Django %s',
'versionadded': 'New in Django %s',
}
def visit_versionmodified(self, node):
self.body.append(
self.starttag(node, 'div', CLASS=node['type'])
)
version_text = self.version_text.get(node['type'])
if version_text:
title = "%s%s" % (
version_text % node['version'],
":" if node else "."
)
self.body.append('<span class="title">%s</span> ' % title)
def depart_versionmodified(self, node):
self.body.append("</div>\n")
# Give each section a unique ID -- nice for custom CSS hooks
def visit_section(self, node):
old_ids = node.get('ids', [])
node['ids'] = ['s-' + i for i in old_ids]
node['ids'].extend(old_ids)
super().visit_section(node)
node['ids'] = old_ids
def parse_django_admin_node(env, sig, signode):
command = sig.split(' ')[0]
env.ref_context['std:program'] = command
title = "django-admin %s" % sig
signode += addnodes.desc_name(title, title)
return command
class DjangoStandaloneHTMLBuilder(StandaloneHTMLBuilder):
"""
Subclass to add some extra things we need.
"""
name = 'djangohtml'
def finish(self):
super().finish()
self.info(bold("writing templatebuiltins.js..."))
xrefs = self.env.domaindata["std"]["objects"]
templatebuiltins = {
"ttags": [
n for ((t, n), (k, a)) in xrefs.items()
if t == "templatetag" and k == "ref/templates/builtins"
],
"tfilters": [
n for ((t, n), (k, a)) in xrefs.items()
if t == "templatefilter" and k == "ref/templates/builtins"
],
}
outfilename = os.path.join(self.outdir, "templatebuiltins.js")
with open(outfilename, 'w') as fp:
fp.write('var django_template_builtins = ')
json.dump(templatebuiltins, fp)
fp.write(';\n')
class ConsoleNode(nodes.literal_block):
"""
Custom node to override the visit/depart event handlers at registration
time. Wrap a literal_block object and defer to it.
"""
def __init__(self, litblk_obj):
self.wrapped = litblk_obj
def __getattr__(self, attr):
if attr == 'wrapped':
return self.__dict__.wrapped
return getattr(self.wrapped, attr)
def visit_console_dummy(self, node):
"""Defer to the corresponding parent's handler."""
self.visit_literal_block(node)
def depart_console_dummy(self, node):
"""Defer to the corresponding parent's handler."""
self.depart_literal_block(node)
def visit_console_html(self, node):
"""Generate HTML for the console directive."""
if self.builder.name in ('djangohtml', 'json') and node['win_console_text']:
# Put a mark on the document object signaling the fact the directive
# has been used on it.
self.document._console_directive_used_flag = True
uid = node['uid']
self.body.append('''\
<div class="console-block" id="console-block-%(id)s">
<input class="c-tab-unix" id="c-tab-%(id)s-unix" type="radio" name="console-%(id)s" checked>
<label for="c-tab-%(id)s-unix" title="Linux/macOS">/</label>
<input class="c-tab-win" id="c-tab-%(id)s-win" type="radio" name="console-%(id)s">
<label for="c-tab-%(id)s-win" title="Windows"></label>
<section class="c-content-unix" id="c-content-%(id)s-unix">\n''' % {'id': uid})
try:
self.visit_literal_block(node)
except nodes.SkipNode:
pass
self.body.append('</section>\n')
self.body.append('<section class="c-content-win" id="c-content-%(id)s-win">\n' % {'id': uid})
win_text = node['win_console_text']
highlight_args = {'force': True}
if 'linenos' in node:
linenos = node['linenos']
else:
linenos = win_text.count('\n') >= self.highlightlinenothreshold - 1
def warner(msg):
self.builder.warn(msg, (self.builder.current_docname, node.line))
highlighted = self.highlighter.highlight_block(
win_text, 'doscon', warn=warner, linenos=linenos, **highlight_args
)
self.body.append(highlighted)
self.body.append('</section>\n')
self.body.append('</div>\n')
raise nodes.SkipNode
else:
self.visit_literal_block(node)
class ConsoleDirective(CodeBlock):
"""
A reStructuredText directive which renders a two-tab code block in which
the second tab shows a Windows command line equivalent of the usual
Unix-oriented examples.
"""
required_arguments = 0
# The 'doscon' Pygments formatter needs a prompt like this. '>' alone
# won't do it because then it simply paints the whole command line as a
# grey comment with no highlighting at all.
WIN_PROMPT = r'...\> '
def run(self):
def args_to_win(cmdline):
changed = False
out = []
for token in cmdline.split():
if token[:2] == './':
token = token[2:]
changed = True
elif token[:2] == '~/':
token = '%HOMEPATH%\\' + token[2:]
changed = True
elif token == 'make':
token = 'make.bat'
changed = True
if '://' not in token and 'git' not in cmdline:
out.append(token.replace('/', '\\'))
changed = True
else:
out.append(token)
if changed:
return ' '.join(out)
return cmdline
def cmdline_to_win(line):
if line.startswith('# '):
return 'REM ' + args_to_win(line[2:])
if line.startswith('$ # '):
return 'REM ' + args_to_win(line[4:])
if line.startswith('$ ./manage.py'):
return 'manage.py ' + args_to_win(line[13:])
if line.startswith('$ manage.py'):
return 'manage.py ' + args_to_win(line[11:])
if line.startswith('$ ./runtests.py'):
return 'runtests.py ' + args_to_win(line[15:])
if line.startswith('$ ./'):
return args_to_win(line[4:])
if line.startswith('$ python'):
return 'py ' + args_to_win(line[8:])
if line.startswith('$ '):
return args_to_win(line[2:])
return None
def code_block_to_win(content):
bchanged = False
lines = []
for line in content:
modline = cmdline_to_win(line)
if modline is None:
lines.append(line)
else:
lines.append(self.WIN_PROMPT + modline)
bchanged = True
if bchanged:
return ViewList(lines)
return None
env = self.state.document.settings.env
self.arguments = ['console']
lit_blk_obj = super().run()[0]
# Only do work when the djangohtml HTML Sphinx builder is being used,
# invoke the default behavior for the rest.
if env.app.builder.name not in ('djangohtml', 'json'):
return [lit_blk_obj]
lit_blk_obj['uid'] = '%s' % env.new_serialno('console')
# Only add the tabbed UI if there is actually a Windows-specific
# version of the CLI example.
win_content = code_block_to_win(self.content)
if win_content is None:
lit_blk_obj['win_console_text'] = None
else:
self.content = win_content
lit_blk_obj['win_console_text'] = super().run()[0].rawsource
# Replace the literal_node object returned by Sphinx's CodeBlock with
# the ConsoleNode wrapper.
return [ConsoleNode(lit_blk_obj)]
def html_page_context_hook(app, pagename, templatename, context, doctree):
# Put a bool on the context used to render the template. It's used to
# control inclusion of console-tabs.css and activation of the JavaScript.
# This way it's include only from HTML files rendered from reST files where
# the ConsoleDirective is used.
context['include_console_assets'] = getattr(doctree, '_console_directive_used_flag', False)
|
6470cf337cee5569d78651832b937dce9906921d8d4e86e7593941ca127c76f9 | import os
import sys
from distutils.sysconfig import get_python_lib
from setuptools import find_packages, setup
CURRENT_PYTHON = sys.version_info[:2]
REQUIRED_PYTHON = (3, 6)
# This check and everything above must remain compatible with Python 2.7.
if CURRENT_PYTHON < REQUIRED_PYTHON:
sys.stderr.write("""
==========================
Unsupported Python version
==========================
This version of Django requires Python {}.{}, but you're trying to
install it on Python {}.{}.
This may be because you are using a version of pip that doesn't
understand the python_requires classifier. Make sure you
have pip >= 9.0 and setuptools >= 24.2, then try again:
$ python -m pip install --upgrade pip setuptools
$ python -m pip install django
This will install the latest version of Django which works on your
version of Python. If you can't upgrade your pip (or Python), request
an older version of Django:
$ python -m pip install "django<2"
""".format(*(REQUIRED_PYTHON + CURRENT_PYTHON)))
sys.exit(1)
# Warn if we are installing over top of an existing installation. This can
# cause issues where files that were deleted from a more recent Django are
# still present in site-packages. See #18115.
overlay_warning = False
if "install" in sys.argv:
lib_paths = [get_python_lib()]
if lib_paths[0].startswith("/usr/lib/"):
# We have to try also with an explicit prefix of /usr/local in order to
# catch Debian's custom user site-packages directory.
lib_paths.append(get_python_lib(prefix="/usr/local"))
for lib_path in lib_paths:
existing_path = os.path.abspath(os.path.join(lib_path, "django"))
if os.path.exists(existing_path):
# We note the need for the warning here, but present it after the
# command is run, so it's more likely to be seen.
overlay_warning = True
break
EXCLUDE_FROM_PACKAGES = ['django.conf.project_template',
'django.conf.app_template',
'django.bin']
# Dynamically calculate the version based on django.VERSION.
version = __import__('django').get_version()
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='Django',
version=version,
python_requires='>={}.{}'.format(*REQUIRED_PYTHON),
url='https://www.djangoproject.com/',
author='Django Software Foundation',
author_email='[email protected]',
description=('A high-level Python Web framework that encourages '
'rapid development and clean, pragmatic design.'),
long_description=read('README.rst'),
license='BSD',
packages=find_packages(exclude=EXCLUDE_FROM_PACKAGES),
include_package_data=True,
scripts=['django/bin/django-admin.py'],
entry_points={'console_scripts': [
'django-admin = django.core.management:execute_from_command_line',
]},
install_requires=['pytz', 'sqlparse'],
extras_require={
"bcrypt": ["bcrypt"],
"argon2": ["argon2-cffi >= 16.1.0"],
},
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
project_urls={
'Documentation': 'https://docs.djangoproject.com/',
'Funding': 'https://www.djangoproject.com/fundraising/',
'Source': 'https://github.com/django/django',
'Tracker': 'https://code.djangoproject.com/',
},
)
if overlay_warning:
sys.stderr.write("""
========
WARNING!
========
You have just installed Django over top of an existing
installation, without removing it first. Because of this,
your install may now include extraneous files from a
previous version that have since been removed from
Django. This is known to cause a variety of problems. You
should manually remove the
%(existing_path)s
directory and re-install Django.
""" % {"existing_path": existing_path})
|
7cdee89436e7d86c0d89313bc9b2ce85bda1f370ed227c8c91bf3df754937d01 | #!/usr/bin/env python
#
# This Python file contains utility scripts to manage Django translations.
# It has to be run inside the django git root directory.
#
# The following commands are available:
#
# * update_catalogs: check for new strings in core and contrib catalogs, and
# output how much strings are new/changed.
#
# * lang_stats: output statistics for each catalog/language combination
#
# * fetch: fetch translations from transifex.com
#
# Each command support the --languages and --resources options to limit their
# operation to the specified language or resource. For example, to get stats
# for Spanish in contrib.admin, run:
#
# $ python scripts/manage_translations.py lang_stats --language=es --resources=admin
import os
from argparse import ArgumentParser
from subprocess import PIPE, Popen, call
import django
from django.conf import settings
from django.core.management import call_command
HAVE_JS = ['admin']
def _get_locale_dirs(resources, include_core=True):
"""
Return a tuple (contrib name, absolute path) for all locale directories,
optionally including the django core catalog.
If resources list is not None, filter directories matching resources content.
"""
contrib_dir = os.path.join(os.getcwd(), 'django', 'contrib')
dirs = []
# Collect all locale directories
for contrib_name in os.listdir(contrib_dir):
path = os.path.join(contrib_dir, contrib_name, 'locale')
if os.path.isdir(path):
dirs.append((contrib_name, path))
if contrib_name in HAVE_JS:
dirs.append(("%s-js" % contrib_name, path))
if include_core:
dirs.insert(0, ('core', os.path.join(os.getcwd(), 'django', 'conf', 'locale')))
# Filter by resources, if any
if resources is not None:
res_names = [d[0] for d in dirs]
dirs = [ld for ld in dirs if ld[0] in resources]
if len(resources) > len(dirs):
print("You have specified some unknown resources. "
"Available resource names are: %s" % (', '.join(res_names),))
exit(1)
return dirs
def _tx_resource_for_name(name):
""" Return the Transifex resource name """
if name == 'core':
return "django.core"
else:
return "django.contrib-%s" % name
def _check_diff(cat_name, base_path):
"""
Output the approximate number of changed/added strings in the en catalog.
"""
po_path = '%(path)s/en/LC_MESSAGES/django%(ext)s.po' % {
'path': base_path, 'ext': 'js' if cat_name.endswith('-js') else ''}
p = Popen("git diff -U0 %s | egrep '^[-+]msgid' | wc -l" % po_path,
stdout=PIPE, stderr=PIPE, shell=True)
output, errors = p.communicate()
num_changes = int(output.strip())
print("%d changed/added messages in '%s' catalog." % (num_changes, cat_name))
def update_catalogs(resources=None, languages=None):
"""
Update the en/LC_MESSAGES/django.po (main and contrib) files with
new/updated translatable strings.
"""
settings.configure()
django.setup()
if resources is not None:
print("`update_catalogs` will always process all resources.")
contrib_dirs = _get_locale_dirs(None, include_core=False)
os.chdir(os.path.join(os.getcwd(), 'django'))
print("Updating en catalogs for Django and contrib apps...")
call_command('makemessages', locale=['en'])
print("Updating en JS catalogs for Django and contrib apps...")
call_command('makemessages', locale=['en'], domain='djangojs')
# Output changed stats
_check_diff('core', os.path.join(os.getcwd(), 'conf', 'locale'))
for name, dir_ in contrib_dirs:
_check_diff(name, dir_)
def lang_stats(resources=None, languages=None):
"""
Output language statistics of committed translation files for each
Django catalog.
If resources is provided, it should be a list of translation resource to
limit the output (e.g. ['core', 'gis']).
"""
locale_dirs = _get_locale_dirs(resources)
for name, dir_ in locale_dirs:
print("\nShowing translations stats for '%s':" % name)
langs = sorted(d for d in os.listdir(dir_) if not d.startswith('_'))
for lang in langs:
if languages and lang not in languages:
continue
# TODO: merge first with the latest en catalog
po_path = '{path}/{lang}/LC_MESSAGES/django{ext}.po'.format(
path=dir_, lang=lang, ext='js' if name.endswith('-js') else ''
)
p = Popen(
['msgfmt', '-vc', '-o', '/dev/null', po_path],
stdout=PIPE, stderr=PIPE,
env={'LANG': 'C'}
)
output, errors = p.communicate()
if p.returncode == 0:
# msgfmt output stats on stderr
print("%s: %s" % (lang, errors.decode().strip()))
else:
print("Errors happened when checking %s translation for %s:\n%s" % (
lang, name, errors.decode()))
def fetch(resources=None, languages=None):
"""
Fetch translations from Transifex, wrap long lines, generate mo files.
"""
locale_dirs = _get_locale_dirs(resources)
errors = []
for name, dir_ in locale_dirs:
# Transifex pull
if languages is None:
call('tx pull -r %(res)s -a -f --minimum-perc=5' % {'res': _tx_resource_for_name(name)}, shell=True)
target_langs = sorted(d for d in os.listdir(dir_) if not d.startswith('_') and d != 'en')
else:
for lang in languages:
call('tx pull -r %(res)s -f -l %(lang)s' % {
'res': _tx_resource_for_name(name), 'lang': lang}, shell=True)
target_langs = languages
# msgcat to wrap lines and msgfmt for compilation of .mo file
for lang in target_langs:
po_path = '%(path)s/%(lang)s/LC_MESSAGES/django%(ext)s.po' % {
'path': dir_, 'lang': lang, 'ext': 'js' if name.endswith('-js') else ''}
if not os.path.exists(po_path):
print("No %(lang)s translation for resource %(name)s" % {
'lang': lang, 'name': name})
continue
call('msgcat --no-location -o %s %s' % (po_path, po_path), shell=True)
res = call('msgfmt -c -o %s.mo %s' % (po_path[:-3], po_path), shell=True)
if res != 0:
errors.append((name, lang))
if errors:
print("\nWARNING: Errors have occurred in following cases:")
for resource, lang in errors:
print("\tResource %s for language %s" % (resource, lang))
exit(1)
if __name__ == "__main__":
RUNABLE_SCRIPTS = ('update_catalogs', 'lang_stats', 'fetch')
parser = ArgumentParser()
parser.add_argument('cmd', nargs=1, choices=RUNABLE_SCRIPTS)
parser.add_argument("-r", "--resources", action='append', help="limit operation to the specified resources")
parser.add_argument("-l", "--languages", action='append', help="limit operation to the specified languages")
options = parser.parse_args()
eval(options.cmd[0])(options.resources, options.languages)
|
5dd492d49308ec2f7a82bd8817d936f2f22e04870ff2e4cf347f7aff93a5e286 | """
This module collects helper functions and classes that "span" multiple levels
of MVC. In other words, these functions/classes introduce controlled coupling
for convenience's sake.
"""
from django.http import (
Http404, HttpResponse, HttpResponsePermanentRedirect, HttpResponseRedirect,
)
from django.template import loader
from django.urls import NoReverseMatch, reverse
from django.utils.functional import Promise
def render(request, template_name, context=None, content_type=None, status=None, using=None):
"""
Return a HttpResponse whose content is filled with the result of calling
django.template.loader.render_to_string() with the passed arguments.
"""
content = loader.render_to_string(template_name, context, request, using=using)
return HttpResponse(content, content_type, status)
def redirect(to, *args, permanent=False, **kwargs):
"""
Return an HttpResponseRedirect to the appropriate URL for the arguments
passed.
The arguments could be:
* A model: the model's `get_absolute_url()` function will be called.
* A view name, possibly with arguments: `urls.reverse()` will be used
to reverse-resolve the name.
* A URL, which will be used as-is for the redirect location.
Issues a temporary redirect by default; pass permanent=True to issue a
permanent redirect.
"""
redirect_class = HttpResponsePermanentRedirect if permanent else HttpResponseRedirect
return redirect_class(resolve_url(to, *args, **kwargs))
def _get_queryset(klass):
"""
Return a QuerySet or a Manager.
Duck typing in action: any class with a `get()` method (for
get_object_or_404) or a `filter()` method (for get_list_or_404) might do
the job.
"""
# If it is a model class or anything else with ._default_manager
if hasattr(klass, '_default_manager'):
return klass._default_manager.all()
return klass
def get_object_or_404(klass, *args, **kwargs):
"""
Use get() to return an object, or raise a Http404 exception if the object
does not exist.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the get() query.
Like with QuerySet.get(), MultipleObjectsReturned is raised if more than
one object is found.
"""
queryset = _get_queryset(klass)
if not hasattr(queryset, 'get'):
klass__name = klass.__name__ if isinstance(klass, type) else klass.__class__.__name__
raise ValueError(
"First argument to get_object_or_404() must be a Model, Manager, "
"or QuerySet, not '%s'." % klass__name
)
try:
return queryset.get(*args, **kwargs)
except queryset.model.DoesNotExist:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
def get_list_or_404(klass, *args, **kwargs):
"""
Use filter() to return a list of objects, or raise a Http404 exception if
the list is empty.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the filter() query.
"""
queryset = _get_queryset(klass)
if not hasattr(queryset, 'filter'):
klass__name = klass.__name__ if isinstance(klass, type) else klass.__class__.__name__
raise ValueError(
"First argument to get_list_or_404() must be a Model, Manager, or "
"QuerySet, not '%s'." % klass__name
)
obj_list = list(queryset.filter(*args, **kwargs))
if not obj_list:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
return obj_list
def resolve_url(to, *args, **kwargs):
"""
Return a URL appropriate for the arguments passed.
The arguments could be:
* A model: the model's `get_absolute_url()` function will be called.
* A view name, possibly with arguments: `urls.reverse()` will be used
to reverse-resolve the name.
* A URL, which will be returned as-is.
"""
# If it's a model, use get_absolute_url()
if hasattr(to, 'get_absolute_url'):
return to.get_absolute_url()
if isinstance(to, Promise):
# Expand the lazy instance, as it can cause issues when it is passed
# further to some Python functions like urlparse.
to = str(to)
if isinstance(to, str):
# Handle relative URLs
if to.startswith(('./', '../')):
return to
# Next try a reverse URL resolution.
try:
return reverse(to, args=args, kwargs=kwargs)
except NoReverseMatch:
# If this is a callable, re-raise.
if callable(to):
raise
# If this doesn't "feel" like a URL, re-raise.
if '/' not in to and '.' not in to:
raise
# Finally, fall back and assume it's a URL
return to
|
8dd17cca5d42479781c23ef83ef187a8b89078ff43586b74cb15fe63cfc15b2b | from django.utils.version import get_version
VERSION = (3, 0, 0, 'alpha', 0)
__version__ = get_version(VERSION)
def setup(set_prefix=True):
"""
Configure the settings (this happens as a side effect of accessing the
first setting), configure logging and populate the app registry.
Set the thread-local urlresolvers script prefix if `set_prefix` is True.
"""
from django.apps import apps
from django.conf import settings
from django.urls import set_script_prefix
from django.utils.log import configure_logging
configure_logging(settings.LOGGING_CONFIG, settings.LOGGING)
if set_prefix:
set_script_prefix(
'/' if settings.FORCE_SCRIPT_NAME is None else settings.FORCE_SCRIPT_NAME
)
apps.populate(settings.INSTALLED_APPS)
|
408bbdc0a396eac12cc3e1417e6b57fc7754313faefece47178dd4a43b48072c | #!/usr/bin/env python
import argparse
import atexit
import copy
import os
import shutil
import socket
import subprocess
import sys
import tempfile
import warnings
try:
import django
except ImportError as e:
raise RuntimeError(
'Django module not found, reference tests/README.rst for instructions.'
) from e
else:
from django.apps import apps
from django.conf import settings
from django.db import connection, connections
from django.test import TestCase, TransactionTestCase
from django.test.runner import default_test_processes
from django.test.selenium import SeleniumTestCaseBase
from django.test.utils import get_runner
from django.utils.deprecation import (
RemovedInDjango31Warning, RemovedInDjango40Warning,
)
from django.utils.log import DEFAULT_LOGGING
from django.utils.version import PY37
try:
import MySQLdb
except ImportError:
pass
else:
# Ignore informational warnings from QuerySet.explain().
warnings.filterwarnings('ignore', r'\(1003, *', category=MySQLdb.Warning)
# Make deprecation warnings errors to ensure no usage of deprecated features.
warnings.simplefilter("error", RemovedInDjango40Warning)
warnings.simplefilter('error', RemovedInDjango31Warning)
# Make runtime warning errors to ensure no usage of error prone patterns.
warnings.simplefilter("error", RuntimeWarning)
# Ignore known warnings in test dependencies.
warnings.filterwarnings("ignore", "'U' mode is deprecated", DeprecationWarning, module='docutils.io')
RUNTESTS_DIR = os.path.abspath(os.path.dirname(__file__))
TEMPLATE_DIR = os.path.join(RUNTESTS_DIR, 'templates')
# Create a specific subdirectory for the duration of the test suite.
TMPDIR = tempfile.mkdtemp(prefix='django_')
# Set the TMPDIR environment variable in addition to tempfile.tempdir
# so that children processes inherit it.
tempfile.tempdir = os.environ['TMPDIR'] = TMPDIR
# Removing the temporary TMPDIR.
atexit.register(shutil.rmtree, TMPDIR)
SUBDIRS_TO_SKIP = [
'data',
'import_error_package',
'test_runner_apps',
]
ALWAYS_INSTALLED_APPS = [
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.admin.apps.SimpleAdminConfig',
'django.contrib.staticfiles',
]
ALWAYS_MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
]
# Need to add the associated contrib app to INSTALLED_APPS in some cases to
# avoid "RuntimeError: Model class X doesn't declare an explicit app_label
# and isn't in an application in INSTALLED_APPS."
CONTRIB_TESTS_TO_APPS = {
'flatpages_tests': 'django.contrib.flatpages',
'redirects_tests': 'django.contrib.redirects',
}
def get_test_modules():
modules = []
discovery_paths = [(None, RUNTESTS_DIR)]
if connection.features.gis_enabled:
# GIS tests are in nested apps
discovery_paths.append(('gis_tests', os.path.join(RUNTESTS_DIR, 'gis_tests')))
else:
SUBDIRS_TO_SKIP.append('gis_tests')
for modpath, dirpath in discovery_paths:
for f in os.scandir(dirpath):
if ('.' not in f.name and
os.path.basename(f.name) not in SUBDIRS_TO_SKIP and
not f.is_file() and
os.path.exists(os.path.join(f.path, '__init__.py'))):
modules.append((modpath, f.name))
return modules
def get_installed():
return [app_config.name for app_config in apps.get_app_configs()]
def setup(verbosity, test_labels, parallel, start_at, start_after):
# Reduce the given test labels to just the app module path.
test_labels_set = set()
for label in test_labels:
bits = label.split('.')[:1]
test_labels_set.add('.'.join(bits))
if verbosity >= 1:
msg = "Testing against Django installed in '%s'" % os.path.dirname(django.__file__)
max_parallel = default_test_processes() if parallel == 0 else parallel
if max_parallel > 1:
msg += " with up to %d processes" % max_parallel
print(msg)
# Force declaring available_apps in TransactionTestCase for faster tests.
def no_available_apps(self):
raise Exception("Please define available_apps in TransactionTestCase "
"and its subclasses.")
TransactionTestCase.available_apps = property(no_available_apps)
TestCase.available_apps = None
state = {
'INSTALLED_APPS': settings.INSTALLED_APPS,
'ROOT_URLCONF': getattr(settings, "ROOT_URLCONF", ""),
'TEMPLATES': settings.TEMPLATES,
'LANGUAGE_CODE': settings.LANGUAGE_CODE,
'STATIC_URL': settings.STATIC_URL,
'STATIC_ROOT': settings.STATIC_ROOT,
'MIDDLEWARE': settings.MIDDLEWARE,
}
# Redirect some settings for the duration of these tests.
settings.INSTALLED_APPS = ALWAYS_INSTALLED_APPS
settings.ROOT_URLCONF = 'urls'
settings.STATIC_URL = '/static/'
settings.STATIC_ROOT = os.path.join(TMPDIR, 'static')
settings.TEMPLATES = [{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATE_DIR],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}]
settings.LANGUAGE_CODE = 'en'
settings.SITE_ID = 1
settings.MIDDLEWARE = ALWAYS_MIDDLEWARE
settings.MIGRATION_MODULES = {
# This lets us skip creating migrations for the test models as many of
# them depend on one of the following contrib applications.
'auth': None,
'contenttypes': None,
'sessions': None,
}
log_config = copy.deepcopy(DEFAULT_LOGGING)
# Filter out non-error logging so we don't have to capture it in lots of
# tests.
log_config['loggers']['django']['level'] = 'ERROR'
settings.LOGGING = log_config
settings.SILENCED_SYSTEM_CHECKS = [
'fields.W342', # ForeignKey(unique=True) -> OneToOneField
]
# Load all the ALWAYS_INSTALLED_APPS.
django.setup()
# It would be nice to put this validation earlier but it must come after
# django.setup() so that connection.features.gis_enabled can be accessed
# without raising AppRegistryNotReady when running gis_tests in isolation
# on some backends (e.g. PostGIS).
if 'gis_tests' in test_labels_set and not connection.features.gis_enabled:
print('Aborting: A GIS database backend is required to run gis_tests.')
sys.exit(1)
def _module_match_label(module_label, label):
# Exact or ancestor match.
return module_label == label or module_label.startswith(label + '.')
# Load all the test model apps.
test_modules = get_test_modules()
found_start = not (start_at or start_after)
installed_app_names = set(get_installed())
for modpath, module_name in test_modules:
if modpath:
module_label = modpath + '.' + module_name
else:
module_label = module_name
if not found_start:
if start_at and _module_match_label(module_label, start_at):
found_start = True
elif start_after and _module_match_label(module_label, start_after):
found_start = True
continue
else:
continue
# if the module (or an ancestor) was named on the command line, or
# no modules were named (i.e., run all), import
# this module and add it to INSTALLED_APPS.
module_found_in_labels = not test_labels or any(
_module_match_label(module_label, label) for label in test_labels_set
)
if module_name in CONTRIB_TESTS_TO_APPS and module_found_in_labels:
settings.INSTALLED_APPS.append(CONTRIB_TESTS_TO_APPS[module_name])
if module_found_in_labels and module_label not in installed_app_names:
if verbosity >= 2:
print("Importing application %s" % module_name)
settings.INSTALLED_APPS.append(module_label)
# Add contrib.gis to INSTALLED_APPS if needed (rather than requiring
# @override_settings(INSTALLED_APPS=...) on all test cases.
gis = 'django.contrib.gis'
if connection.features.gis_enabled and gis not in settings.INSTALLED_APPS:
if verbosity >= 2:
print("Importing application %s" % gis)
settings.INSTALLED_APPS.append(gis)
apps.set_installed_apps(settings.INSTALLED_APPS)
return state
def teardown(state):
# Restore the old settings.
for key, value in state.items():
setattr(settings, key, value)
# Discard the multiprocessing.util finalizer that tries to remove a
# temporary directory that's already removed by this script's
# atexit.register(shutil.rmtree, TMPDIR) handler. Prevents
# FileNotFoundError at the end of a test run (#27890).
from multiprocessing.util import _finalizer_registry
_finalizer_registry.pop((-100, 0), None)
def actual_test_processes(parallel):
if parallel == 0:
# This doesn't work before django.setup() on some databases.
if all(conn.features.can_clone_databases for conn in connections.all()):
return default_test_processes()
else:
return 1
else:
return parallel
class ActionSelenium(argparse.Action):
"""
Validate the comma-separated list of requested browsers.
"""
def __call__(self, parser, namespace, values, option_string=None):
browsers = values.split(',')
for browser in browsers:
try:
SeleniumTestCaseBase.import_webdriver(browser)
except ImportError:
raise argparse.ArgumentError(self, "Selenium browser specification '%s' is not valid." % browser)
setattr(namespace, self.dest, browsers)
def django_tests(verbosity, interactive, failfast, keepdb, reverse,
test_labels, debug_sql, parallel, tags, exclude_tags,
test_name_patterns, start_at, start_after):
state = setup(verbosity, test_labels, parallel, start_at, start_after)
extra_tests = []
# Run the test suite, including the extra validation tests.
if not hasattr(settings, 'TEST_RUNNER'):
settings.TEST_RUNNER = 'django.test.runner.DiscoverRunner'
TestRunner = get_runner(settings)
test_runner = TestRunner(
verbosity=verbosity,
interactive=interactive,
failfast=failfast,
keepdb=keepdb,
reverse=reverse,
debug_sql=debug_sql,
parallel=actual_test_processes(parallel),
tags=tags,
exclude_tags=exclude_tags,
test_name_patterns=test_name_patterns,
)
failures = test_runner.run_tests(
test_labels or get_installed(),
extra_tests=extra_tests,
)
teardown(state)
return failures
def get_subprocess_args(options):
subprocess_args = [
sys.executable, __file__, '--settings=%s' % options.settings
]
if options.failfast:
subprocess_args.append('--failfast')
if options.verbosity:
subprocess_args.append('--verbosity=%s' % options.verbosity)
if not options.interactive:
subprocess_args.append('--noinput')
if options.tags:
subprocess_args.append('--tag=%s' % options.tags)
if options.exclude_tags:
subprocess_args.append('--exclude_tag=%s' % options.exclude_tags)
return subprocess_args
def bisect_tests(bisection_label, options, test_labels, parallel, start_at, start_after):
state = setup(options.verbosity, test_labels, parallel, start_at, start_after)
test_labels = test_labels or get_installed()
print('***** Bisecting test suite: %s' % ' '.join(test_labels))
# Make sure the bisection point isn't in the test list
# Also remove tests that need to be run in specific combinations
for label in [bisection_label, 'model_inheritance_same_model_name']:
try:
test_labels.remove(label)
except ValueError:
pass
subprocess_args = get_subprocess_args(options)
iteration = 1
while len(test_labels) > 1:
midpoint = len(test_labels) // 2
test_labels_a = test_labels[:midpoint] + [bisection_label]
test_labels_b = test_labels[midpoint:] + [bisection_label]
print('***** Pass %da: Running the first half of the test suite' % iteration)
print('***** Test labels: %s' % ' '.join(test_labels_a))
failures_a = subprocess.call(subprocess_args + test_labels_a)
print('***** Pass %db: Running the second half of the test suite' % iteration)
print('***** Test labels: %s' % ' '.join(test_labels_b))
print('')
failures_b = subprocess.call(subprocess_args + test_labels_b)
if failures_a and not failures_b:
print("***** Problem found in first half. Bisecting again...")
iteration += 1
test_labels = test_labels_a[:-1]
elif failures_b and not failures_a:
print("***** Problem found in second half. Bisecting again...")
iteration += 1
test_labels = test_labels_b[:-1]
elif failures_a and failures_b:
print("***** Multiple sources of failure found")
break
else:
print("***** No source of failure found... try pair execution (--pair)")
break
if len(test_labels) == 1:
print("***** Source of error: %s" % test_labels[0])
teardown(state)
def paired_tests(paired_test, options, test_labels, parallel, start_at, start_after):
state = setup(options.verbosity, test_labels, parallel, start_at, start_after)
test_labels = test_labels or get_installed()
print('***** Trying paired execution')
# Make sure the constant member of the pair isn't in the test list
# Also remove tests that need to be run in specific combinations
for label in [paired_test, 'model_inheritance_same_model_name']:
try:
test_labels.remove(label)
except ValueError:
pass
subprocess_args = get_subprocess_args(options)
for i, label in enumerate(test_labels):
print('***** %d of %d: Check test pairing with %s' % (
i + 1, len(test_labels), label))
failures = subprocess.call(subprocess_args + [label, paired_test])
if failures:
print('***** Found problem pair with %s' % label)
return
print('***** No problem pair found')
teardown(state)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run the Django test suite.")
parser.add_argument(
'modules', nargs='*', metavar='module',
help='Optional path(s) to test modules; e.g. "i18n" or '
'"i18n.tests.TranslationTests.test_lazy_objects".',
)
parser.add_argument(
'-v', '--verbosity', default=1, type=int, choices=[0, 1, 2, 3],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output',
)
parser.add_argument(
'--noinput', action='store_false', dest='interactive',
help='Tells Django to NOT prompt the user for input of any kind.',
)
parser.add_argument(
'--failfast', action='store_true',
help='Tells Django to stop running the test suite after first failed test.',
)
parser.add_argument(
'--keepdb', action='store_true',
help='Tells Django to preserve the test database between runs.',
)
parser.add_argument(
'--settings',
help='Python path to settings module, e.g. "myproject.settings". If '
'this isn\'t provided, either the DJANGO_SETTINGS_MODULE '
'environment variable or "test_sqlite" will be used.',
)
parser.add_argument(
'--bisect',
help='Bisect the test suite to discover a test that causes a test '
'failure when combined with the named test.',
)
parser.add_argument(
'--pair',
help='Run the test suite in pairs with the named test to find problem pairs.',
)
parser.add_argument(
'--reverse', action='store_true',
help='Sort test suites and test cases in opposite order to debug '
'test side effects not apparent with normal execution lineup.',
)
parser.add_argument(
'--selenium', action=ActionSelenium, metavar='BROWSERS',
help='A comma-separated list of browsers to run the Selenium tests against.',
)
parser.add_argument(
'--headless', action='store_true',
help='Run selenium tests in headless mode, if the browser supports the option.',
)
parser.add_argument(
'--selenium-hub',
help='A URL for a selenium hub instance to use in combination with --selenium.',
)
parser.add_argument(
'--external-host', default=socket.gethostname(),
help='The external host that can be reached by the selenium hub instance when running Selenium '
'tests via Selenium Hub.',
)
parser.add_argument(
'--debug-sql', action='store_true',
help='Turn on the SQL query logger within tests.',
)
parser.add_argument(
'--parallel', nargs='?', default=0, type=int,
const=default_test_processes(), metavar='N',
help='Run tests using up to N parallel processes.',
)
parser.add_argument(
'--tag', dest='tags', action='append',
help='Run only tests with the specified tags. Can be used multiple times.',
)
parser.add_argument(
'--exclude-tag', dest='exclude_tags', action='append',
help='Do not run tests with the specified tag. Can be used multiple times.',
)
parser.add_argument(
'--start-after', dest='start_after',
help='Run tests starting after the specified top-level module.',
)
parser.add_argument(
'--start-at', dest='start_at',
help='Run tests starting at the specified top-level module.',
)
if PY37:
parser.add_argument(
'-k', dest='test_name_patterns', action='append',
help=(
'Only run test methods and classes matching test name pattern. '
'Same as unittest -k option. Can be used multiple times.'
),
)
options = parser.parse_args()
using_selenium_hub = options.selenium and options.selenium_hub
if options.selenium_hub and not options.selenium:
parser.error('--selenium-hub and --external-host require --selenium to be used.')
if using_selenium_hub and not options.external_host:
parser.error('--selenium-hub and --external-host must be used together.')
# Allow including a trailing slash on app_labels for tab completion convenience
options.modules = [os.path.normpath(labels) for labels in options.modules]
mutually_exclusive_options = [options.start_at, options.start_after, options.modules]
enabled_module_options = [bool(option) for option in mutually_exclusive_options].count(True)
if enabled_module_options > 1:
print('Aborting: --start-at, --start-after, and test labels are mutually exclusive.')
sys.exit(1)
for opt_name in ['start_at', 'start_after']:
opt_val = getattr(options, opt_name)
if opt_val:
if '.' in opt_val:
print('Aborting: --%s must be a top-level module.' % opt_name.replace('_', '-'))
sys.exit(1)
setattr(options, opt_name, os.path.normpath(opt_val))
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
else:
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'test_sqlite')
options.settings = os.environ['DJANGO_SETTINGS_MODULE']
if options.selenium:
if not options.tags:
options.tags = ['selenium']
elif 'selenium' not in options.tags:
options.tags.append('selenium')
if options.selenium_hub:
SeleniumTestCaseBase.selenium_hub = options.selenium_hub
SeleniumTestCaseBase.external_host = options.external_host
SeleniumTestCaseBase.headless = options.headless
SeleniumTestCaseBase.browsers = options.selenium
if options.bisect:
bisect_tests(
options.bisect, options, options.modules, options.parallel,
options.start_at, options.start_after,
)
elif options.pair:
paired_tests(
options.pair, options, options.modules, options.parallel,
options.start_at, options.start_after,
)
else:
failures = django_tests(
options.verbosity, options.interactive, options.failfast,
options.keepdb, options.reverse, options.modules,
options.debug_sql, options.parallel, options.tags,
options.exclude_tags,
getattr(options, 'test_name_patterns', None),
options.start_at, options.start_after,
)
if failures:
sys.exit(1)
|
8e70c8eba1523fa85c39f336f28693a757f22e1b8e67671cd8231ddd160af301 | # Django documentation build configuration file, created by
# sphinx-quickstart on Thu Mar 27 09:06:53 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't picklable (module imports are okay, they're removed automatically).
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
from os.path import abspath, dirname, join
# Workaround for sphinx-build recursion limit overflow:
# pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL)
# RuntimeError: maximum recursion depth exceeded while pickling an object
#
# Python's default allowed recursion depth is 1000 but this isn't enough for
# building docs/ref/settings.txt sometimes.
# https://groups.google.com/d/topic/sphinx-dev/MtRf64eGtv4/discussion
sys.setrecursionlimit(2000)
# Make sure we get the version of this copy of Django
sys.path.insert(1, dirname(dirname(abspath(__file__))))
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(abspath(join(dirname(__file__), "_ext")))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.6.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
"djangodocs",
'sphinx.ext.extlinks',
"sphinx.ext.intersphinx",
"sphinx.ext.viewcode",
]
# Spelling check needs an additional module that is not installed by default.
# Add it only if spelling check is requested so docs can be generated without it.
if 'spelling' in sys.argv:
extensions.append("sphinxcontrib.spelling")
# Spelling language.
spelling_lang = 'en_US'
# Location of word list.
spelling_word_list_filename = 'spelling_wordlist'
# Add any paths that contain templates here, relative to this directory.
# templates_path = []
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'contents'
# General substitutions.
project = 'Django'
copyright = 'Django Software Foundation and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '3.0'
# The full version, including alpha/beta/rc tags.
try:
from django import VERSION, get_version
except ImportError:
release = version
else:
def django_release():
pep440ver = get_version()
if VERSION[3:5] == ('alpha', 0) and 'dev' not in pep440ver:
return pep440ver + '.dev'
return pep440ver
release = django_release()
# The "development version" of Django
django_next_version = '3.0'
extlinks = {
'commit': ('https://github.com/django/django/commit/%s', ''),
'cve': ('https://nvd.nist.gov/view/vuln/detail?vulnId=%s', 'CVE-'),
# A file or directory. GitHub redirects from blob to tree if needed.
'source': ('https://github.com/django/django/blob/master/%s', ''),
'ticket': ('https://code.djangoproject.com/ticket/%s', '#'),
}
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# Location for .po/.mo translation files used when language is set
locale_dirs = ['locale/']
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', '_theme']
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'trac'
# Links to Python's docs should reference the most recent version of the 3.x
# branch, which is located at this URL.
intersphinx_mapping = {
'python': ('https://docs.python.org/3/', None),
'sphinx': ('http://www.sphinx-doc.org/en/master/', None),
'psycopg2': ('http://initd.org/psycopg/docs/', None),
}
# Python's docs don't change every week.
intersphinx_cache_limit = 90 # days
# The 'versionadded' and 'versionchanged' directives are overridden.
suppress_warnings = ['app.add_directive']
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "djangodocs"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["_theme"]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# Content template for the index page.
# html_index = ''
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Djangodoc'
modindex_common_prefix = ["django."]
# Appended to every page
rst_epilog = """
.. |django-users| replace:: :ref:`django-users <django-users-mailing-list>`
.. |django-core-mentorship| replace:: :ref:`django-core-mentorship <django-core-mentorship-mailing-list>`
.. |django-developers| replace:: :ref:`django-developers <django-developers-mailing-list>`
.. |django-announce| replace:: :ref:`django-announce <django-announce-mailing-list>`
.. |django-updates| replace:: :ref:`django-updates <django-updates-mailing-list>`
"""
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
'preamble': (
'\\DeclareUnicodeCharacter{2264}{\\ensuremath{\\le}}'
'\\DeclareUnicodeCharacter{2265}{\\ensuremath{\\ge}}'
'\\DeclareUnicodeCharacter{2665}{[unicode-heart]}'
'\\DeclareUnicodeCharacter{2713}{[unicode-checkmark]}'
),
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
# latex_documents = []
latex_documents = [
('contents', 'django.tex', 'Django Documentation',
'Django Software Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(
'ref/django-admin',
'django-admin',
'Utility script for the Django Web framework',
['Django Software Foundation'],
1
)]
# -- Options for Texinfo output ------------------------------------------------
# List of tuples (startdocname, targetname, title, author, dir_entry,
# description, category, toctree_only)
texinfo_documents = [(
master_doc, "django", "", "", "Django",
"Documentation of the Django framework", "Web development", False
)]
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = 'Django Software Foundation'
epub_publisher = 'Django Software Foundation'
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
# epub_basename = 'Django'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
epub_theme = 'djangodocs-epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
# epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
# epub_scheme = ''
# The unique identifier of the text. This can be an ISBN number
# or the project homepage.
# epub_identifier = ''
# A unique identification for the text.
# epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
epub_cover = ('', 'epub-cover.html')
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
# epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_post_files = []
# A list of files that should not be packed into the epub file.
# epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
# epub_tocdepth = 3
# Allow duplicate toc entries.
# epub_tocdup = True
# Choose between 'default' and 'includehidden'.
# epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
# epub_fix_images = False
# Scale large images.
# epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# epub_show_urls = 'inline'
# If false, no index is generated.
# epub_use_index = True
|
b28b3509aaa2d1a51e75db42fe443abf143749abb68f4d987a3d4e1d5c4948e4 | import threading
import weakref
from django.utils.inspect import func_accepts_kwargs
def _make_id(target):
if hasattr(target, '__func__'):
return (id(target.__self__), id(target.__func__))
return id(target)
NONE_ID = _make_id(None)
# A marker for caching
NO_RECEIVERS = object()
class Signal:
"""
Base class for all signals
Internal attributes:
receivers
{ receiverkey (id) : weakref(receiver) }
"""
def __init__(self, providing_args=None, use_caching=False):
"""
Create a new signal.
providing_args
A list of the arguments this signal can pass along in a send() call.
"""
self.receivers = []
if providing_args is None:
providing_args = []
self.providing_args = set(providing_args)
self.lock = threading.Lock()
self.use_caching = use_caching
# For convenience we create empty caches even if they are not used.
# A note about caching: if use_caching is defined, then for each
# distinct sender we cache the receivers that sender has in
# 'sender_receivers_cache'. The cache is cleaned when .connect() or
# .disconnect() is called and populated on send().
self.sender_receivers_cache = weakref.WeakKeyDictionary() if use_caching else {}
self._dead_receivers = False
def connect(self, receiver, sender=None, weak=True, dispatch_uid=None):
"""
Connect receiver to sender for signal.
Arguments:
receiver
A function or an instance method which is to receive signals.
Receivers must be hashable objects.
If weak is True, then receiver must be weak referenceable.
Receivers must be able to accept keyword arguments.
If a receiver is connected with a dispatch_uid argument, it
will not be added if another receiver was already connected
with that dispatch_uid.
sender
The sender to which the receiver should respond. Must either be
a Python object, or None to receive events from any sender.
weak
Whether to use weak references to the receiver. By default, the
module will attempt to use weak references to the receiver
objects. If this parameter is false, then strong references will
be used.
dispatch_uid
An identifier used to uniquely identify a particular instance of
a receiver. This will usually be a string, though it may be
anything hashable.
"""
from django.conf import settings
# If DEBUG is on, check that we got a good receiver
if settings.configured and settings.DEBUG:
assert callable(receiver), "Signal receivers must be callable."
# Check for **kwargs
if not func_accepts_kwargs(receiver):
raise ValueError("Signal receivers must accept keyword arguments (**kwargs).")
if dispatch_uid:
lookup_key = (dispatch_uid, _make_id(sender))
else:
lookup_key = (_make_id(receiver), _make_id(sender))
if weak:
ref = weakref.ref
receiver_object = receiver
# Check for bound methods
if hasattr(receiver, '__self__') and hasattr(receiver, '__func__'):
ref = weakref.WeakMethod
receiver_object = receiver.__self__
receiver = ref(receiver)
weakref.finalize(receiver_object, self._remove_receiver)
with self.lock:
self._clear_dead_receivers()
if not any(r_key == lookup_key for r_key, _ in self.receivers):
self.receivers.append((lookup_key, receiver))
self.sender_receivers_cache.clear()
def disconnect(self, receiver=None, sender=None, dispatch_uid=None):
"""
Disconnect receiver from sender for signal.
If weak references are used, disconnect need not be called. The receiver
will be removed from dispatch automatically.
Arguments:
receiver
The registered receiver to disconnect. May be none if
dispatch_uid is specified.
sender
The registered sender to disconnect
dispatch_uid
the unique identifier of the receiver to disconnect
"""
if dispatch_uid:
lookup_key = (dispatch_uid, _make_id(sender))
else:
lookup_key = (_make_id(receiver), _make_id(sender))
disconnected = False
with self.lock:
self._clear_dead_receivers()
for index in range(len(self.receivers)):
(r_key, _) = self.receivers[index]
if r_key == lookup_key:
disconnected = True
del self.receivers[index]
break
self.sender_receivers_cache.clear()
return disconnected
def has_listeners(self, sender=None):
return bool(self._live_receivers(sender))
def send(self, sender, **named):
"""
Send signal from sender to all connected receivers.
If any receiver raises an error, the error propagates back through send,
terminating the dispatch loop. So it's possible that all receivers
won't be called if an error is raised.
Arguments:
sender
The sender of the signal. Either a specific object or None.
named
Named arguments which will be passed to receivers.
Return a list of tuple pairs [(receiver, response), ... ].
"""
if not self.receivers or self.sender_receivers_cache.get(sender) is NO_RECEIVERS:
return []
return [
(receiver, receiver(signal=self, sender=sender, **named))
for receiver in self._live_receivers(sender)
]
def send_robust(self, sender, **named):
"""
Send signal from sender to all connected receivers catching errors.
Arguments:
sender
The sender of the signal. Can be any Python object (normally one
registered with a connect if you actually want something to
occur).
named
Named arguments which will be passed to receivers. These
arguments must be a subset of the argument names defined in
providing_args.
Return a list of tuple pairs [(receiver, response), ... ].
If any receiver raises an error (specifically any subclass of
Exception), return the error instance as the result for that receiver.
"""
if not self.receivers or self.sender_receivers_cache.get(sender) is NO_RECEIVERS:
return []
# Call each receiver with whatever arguments it can accept.
# Return a list of tuple pairs [(receiver, response), ... ].
responses = []
for receiver in self._live_receivers(sender):
try:
response = receiver(signal=self, sender=sender, **named)
except Exception as err:
responses.append((receiver, err))
else:
responses.append((receiver, response))
return responses
def _clear_dead_receivers(self):
# Note: caller is assumed to hold self.lock.
if self._dead_receivers:
self._dead_receivers = False
self.receivers = [
r for r in self.receivers
if not(isinstance(r[1], weakref.ReferenceType) and r[1]() is None)
]
def _live_receivers(self, sender):
"""
Filter sequence of receivers to get resolved, live receivers.
This checks for weak references and resolves them, then returning only
live receivers.
"""
receivers = None
if self.use_caching and not self._dead_receivers:
receivers = self.sender_receivers_cache.get(sender)
# We could end up here with NO_RECEIVERS even if we do check this case in
# .send() prior to calling _live_receivers() due to concurrent .send() call.
if receivers is NO_RECEIVERS:
return []
if receivers is None:
with self.lock:
self._clear_dead_receivers()
senderkey = _make_id(sender)
receivers = []
for (receiverkey, r_senderkey), receiver in self.receivers:
if r_senderkey == NONE_ID or r_senderkey == senderkey:
receivers.append(receiver)
if self.use_caching:
if not receivers:
self.sender_receivers_cache[sender] = NO_RECEIVERS
else:
# Note, we must cache the weakref versions.
self.sender_receivers_cache[sender] = receivers
non_weak_receivers = []
for receiver in receivers:
if isinstance(receiver, weakref.ReferenceType):
# Dereference the weak reference.
receiver = receiver()
if receiver is not None:
non_weak_receivers.append(receiver)
else:
non_weak_receivers.append(receiver)
return non_weak_receivers
def _remove_receiver(self, receiver=None):
# Mark that the self.receivers list has dead weakrefs. If so, we will
# clean those up in connect, disconnect and _live_receivers while
# holding self.lock. Note that doing the cleanup here isn't a good
# idea, _remove_receiver() will be called as side effect of garbage
# collection, and so the call can happen while we are already holding
# self.lock.
self._dead_receivers = True
def receiver(signal, **kwargs):
"""
A decorator for connecting receivers to signals. Used by passing in the
signal (or list of signals) and keyword arguments to connect::
@receiver(post_save, sender=MyModel)
def signal_receiver(sender, **kwargs):
...
@receiver([post_save, post_delete], sender=MyModel)
def signals_receiver(sender, **kwargs):
...
"""
def _decorator(func):
if isinstance(signal, (list, tuple)):
for s in signal:
s.connect(func, **kwargs)
else:
signal.connect(func, **kwargs)
return func
return _decorator
|
1b5e28e89308667ac44820d9d9a6fd057d61dc53ae38fa66d7ae2e7cecf2eee6 | import sys
import unittest
from contextlib import contextmanager
from django.test import LiveServerTestCase, tag
from django.utils.decorators import classproperty
from django.utils.module_loading import import_string
from django.utils.text import capfirst
class SeleniumTestCaseBase(type(LiveServerTestCase)):
# List of browsers to dynamically create test classes for.
browsers = []
# A selenium hub URL to test against.
selenium_hub = None
# The external host Selenium Hub can reach.
external_host = None
# Sentinel value to differentiate browser-specific instances.
browser = None
# Run browsers in headless mode.
headless = False
def __new__(cls, name, bases, attrs):
"""
Dynamically create new classes and add them to the test module when
multiple browsers specs are provided (e.g. --selenium=firefox,chrome).
"""
test_class = super().__new__(cls, name, bases, attrs)
# If the test class is either browser-specific or a test base, return it.
if test_class.browser or not any(name.startswith('test') and callable(value) for name, value in attrs.items()):
return test_class
elif test_class.browsers:
# Reuse the created test class to make it browser-specific.
# We can't rename it to include the browser name or create a
# subclass like we do with the remaining browsers as it would
# either duplicate tests or prevent pickling of its instances.
first_browser = test_class.browsers[0]
test_class.browser = first_browser
# Listen on an external interface if using a selenium hub.
host = test_class.host if not test_class.selenium_hub else '0.0.0.0'
test_class.host = host
test_class.external_host = cls.external_host
# Create subclasses for each of the remaining browsers and expose
# them through the test's module namespace.
module = sys.modules[test_class.__module__]
for browser in test_class.browsers[1:]:
browser_test_class = cls.__new__(
cls,
"%s%s" % (capfirst(browser), name),
(test_class,),
{
'browser': browser,
'host': host,
'external_host': cls.external_host,
'__module__': test_class.__module__,
}
)
setattr(module, browser_test_class.__name__, browser_test_class)
return test_class
# If no browsers were specified, skip this class (it'll still be discovered).
return unittest.skip('No browsers specified.')(test_class)
@classmethod
def import_webdriver(cls, browser):
return import_string("selenium.webdriver.%s.webdriver.WebDriver" % browser)
@classmethod
def import_options(cls, browser):
return import_string('selenium.webdriver.%s.options.Options' % browser)
@classmethod
def get_capability(cls, browser):
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
return getattr(DesiredCapabilities, browser.upper())
def create_options(self):
options = self.import_options(self.browser)()
if self.headless:
try:
options.headless = True
except AttributeError:
pass # Only Chrome and Firefox support the headless mode.
return options
def create_webdriver(self):
if self.selenium_hub:
from selenium import webdriver
return webdriver.Remote(
command_executor=self.selenium_hub,
desired_capabilities=self.get_capability(self.browser),
)
return self.import_webdriver(self.browser)(options=self.create_options())
@tag('selenium')
class SeleniumTestCase(LiveServerTestCase, metaclass=SeleniumTestCaseBase):
implicit_wait = 10
external_host = None
@classproperty
def live_server_url(cls):
return 'http://%s:%s' % (cls.external_host or cls.host, cls.server_thread.port)
@classproperty
def allowed_host(cls):
return cls.external_host or cls.host
@classmethod
def setUpClass(cls):
cls.selenium = cls.create_webdriver()
cls.selenium.implicitly_wait(cls.implicit_wait)
super().setUpClass()
@classmethod
def _tearDownClassInternal(cls):
# quit() the WebDriver before attempting to terminate and join the
# single-threaded LiveServerThread to avoid a dead lock if the browser
# kept a connection alive.
if hasattr(cls, 'selenium'):
cls.selenium.quit()
super()._tearDownClassInternal()
@contextmanager
def disable_implicit_wait(self):
"""Disable the default implicit wait."""
self.selenium.implicitly_wait(0)
try:
yield
finally:
self.selenium.implicitly_wait(self.implicit_wait)
|
1d25e9ad981e5ba887fa39cd81e96056a49bdebd9a5cf25f08ccedbc2228616f | import ctypes
import itertools
import logging
import multiprocessing
import os
import pickle
import textwrap
import unittest
from importlib import import_module
from io import StringIO
from django.core.management import call_command
from django.db import connections
from django.test import SimpleTestCase, TestCase
from django.test.utils import (
setup_databases as _setup_databases, setup_test_environment,
teardown_databases as _teardown_databases, teardown_test_environment,
)
from django.utils.datastructures import OrderedSet
from django.utils.version import PY37
try:
import tblib.pickling_support
except ImportError:
tblib = None
class DebugSQLTextTestResult(unittest.TextTestResult):
def __init__(self, stream, descriptions, verbosity):
self.logger = logging.getLogger('django.db.backends')
self.logger.setLevel(logging.DEBUG)
super().__init__(stream, descriptions, verbosity)
def startTest(self, test):
self.debug_sql_stream = StringIO()
self.handler = logging.StreamHandler(self.debug_sql_stream)
self.logger.addHandler(self.handler)
super().startTest(test)
def stopTest(self, test):
super().stopTest(test)
self.logger.removeHandler(self.handler)
if self.showAll:
self.debug_sql_stream.seek(0)
self.stream.write(self.debug_sql_stream.read())
self.stream.writeln(self.separator2)
def addError(self, test, err):
super().addError(test, err)
self.debug_sql_stream.seek(0)
self.errors[-1] = self.errors[-1] + (self.debug_sql_stream.read(),)
def addFailure(self, test, err):
super().addFailure(test, err)
self.debug_sql_stream.seek(0)
self.failures[-1] = self.failures[-1] + (self.debug_sql_stream.read(),)
def addSubTest(self, test, subtest, err):
super().addSubTest(test, subtest, err)
if err is not None:
self.debug_sql_stream.seek(0)
errors = self.failures if issubclass(err[0], test.failureException) else self.errors
errors[-1] = errors[-1] + (self.debug_sql_stream.read(),)
def printErrorList(self, flavour, errors):
for test, err, sql_debug in errors:
self.stream.writeln(self.separator1)
self.stream.writeln("%s: %s" % (flavour, self.getDescription(test)))
self.stream.writeln(self.separator2)
self.stream.writeln(err)
self.stream.writeln(self.separator2)
self.stream.writeln(sql_debug)
class RemoteTestResult:
"""
Record information about which tests have succeeded and which have failed.
The sole purpose of this class is to record events in the child processes
so they can be replayed in the master process. As a consequence it doesn't
inherit unittest.TestResult and doesn't attempt to implement all its API.
The implementation matches the unpythonic coding style of unittest2.
"""
def __init__(self):
if tblib is not None:
tblib.pickling_support.install()
self.events = []
self.failfast = False
self.shouldStop = False
self.testsRun = 0
@property
def test_index(self):
return self.testsRun - 1
def _confirm_picklable(self, obj):
"""
Confirm that obj can be pickled and unpickled as multiprocessing will
need to pickle the exception in the child process and unpickle it in
the parent process. Let the exception rise, if not.
"""
pickle.loads(pickle.dumps(obj))
def _print_unpicklable_subtest(self, test, subtest, pickle_exc):
print("""
Subtest failed:
test: {}
subtest: {}
Unfortunately, the subtest that failed cannot be pickled, so the parallel
test runner cannot handle it cleanly. Here is the pickling error:
> {}
You should re-run this test with --parallel=1 to reproduce the failure
with a cleaner failure message.
""".format(test, subtest, pickle_exc))
def check_picklable(self, test, err):
# Ensure that sys.exc_info() tuples are picklable. This displays a
# clear multiprocessing.pool.RemoteTraceback generated in the child
# process instead of a multiprocessing.pool.MaybeEncodingError, making
# the root cause easier to figure out for users who aren't familiar
# with the multiprocessing module. Since we're in a forked process,
# our best chance to communicate with them is to print to stdout.
try:
self._confirm_picklable(err)
except Exception as exc:
original_exc_txt = repr(err[1])
original_exc_txt = textwrap.fill(original_exc_txt, 75, initial_indent=' ', subsequent_indent=' ')
pickle_exc_txt = repr(exc)
pickle_exc_txt = textwrap.fill(pickle_exc_txt, 75, initial_indent=' ', subsequent_indent=' ')
if tblib is None:
print("""
{} failed:
{}
Unfortunately, tracebacks cannot be pickled, making it impossible for the
parallel test runner to handle this exception cleanly.
In order to see the traceback, you should install tblib:
python -m pip install tblib
""".format(test, original_exc_txt))
else:
print("""
{} failed:
{}
Unfortunately, the exception it raised cannot be pickled, making it impossible
for the parallel test runner to handle it cleanly.
Here's the error encountered while trying to pickle the exception:
{}
You should re-run this test with the --parallel=1 option to reproduce the
failure and get a correct traceback.
""".format(test, original_exc_txt, pickle_exc_txt))
raise
def check_subtest_picklable(self, test, subtest):
try:
self._confirm_picklable(subtest)
except Exception as exc:
self._print_unpicklable_subtest(test, subtest, exc)
raise
def stop_if_failfast(self):
if self.failfast:
self.stop()
def stop(self):
self.shouldStop = True
def startTestRun(self):
self.events.append(('startTestRun',))
def stopTestRun(self):
self.events.append(('stopTestRun',))
def startTest(self, test):
self.testsRun += 1
self.events.append(('startTest', self.test_index))
def stopTest(self, test):
self.events.append(('stopTest', self.test_index))
def addError(self, test, err):
self.check_picklable(test, err)
self.events.append(('addError', self.test_index, err))
self.stop_if_failfast()
def addFailure(self, test, err):
self.check_picklable(test, err)
self.events.append(('addFailure', self.test_index, err))
self.stop_if_failfast()
def addSubTest(self, test, subtest, err):
# Follow Python 3.5's implementation of unittest.TestResult.addSubTest()
# by not doing anything when a subtest is successful.
if err is not None:
# Call check_picklable() before check_subtest_picklable() since
# check_picklable() performs the tblib check.
self.check_picklable(test, err)
self.check_subtest_picklable(test, subtest)
self.events.append(('addSubTest', self.test_index, subtest, err))
self.stop_if_failfast()
def addSuccess(self, test):
self.events.append(('addSuccess', self.test_index))
def addSkip(self, test, reason):
self.events.append(('addSkip', self.test_index, reason))
def addExpectedFailure(self, test, err):
# If tblib isn't installed, pickling the traceback will always fail.
# However we don't want tblib to be required for running the tests
# when they pass or fail as expected. Drop the traceback when an
# expected failure occurs.
if tblib is None:
err = err[0], err[1], None
self.check_picklable(test, err)
self.events.append(('addExpectedFailure', self.test_index, err))
def addUnexpectedSuccess(self, test):
self.events.append(('addUnexpectedSuccess', self.test_index))
self.stop_if_failfast()
class RemoteTestRunner:
"""
Run tests and record everything but don't display anything.
The implementation matches the unpythonic coding style of unittest2.
"""
resultclass = RemoteTestResult
def __init__(self, failfast=False, resultclass=None):
self.failfast = failfast
if resultclass is not None:
self.resultclass = resultclass
def run(self, test):
result = self.resultclass()
unittest.registerResult(result)
result.failfast = self.failfast
test(result)
return result
def default_test_processes():
"""Default number of test processes when using the --parallel option."""
# The current implementation of the parallel test runner requires
# multiprocessing to start subprocesses with fork().
if multiprocessing.get_start_method() != 'fork':
return 1
try:
return int(os.environ['DJANGO_TEST_PROCESSES'])
except KeyError:
return multiprocessing.cpu_count()
_worker_id = 0
def _init_worker(counter):
"""
Switch to databases dedicated to this worker.
This helper lives at module-level because of the multiprocessing module's
requirements.
"""
global _worker_id
with counter.get_lock():
counter.value += 1
_worker_id = counter.value
for alias in connections:
connection = connections[alias]
settings_dict = connection.creation.get_test_db_clone_settings(str(_worker_id))
# connection.settings_dict must be updated in place for changes to be
# reflected in django.db.connections. If the following line assigned
# connection.settings_dict = settings_dict, new threads would connect
# to the default database instead of the appropriate clone.
connection.settings_dict.update(settings_dict)
connection.close()
def _run_subsuite(args):
"""
Run a suite of tests with a RemoteTestRunner and return a RemoteTestResult.
This helper lives at module-level and its arguments are wrapped in a tuple
because of the multiprocessing module's requirements.
"""
runner_class, subsuite_index, subsuite, failfast = args
runner = runner_class(failfast=failfast)
result = runner.run(subsuite)
return subsuite_index, result.events
class ParallelTestSuite(unittest.TestSuite):
"""
Run a series of tests in parallel in several processes.
While the unittest module's documentation implies that orchestrating the
execution of tests is the responsibility of the test runner, in practice,
it appears that TestRunner classes are more concerned with formatting and
displaying test results.
Since there are fewer use cases for customizing TestSuite than TestRunner,
implementing parallelization at the level of the TestSuite improves
interoperability with existing custom test runners. A single instance of a
test runner can still collect results from all tests without being aware
that they have been run in parallel.
"""
# In case someone wants to modify these in a subclass.
init_worker = _init_worker
run_subsuite = _run_subsuite
runner_class = RemoteTestRunner
def __init__(self, suite, processes, failfast=False):
self.subsuites = partition_suite_by_case(suite)
self.processes = processes
self.failfast = failfast
super().__init__()
def run(self, result):
"""
Distribute test cases across workers.
Return an identifier of each test case with its result in order to use
imap_unordered to show results as soon as they're available.
To minimize pickling errors when getting results from workers:
- pass back numeric indexes in self.subsuites instead of tests
- make tracebacks picklable with tblib, if available
Even with tblib, errors may still occur for dynamically created
exception classes which cannot be unpickled.
"""
counter = multiprocessing.Value(ctypes.c_int, 0)
pool = multiprocessing.Pool(
processes=self.processes,
initializer=self.init_worker.__func__,
initargs=[counter],
)
args = [
(self.runner_class, index, subsuite, self.failfast)
for index, subsuite in enumerate(self.subsuites)
]
test_results = pool.imap_unordered(self.run_subsuite.__func__, args)
while True:
if result.shouldStop:
pool.terminate()
break
try:
subsuite_index, events = test_results.next(timeout=0.1)
except multiprocessing.TimeoutError:
continue
except StopIteration:
pool.close()
break
tests = list(self.subsuites[subsuite_index])
for event in events:
event_name = event[0]
handler = getattr(result, event_name, None)
if handler is None:
continue
test = tests[event[1]]
args = event[2:]
handler(test, *args)
pool.join()
return result
def __iter__(self):
return iter(self.subsuites)
class DiscoverRunner:
"""A Django test runner that uses unittest2 test discovery."""
test_suite = unittest.TestSuite
parallel_test_suite = ParallelTestSuite
test_runner = unittest.TextTestRunner
test_loader = unittest.defaultTestLoader
reorder_by = (TestCase, SimpleTestCase)
def __init__(self, pattern=None, top_level=None, verbosity=1,
interactive=True, failfast=False, keepdb=False,
reverse=False, debug_mode=False, debug_sql=False, parallel=0,
tags=None, exclude_tags=None, test_name_patterns=None, **kwargs):
self.pattern = pattern
self.top_level = top_level
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
self.keepdb = keepdb
self.reverse = reverse
self.debug_mode = debug_mode
self.debug_sql = debug_sql
self.parallel = parallel
self.tags = set(tags or [])
self.exclude_tags = set(exclude_tags or [])
self.test_name_patterns = None
if test_name_patterns:
# unittest does not export the _convert_select_pattern function
# that converts command-line arguments to patterns.
self.test_name_patterns = {
pattern if '*' in pattern else '*%s*' % pattern
for pattern in test_name_patterns
}
@classmethod
def add_arguments(cls, parser):
parser.add_argument(
'-t', '--top-level-directory', dest='top_level',
help='Top level of project for unittest discovery.',
)
parser.add_argument(
'-p', '--pattern', default="test*.py",
help='The test matching pattern. Defaults to test*.py.',
)
parser.add_argument(
'--keepdb', action='store_true',
help='Preserves the test DB between runs.'
)
parser.add_argument(
'-r', '--reverse', action='store_true',
help='Reverses test cases order.',
)
parser.add_argument(
'--debug-mode', action='store_true',
help='Sets settings.DEBUG to True.',
)
parser.add_argument(
'-d', '--debug-sql', action='store_true',
help='Prints logged SQL queries on failure.',
)
parser.add_argument(
'--parallel', nargs='?', default=1, type=int,
const=default_test_processes(), metavar='N',
help='Run tests using up to N parallel processes.',
)
parser.add_argument(
'--tag', action='append', dest='tags',
help='Run only tests with the specified tag. Can be used multiple times.',
)
parser.add_argument(
'--exclude-tag', action='append', dest='exclude_tags',
help='Do not run tests with the specified tag. Can be used multiple times.',
)
if PY37:
parser.add_argument(
'-k', action='append', dest='test_name_patterns',
help=(
'Only run test methods and classes that match the pattern '
'or substring. Can be used multiple times. Same as '
'unittest -k option.'
),
)
def setup_test_environment(self, **kwargs):
setup_test_environment(debug=self.debug_mode)
unittest.installHandler()
def build_suite(self, test_labels=None, extra_tests=None, **kwargs):
suite = self.test_suite()
test_labels = test_labels or ['.']
extra_tests = extra_tests or []
self.test_loader.testNamePatterns = self.test_name_patterns
discover_kwargs = {}
if self.pattern is not None:
discover_kwargs['pattern'] = self.pattern
if self.top_level is not None:
discover_kwargs['top_level_dir'] = self.top_level
for label in test_labels:
kwargs = discover_kwargs.copy()
tests = None
label_as_path = os.path.abspath(label)
# if a module, or "module.ClassName[.method_name]", just run those
if not os.path.exists(label_as_path):
tests = self.test_loader.loadTestsFromName(label)
elif os.path.isdir(label_as_path) and not self.top_level:
# Try to be a bit smarter than unittest about finding the
# default top-level for a given directory path, to avoid
# breaking relative imports. (Unittest's default is to set
# top-level equal to the path, which means relative imports
# will result in "Attempted relative import in non-package.").
# We'd be happy to skip this and require dotted module paths
# (which don't cause this problem) instead of file paths (which
# do), but in the case of a directory in the cwd, which would
# be equally valid if considered as a top-level module or as a
# directory path, unittest unfortunately prefers the latter.
top_level = label_as_path
while True:
init_py = os.path.join(top_level, '__init__.py')
if os.path.exists(init_py):
try_next = os.path.dirname(top_level)
if try_next == top_level:
# __init__.py all the way down? give up.
break
top_level = try_next
continue
break
kwargs['top_level_dir'] = top_level
if not (tests and tests.countTestCases()) and is_discoverable(label):
# Try discovery if path is a package or directory
tests = self.test_loader.discover(start_dir=label, **kwargs)
# Make unittest forget the top-level dir it calculated from this
# run, to support running tests from two different top-levels.
self.test_loader._top_level_dir = None
suite.addTests(tests)
for test in extra_tests:
suite.addTest(test)
if self.tags or self.exclude_tags:
if self.verbosity >= 2:
if self.tags:
print('Including test tag(s): %s.' % ', '.join(sorted(self.tags)))
if self.exclude_tags:
print('Excluding test tag(s): %s.' % ', '.join(sorted(self.exclude_tags)))
suite = filter_tests_by_tags(suite, self.tags, self.exclude_tags)
suite = reorder_suite(suite, self.reorder_by, self.reverse)
if self.parallel > 1:
parallel_suite = self.parallel_test_suite(suite, self.parallel, self.failfast)
# Since tests are distributed across processes on a per-TestCase
# basis, there's no need for more processes than TestCases.
parallel_units = len(parallel_suite.subsuites)
self.parallel = min(self.parallel, parallel_units)
# If there's only one TestCase, parallelization isn't needed.
if self.parallel > 1:
suite = parallel_suite
return suite
def setup_databases(self, **kwargs):
return _setup_databases(
self.verbosity, self.interactive, self.keepdb, self.debug_sql,
self.parallel, **kwargs
)
def get_resultclass(self):
return DebugSQLTextTestResult if self.debug_sql else None
def get_test_runner_kwargs(self):
return {
'failfast': self.failfast,
'resultclass': self.get_resultclass(),
'verbosity': self.verbosity,
}
def run_checks(self):
# Checks are run after database creation since some checks require
# database access.
call_command('check', verbosity=self.verbosity)
def run_suite(self, suite, **kwargs):
kwargs = self.get_test_runner_kwargs()
runner = self.test_runner(**kwargs)
return runner.run(suite)
def teardown_databases(self, old_config, **kwargs):
"""Destroy all the non-mirror databases."""
_teardown_databases(
old_config,
verbosity=self.verbosity,
parallel=self.parallel,
keepdb=self.keepdb,
)
def teardown_test_environment(self, **kwargs):
unittest.removeHandler()
teardown_test_environment()
def suite_result(self, suite, result, **kwargs):
return len(result.failures) + len(result.errors)
def _get_databases(self, suite):
databases = set()
for test in suite:
if isinstance(test, unittest.TestCase):
test_databases = getattr(test, 'databases', None)
if test_databases == '__all__':
return set(connections)
if test_databases:
databases.update(test_databases)
else:
databases.update(self._get_databases(test))
return databases
def get_databases(self, suite):
databases = self._get_databases(suite)
if self.verbosity >= 2:
unused_databases = [alias for alias in connections if alias not in databases]
if unused_databases:
print('Skipping setup of unused database(s): %s.' % ', '.join(sorted(unused_databases)))
return databases
def run_tests(self, test_labels, extra_tests=None, **kwargs):
"""
Run the unit tests for all the test labels in the provided list.
Test labels should be dotted Python paths to test modules, test
classes, or test methods.
A list of 'extra' tests may also be provided; these tests
will be added to the test suite.
Return the number of tests that failed.
"""
self.setup_test_environment()
suite = self.build_suite(test_labels, extra_tests)
databases = self.get_databases(suite)
old_config = self.setup_databases(aliases=databases)
run_failed = False
try:
self.run_checks()
result = self.run_suite(suite)
except Exception:
run_failed = True
raise
finally:
try:
self.teardown_databases(old_config)
self.teardown_test_environment()
except Exception:
# Silence teardown exceptions if an exception was raised during
# runs to avoid shadowing it.
if not run_failed:
raise
return self.suite_result(suite, result)
def is_discoverable(label):
"""
Check if a test label points to a Python package or file directory.
Relative labels like "." and ".." are seen as directories.
"""
try:
mod = import_module(label)
except (ImportError, TypeError):
pass
else:
return hasattr(mod, '__path__')
return os.path.isdir(os.path.abspath(label))
def reorder_suite(suite, classes, reverse=False):
"""
Reorder a test suite by test type.
`classes` is a sequence of types
All tests of type classes[0] are placed first, then tests of type
classes[1], etc. Tests with no match in classes are placed last.
If `reverse` is True, sort tests within classes in opposite order but
don't reverse test classes.
"""
class_count = len(classes)
suite_class = type(suite)
bins = [OrderedSet() for i in range(class_count + 1)]
partition_suite_by_type(suite, classes, bins, reverse=reverse)
reordered_suite = suite_class()
for i in range(class_count + 1):
reordered_suite.addTests(bins[i])
return reordered_suite
def partition_suite_by_type(suite, classes, bins, reverse=False):
"""
Partition a test suite by test type. Also prevent duplicated tests.
classes is a sequence of types
bins is a sequence of TestSuites, one more than classes
reverse changes the ordering of tests within bins
Tests of type classes[i] are added to bins[i],
tests with no match found in classes are place in bins[-1]
"""
suite_class = type(suite)
if reverse:
suite = reversed(tuple(suite))
for test in suite:
if isinstance(test, suite_class):
partition_suite_by_type(test, classes, bins, reverse=reverse)
else:
for i in range(len(classes)):
if isinstance(test, classes[i]):
bins[i].add(test)
break
else:
bins[-1].add(test)
def partition_suite_by_case(suite):
"""Partition a test suite by test case, preserving the order of tests."""
groups = []
suite_class = type(suite)
for test_type, test_group in itertools.groupby(suite, type):
if issubclass(test_type, unittest.TestCase):
groups.append(suite_class(test_group))
else:
for item in test_group:
groups.extend(partition_suite_by_case(item))
return groups
def filter_tests_by_tags(suite, tags, exclude_tags):
suite_class = type(suite)
filtered_suite = suite_class()
for test in suite:
if isinstance(test, suite_class):
filtered_suite.addTests(filter_tests_by_tags(test, tags, exclude_tags))
else:
test_tags = set(getattr(test, 'tags', set()))
test_fn_name = getattr(test, '_testMethodName', str(test))
test_fn = getattr(test, test_fn_name, test)
test_fn_tags = set(getattr(test_fn, 'tags', set()))
all_tags = test_tags.union(test_fn_tags)
matched_tags = all_tags.intersection(tags)
if (matched_tags or not tags) and not all_tags.intersection(exclude_tags):
filtered_suite.addTest(test)
return filtered_suite
|
5b7aebd46a5a411129cba239e3ca87ea343c3ae9e7d4d9c0002a8c257791ee1f | import json
import mimetypes
import os
import re
import sys
from copy import copy
from functools import partial
from http import HTTPStatus
from importlib import import_module
from io import BytesIO
from urllib.parse import unquote_to_bytes, urljoin, urlparse, urlsplit
from django.conf import settings
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import WSGIRequest
from django.core.serializers.json import DjangoJSONEncoder
from django.core.signals import (
got_request_exception, request_finished, request_started,
)
from django.db import close_old_connections
from django.http import HttpRequest, QueryDict, SimpleCookie
from django.test import signals
from django.test.utils import ContextList
from django.urls import resolve
from django.utils.encoding import force_bytes
from django.utils.functional import SimpleLazyObject
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
__all__ = ('Client', 'RedirectCycleError', 'RequestFactory', 'encode_file', 'encode_multipart')
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
CONTENT_TYPE_RE = re.compile(r'.*; charset=([\w\d-]+);?')
# Structured suffix spec: https://tools.ietf.org/html/rfc6838#section-4.2.8
JSON_CONTENT_TYPE_RE = re.compile(r'^application\/(.+\+)?json')
class RedirectCycleError(Exception):
"""The test client has been asked to follow a redirect loop."""
def __init__(self, message, last_response):
super().__init__(message)
self.last_response = last_response
self.redirect_chain = last_response.redirect_chain
class FakePayload:
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be sought and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in real life.
"""
def __init__(self, content=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if content is not None:
self.write(content)
def __len__(self):
return self.__len
def read(self, num_bytes=None):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if num_bytes is None:
num_bytes = self.__len or 0
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(num_bytes)
self.__len -= num_bytes
return content
def write(self, content):
if self.read_started:
raise ValueError("Unable to write a payload after it's been read")
content = force_bytes(content)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
yield from iterable
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
def conditional_content_removal(request, response):
"""
Simulate the behavior of most Web servers by removing the content of
responses for HEAD requests, 1xx, 204, and 304 responses. Ensure
compliance with RFC 7230, section 3.3.3.
"""
if 100 <= response.status_code < 200 or response.status_code in (204, 304):
if response.streaming:
response.streaming_content = []
else:
response.content = b''
if request.method == 'HEAD':
if response.streaming:
response.streaming_content = []
else:
response.content = b''
return response
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes. Use the WSGI
interface to compose requests, but return the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super().__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._middleware_chain is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__, environ=environ)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Simulate behaviors of most Web servers.
conditional_content_removal(request, response)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# Emulate a WSGI server by calling the close method on completion.
if response.streaming:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Store templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault('templates', []).append(template)
if 'context' not in store:
store['context'] = ContextList()
store['context'].append(copy(context))
def encode_multipart(boundary, data):
"""
Encode multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
def is_file(thing):
return hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for (key, value) in data.items():
if value is None:
raise TypeError(
'Cannot encode None as POST data. Did you mean to pass an '
'empty string or omit the value?'
)
elif is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, str) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
item
])
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
value
])
lines.extend([
to_bytes('--%s--' % boundary),
b'',
])
return b'\r\n'.join(lines)
def encode_file(boundary, key, file):
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# file.name might not be a string. For example, it's an int for
# tempfile.TemporaryFile().
file_has_string_name = hasattr(file, 'name') and isinstance(file.name, str)
filename = os.path.basename(file.name) if file_has_string_name else ''
if hasattr(file, 'content_type'):
content_type = file.content_type
elif filename:
content_type = mimetypes.guess_type(filename)[0]
else:
content_type = None
if content_type is None:
content_type = 'application/octet-stream'
filename = filename or key
return [
to_bytes('--%s' % boundary),
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
% (key, filename)),
to_bytes('Content-Type: %s' % content_type),
b'',
to_bytes(file.read())
]
class RequestFactory:
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, *, json_encoder=DjangoJSONEncoder, **defaults):
self.json_encoder = json_encoder
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See https://www.python.org/dev/peps/pep-3333/#environ-variables
return {
'HTTP_COOKIE': '; '.join(sorted(
'%s=%s' % (morsel.key, morsel.coded_value)
for morsel in self.cookies.values()
)),
'PATH_INFO': '/',
'REMOTE_ADDR': '127.0.0.1',
'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'SERVER_NAME': 'testserver',
'SERVER_PORT': '80',
'SERVER_PROTOCOL': 'HTTP/1.1',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
**self.defaults,
**request,
}
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _encode_json(self, data, content_type):
"""
Return encoded JSON if data is a dict, list, or tuple and content_type
is application/json.
"""
should_encode = JSON_CONTENT_TYPE_RE.match(content_type) and isinstance(data, (dict, list, tuple))
return json.dumps(data, cls=self.json_encoder) if should_encode else data
def _get_path(self, parsed):
path = parsed.path
# If there are parameters, add them
if parsed.params:
path += ";" + parsed.params
path = unquote_to_bytes(path)
# Replace the behavior where non-ASCII values in the WSGI environ are
# arbitrarily decoded with ISO-8859-1.
# Refs comment in `get_bytes_from_wsgi()`.
return path.decode('iso-8859-1')
def get(self, path, data=None, secure=False, **extra):
"""Construct a GET request."""
data = {} if data is None else data
return self.generic('GET', path, secure=secure, **{
'QUERY_STRING': urlencode(data, doseq=True),
**extra,
})
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
secure=False, **extra):
"""Construct a POST request."""
data = self._encode_json({} if data is None else data, content_type)
post_data = self._encode_data(data, content_type)
return self.generic('POST', path, post_data, content_type,
secure=secure, **extra)
def head(self, path, data=None, secure=False, **extra):
"""Construct a HEAD request."""
data = {} if data is None else data
return self.generic('HEAD', path, secure=secure, **{
'QUERY_STRING': urlencode(data, doseq=True),
**extra,
})
def trace(self, path, secure=False, **extra):
"""Construct a TRACE request."""
return self.generic('TRACE', path, secure=secure, **extra)
def options(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct an OPTIONS request."
return self.generic('OPTIONS', path, data, content_type,
secure=secure, **extra)
def put(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"""Construct a PUT request."""
data = self._encode_json(data, content_type)
return self.generic('PUT', path, data, content_type,
secure=secure, **extra)
def patch(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"""Construct a PATCH request."""
data = self._encode_json(data, content_type)
return self.generic('PATCH', path, data, content_type,
secure=secure, **extra)
def delete(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"""Construct a DELETE request."""
data = self._encode_json(data, content_type)
return self.generic('DELETE', path, data, content_type,
secure=secure, **extra)
def generic(self, method, path, data='',
content_type='application/octet-stream', secure=False,
**extra):
"""Construct an arbitrary HTTP request."""
parsed = urlparse(str(path)) # path can be lazy
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
'PATH_INFO': self._get_path(parsed),
'REQUEST_METHOD': method,
'SERVER_PORT': '443' if secure else '80',
'wsgi.url_scheme': 'https' if secure else 'http',
}
if data:
r.update({
'CONTENT_LENGTH': str(len(data)),
'CONTENT_TYPE': content_type,
'wsgi.input': FakePayload(data),
})
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get('QUERY_STRING'):
# WSGI requires latin-1 encoded strings. See get_path_info().
query_string = parsed[4].encode().decode('iso-8859-1')
r['QUERY_STRING'] = query_string
return self.request(**r)
class Client(RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, enforce_csrf_checks=False, raise_request_exception=True, **defaults):
super().__init__(**defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.raise_request_exception = raise_request_exception
self.exc_info = None
def store_exc_info(self, **kwargs):
"""Store exceptions when they are generated by a view."""
self.exc_info = sys.exc_info()
@property
def session(self):
"""Return the current session variables."""
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME)
if cookie:
return engine.SessionStore(cookie.value)
session = engine.SessionStore()
session.save()
self.cookies[settings.SESSION_COOKIE_NAME] = session.session_key
return session
def request(self, **request):
"""
The master request method. Compose the environment dictionary and pass
to the handler, return the result of the handler. Assume defaults for
the query environment, which can be overridden using the arguments to
the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = partial(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = "request-exception-%s" % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
response = self.handler(environ)
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
# Look for a signaled exception, clear the current context exception
# data, then re-raise the signaled exception. Also clear the signaled
# exception from the local cache.
response.exc_info = self.exc_info
if self.exc_info:
_, exc_value, _ = self.exc_info
self.exc_info = None
if self.raise_request_exception:
raise exc_value
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get('templates', [])
response.context = data.get('context')
response.json = partial(self._parse_json, response)
# Attach the ResolverMatch instance to the response.
response.resolver_match = SimpleLazyObject(lambda: resolve(request['PATH_INFO']))
# Flatten a single context. Not really necessary anymore thanks to the
# __getattr__ flattening in ContextList, but has some edge case
# backwards compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
def get(self, path, data=None, follow=False, secure=False, **extra):
"""Request a response from the server using GET."""
response = super().get(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, **extra)
return response
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
follow=False, secure=False, **extra):
"""Request a response from the server using POST."""
response = super().post(path, data=data, content_type=content_type, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
return response
def head(self, path, data=None, follow=False, secure=False, **extra):
"""Request a response from the server using HEAD."""
response = super().head(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, **extra)
return response
def options(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""Request a response from the server using OPTIONS."""
response = super().options(path, data=data, content_type=content_type, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
return response
def put(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""Send a resource to the server using PUT."""
response = super().put(path, data=data, content_type=content_type, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
return response
def patch(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""Send a resource to the server using PATCH."""
response = super().patch(path, data=data, content_type=content_type, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
return response
def delete(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""Send a DELETE request to the server."""
response = super().delete(path, data=data, content_type=content_type, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
return response
def trace(self, path, data='', follow=False, secure=False, **extra):
"""Send a TRACE request to the server."""
response = super().trace(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, **extra)
return response
def login(self, **credentials):
"""
Set the Factory to appear as if it has successfully logged into a site.
Return True if login is possible; False if the provided credentials
are incorrect.
"""
from django.contrib.auth import authenticate
user = authenticate(**credentials)
if user:
self._login(user)
return True
else:
return False
def force_login(self, user, backend=None):
def get_backend():
from django.contrib.auth import load_backend
for backend_path in settings.AUTHENTICATION_BACKENDS:
backend = load_backend(backend_path)
if hasattr(backend, 'get_user'):
return backend_path
if backend is None:
backend = get_backend()
user.backend = backend
self._login(user, backend)
def _login(self, user, backend=None):
from django.contrib.auth import login
engine = import_module(settings.SESSION_ENGINE)
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
request.session = engine.SessionStore()
login(request, user, backend)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
def logout(self):
"""Log out the user by removing the cookies and session object."""
from django.contrib.auth import get_user, logout
request = HttpRequest()
engine = import_module(settings.SESSION_ENGINE)
if self.session:
request.session = self.session
request.user = get_user(request)
else:
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
def _parse_json(self, response, **extra):
if not hasattr(response, '_json'):
if not JSON_CONTENT_TYPE_RE.match(response.get('Content-Type')):
raise ValueError(
'Content-Type header is "{0}", not "application/json"'
.format(response.get('Content-Type'))
)
response._json = json.loads(response.content.decode(response.charset), **extra)
return response._json
def _handle_redirects(self, response, data='', content_type='', **extra):
"""
Follow any redirects by requesting responses from the server using GET.
"""
response.redirect_chain = []
redirect_status_codes = (
HTTPStatus.MOVED_PERMANENTLY,
HTTPStatus.FOUND,
HTTPStatus.SEE_OTHER,
HTTPStatus.TEMPORARY_REDIRECT,
HTTPStatus.PERMANENT_REDIRECT,
)
while response.status_code in redirect_status_codes:
response_url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((response_url, response.status_code))
url = urlsplit(response_url)
if url.scheme:
extra['wsgi.url_scheme'] = url.scheme
if url.hostname:
extra['SERVER_NAME'] = url.hostname
if url.port:
extra['SERVER_PORT'] = str(url.port)
# Prepend the request path to handle relative path redirects
path = url.path
if not path.startswith('/'):
path = urljoin(response.request['PATH_INFO'], path)
if response.status_code in (HTTPStatus.TEMPORARY_REDIRECT, HTTPStatus.PERMANENT_REDIRECT):
# Preserve request method post-redirect for 307/308 responses.
request_method = getattr(self, response.request['REQUEST_METHOD'].lower())
else:
request_method = self.get
data = QueryDict(url.query)
content_type = None
response = request_method(path, data=data, content_type=content_type, follow=False, **extra)
response.redirect_chain = redirect_chain
if redirect_chain[-1] in redirect_chain[:-1]:
# Check that we're not redirecting to somewhere we've already
# been to, to prevent loops.
raise RedirectCycleError("Redirect loop detected.", last_response=response)
if len(redirect_chain) > 20:
# Such a lengthy chain likely also means a loop, but one with
# a growing path, changing view, or changing query argument;
# 20 is the value of "network.http.redirection-limit" from Firefox.
raise RedirectCycleError("Too many redirects.", last_response=response)
return response
|
e8e22095c3b89682047196eb5cb86de377225bf776f3f853cf91122ccfdd9723 | import difflib
import json
import posixpath
import sys
import threading
import unittest
import warnings
from collections import Counter
from contextlib import contextmanager
from copy import copy
from difflib import get_close_matches
from functools import wraps
from unittest.util import safe_repr
from urllib.parse import (
parse_qsl, unquote, urlencode, urljoin, urlparse, urlsplit, urlunparse,
)
from urllib.request import url2pathname
from django.apps import apps
from django.conf import settings
from django.core import mail
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.core.files import locks
from django.core.handlers.wsgi import WSGIHandler, get_path_info
from django.core.management import call_command
from django.core.management.color import no_style
from django.core.management.sql import emit_post_migrate_signal
from django.core.servers.basehttp import ThreadedWSGIServer, WSGIRequestHandler
from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction
from django.forms.fields import CharField
from django.http import QueryDict
from django.http.request import split_domain_port, validate_host
from django.test.client import Client
from django.test.html import HTMLParseError, parse_html
from django.test.signals import setting_changed, template_rendered
from django.test.utils import (
CaptureQueriesContext, ContextList, compare_xml, modify_settings,
override_settings,
)
from django.utils.decorators import classproperty
from django.utils.deprecation import RemovedInDjango31Warning
from django.views.static import serve
__all__ = ('TestCase', 'TransactionTestCase',
'SimpleTestCase', 'skipIfDBFeature', 'skipUnlessDBFeature')
def to_list(value):
"""
Put value into a list if it's not already one. Return an empty list if
value is None.
"""
if value is None:
value = []
elif not isinstance(value, list):
value = [value]
return value
def assert_and_parse_html(self, html, user_msg, msg):
try:
dom = parse_html(html)
except HTMLParseError as e:
standardMsg = '%s\n%s' % (msg, e)
self.fail(self._formatMessage(user_msg, standardMsg))
return dom
class _AssertNumQueriesContext(CaptureQueriesContext):
def __init__(self, test_case, num, connection):
self.test_case = test_case
self.num = num
super().__init__(connection)
def __exit__(self, exc_type, exc_value, traceback):
super().__exit__(exc_type, exc_value, traceback)
if exc_type is not None:
return
executed = len(self)
self.test_case.assertEqual(
executed, self.num,
"%d queries executed, %d expected\nCaptured queries were:\n%s" % (
executed, self.num,
'\n'.join(
'%d. %s' % (i, query['sql']) for i, query in enumerate(self.captured_queries, start=1)
)
)
)
class _AssertTemplateUsedContext:
def __init__(self, test_case, template_name):
self.test_case = test_case
self.template_name = template_name
self.rendered_templates = []
self.rendered_template_names = []
self.context = ContextList()
def on_template_render(self, sender, signal, template, context, **kwargs):
self.rendered_templates.append(template)
self.rendered_template_names.append(template.name)
self.context.append(copy(context))
def test(self):
return self.template_name in self.rendered_template_names
def message(self):
return '%s was not rendered.' % self.template_name
def __enter__(self):
template_rendered.connect(self.on_template_render)
return self
def __exit__(self, exc_type, exc_value, traceback):
template_rendered.disconnect(self.on_template_render)
if exc_type is not None:
return
if not self.test():
message = self.message()
if self.rendered_templates:
message += ' Following templates were rendered: %s' % (
', '.join(self.rendered_template_names)
)
else:
message += ' No template was rendered.'
self.test_case.fail(message)
class _AssertTemplateNotUsedContext(_AssertTemplateUsedContext):
def test(self):
return self.template_name not in self.rendered_template_names
def message(self):
return '%s was rendered.' % self.template_name
class _DatabaseFailure:
def __init__(self, wrapped, message):
self.wrapped = wrapped
self.message = message
def __call__(self):
raise AssertionError(self.message)
class _SimpleTestCaseDatabasesDescriptor:
"""Descriptor for SimpleTestCase.allow_database_queries deprecation."""
def __get__(self, instance, cls=None):
try:
allow_database_queries = cls.allow_database_queries
except AttributeError:
pass
else:
msg = (
'`SimpleTestCase.allow_database_queries` is deprecated. '
'Restrict the databases available during the execution of '
'%s.%s with the `databases` attribute instead.'
) % (cls.__module__, cls.__qualname__)
warnings.warn(msg, RemovedInDjango31Warning)
if allow_database_queries:
return {DEFAULT_DB_ALIAS}
return set()
class SimpleTestCase(unittest.TestCase):
# The class we'll use for the test client self.client.
# Can be overridden in derived classes.
client_class = Client
_overridden_settings = None
_modified_settings = None
databases = _SimpleTestCaseDatabasesDescriptor()
_disallowed_database_msg = (
'Database %(operation)s to %(alias)r are not allowed in SimpleTestCase '
'subclasses. Either subclass TestCase or TransactionTestCase to ensure '
'proper test isolation or add %(alias)r to %(test)s.databases to silence '
'this failure.'
)
_disallowed_connection_methods = [
('connect', 'connections'),
('temporary_connection', 'connections'),
('cursor', 'queries'),
('chunked_cursor', 'queries'),
]
@classmethod
def setUpClass(cls):
super().setUpClass()
if cls._overridden_settings:
cls._cls_overridden_context = override_settings(**cls._overridden_settings)
cls._cls_overridden_context.enable()
if cls._modified_settings:
cls._cls_modified_context = modify_settings(cls._modified_settings)
cls._cls_modified_context.enable()
cls._add_databases_failures()
@classmethod
def _validate_databases(cls):
if cls.databases == '__all__':
return frozenset(connections)
for alias in cls.databases:
if alias not in connections:
message = '%s.%s.databases refers to %r which is not defined in settings.DATABASES.' % (
cls.__module__,
cls.__qualname__,
alias,
)
close_matches = get_close_matches(alias, list(connections))
if close_matches:
message += ' Did you mean %r?' % close_matches[0]
raise ImproperlyConfigured(message)
return frozenset(cls.databases)
@classmethod
def _add_databases_failures(cls):
cls.databases = cls._validate_databases()
for alias in connections:
if alias in cls.databases:
continue
connection = connections[alias]
for name, operation in cls._disallowed_connection_methods:
message = cls._disallowed_database_msg % {
'test': '%s.%s' % (cls.__module__, cls.__qualname__),
'alias': alias,
'operation': operation,
}
method = getattr(connection, name)
setattr(connection, name, _DatabaseFailure(method, message))
@classmethod
def _remove_databases_failures(cls):
for alias in connections:
if alias in cls.databases:
continue
connection = connections[alias]
for name, _ in cls._disallowed_connection_methods:
method = getattr(connection, name)
setattr(connection, name, method.wrapped)
@classmethod
def tearDownClass(cls):
cls._remove_databases_failures()
if hasattr(cls, '_cls_modified_context'):
cls._cls_modified_context.disable()
delattr(cls, '_cls_modified_context')
if hasattr(cls, '_cls_overridden_context'):
cls._cls_overridden_context.disable()
delattr(cls, '_cls_overridden_context')
super().tearDownClass()
def __call__(self, result=None):
"""
Wrapper around default __call__ method to perform common Django test
set up. This means that user-defined Test Cases aren't required to
include a call to super().setUp().
"""
testMethod = getattr(self, self._testMethodName)
skipped = (
getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)
)
if not skipped:
try:
self._pre_setup()
except Exception:
result.addError(self, sys.exc_info())
return
super().__call__(result)
if not skipped:
try:
self._post_teardown()
except Exception:
result.addError(self, sys.exc_info())
return
def _pre_setup(self):
"""
Perform pre-test setup:
* Create a test client.
* Clear the mail test outbox.
"""
self.client = self.client_class()
mail.outbox = []
def _post_teardown(self):
"""Perform post-test things."""
pass
def settings(self, **kwargs):
"""
A context manager that temporarily sets a setting and reverts to the
original value when exiting the context.
"""
return override_settings(**kwargs)
def modify_settings(self, **kwargs):
"""
A context manager that temporarily applies changes a list setting and
reverts back to the original value when exiting the context.
"""
return modify_settings(**kwargs)
def assertRedirects(self, response, expected_url, status_code=302,
target_status_code=200, msg_prefix='',
fetch_redirect_response=True):
"""
Assert that a response redirected to a specific URL and that the
redirect URL can be loaded.
Won't work for external links since it uses the test client to do a
request (use fetch_redirect_response=False to check such links without
fetching them).
"""
if msg_prefix:
msg_prefix += ": "
if hasattr(response, 'redirect_chain'):
# The request was a followed redirect
self.assertTrue(
response.redirect_chain,
msg_prefix + "Response didn't redirect as expected: Response code was %d (expected %d)"
% (response.status_code, status_code)
)
self.assertEqual(
response.redirect_chain[0][1], status_code,
msg_prefix + "Initial response didn't redirect as expected: Response code was %d (expected %d)"
% (response.redirect_chain[0][1], status_code)
)
url, status_code = response.redirect_chain[-1]
scheme, netloc, path, query, fragment = urlsplit(url)
self.assertEqual(
response.status_code, target_status_code,
msg_prefix + "Response didn't redirect as expected: Final Response code was %d (expected %d)"
% (response.status_code, target_status_code)
)
else:
# Not a followed redirect
self.assertEqual(
response.status_code, status_code,
msg_prefix + "Response didn't redirect as expected: Response code was %d (expected %d)"
% (response.status_code, status_code)
)
url = response.url
scheme, netloc, path, query, fragment = urlsplit(url)
# Prepend the request path to handle relative path redirects.
if not path.startswith('/'):
url = urljoin(response.request['PATH_INFO'], url)
path = urljoin(response.request['PATH_INFO'], path)
if fetch_redirect_response:
# netloc might be empty, or in cases where Django tests the
# HTTP scheme, the convention is for netloc to be 'testserver'.
# Trust both as "internal" URLs here.
domain, port = split_domain_port(netloc)
if domain and not validate_host(domain, settings.ALLOWED_HOSTS):
raise ValueError(
"The test client is unable to fetch remote URLs (got %s). "
"If the host is served by Django, add '%s' to ALLOWED_HOSTS. "
"Otherwise, use assertRedirects(..., fetch_redirect_response=False)."
% (url, domain)
)
redirect_response = response.client.get(path, QueryDict(query), secure=(scheme == 'https'))
# Get the redirection page, using the same client that was used
# to obtain the original response.
self.assertEqual(
redirect_response.status_code, target_status_code,
msg_prefix + "Couldn't retrieve redirection page '%s': response code was %d (expected %d)"
% (path, redirect_response.status_code, target_status_code)
)
self.assertURLEqual(
url, expected_url,
msg_prefix + "Response redirected to '%s', expected '%s'" % (url, expected_url)
)
def assertURLEqual(self, url1, url2, msg_prefix=''):
"""
Assert that two URLs are the same, ignoring the order of query string
parameters except for parameters with the same name.
For example, /path/?x=1&y=2 is equal to /path/?y=2&x=1, but
/path/?a=1&a=2 isn't equal to /path/?a=2&a=1.
"""
def normalize(url):
"""Sort the URL's query string parameters."""
url = str(url) # Coerce reverse_lazy() URLs.
scheme, netloc, path, params, query, fragment = urlparse(url)
query_parts = sorted(parse_qsl(query))
return urlunparse((scheme, netloc, path, params, urlencode(query_parts), fragment))
self.assertEqual(
normalize(url1), normalize(url2),
msg_prefix + "Expected '%s' to equal '%s'." % (url1, url2)
)
def _assert_contains(self, response, text, status_code, msg_prefix, html):
# If the response supports deferred rendering and hasn't been rendered
# yet, then ensure that it does get rendered before proceeding further.
if hasattr(response, 'render') and callable(response.render) and not response.is_rendered:
response.render()
if msg_prefix:
msg_prefix += ": "
self.assertEqual(
response.status_code, status_code,
msg_prefix + "Couldn't retrieve content: Response code was %d"
" (expected %d)" % (response.status_code, status_code)
)
if response.streaming:
content = b''.join(response.streaming_content)
else:
content = response.content
if not isinstance(text, bytes) or html:
text = str(text)
content = content.decode(response.charset)
text_repr = "'%s'" % text
else:
text_repr = repr(text)
if html:
content = assert_and_parse_html(self, content, None, "Response's content is not valid HTML:")
text = assert_and_parse_html(self, text, None, "Second argument is not valid HTML:")
real_count = content.count(text)
return (text_repr, real_count, msg_prefix)
def assertContains(self, response, text, count=None, status_code=200, msg_prefix='', html=False):
"""
Assert that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected) and that
``text`` occurs ``count`` times in the content of the response.
If ``count`` is None, the count doesn't matter - the assertion is true
if the text occurs at least once in the response.
"""
text_repr, real_count, msg_prefix = self._assert_contains(
response, text, status_code, msg_prefix, html)
if count is not None:
self.assertEqual(
real_count, count,
msg_prefix + "Found %d instances of %s in response (expected %d)" % (real_count, text_repr, count)
)
else:
self.assertTrue(real_count != 0, msg_prefix + "Couldn't find %s in response" % text_repr)
def assertNotContains(self, response, text, status_code=200, msg_prefix='', html=False):
"""
Assert that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected) and that
``text`` doesn't occurs in the content of the response.
"""
text_repr, real_count, msg_prefix = self._assert_contains(
response, text, status_code, msg_prefix, html)
self.assertEqual(real_count, 0, msg_prefix + "Response should not contain %s" % text_repr)
def assertFormError(self, response, form, field, errors, msg_prefix=''):
"""
Assert that a form used to render the response has a specific field
error.
"""
if msg_prefix:
msg_prefix += ": "
# Put context(s) into a list to simplify processing.
contexts = to_list(response.context)
if not contexts:
self.fail(msg_prefix + "Response did not use any contexts to render the response")
# Put error(s) into a list to simplify processing.
errors = to_list(errors)
# Search all contexts for the error.
found_form = False
for i, context in enumerate(contexts):
if form not in context:
continue
found_form = True
for err in errors:
if field:
if field in context[form].errors:
field_errors = context[form].errors[field]
self.assertTrue(
err in field_errors,
msg_prefix + "The field '%s' on form '%s' in"
" context %d does not contain the error '%s'"
" (actual errors: %s)" %
(field, form, i, err, repr(field_errors))
)
elif field in context[form].fields:
self.fail(
msg_prefix + "The field '%s' on form '%s' in context %d contains no errors" %
(field, form, i)
)
else:
self.fail(
msg_prefix + "The form '%s' in context %d does not contain the field '%s'" %
(form, i, field)
)
else:
non_field_errors = context[form].non_field_errors()
self.assertTrue(
err in non_field_errors,
msg_prefix + "The form '%s' in context %d does not"
" contain the non-field error '%s'"
" (actual errors: %s)" %
(form, i, err, non_field_errors or 'none')
)
if not found_form:
self.fail(msg_prefix + "The form '%s' was not used to render the response" % form)
def assertFormsetError(self, response, formset, form_index, field, errors,
msg_prefix=''):
"""
Assert that a formset used to render the response has a specific error.
For field errors, specify the ``form_index`` and the ``field``.
For non-field errors, specify the ``form_index`` and the ``field`` as
None.
For non-form errors, specify ``form_index`` as None and the ``field``
as None.
"""
# Add punctuation to msg_prefix
if msg_prefix:
msg_prefix += ": "
# Put context(s) into a list to simplify processing.
contexts = to_list(response.context)
if not contexts:
self.fail(msg_prefix + 'Response did not use any contexts to '
'render the response')
# Put error(s) into a list to simplify processing.
errors = to_list(errors)
# Search all contexts for the error.
found_formset = False
for i, context in enumerate(contexts):
if formset not in context:
continue
found_formset = True
for err in errors:
if field is not None:
if field in context[formset].forms[form_index].errors:
field_errors = context[formset].forms[form_index].errors[field]
self.assertTrue(
err in field_errors,
msg_prefix + "The field '%s' on formset '%s', "
"form %d in context %d does not contain the "
"error '%s' (actual errors: %s)" %
(field, formset, form_index, i, err, repr(field_errors))
)
elif field in context[formset].forms[form_index].fields:
self.fail(
msg_prefix + "The field '%s' on formset '%s', form %d in context %d contains no errors"
% (field, formset, form_index, i)
)
else:
self.fail(
msg_prefix + "The formset '%s', form %d in context %d does not contain the field '%s'"
% (formset, form_index, i, field)
)
elif form_index is not None:
non_field_errors = context[formset].forms[form_index].non_field_errors()
self.assertFalse(
not non_field_errors,
msg_prefix + "The formset '%s', form %d in context %d "
"does not contain any non-field errors." % (formset, form_index, i)
)
self.assertTrue(
err in non_field_errors,
msg_prefix + "The formset '%s', form %d in context %d "
"does not contain the non-field error '%s' (actual errors: %s)"
% (formset, form_index, i, err, repr(non_field_errors))
)
else:
non_form_errors = context[formset].non_form_errors()
self.assertFalse(
not non_form_errors,
msg_prefix + "The formset '%s' in context %d does not "
"contain any non-form errors." % (formset, i)
)
self.assertTrue(
err in non_form_errors,
msg_prefix + "The formset '%s' in context %d does not "
"contain the non-form error '%s' (actual errors: %s)"
% (formset, i, err, repr(non_form_errors))
)
if not found_formset:
self.fail(msg_prefix + "The formset '%s' was not used to render the response" % formset)
def _assert_template_used(self, response, template_name, msg_prefix):
if response is None and template_name is None:
raise TypeError('response and/or template_name argument must be provided')
if msg_prefix:
msg_prefix += ": "
if template_name is not None and response is not None and not hasattr(response, 'templates'):
raise ValueError(
"assertTemplateUsed() and assertTemplateNotUsed() are only "
"usable on responses fetched using the Django test Client."
)
if not hasattr(response, 'templates') or (response is None and template_name):
if response:
template_name = response
response = None
# use this template with context manager
return template_name, None, msg_prefix
template_names = [t.name for t in response.templates if t.name is not None]
return None, template_names, msg_prefix
def assertTemplateUsed(self, response=None, template_name=None, msg_prefix='', count=None):
"""
Assert that the template with the provided name was used in rendering
the response. Also usable as context manager.
"""
context_mgr_template, template_names, msg_prefix = self._assert_template_used(
response, template_name, msg_prefix)
if context_mgr_template:
# Use assertTemplateUsed as context manager.
return _AssertTemplateUsedContext(self, context_mgr_template)
if not template_names:
self.fail(msg_prefix + "No templates used to render the response")
self.assertTrue(
template_name in template_names,
msg_prefix + "Template '%s' was not a template used to render"
" the response. Actual template(s) used: %s"
% (template_name, ', '.join(template_names))
)
if count is not None:
self.assertEqual(
template_names.count(template_name), count,
msg_prefix + "Template '%s' was expected to be rendered %d "
"time(s) but was actually rendered %d time(s)."
% (template_name, count, template_names.count(template_name))
)
def assertTemplateNotUsed(self, response=None, template_name=None, msg_prefix=''):
"""
Assert that the template with the provided name was NOT used in
rendering the response. Also usable as context manager.
"""
context_mgr_template, template_names, msg_prefix = self._assert_template_used(
response, template_name, msg_prefix
)
if context_mgr_template:
# Use assertTemplateNotUsed as context manager.
return _AssertTemplateNotUsedContext(self, context_mgr_template)
self.assertFalse(
template_name in template_names,
msg_prefix + "Template '%s' was used unexpectedly in rendering the response" % template_name
)
@contextmanager
def _assert_raises_or_warns_cm(self, func, cm_attr, expected_exception, expected_message):
with func(expected_exception) as cm:
yield cm
self.assertIn(expected_message, str(getattr(cm, cm_attr)))
def _assertFooMessage(self, func, cm_attr, expected_exception, expected_message, *args, **kwargs):
callable_obj = None
if args:
callable_obj, *args = args
cm = self._assert_raises_or_warns_cm(func, cm_attr, expected_exception, expected_message)
# Assertion used in context manager fashion.
if callable_obj is None:
return cm
# Assertion was passed a callable.
with cm:
callable_obj(*args, **kwargs)
def assertRaisesMessage(self, expected_exception, expected_message, *args, **kwargs):
"""
Assert that expected_message is found in the message of a raised
exception.
Args:
expected_exception: Exception class expected to be raised.
expected_message: expected error message string value.
args: Function to be called and extra positional args.
kwargs: Extra kwargs.
"""
return self._assertFooMessage(
self.assertRaises, 'exception', expected_exception, expected_message,
*args, **kwargs
)
def assertWarnsMessage(self, expected_warning, expected_message, *args, **kwargs):
"""
Same as assertRaisesMessage but for assertWarns() instead of
assertRaises().
"""
return self._assertFooMessage(
self.assertWarns, 'warning', expected_warning, expected_message,
*args, **kwargs
)
def assertFieldOutput(self, fieldclass, valid, invalid, field_args=None,
field_kwargs=None, empty_value=''):
"""
Assert that a form field behaves correctly with various inputs.
Args:
fieldclass: the class of the field to be tested.
valid: a dictionary mapping valid inputs to their expected
cleaned values.
invalid: a dictionary mapping invalid inputs to one or more
raised error messages.
field_args: the args passed to instantiate the field
field_kwargs: the kwargs passed to instantiate the field
empty_value: the expected clean output for inputs in empty_values
"""
if field_args is None:
field_args = []
if field_kwargs is None:
field_kwargs = {}
required = fieldclass(*field_args, **field_kwargs)
optional = fieldclass(*field_args, **{**field_kwargs, 'required': False})
# test valid inputs
for input, output in valid.items():
self.assertEqual(required.clean(input), output)
self.assertEqual(optional.clean(input), output)
# test invalid inputs
for input, errors in invalid.items():
with self.assertRaises(ValidationError) as context_manager:
required.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
with self.assertRaises(ValidationError) as context_manager:
optional.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
# test required inputs
error_required = [required.error_messages['required']]
for e in required.empty_values:
with self.assertRaises(ValidationError) as context_manager:
required.clean(e)
self.assertEqual(context_manager.exception.messages, error_required)
self.assertEqual(optional.clean(e), empty_value)
# test that max_length and min_length are always accepted
if issubclass(fieldclass, CharField):
field_kwargs.update({'min_length': 2, 'max_length': 20})
self.assertIsInstance(fieldclass(*field_args, **field_kwargs), fieldclass)
def assertHTMLEqual(self, html1, html2, msg=None):
"""
Assert that two HTML snippets are semantically the same.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The arguments must be valid HTML.
"""
dom1 = assert_and_parse_html(self, html1, msg, 'First argument is not valid HTML:')
dom2 = assert_and_parse_html(self, html2, msg, 'Second argument is not valid HTML:')
if dom1 != dom2:
standardMsg = '%s != %s' % (
safe_repr(dom1, True), safe_repr(dom2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
str(dom1).splitlines(), str(dom2).splitlines(),
)))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertHTMLNotEqual(self, html1, html2, msg=None):
"""Assert that two HTML snippets are not semantically equivalent."""
dom1 = assert_and_parse_html(self, html1, msg, 'First argument is not valid HTML:')
dom2 = assert_and_parse_html(self, html2, msg, 'Second argument is not valid HTML:')
if dom1 == dom2:
standardMsg = '%s == %s' % (
safe_repr(dom1, True), safe_repr(dom2, True))
self.fail(self._formatMessage(msg, standardMsg))
def assertInHTML(self, needle, haystack, count=None, msg_prefix=''):
needle = assert_and_parse_html(self, needle, None, 'First argument is not valid HTML:')
haystack = assert_and_parse_html(self, haystack, None, 'Second argument is not valid HTML:')
real_count = haystack.count(needle)
if count is not None:
self.assertEqual(
real_count, count,
msg_prefix + "Found %d instances of '%s' in response (expected %d)" % (real_count, needle, count)
)
else:
self.assertTrue(real_count != 0, msg_prefix + "Couldn't find '%s' in response" % needle)
def assertJSONEqual(self, raw, expected_data, msg=None):
"""
Assert that the JSON fragments raw and expected_data are equal.
Usual JSON non-significant whitespace rules apply as the heavyweight
is delegated to the json library.
"""
try:
data = json.loads(raw)
except json.JSONDecodeError:
self.fail("First argument is not valid JSON: %r" % raw)
if isinstance(expected_data, str):
try:
expected_data = json.loads(expected_data)
except ValueError:
self.fail("Second argument is not valid JSON: %r" % expected_data)
self.assertEqual(data, expected_data, msg=msg)
def assertJSONNotEqual(self, raw, expected_data, msg=None):
"""
Assert that the JSON fragments raw and expected_data are not equal.
Usual JSON non-significant whitespace rules apply as the heavyweight
is delegated to the json library.
"""
try:
data = json.loads(raw)
except json.JSONDecodeError:
self.fail("First argument is not valid JSON: %r" % raw)
if isinstance(expected_data, str):
try:
expected_data = json.loads(expected_data)
except json.JSONDecodeError:
self.fail("Second argument is not valid JSON: %r" % expected_data)
self.assertNotEqual(data, expected_data, msg=msg)
def assertXMLEqual(self, xml1, xml2, msg=None):
"""
Assert that two XML snippets are semantically the same.
Whitespace in most cases is ignored and attribute ordering is not
significant. The arguments must be valid XML.
"""
try:
result = compare_xml(xml1, xml2)
except Exception as e:
standardMsg = 'First or second argument is not valid XML\n%s' % e
self.fail(self._formatMessage(msg, standardMsg))
else:
if not result:
standardMsg = '%s != %s' % (safe_repr(xml1, True), safe_repr(xml2, True))
diff = ('\n' + '\n'.join(
difflib.ndiff(xml1.splitlines(), xml2.splitlines())
))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertXMLNotEqual(self, xml1, xml2, msg=None):
"""
Assert that two XML snippets are not semantically equivalent.
Whitespace in most cases is ignored and attribute ordering is not
significant. The arguments must be valid XML.
"""
try:
result = compare_xml(xml1, xml2)
except Exception as e:
standardMsg = 'First or second argument is not valid XML\n%s' % e
self.fail(self._formatMessage(msg, standardMsg))
else:
if result:
standardMsg = '%s == %s' % (safe_repr(xml1, True), safe_repr(xml2, True))
self.fail(self._formatMessage(msg, standardMsg))
class _TransactionTestCaseDatabasesDescriptor:
"""Descriptor for TransactionTestCase.multi_db deprecation."""
msg = (
'`TransactionTestCase.multi_db` is deprecated. Databases available '
'during this test can be defined using %s.%s.databases.'
)
def __get__(self, instance, cls=None):
try:
multi_db = cls.multi_db
except AttributeError:
pass
else:
msg = self.msg % (cls.__module__, cls.__qualname__)
warnings.warn(msg, RemovedInDjango31Warning)
if multi_db:
return set(connections)
return {DEFAULT_DB_ALIAS}
class TransactionTestCase(SimpleTestCase):
# Subclasses can ask for resetting of auto increment sequence before each
# test case
reset_sequences = False
# Subclasses can enable only a subset of apps for faster tests
available_apps = None
# Subclasses can define fixtures which will be automatically installed.
fixtures = None
databases = _TransactionTestCaseDatabasesDescriptor()
_disallowed_database_msg = (
'Database %(operation)s to %(alias)r are not allowed in this test. '
'Add %(alias)r to %(test)s.databases to ensure proper test isolation '
'and silence this failure.'
)
# If transactions aren't available, Django will serialize the database
# contents into a fixture during setup and flush and reload them
# during teardown (as flush does not restore data from migrations).
# This can be slow; this flag allows enabling on a per-case basis.
serialized_rollback = False
def _pre_setup(self):
"""
Perform pre-test setup:
* If the class has an 'available_apps' attribute, restrict the app
registry to these applications, then fire the post_migrate signal --
it must run with the correct set of applications for the test case.
* If the class has a 'fixtures' attribute, install those fixtures.
"""
super()._pre_setup()
if self.available_apps is not None:
apps.set_available_apps(self.available_apps)
setting_changed.send(
sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=self.available_apps,
enter=True,
)
for db_name in self._databases_names(include_mirrors=False):
emit_post_migrate_signal(verbosity=0, interactive=False, db=db_name)
try:
self._fixture_setup()
except Exception:
if self.available_apps is not None:
apps.unset_available_apps()
setting_changed.send(
sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=settings.INSTALLED_APPS,
enter=False,
)
raise
# Clear the queries_log so that it's less likely to overflow (a single
# test probably won't execute 9K queries). If queries_log overflows,
# then assertNumQueries() doesn't work.
for db_name in self._databases_names(include_mirrors=False):
connections[db_name].queries_log.clear()
@classmethod
def _databases_names(cls, include_mirrors=True):
# Only consider allowed database aliases, including mirrors or not.
return [
alias for alias in connections
if alias in cls.databases and (
include_mirrors or not connections[alias].settings_dict['TEST']['MIRROR']
)
]
def _reset_sequences(self, db_name):
conn = connections[db_name]
if conn.features.supports_sequence_reset:
sql_list = conn.ops.sequence_reset_by_name_sql(
no_style(), conn.introspection.sequence_list())
if sql_list:
with transaction.atomic(using=db_name):
with conn.cursor() as cursor:
for sql in sql_list:
cursor.execute(sql)
def _fixture_setup(self):
for db_name in self._databases_names(include_mirrors=False):
# Reset sequences
if self.reset_sequences:
self._reset_sequences(db_name)
# Provide replica initial data from migrated apps, if needed.
if self.serialized_rollback and hasattr(connections[db_name], "_test_serialized_contents"):
if self.available_apps is not None:
apps.unset_available_apps()
connections[db_name].creation.deserialize_db_from_string(
connections[db_name]._test_serialized_contents
)
if self.available_apps is not None:
apps.set_available_apps(self.available_apps)
if self.fixtures:
# We have to use this slightly awkward syntax due to the fact
# that we're using *args and **kwargs together.
call_command('loaddata', *self.fixtures,
**{'verbosity': 0, 'database': db_name})
def _should_reload_connections(self):
return True
def _post_teardown(self):
"""
Perform post-test things:
* Flush the contents of the database to leave a clean slate. If the
class has an 'available_apps' attribute, don't fire post_migrate.
* Force-close the connection so the next test gets a clean cursor.
"""
try:
self._fixture_teardown()
super()._post_teardown()
if self._should_reload_connections():
# Some DB cursors include SQL statements as part of cursor
# creation. If you have a test that does a rollback, the effect
# of these statements is lost, which can affect the operation of
# tests (e.g., losing a timezone setting causing objects to be
# created with the wrong time). To make sure this doesn't
# happen, get a clean connection at the start of every test.
for conn in connections.all():
conn.close()
finally:
if self.available_apps is not None:
apps.unset_available_apps()
setting_changed.send(sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=settings.INSTALLED_APPS,
enter=False)
def _fixture_teardown(self):
# Allow TRUNCATE ... CASCADE and don't emit the post_migrate signal
# when flushing only a subset of the apps
for db_name in self._databases_names(include_mirrors=False):
# Flush the database
inhibit_post_migrate = (
self.available_apps is not None or
( # Inhibit the post_migrate signal when using serialized
# rollback to avoid trying to recreate the serialized data.
self.serialized_rollback and
hasattr(connections[db_name], '_test_serialized_contents')
)
)
call_command('flush', verbosity=0, interactive=False,
database=db_name, reset_sequences=False,
allow_cascade=self.available_apps is not None,
inhibit_post_migrate=inhibit_post_migrate)
def assertQuerysetEqual(self, qs, values, transform=repr, ordered=True, msg=None):
items = map(transform, qs)
if not ordered:
return self.assertEqual(Counter(items), Counter(values), msg=msg)
values = list(values)
# For example qs.iterator() could be passed as qs, but it does not
# have 'ordered' attribute.
if len(values) > 1 and hasattr(qs, 'ordered') and not qs.ordered:
raise ValueError("Trying to compare non-ordered queryset "
"against more than one ordered values")
return self.assertEqual(list(items), values, msg=msg)
def assertNumQueries(self, num, func=None, *args, using=DEFAULT_DB_ALIAS, **kwargs):
conn = connections[using]
context = _AssertNumQueriesContext(self, num, conn)
if func is None:
return context
with context:
func(*args, **kwargs)
def connections_support_transactions(aliases=None):
"""
Return whether or not all (or specified) connections support
transactions.
"""
conns = connections.all() if aliases is None else (connections[alias] for alias in aliases)
return all(conn.features.supports_transactions for conn in conns)
class _TestCaseDatabasesDescriptor(_TransactionTestCaseDatabasesDescriptor):
"""Descriptor for TestCase.multi_db deprecation."""
msg = (
'`TestCase.multi_db` is deprecated. Databases available during this '
'test can be defined using %s.%s.databases.'
)
class TestCase(TransactionTestCase):
"""
Similar to TransactionTestCase, but use `transaction.atomic()` to achieve
test isolation.
In most situations, TestCase should be preferred to TransactionTestCase as
it allows faster execution. However, there are some situations where using
TransactionTestCase might be necessary (e.g. testing some transactional
behavior).
On database backends with no transaction support, TestCase behaves as
TransactionTestCase.
"""
databases = _TestCaseDatabasesDescriptor()
@classmethod
def _enter_atomics(cls):
"""Open atomic blocks for multiple databases."""
atomics = {}
for db_name in cls._databases_names():
atomics[db_name] = transaction.atomic(using=db_name)
atomics[db_name].__enter__()
return atomics
@classmethod
def _rollback_atomics(cls, atomics):
"""Rollback atomic blocks opened by the previous method."""
for db_name in reversed(cls._databases_names()):
transaction.set_rollback(True, using=db_name)
atomics[db_name].__exit__(None, None, None)
@classmethod
def _databases_support_transactions(cls):
return connections_support_transactions(cls.databases)
@classmethod
def setUpClass(cls):
super().setUpClass()
if not cls._databases_support_transactions():
return
cls.cls_atomics = cls._enter_atomics()
if cls.fixtures:
for db_name in cls._databases_names(include_mirrors=False):
try:
call_command('loaddata', *cls.fixtures, **{'verbosity': 0, 'database': db_name})
except Exception:
cls._rollback_atomics(cls.cls_atomics)
cls._remove_databases_failures()
raise
try:
cls.setUpTestData()
except Exception:
cls._rollback_atomics(cls.cls_atomics)
cls._remove_databases_failures()
raise
@classmethod
def tearDownClass(cls):
if cls._databases_support_transactions():
cls._rollback_atomics(cls.cls_atomics)
for conn in connections.all():
conn.close()
super().tearDownClass()
@classmethod
def setUpTestData(cls):
"""Load initial data for the TestCase."""
pass
def _should_reload_connections(self):
if self._databases_support_transactions():
return False
return super()._should_reload_connections()
def _fixture_setup(self):
if not self._databases_support_transactions():
# If the backend does not support transactions, we should reload
# class data before each test
self.setUpTestData()
return super()._fixture_setup()
assert not self.reset_sequences, 'reset_sequences cannot be used on TestCase instances'
self.atomics = self._enter_atomics()
def _fixture_teardown(self):
if not self._databases_support_transactions():
return super()._fixture_teardown()
try:
for db_name in reversed(self._databases_names()):
if self._should_check_constraints(connections[db_name]):
connections[db_name].check_constraints()
finally:
self._rollback_atomics(self.atomics)
def _should_check_constraints(self, connection):
return (
connection.features.can_defer_constraint_checks and
not connection.needs_rollback and connection.is_usable()
)
class CheckCondition:
"""Descriptor class for deferred condition checking."""
def __init__(self, *conditions):
self.conditions = conditions
def add_condition(self, condition, reason):
return self.__class__(*self.conditions, (condition, reason))
def __get__(self, instance, cls=None):
# Trigger access for all bases.
if any(getattr(base, '__unittest_skip__', False) for base in cls.__bases__):
return True
for condition, reason in self.conditions:
if condition():
# Override this descriptor's value and set the skip reason.
cls.__unittest_skip__ = True
cls.__unittest_skip_why__ = reason
return True
return False
def _deferredSkip(condition, reason, name):
def decorator(test_func):
nonlocal condition
if not (isinstance(test_func, type) and
issubclass(test_func, unittest.TestCase)):
@wraps(test_func)
def skip_wrapper(*args, **kwargs):
if (args and isinstance(args[0], unittest.TestCase) and
connection.alias not in getattr(args[0], 'databases', {})):
raise ValueError(
"%s cannot be used on %s as %s doesn't allow queries "
"against the %r database." % (
name,
args[0],
args[0].__class__.__qualname__,
connection.alias,
)
)
if condition():
raise unittest.SkipTest(reason)
return test_func(*args, **kwargs)
test_item = skip_wrapper
else:
# Assume a class is decorated
test_item = test_func
databases = getattr(test_item, 'databases', None)
if not databases or connection.alias not in databases:
# Defer raising to allow importing test class's module.
def condition():
raise ValueError(
"%s cannot be used on %s as it doesn't allow queries "
"against the '%s' database." % (
name, test_item, connection.alias,
)
)
# Retrieve the possibly existing value from the class's dict to
# avoid triggering the descriptor.
skip = test_func.__dict__.get('__unittest_skip__')
if isinstance(skip, CheckCondition):
test_item.__unittest_skip__ = skip.add_condition(condition, reason)
elif skip is not True:
test_item.__unittest_skip__ = CheckCondition((condition, reason))
return test_item
return decorator
def skipIfDBFeature(*features):
"""Skip a test if a database has at least one of the named features."""
return _deferredSkip(
lambda: any(getattr(connection.features, feature, False) for feature in features),
"Database has feature(s) %s" % ", ".join(features),
'skipIfDBFeature',
)
def skipUnlessDBFeature(*features):
"""Skip a test unless a database has all the named features."""
return _deferredSkip(
lambda: not all(getattr(connection.features, feature, False) for feature in features),
"Database doesn't support feature(s): %s" % ", ".join(features),
'skipUnlessDBFeature',
)
def skipUnlessAnyDBFeature(*features):
"""Skip a test unless a database has any of the named features."""
return _deferredSkip(
lambda: not any(getattr(connection.features, feature, False) for feature in features),
"Database doesn't support any of the feature(s): %s" % ", ".join(features),
'skipUnlessAnyDBFeature',
)
class QuietWSGIRequestHandler(WSGIRequestHandler):
"""
A WSGIRequestHandler that doesn't log to standard output any of the
requests received, so as to not clutter the test result output.
"""
def log_message(*args):
pass
class FSFilesHandler(WSGIHandler):
"""
WSGI middleware that intercepts calls to a directory, as defined by one of
the *_ROOT settings, and serves those files, publishing them under *_URL.
"""
def __init__(self, application):
self.application = application
self.base_url = urlparse(self.get_base_url())
super().__init__()
def _should_handle(self, path):
"""
Check if the path should be handled. Ignore the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
"""
return path.startswith(self.base_url[2]) and not self.base_url[1]
def file_path(self, url):
"""Return the relative path to the file on disk for the given URL."""
relative_url = url[len(self.base_url[2]):]
return url2pathname(relative_url)
def get_response(self, request):
from django.http import Http404
if self._should_handle(request.path):
try:
return self.serve(request)
except Http404:
pass
return super().get_response(request)
def serve(self, request):
os_rel_path = self.file_path(request.path)
os_rel_path = posixpath.normpath(unquote(os_rel_path))
# Emulate behavior of django.contrib.staticfiles.views.serve() when it
# invokes staticfiles' finders functionality.
# TODO: Modify if/when that internal API is refactored
final_rel_path = os_rel_path.replace('\\', '/').lstrip('/')
return serve(request, final_rel_path, document_root=self.get_base_dir())
def __call__(self, environ, start_response):
if not self._should_handle(get_path_info(environ)):
return self.application(environ, start_response)
return super().__call__(environ, start_response)
class _StaticFilesHandler(FSFilesHandler):
"""
Handler for serving static files. A private class that is meant to be used
solely as a convenience by LiveServerThread.
"""
def get_base_dir(self):
return settings.STATIC_ROOT
def get_base_url(self):
return settings.STATIC_URL
class _MediaFilesHandler(FSFilesHandler):
"""
Handler for serving the media files. A private class that is meant to be
used solely as a convenience by LiveServerThread.
"""
def get_base_dir(self):
return settings.MEDIA_ROOT
def get_base_url(self):
return settings.MEDIA_URL
class LiveServerThread(threading.Thread):
"""Thread for running a live http server while the tests are running."""
def __init__(self, host, static_handler, connections_override=None, port=0):
self.host = host
self.port = port
self.is_ready = threading.Event()
self.error = None
self.static_handler = static_handler
self.connections_override = connections_override
super().__init__()
def run(self):
"""
Set up the live server and databases, and then loop over handling
HTTP requests.
"""
if self.connections_override:
# Override this thread's database connections with the ones
# provided by the main thread.
for alias, conn in self.connections_override.items():
connections[alias] = conn
try:
# Create the handler for serving static and media files
handler = self.static_handler(_MediaFilesHandler(WSGIHandler()))
self.httpd = self._create_server()
# If binding to port zero, assign the port allocated by the OS.
if self.port == 0:
self.port = self.httpd.server_address[1]
self.httpd.set_app(handler)
self.is_ready.set()
self.httpd.serve_forever()
except Exception as e:
self.error = e
self.is_ready.set()
finally:
connections.close_all()
def _create_server(self):
return ThreadedWSGIServer((self.host, self.port), QuietWSGIRequestHandler, allow_reuse_address=False)
def terminate(self):
if hasattr(self, 'httpd'):
# Stop the WSGI server
self.httpd.shutdown()
self.httpd.server_close()
self.join()
class LiveServerTestCase(TransactionTestCase):
"""
Do basically the same as TransactionTestCase but also launch a live HTTP
server in a separate thread so that the tests may use another testing
framework, such as Selenium for example, instead of the built-in dummy
client.
It inherits from TransactionTestCase instead of TestCase because the
threads don't share the same transactions (unless if using in-memory sqlite)
and each thread needs to commit all their transactions so that the other
thread can see the changes.
"""
host = 'localhost'
port = 0
server_thread_class = LiveServerThread
static_handler = _StaticFilesHandler
@classproperty
def live_server_url(cls):
return 'http://%s:%s' % (cls.host, cls.server_thread.port)
@classproperty
def allowed_host(cls):
return cls.host
@classmethod
def setUpClass(cls):
super().setUpClass()
connections_override = {}
for conn in connections.all():
# If using in-memory sqlite databases, pass the connections to
# the server thread.
if conn.vendor == 'sqlite' and conn.is_in_memory_db():
# Explicitly enable thread-shareability for this connection
conn.inc_thread_sharing()
connections_override[conn.alias] = conn
cls._live_server_modified_settings = modify_settings(
ALLOWED_HOSTS={'append': cls.allowed_host},
)
cls._live_server_modified_settings.enable()
cls.server_thread = cls._create_server_thread(connections_override)
cls.server_thread.daemon = True
cls.server_thread.start()
# Wait for the live server to be ready
cls.server_thread.is_ready.wait()
if cls.server_thread.error:
# Clean up behind ourselves, since tearDownClass won't get called in
# case of errors.
cls._tearDownClassInternal()
raise cls.server_thread.error
@classmethod
def _create_server_thread(cls, connections_override):
return cls.server_thread_class(
cls.host,
cls.static_handler,
connections_override=connections_override,
port=cls.port,
)
@classmethod
def _tearDownClassInternal(cls):
# There may not be a 'server_thread' attribute if setUpClass() for some
# reasons has raised an exception.
if hasattr(cls, 'server_thread'):
# Terminate the live server's thread
cls.server_thread.terminate()
# Restore sqlite in-memory database connections' non-shareability.
for conn in cls.server_thread.connections_override.values():
conn.dec_thread_sharing()
@classmethod
def tearDownClass(cls):
cls._tearDownClassInternal()
cls._live_server_modified_settings.disable()
super().tearDownClass()
class SerializeMixin:
"""
Enforce serialization of TestCases that share a common resource.
Define a common 'lockfile' for each set of TestCases to serialize. This
file must exist on the filesystem.
Place it early in the MRO in order to isolate setUpClass()/tearDownClass().
"""
lockfile = None
@classmethod
def setUpClass(cls):
if cls.lockfile is None:
raise ValueError(
"{}.lockfile isn't set. Set it to a unique value "
"in the base class.".format(cls.__name__))
cls._lockfile = open(cls.lockfile)
locks.lock(cls._lockfile, locks.LOCK_EX)
super().setUpClass()
@classmethod
def tearDownClass(cls):
super().tearDownClass()
cls._lockfile.close()
|
3e73a48bc3983833f9c121e5e20a6de980ff8d86a0cf697c8b236642398b8a57 | """Compare two HTML documents."""
import re
from html.parser import HTMLParser
# ASCII whitespace is U+0009 TAB, U+000A LF, U+000C FF, U+000D CR, or U+0020
# SPACE.
# https://infra.spec.whatwg.org/#ascii-whitespace
ASCII_WHITESPACE = re.compile(r'[\t\n\f\r ]+')
def normalize_whitespace(string):
return ASCII_WHITESPACE.sub(' ', string)
class Element:
def __init__(self, name, attributes):
self.name = name
self.attributes = sorted(attributes)
self.children = []
def append(self, element):
if isinstance(element, str):
element = normalize_whitespace(element)
if self.children:
if isinstance(self.children[-1], str):
self.children[-1] += element
self.children[-1] = normalize_whitespace(self.children[-1])
return
elif self.children:
# removing last children if it is only whitespace
# this can result in incorrect dom representations since
# whitespace between inline tags like <span> is significant
if isinstance(self.children[-1], str):
if self.children[-1].isspace():
self.children.pop()
if element:
self.children.append(element)
def finalize(self):
def rstrip_last_element(children):
if children:
if isinstance(children[-1], str):
children[-1] = children[-1].rstrip()
if not children[-1]:
children.pop()
children = rstrip_last_element(children)
return children
rstrip_last_element(self.children)
for i, child in enumerate(self.children):
if isinstance(child, str):
self.children[i] = child.strip()
elif hasattr(child, 'finalize'):
child.finalize()
def __eq__(self, element):
if not hasattr(element, 'name') or self.name != element.name:
return False
if len(self.attributes) != len(element.attributes):
return False
if self.attributes != element.attributes:
# attributes without a value is same as attribute with value that
# equals the attributes name:
# <input checked> == <input checked="checked">
for i in range(len(self.attributes)):
attr, value = self.attributes[i]
other_attr, other_value = element.attributes[i]
if value is None:
value = attr
if other_value is None:
other_value = other_attr
if attr != other_attr or value != other_value:
return False
return self.children == element.children
def __hash__(self):
return hash((self.name, *self.attributes))
def _count(self, element, count=True):
if not isinstance(element, str):
if self == element:
return 1
if isinstance(element, RootElement):
if self.children == element.children:
return 1
i = 0
for child in self.children:
# child is text content and element is also text content, then
# make a simple "text" in "text"
if isinstance(child, str):
if isinstance(element, str):
if count:
i += child.count(element)
elif element in child:
return 1
else:
i += child._count(element, count=count)
if not count and i:
return i
return i
def __contains__(self, element):
return self._count(element, count=False) > 0
def count(self, element):
return self._count(element, count=True)
def __getitem__(self, key):
return self.children[key]
def __str__(self):
output = '<%s' % self.name
for key, value in self.attributes:
if value:
output += ' %s="%s"' % (key, value)
else:
output += ' %s' % key
if self.children:
output += '>\n'
output += ''.join(str(c) for c in self.children)
output += '\n</%s>' % self.name
else:
output += '>'
return output
def __repr__(self):
return str(self)
class RootElement(Element):
def __init__(self):
super().__init__(None, ())
def __str__(self):
return ''.join(str(c) for c in self.children)
class HTMLParseError(Exception):
pass
class Parser(HTMLParser):
# https://html.spec.whatwg.org/#void-elements
SELF_CLOSING_TAGS = {
'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'link', 'meta',
'param', 'source', 'track', 'wbr',
# Deprecated tags
'frame', 'spacer',
}
def __init__(self):
super().__init__()
self.root = RootElement()
self.open_tags = []
self.element_positions = {}
def error(self, msg):
raise HTMLParseError(msg, self.getpos())
def format_position(self, position=None, element=None):
if not position and element:
position = self.element_positions[element]
if position is None:
position = self.getpos()
if hasattr(position, 'lineno'):
position = position.lineno, position.offset
return 'Line %d, Column %d' % position
@property
def current(self):
if self.open_tags:
return self.open_tags[-1]
else:
return self.root
def handle_startendtag(self, tag, attrs):
self.handle_starttag(tag, attrs)
if tag not in self.SELF_CLOSING_TAGS:
self.handle_endtag(tag)
def handle_starttag(self, tag, attrs):
# Special case handling of 'class' attribute, so that comparisons of DOM
# instances are not sensitive to ordering of classes.
attrs = [
(name, ' '.join(sorted(value for value in ASCII_WHITESPACE.split(value) if value)))
if name == "class"
else (name, value)
for name, value in attrs
]
element = Element(tag, attrs)
self.current.append(element)
if tag not in self.SELF_CLOSING_TAGS:
self.open_tags.append(element)
self.element_positions[element] = self.getpos()
def handle_endtag(self, tag):
if not self.open_tags:
self.error("Unexpected end tag `%s` (%s)" % (
tag, self.format_position()))
element = self.open_tags.pop()
while element.name != tag:
if not self.open_tags:
self.error("Unexpected end tag `%s` (%s)" % (
tag, self.format_position()))
element = self.open_tags.pop()
def handle_data(self, data):
self.current.append(data)
def parse_html(html):
"""
Take a string that contains *valid* HTML and turn it into a Python object
structure that can be easily compared against other HTML on semantic
equivalence. Syntactical differences like which quotation is used on
arguments will be ignored.
"""
parser = Parser()
parser.feed(html)
parser.close()
document = parser.root
document.finalize()
# Removing ROOT element if it's not necessary
if len(document.children) == 1:
if not isinstance(document.children[0], str):
document = document.children[0]
return document
|
4de3b1a8a3ea332f5dc26f9408a4d7f1e5ad3ab5d20bc193b7594fb628cdedbf | import logging
import re
import sys
import time
import warnings
from contextlib import contextmanager
from functools import wraps
from io import StringIO
from itertools import chain
from types import SimpleNamespace
from unittest import TestCase, skipIf, skipUnless
from xml.dom.minidom import Node, parseString
from django.apps import apps
from django.apps.registry import Apps
from django.conf import UserSettingsHolder, settings
from django.core import mail
from django.core.exceptions import ImproperlyConfigured
from django.core.signals import request_started
from django.db import DEFAULT_DB_ALIAS, connections, reset_queries
from django.db.models.options import Options
from django.template import Template
from django.test.signals import setting_changed, template_rendered
from django.urls import get_script_prefix, set_script_prefix
from django.utils.translation import deactivate
try:
import jinja2
except ImportError:
jinja2 = None
__all__ = (
'Approximate', 'ContextList', 'isolate_lru_cache', 'get_runner',
'modify_settings', 'override_settings',
'requires_tz_support',
'setup_test_environment', 'teardown_test_environment',
)
TZ_SUPPORT = hasattr(time, 'tzset')
class Approximate:
def __init__(self, val, places=7):
self.val = val
self.places = places
def __repr__(self):
return repr(self.val)
def __eq__(self, other):
return self.val == other or round(abs(self.val - other), self.places) == 0
class ContextList(list):
"""
A wrapper that provides direct key access to context items contained
in a list of context objects.
"""
def __getitem__(self, key):
if isinstance(key, str):
for subcontext in self:
if key in subcontext:
return subcontext[key]
raise KeyError(key)
else:
return super().__getitem__(key)
def get(self, key, default=None):
try:
return self.__getitem__(key)
except KeyError:
return default
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
return True
def keys(self):
"""
Flattened keys of subcontexts.
"""
return set(chain.from_iterable(d for subcontext in self for d in subcontext))
def instrumented_test_render(self, context):
"""
An instrumented Template render method, providing a signal that can be
intercepted by the test Client.
"""
template_rendered.send(sender=self, template=self, context=context)
return self.nodelist.render(context)
class _TestState:
pass
def setup_test_environment(debug=None):
"""
Perform global pre-test setup, such as installing the instrumented template
renderer and setting the email backend to the locmem email backend.
"""
if hasattr(_TestState, 'saved_data'):
# Executing this function twice would overwrite the saved values.
raise RuntimeError(
"setup_test_environment() was already called and can't be called "
"again without first calling teardown_test_environment()."
)
if debug is None:
debug = settings.DEBUG
saved_data = SimpleNamespace()
_TestState.saved_data = saved_data
saved_data.allowed_hosts = settings.ALLOWED_HOSTS
# Add the default host of the test client.
settings.ALLOWED_HOSTS = [*settings.ALLOWED_HOSTS, 'testserver']
saved_data.debug = settings.DEBUG
settings.DEBUG = debug
saved_data.email_backend = settings.EMAIL_BACKEND
settings.EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
saved_data.template_render = Template._render
Template._render = instrumented_test_render
mail.outbox = []
deactivate()
def teardown_test_environment():
"""
Perform any global post-test teardown, such as restoring the original
template renderer and restoring the email sending functions.
"""
saved_data = _TestState.saved_data
settings.ALLOWED_HOSTS = saved_data.allowed_hosts
settings.DEBUG = saved_data.debug
settings.EMAIL_BACKEND = saved_data.email_backend
Template._render = saved_data.template_render
del _TestState.saved_data
del mail.outbox
def setup_databases(verbosity, interactive, keepdb=False, debug_sql=False, parallel=0, aliases=None, **kwargs):
"""Create the test databases."""
test_databases, mirrored_aliases = get_unique_databases_and_mirrors(aliases)
old_names = []
for db_name, aliases in test_databases.values():
first_alias = None
for alias in aliases:
connection = connections[alias]
old_names.append((connection, db_name, first_alias is None))
# Actually create the database for the first connection
if first_alias is None:
first_alias = alias
connection.creation.create_test_db(
verbosity=verbosity,
autoclobber=not interactive,
keepdb=keepdb,
serialize=connection.settings_dict.get('TEST', {}).get('SERIALIZE', True),
)
if parallel > 1:
for index in range(parallel):
connection.creation.clone_test_db(
suffix=str(index + 1),
verbosity=verbosity,
keepdb=keepdb,
)
# Configure all other connections as mirrors of the first one
else:
connections[alias].creation.set_as_test_mirror(connections[first_alias].settings_dict)
# Configure the test mirrors.
for alias, mirror_alias in mirrored_aliases.items():
connections[alias].creation.set_as_test_mirror(
connections[mirror_alias].settings_dict)
if debug_sql:
for alias in connections:
connections[alias].force_debug_cursor = True
return old_names
def dependency_ordered(test_databases, dependencies):
"""
Reorder test_databases into an order that honors the dependencies
described in TEST[DEPENDENCIES].
"""
ordered_test_databases = []
resolved_databases = set()
# Maps db signature to dependencies of all its aliases
dependencies_map = {}
# Check that no database depends on its own alias
for sig, (_, aliases) in test_databases:
all_deps = set()
for alias in aliases:
all_deps.update(dependencies.get(alias, []))
if not all_deps.isdisjoint(aliases):
raise ImproperlyConfigured(
"Circular dependency: databases %r depend on each other, "
"but are aliases." % aliases
)
dependencies_map[sig] = all_deps
while test_databases:
changed = False
deferred = []
# Try to find a DB that has all its dependencies met
for signature, (db_name, aliases) in test_databases:
if dependencies_map[signature].issubset(resolved_databases):
resolved_databases.update(aliases)
ordered_test_databases.append((signature, (db_name, aliases)))
changed = True
else:
deferred.append((signature, (db_name, aliases)))
if not changed:
raise ImproperlyConfigured("Circular dependency in TEST[DEPENDENCIES]")
test_databases = deferred
return ordered_test_databases
def get_unique_databases_and_mirrors(aliases=None):
"""
Figure out which databases actually need to be created.
Deduplicate entries in DATABASES that correspond the same database or are
configured as test mirrors.
Return two values:
- test_databases: ordered mapping of signatures to (name, list of aliases)
where all aliases share the same underlying database.
- mirrored_aliases: mapping of mirror aliases to original aliases.
"""
if aliases is None:
aliases = connections
mirrored_aliases = {}
test_databases = {}
dependencies = {}
default_sig = connections[DEFAULT_DB_ALIAS].creation.test_db_signature()
for alias in connections:
connection = connections[alias]
test_settings = connection.settings_dict['TEST']
if test_settings['MIRROR']:
# If the database is marked as a test mirror, save the alias.
mirrored_aliases[alias] = test_settings['MIRROR']
elif alias in aliases:
# Store a tuple with DB parameters that uniquely identify it.
# If we have two aliases with the same values for that tuple,
# we only need to create the test database once.
item = test_databases.setdefault(
connection.creation.test_db_signature(),
(connection.settings_dict['NAME'], set())
)
item[1].add(alias)
if 'DEPENDENCIES' in test_settings:
dependencies[alias] = test_settings['DEPENDENCIES']
else:
if alias != DEFAULT_DB_ALIAS and connection.creation.test_db_signature() != default_sig:
dependencies[alias] = test_settings.get('DEPENDENCIES', [DEFAULT_DB_ALIAS])
test_databases = dict(dependency_ordered(test_databases.items(), dependencies))
return test_databases, mirrored_aliases
def teardown_databases(old_config, verbosity, parallel=0, keepdb=False):
"""Destroy all the non-mirror databases."""
for connection, old_name, destroy in old_config:
if destroy:
if parallel > 1:
for index in range(parallel):
connection.creation.destroy_test_db(
suffix=str(index + 1),
verbosity=verbosity,
keepdb=keepdb,
)
connection.creation.destroy_test_db(old_name, verbosity, keepdb)
def get_runner(settings, test_runner_class=None):
test_runner_class = test_runner_class or settings.TEST_RUNNER
test_path = test_runner_class.split('.')
# Allow for relative paths
if len(test_path) > 1:
test_module_name = '.'.join(test_path[:-1])
else:
test_module_name = '.'
test_module = __import__(test_module_name, {}, {}, test_path[-1])
return getattr(test_module, test_path[-1])
class TestContextDecorator:
"""
A base class that can either be used as a context manager during tests
or as a test function or unittest.TestCase subclass decorator to perform
temporary alterations.
`attr_name`: attribute assigned the return value of enable() if used as
a class decorator.
`kwarg_name`: keyword argument passing the return value of enable() if
used as a function decorator.
"""
def __init__(self, attr_name=None, kwarg_name=None):
self.attr_name = attr_name
self.kwarg_name = kwarg_name
def enable(self):
raise NotImplementedError
def disable(self):
raise NotImplementedError
def __enter__(self):
return self.enable()
def __exit__(self, exc_type, exc_value, traceback):
self.disable()
def decorate_class(self, cls):
if issubclass(cls, TestCase):
decorated_setUp = cls.setUp
decorated_tearDown = cls.tearDown
def setUp(inner_self):
context = self.enable()
if self.attr_name:
setattr(inner_self, self.attr_name, context)
try:
decorated_setUp(inner_self)
except Exception:
self.disable()
raise
def tearDown(inner_self):
decorated_tearDown(inner_self)
self.disable()
cls.setUp = setUp
cls.tearDown = tearDown
return cls
raise TypeError('Can only decorate subclasses of unittest.TestCase')
def decorate_callable(self, func):
@wraps(func)
def inner(*args, **kwargs):
with self as context:
if self.kwarg_name:
kwargs[self.kwarg_name] = context
return func(*args, **kwargs)
return inner
def __call__(self, decorated):
if isinstance(decorated, type):
return self.decorate_class(decorated)
elif callable(decorated):
return self.decorate_callable(decorated)
raise TypeError('Cannot decorate object of type %s' % type(decorated))
class override_settings(TestContextDecorator):
"""
Act as either a decorator or a context manager. If it's a decorator, take a
function and return a wrapped function. If it's a contextmanager, use it
with the ``with`` statement. In either event, entering/exiting are called
before and after, respectively, the function/block is executed.
"""
enable_exception = None
def __init__(self, **kwargs):
self.options = kwargs
super().__init__()
def enable(self):
# Keep this code at the beginning to leave the settings unchanged
# in case it raises an exception because INSTALLED_APPS is invalid.
if 'INSTALLED_APPS' in self.options:
try:
apps.set_installed_apps(self.options['INSTALLED_APPS'])
except Exception:
apps.unset_installed_apps()
raise
override = UserSettingsHolder(settings._wrapped)
for key, new_value in self.options.items():
setattr(override, key, new_value)
self.wrapped = settings._wrapped
settings._wrapped = override
for key, new_value in self.options.items():
try:
setting_changed.send(
sender=settings._wrapped.__class__,
setting=key, value=new_value, enter=True,
)
except Exception as exc:
self.enable_exception = exc
self.disable()
def disable(self):
if 'INSTALLED_APPS' in self.options:
apps.unset_installed_apps()
settings._wrapped = self.wrapped
del self.wrapped
responses = []
for key in self.options:
new_value = getattr(settings, key, None)
responses_for_setting = setting_changed.send_robust(
sender=settings._wrapped.__class__,
setting=key, value=new_value, enter=False,
)
responses.extend(responses_for_setting)
if self.enable_exception is not None:
exc = self.enable_exception
self.enable_exception = None
raise exc
for _, response in responses:
if isinstance(response, Exception):
raise response
def save_options(self, test_func):
if test_func._overridden_settings is None:
test_func._overridden_settings = self.options
else:
# Duplicate dict to prevent subclasses from altering their parent.
test_func._overridden_settings = {
**test_func._overridden_settings,
**self.options,
}
def decorate_class(self, cls):
from django.test import SimpleTestCase
if not issubclass(cls, SimpleTestCase):
raise ValueError(
"Only subclasses of Django SimpleTestCase can be decorated "
"with override_settings")
self.save_options(cls)
return cls
class modify_settings(override_settings):
"""
Like override_settings, but makes it possible to append, prepend, or remove
items instead of redefining the entire list.
"""
def __init__(self, *args, **kwargs):
if args:
# Hack used when instantiating from SimpleTestCase.setUpClass.
assert not kwargs
self.operations = args[0]
else:
assert not args
self.operations = list(kwargs.items())
super(override_settings, self).__init__()
def save_options(self, test_func):
if test_func._modified_settings is None:
test_func._modified_settings = self.operations
else:
# Duplicate list to prevent subclasses from altering their parent.
test_func._modified_settings = list(
test_func._modified_settings) + self.operations
def enable(self):
self.options = {}
for name, operations in self.operations:
try:
# When called from SimpleTestCase.setUpClass, values may be
# overridden several times; cumulate changes.
value = self.options[name]
except KeyError:
value = list(getattr(settings, name, []))
for action, items in operations.items():
# items my be a single value or an iterable.
if isinstance(items, str):
items = [items]
if action == 'append':
value = value + [item for item in items if item not in value]
elif action == 'prepend':
value = [item for item in items if item not in value] + value
elif action == 'remove':
value = [item for item in value if item not in items]
else:
raise ValueError("Unsupported action: %s" % action)
self.options[name] = value
super().enable()
class override_system_checks(TestContextDecorator):
"""
Act as a decorator. Override list of registered system checks.
Useful when you override `INSTALLED_APPS`, e.g. if you exclude `auth` app,
you also need to exclude its system checks.
"""
def __init__(self, new_checks, deployment_checks=None):
from django.core.checks.registry import registry
self.registry = registry
self.new_checks = new_checks
self.deployment_checks = deployment_checks
super().__init__()
def enable(self):
self.old_checks = self.registry.registered_checks
self.registry.registered_checks = set()
for check in self.new_checks:
self.registry.register(check, *getattr(check, 'tags', ()))
self.old_deployment_checks = self.registry.deployment_checks
if self.deployment_checks is not None:
self.registry.deployment_checks = set()
for check in self.deployment_checks:
self.registry.register(check, *getattr(check, 'tags', ()), deploy=True)
def disable(self):
self.registry.registered_checks = self.old_checks
self.registry.deployment_checks = self.old_deployment_checks
def compare_xml(want, got):
"""
Try to do a 'xml-comparison' of want and got. Plain string comparison
doesn't always work because, for example, attribute ordering should not be
important. Ignore comment nodes, document type node, and leading and
trailing whitespaces.
Based on https://github.com/lxml/lxml/blob/master/src/lxml/doctestcompare.py
"""
_norm_whitespace_re = re.compile(r'[ \t\n][ \t\n]+')
def norm_whitespace(v):
return _norm_whitespace_re.sub(' ', v)
def child_text(element):
return ''.join(c.data for c in element.childNodes
if c.nodeType == Node.TEXT_NODE)
def children(element):
return [c for c in element.childNodes
if c.nodeType == Node.ELEMENT_NODE]
def norm_child_text(element):
return norm_whitespace(child_text(element))
def attrs_dict(element):
return dict(element.attributes.items())
def check_element(want_element, got_element):
if want_element.tagName != got_element.tagName:
return False
if norm_child_text(want_element) != norm_child_text(got_element):
return False
if attrs_dict(want_element) != attrs_dict(got_element):
return False
want_children = children(want_element)
got_children = children(got_element)
if len(want_children) != len(got_children):
return False
return all(check_element(want, got) for want, got in zip(want_children, got_children))
def first_node(document):
for node in document.childNodes:
if node.nodeType not in (Node.COMMENT_NODE, Node.DOCUMENT_TYPE_NODE):
return node
want = want.strip().replace('\\n', '\n')
got = got.strip().replace('\\n', '\n')
# If the string is not a complete xml document, we may need to add a
# root element. This allow us to compare fragments, like "<foo/><bar/>"
if not want.startswith('<?xml'):
wrapper = '<root>%s</root>'
want = wrapper % want
got = wrapper % got
# Parse the want and got strings, and compare the parsings.
want_root = first_node(parseString(want))
got_root = first_node(parseString(got))
return check_element(want_root, got_root)
class CaptureQueriesContext:
"""
Context manager that captures queries executed by the specified connection.
"""
def __init__(self, connection):
self.connection = connection
def __iter__(self):
return iter(self.captured_queries)
def __getitem__(self, index):
return self.captured_queries[index]
def __len__(self):
return len(self.captured_queries)
@property
def captured_queries(self):
return self.connection.queries[self.initial_queries:self.final_queries]
def __enter__(self):
self.force_debug_cursor = self.connection.force_debug_cursor
self.connection.force_debug_cursor = True
# Run any initialization queries if needed so that they won't be
# included as part of the count.
self.connection.ensure_connection()
self.initial_queries = len(self.connection.queries_log)
self.final_queries = None
request_started.disconnect(reset_queries)
return self
def __exit__(self, exc_type, exc_value, traceback):
self.connection.force_debug_cursor = self.force_debug_cursor
request_started.connect(reset_queries)
if exc_type is not None:
return
self.final_queries = len(self.connection.queries_log)
class ignore_warnings(TestContextDecorator):
def __init__(self, **kwargs):
self.ignore_kwargs = kwargs
if 'message' in self.ignore_kwargs or 'module' in self.ignore_kwargs:
self.filter_func = warnings.filterwarnings
else:
self.filter_func = warnings.simplefilter
super().__init__()
def enable(self):
self.catch_warnings = warnings.catch_warnings()
self.catch_warnings.__enter__()
self.filter_func('ignore', **self.ignore_kwargs)
def disable(self):
self.catch_warnings.__exit__(*sys.exc_info())
# On OSes that don't provide tzset (Windows), we can't set the timezone
# in which the program runs. As a consequence, we must skip tests that
# don't enforce a specific timezone (with timezone.override or equivalent),
# or attempt to interpret naive datetimes in the default timezone.
requires_tz_support = skipUnless(
TZ_SUPPORT,
"This test relies on the ability to run a program in an arbitrary "
"time zone, but your operating system isn't able to do that."
)
@contextmanager
def extend_sys_path(*paths):
"""Context manager to temporarily add paths to sys.path."""
_orig_sys_path = sys.path[:]
sys.path.extend(paths)
try:
yield
finally:
sys.path = _orig_sys_path
@contextmanager
def isolate_lru_cache(lru_cache_object):
"""Clear the cache of an LRU cache object on entering and exiting."""
lru_cache_object.cache_clear()
try:
yield
finally:
lru_cache_object.cache_clear()
@contextmanager
def captured_output(stream_name):
"""Return a context manager used by captured_stdout/stdin/stderr
that temporarily replaces the sys stream *stream_name* with a StringIO.
Note: This function and the following ``captured_std*`` are copied
from CPython's ``test.support`` module."""
orig_stdout = getattr(sys, stream_name)
setattr(sys, stream_name, StringIO())
try:
yield getattr(sys, stream_name)
finally:
setattr(sys, stream_name, orig_stdout)
def captured_stdout():
"""Capture the output of sys.stdout:
with captured_stdout() as stdout:
print("hello")
self.assertEqual(stdout.getvalue(), "hello\n")
"""
return captured_output("stdout")
def captured_stderr():
"""Capture the output of sys.stderr:
with captured_stderr() as stderr:
print("hello", file=sys.stderr)
self.assertEqual(stderr.getvalue(), "hello\n")
"""
return captured_output("stderr")
def captured_stdin():
"""Capture the input to sys.stdin:
with captured_stdin() as stdin:
stdin.write('hello\n')
stdin.seek(0)
# call test code that consumes from sys.stdin
captured = input()
self.assertEqual(captured, "hello")
"""
return captured_output("stdin")
@contextmanager
def freeze_time(t):
"""
Context manager to temporarily freeze time.time(). This temporarily
modifies the time function of the time module. Modules which import the
time function directly (e.g. `from time import time`) won't be affected
This isn't meant as a public API, but helps reduce some repetitive code in
Django's test suite.
"""
_real_time = time.time
time.time = lambda: t
try:
yield
finally:
time.time = _real_time
def require_jinja2(test_func):
"""
Decorator to enable a Jinja2 template engine in addition to the regular
Django template engine for a test or skip it if Jinja2 isn't available.
"""
test_func = skipIf(jinja2 is None, "this test requires jinja2")(test_func)
return override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
}, {
'BACKEND': 'django.template.backends.jinja2.Jinja2',
'APP_DIRS': True,
'OPTIONS': {'keep_trailing_newline': True},
}])(test_func)
class override_script_prefix(TestContextDecorator):
"""Decorator or context manager to temporary override the script prefix."""
def __init__(self, prefix):
self.prefix = prefix
super().__init__()
def enable(self):
self.old_prefix = get_script_prefix()
set_script_prefix(self.prefix)
def disable(self):
set_script_prefix(self.old_prefix)
class LoggingCaptureMixin:
"""
Capture the output from the 'django' logger and store it on the class's
logger_output attribute.
"""
def setUp(self):
self.logger = logging.getLogger('django')
self.old_stream = self.logger.handlers[0].stream
self.logger_output = StringIO()
self.logger.handlers[0].stream = self.logger_output
def tearDown(self):
self.logger.handlers[0].stream = self.old_stream
class isolate_apps(TestContextDecorator):
"""
Act as either a decorator or a context manager to register models defined
in its wrapped context to an isolated registry.
The list of installed apps the isolated registry should contain must be
passed as arguments.
Two optional keyword arguments can be specified:
`attr_name`: attribute assigned the isolated registry if used as a class
decorator.
`kwarg_name`: keyword argument passing the isolated registry if used as a
function decorator.
"""
def __init__(self, *installed_apps, **kwargs):
self.installed_apps = installed_apps
super().__init__(**kwargs)
def enable(self):
self.old_apps = Options.default_apps
apps = Apps(self.installed_apps)
setattr(Options, 'default_apps', apps)
return apps
def disable(self):
setattr(Options, 'default_apps', self.old_apps)
def tag(*tags):
"""Decorator to add tags to a test class or method."""
def decorator(obj):
if hasattr(obj, 'tags'):
obj.tags = obj.tags.union(tags)
else:
setattr(obj, 'tags', set(tags))
return obj
return decorator
@contextmanager
def register_lookup(field, *lookups, lookup_name=None):
"""
Context manager to temporarily register lookups on a model field using
lookup_name (or the lookup's lookup_name if not provided).
"""
try:
for lookup in lookups:
field.register_lookup(lookup, lookup_name)
yield
finally:
for lookup in lookups:
field._unregister_lookup(lookup, lookup_name)
|
d1e5b3cbadd6446604bccdb1e631cdb263035efc5c1d8e311dbb1207fe4ea5ab | import functools
import sys
import threading
import warnings
from collections import Counter, defaultdict
from functools import partial
from django.core.exceptions import AppRegistryNotReady, ImproperlyConfigured
from .config import AppConfig
class Apps:
"""
A registry that stores the configuration of installed applications.
It also keeps track of models, e.g. to provide reverse relations.
"""
def __init__(self, installed_apps=()):
# installed_apps is set to None when creating the master registry
# because it cannot be populated at that point. Other registries must
# provide a list of installed apps and are populated immediately.
if installed_apps is None and hasattr(sys.modules[__name__], 'apps'):
raise RuntimeError("You must supply an installed_apps argument.")
# Mapping of app labels => model names => model classes. Every time a
# model is imported, ModelBase.__new__ calls apps.register_model which
# creates an entry in all_models. All imported models are registered,
# regardless of whether they're defined in an installed application
# and whether the registry has been populated. Since it isn't possible
# to reimport a module safely (it could reexecute initialization code)
# all_models is never overridden or reset.
self.all_models = defaultdict(dict)
# Mapping of labels to AppConfig instances for installed apps.
self.app_configs = {}
# Stack of app_configs. Used to store the current state in
# set_available_apps and set_installed_apps.
self.stored_app_configs = []
# Whether the registry is populated.
self.apps_ready = self.models_ready = self.ready = False
# For the autoreloader.
self.ready_event = threading.Event()
# Lock for thread-safe population.
self._lock = threading.RLock()
self.loading = False
# Maps ("app_label", "modelname") tuples to lists of functions to be
# called when the corresponding model is ready. Used by this class's
# `lazy_model_operation()` and `do_pending_operations()` methods.
self._pending_operations = defaultdict(list)
# Populate apps and models, unless it's the master registry.
if installed_apps is not None:
self.populate(installed_apps)
def populate(self, installed_apps=None):
"""
Load application configurations and models.
Import each application module and then each model module.
It is thread-safe and idempotent, but not reentrant.
"""
if self.ready:
return
# populate() might be called by two threads in parallel on servers
# that create threads before initializing the WSGI callable.
with self._lock:
if self.ready:
return
# An RLock prevents other threads from entering this section. The
# compare and set operation below is atomic.
if self.loading:
# Prevent reentrant calls to avoid running AppConfig.ready()
# methods twice.
raise RuntimeError("populate() isn't reentrant")
self.loading = True
# Phase 1: initialize app configs and import app modules.
for entry in installed_apps:
if isinstance(entry, AppConfig):
app_config = entry
else:
app_config = AppConfig.create(entry)
if app_config.label in self.app_configs:
raise ImproperlyConfigured(
"Application labels aren't unique, "
"duplicates: %s" % app_config.label)
self.app_configs[app_config.label] = app_config
app_config.apps = self
# Check for duplicate app names.
counts = Counter(
app_config.name for app_config in self.app_configs.values())
duplicates = [
name for name, count in counts.most_common() if count > 1]
if duplicates:
raise ImproperlyConfigured(
"Application names aren't unique, "
"duplicates: %s" % ", ".join(duplicates))
self.apps_ready = True
# Phase 2: import models modules.
for app_config in self.app_configs.values():
app_config.import_models()
self.clear_cache()
self.models_ready = True
# Phase 3: run ready() methods of app configs.
for app_config in self.get_app_configs():
app_config.ready()
self.ready = True
self.ready_event.set()
def check_apps_ready(self):
"""Raise an exception if all apps haven't been imported yet."""
if not self.apps_ready:
from django.conf import settings
# If "not ready" is due to unconfigured settings, accessing
# INSTALLED_APPS raises a more helpful ImproperlyConfigured
# exception.
settings.INSTALLED_APPS
raise AppRegistryNotReady("Apps aren't loaded yet.")
def check_models_ready(self):
"""Raise an exception if all models haven't been imported yet."""
if not self.models_ready:
raise AppRegistryNotReady("Models aren't loaded yet.")
def get_app_configs(self):
"""Import applications and return an iterable of app configs."""
self.check_apps_ready()
return self.app_configs.values()
def get_app_config(self, app_label):
"""
Import applications and returns an app config for the given label.
Raise LookupError if no application exists with this label.
"""
self.check_apps_ready()
try:
return self.app_configs[app_label]
except KeyError:
message = "No installed app with label '%s'." % app_label
for app_config in self.get_app_configs():
if app_config.name == app_label:
message += " Did you mean '%s'?" % app_config.label
break
raise LookupError(message)
# This method is performance-critical at least for Django's test suite.
@functools.lru_cache(maxsize=None)
def get_models(self, include_auto_created=False, include_swapped=False):
"""
Return a list of all installed models.
By default, the following models aren't included:
- auto-created models for many-to-many relations without
an explicit intermediate table,
- models that have been swapped out.
Set the corresponding keyword argument to True to include such models.
"""
self.check_models_ready()
result = []
for app_config in self.app_configs.values():
result.extend(app_config.get_models(include_auto_created, include_swapped))
return result
def get_model(self, app_label, model_name=None, require_ready=True):
"""
Return the model matching the given app_label and model_name.
As a shortcut, app_label may be in the form <app_label>.<model_name>.
model_name is case-insensitive.
Raise LookupError if no application exists with this label, or no
model exists with this name in the application. Raise ValueError if
called with a single argument that doesn't contain exactly one dot.
"""
if require_ready:
self.check_models_ready()
else:
self.check_apps_ready()
if model_name is None:
app_label, model_name = app_label.split('.')
app_config = self.get_app_config(app_label)
if not require_ready and app_config.models is None:
app_config.import_models()
return app_config.get_model(model_name, require_ready=require_ready)
def register_model(self, app_label, model):
# Since this method is called when models are imported, it cannot
# perform imports because of the risk of import loops. It mustn't
# call get_app_config().
model_name = model._meta.model_name
app_models = self.all_models[app_label]
if model_name in app_models:
if (model.__name__ == app_models[model_name].__name__ and
model.__module__ == app_models[model_name].__module__):
warnings.warn(
"Model '%s.%s' was already registered. "
"Reloading models is not advised as it can lead to inconsistencies, "
"most notably with related models." % (app_label, model_name),
RuntimeWarning, stacklevel=2)
else:
raise RuntimeError(
"Conflicting '%s' models in application '%s': %s and %s." %
(model_name, app_label, app_models[model_name], model))
app_models[model_name] = model
self.do_pending_operations(model)
self.clear_cache()
def is_installed(self, app_name):
"""
Check whether an application with this name exists in the registry.
app_name is the full name of the app e.g. 'django.contrib.admin'.
"""
self.check_apps_ready()
return any(ac.name == app_name for ac in self.app_configs.values())
def get_containing_app_config(self, object_name):
"""
Look for an app config containing a given object.
object_name is the dotted Python path to the object.
Return the app config for the inner application in case of nesting.
Return None if the object isn't in any registered app config.
"""
self.check_apps_ready()
candidates = []
for app_config in self.app_configs.values():
if object_name.startswith(app_config.name):
subpath = object_name[len(app_config.name):]
if subpath == '' or subpath[0] == '.':
candidates.append(app_config)
if candidates:
return sorted(candidates, key=lambda ac: -len(ac.name))[0]
def get_registered_model(self, app_label, model_name):
"""
Similar to get_model(), but doesn't require that an app exists with
the given app_label.
It's safe to call this method at import time, even while the registry
is being populated.
"""
model = self.all_models[app_label].get(model_name.lower())
if model is None:
raise LookupError(
"Model '%s.%s' not registered." % (app_label, model_name))
return model
@functools.lru_cache(maxsize=None)
def get_swappable_settings_name(self, to_string):
"""
For a given model string (e.g. "auth.User"), return the name of the
corresponding settings name if it refers to a swappable model. If the
referred model is not swappable, return None.
This method is decorated with lru_cache because it's performance
critical when it comes to migrations. Since the swappable settings don't
change after Django has loaded the settings, there is no reason to get
the respective settings attribute over and over again.
"""
for model in self.get_models(include_swapped=True):
swapped = model._meta.swapped
# Is this model swapped out for the model given by to_string?
if swapped and swapped == to_string:
return model._meta.swappable
# Is this model swappable and the one given by to_string?
if model._meta.swappable and model._meta.label == to_string:
return model._meta.swappable
return None
def set_available_apps(self, available):
"""
Restrict the set of installed apps used by get_app_config[s].
available must be an iterable of application names.
set_available_apps() must be balanced with unset_available_apps().
Primarily used for performance optimization in TransactionTestCase.
This method is safe in the sense that it doesn't trigger any imports.
"""
available = set(available)
installed = {app_config.name for app_config in self.get_app_configs()}
if not available.issubset(installed):
raise ValueError(
"Available apps isn't a subset of installed apps, extra apps: %s"
% ", ".join(available - installed)
)
self.stored_app_configs.append(self.app_configs)
self.app_configs = {
label: app_config
for label, app_config in self.app_configs.items()
if app_config.name in available
}
self.clear_cache()
def unset_available_apps(self):
"""Cancel a previous call to set_available_apps()."""
self.app_configs = self.stored_app_configs.pop()
self.clear_cache()
def set_installed_apps(self, installed):
"""
Enable a different set of installed apps for get_app_config[s].
installed must be an iterable in the same format as INSTALLED_APPS.
set_installed_apps() must be balanced with unset_installed_apps(),
even if it exits with an exception.
Primarily used as a receiver of the setting_changed signal in tests.
This method may trigger new imports, which may add new models to the
registry of all imported models. They will stay in the registry even
after unset_installed_apps(). Since it isn't possible to replay
imports safely (e.g. that could lead to registering listeners twice),
models are registered when they're imported and never removed.
"""
if not self.ready:
raise AppRegistryNotReady("App registry isn't ready yet.")
self.stored_app_configs.append(self.app_configs)
self.app_configs = {}
self.apps_ready = self.models_ready = self.loading = self.ready = False
self.clear_cache()
self.populate(installed)
def unset_installed_apps(self):
"""Cancel a previous call to set_installed_apps()."""
self.app_configs = self.stored_app_configs.pop()
self.apps_ready = self.models_ready = self.ready = True
self.clear_cache()
def clear_cache(self):
"""
Clear all internal caches, for methods that alter the app registry.
This is mostly used in tests.
"""
# Call expire cache on each model. This will purge
# the relation tree and the fields cache.
self.get_models.cache_clear()
if self.ready:
# Circumvent self.get_models() to prevent that the cache is refilled.
# This particularly prevents that an empty value is cached while cloning.
for app_config in self.app_configs.values():
for model in app_config.get_models(include_auto_created=True):
model._meta._expire_cache()
def lazy_model_operation(self, function, *model_keys):
"""
Take a function and a number of ("app_label", "modelname") tuples, and
when all the corresponding models have been imported and registered,
call the function with the model classes as its arguments.
The function passed to this method must accept exactly n models as
arguments, where n=len(model_keys).
"""
# Base case: no arguments, just execute the function.
if not model_keys:
function()
# Recursive case: take the head of model_keys, wait for the
# corresponding model class to be imported and registered, then apply
# that argument to the supplied function. Pass the resulting partial
# to lazy_model_operation() along with the remaining model args and
# repeat until all models are loaded and all arguments are applied.
else:
next_model, *more_models = model_keys
# This will be executed after the class corresponding to next_model
# has been imported and registered. The `func` attribute provides
# duck-type compatibility with partials.
def apply_next_model(model):
next_function = partial(apply_next_model.func, model)
self.lazy_model_operation(next_function, *more_models)
apply_next_model.func = function
# If the model has already been imported and registered, partially
# apply it to the function now. If not, add it to the list of
# pending operations for the model, where it will be executed with
# the model class as its sole argument once the model is ready.
try:
model_class = self.get_registered_model(*next_model)
except LookupError:
self._pending_operations[next_model].append(apply_next_model)
else:
apply_next_model(model_class)
def do_pending_operations(self, model):
"""
Take a newly-prepared model and pass it to each function waiting for
it. This is called at the very end of Apps.register_model().
"""
key = model._meta.app_label, model._meta.model_name
for function in self._pending_operations.pop(key, []):
function(model)
apps = Apps(installed_apps=None)
|
90930f6ee1a26bc0086771ca101b0b042d84974077366a91c5271e024e992eea | import os
from importlib import import_module
from django.core.exceptions import ImproperlyConfigured
from django.utils.module_loading import module_has_submodule
MODELS_MODULE_NAME = 'models'
class AppConfig:
"""Class representing a Django application and its configuration."""
def __init__(self, app_name, app_module):
# Full Python path to the application e.g. 'django.contrib.admin'.
self.name = app_name
# Root module for the application e.g. <module 'django.contrib.admin'
# from 'django/contrib/admin/__init__.py'>.
self.module = app_module
# Reference to the Apps registry that holds this AppConfig. Set by the
# registry when it registers the AppConfig instance.
self.apps = None
# The following attributes could be defined at the class level in a
# subclass, hence the test-and-set pattern.
# Last component of the Python path to the application e.g. 'admin'.
# This value must be unique across a Django project.
if not hasattr(self, 'label'):
self.label = app_name.rpartition(".")[2]
# Human-readable name for the application e.g. "Admin".
if not hasattr(self, 'verbose_name'):
self.verbose_name = self.label.title()
# Filesystem path to the application directory e.g.
# '/path/to/django/contrib/admin'.
if not hasattr(self, 'path'):
self.path = self._path_from_module(app_module)
# Module containing models e.g. <module 'django.contrib.admin.models'
# from 'django/contrib/admin/models.py'>. Set by import_models().
# None if the application doesn't have a models module.
self.models_module = None
# Mapping of lowercase model names to model classes. Initially set to
# None to prevent accidental access before import_models() runs.
self.models = None
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.label)
def _path_from_module(self, module):
"""Attempt to determine app's filesystem path from its module."""
# See #21874 for extended discussion of the behavior of this method in
# various cases.
# Convert paths to list because Python's _NamespacePath doesn't support
# indexing.
paths = list(getattr(module, '__path__', []))
if len(paths) != 1:
filename = getattr(module, '__file__', None)
if filename is not None:
paths = [os.path.dirname(filename)]
else:
# For unknown reasons, sometimes the list returned by __path__
# contains duplicates that must be removed (#25246).
paths = list(set(paths))
if len(paths) > 1:
raise ImproperlyConfigured(
"The app module %r has multiple filesystem locations (%r); "
"you must configure this app with an AppConfig subclass "
"with a 'path' class attribute." % (module, paths))
elif not paths:
raise ImproperlyConfigured(
"The app module %r has no filesystem location, "
"you must configure this app with an AppConfig subclass "
"with a 'path' class attribute." % (module,))
return paths[0]
@classmethod
def create(cls, entry):
"""
Factory that creates an app config from an entry in INSTALLED_APPS.
"""
try:
# If import_module succeeds, entry is a path to an app module,
# which may specify an app config class with default_app_config.
# Otherwise, entry is a path to an app config class or an error.
module = import_module(entry)
except ImportError:
# Track that importing as an app module failed. If importing as an
# app config class fails too, we'll trigger the ImportError again.
module = None
mod_path, _, cls_name = entry.rpartition('.')
# Raise the original exception when entry cannot be a path to an
# app config class.
if not mod_path:
raise
else:
try:
# If this works, the app module specifies an app config class.
entry = module.default_app_config
except AttributeError:
# Otherwise, it simply uses the default app config class.
return cls(entry, module)
else:
mod_path, _, cls_name = entry.rpartition('.')
# If we're reaching this point, we must attempt to load the app config
# class located at <mod_path>.<cls_name>
mod = import_module(mod_path)
try:
cls = getattr(mod, cls_name)
except AttributeError:
if module is None:
# If importing as an app module failed, check if the module
# contains any valid AppConfigs and show them as choices.
# Otherwise, that error probably contains the most informative
# traceback, so trigger it again.
candidates = sorted(
repr(name) for name, candidate in mod.__dict__.items()
if isinstance(candidate, type) and
issubclass(candidate, AppConfig) and
candidate is not AppConfig
)
if candidates:
raise ImproperlyConfigured(
"'%s' does not contain a class '%s'. Choices are: %s."
% (mod_path, cls_name, ', '.join(candidates))
)
import_module(entry)
else:
raise
# Check for obvious errors. (This check prevents duck typing, but
# it could be removed if it became a problem in practice.)
if not issubclass(cls, AppConfig):
raise ImproperlyConfigured(
"'%s' isn't a subclass of AppConfig." % entry)
# Obtain app name here rather than in AppClass.__init__ to keep
# all error checking for entries in INSTALLED_APPS in one place.
try:
app_name = cls.name
except AttributeError:
raise ImproperlyConfigured(
"'%s' must supply a name attribute." % entry)
# Ensure app_name points to a valid module.
try:
app_module = import_module(app_name)
except ImportError:
raise ImproperlyConfigured(
"Cannot import '%s'. Check that '%s.%s.name' is correct." % (
app_name, mod_path, cls_name,
)
)
# Entry is a path to an app config class.
return cls(app_name, app_module)
def get_model(self, model_name, require_ready=True):
"""
Return the model with the given case-insensitive model_name.
Raise LookupError if no model exists with this name.
"""
if require_ready:
self.apps.check_models_ready()
else:
self.apps.check_apps_ready()
try:
return self.models[model_name.lower()]
except KeyError:
raise LookupError(
"App '%s' doesn't have a '%s' model." % (self.label, model_name))
def get_models(self, include_auto_created=False, include_swapped=False):
"""
Return an iterable of models.
By default, the following models aren't included:
- auto-created models for many-to-many relations without
an explicit intermediate table,
- models that have been swapped out.
Set the corresponding keyword argument to True to include such models.
Keyword arguments aren't documented; they're a private API.
"""
self.apps.check_models_ready()
for model in self.models.values():
if model._meta.auto_created and not include_auto_created:
continue
if model._meta.swapped and not include_swapped:
continue
yield model
def import_models(self):
# Dictionary of models for this app, primarily maintained in the
# 'all_models' attribute of the Apps this AppConfig is attached to.
self.models = self.apps.all_models[self.label]
if module_has_submodule(self.module, MODELS_MODULE_NAME):
models_module_name = '%s.%s' % (self.name, MODELS_MODULE_NAME)
self.models_module = import_module(models_module_name)
def ready(self):
"""
Override this method in subclasses to run code when Django starts.
"""
|
61ccb3f41661140f91369b9fda02dfb957af4732588ad08c797336e35d6837a7 | """
Views and functions for serving static files. These are only to be used
during development, and SHOULD NOT be used in a production setting.
"""
import mimetypes
import posixpath
import re
from pathlib import Path
from django.http import (
FileResponse, Http404, HttpResponse, HttpResponseNotModified,
)
from django.template import Context, Engine, TemplateDoesNotExist, loader
from django.utils._os import safe_join
from django.utils.http import http_date, parse_http_date
from django.utils.translation import gettext as _, gettext_lazy
def serve(request, path, document_root=None, show_indexes=False):
"""
Serve static files below a given point in the directory structure.
To use, put a URL pattern such as::
from django.views.static import serve
path('<path:path>', serve, {'document_root': '/path/to/my/files/'})
in your URLconf. You must provide the ``document_root`` param. You may
also set ``show_indexes`` to ``True`` if you'd like to serve a basic index
of the directory. This index view will use the template hardcoded below,
but if you'd like to override it, you can create a template called
``static/directory_index.html``.
"""
path = posixpath.normpath(path).lstrip('/')
fullpath = Path(safe_join(document_root, path))
if fullpath.is_dir():
if show_indexes:
return directory_index(path, fullpath)
raise Http404(_("Directory indexes are not allowed here."))
if not fullpath.exists():
raise Http404(_('"%(path)s" does not exist') % {'path': fullpath})
# Respect the If-Modified-Since header.
statobj = fullpath.stat()
if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),
statobj.st_mtime, statobj.st_size):
return HttpResponseNotModified()
content_type, encoding = mimetypes.guess_type(str(fullpath))
content_type = content_type or 'application/octet-stream'
response = FileResponse(fullpath.open('rb'), content_type=content_type)
response["Last-Modified"] = http_date(statobj.st_mtime)
if encoding:
response["Content-Encoding"] = encoding
return response
DEFAULT_DIRECTORY_INDEX_TEMPLATE = """
{% load i18n %}
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8">
<meta http-equiv="Content-Language" content="en-us">
<meta name="robots" content="NONE,NOARCHIVE">
<title>{% blocktrans %}Index of {{ directory }}{% endblocktrans %}</title>
</head>
<body>
<h1>{% blocktrans %}Index of {{ directory }}{% endblocktrans %}</h1>
<ul>
{% if directory != "/" %}
<li><a href="../">../</a></li>
{% endif %}
{% for f in file_list %}
<li><a href="{{ f|urlencode }}">{{ f }}</a></li>
{% endfor %}
</ul>
</body>
</html>
"""
template_translatable = gettext_lazy("Index of %(directory)s")
def directory_index(path, fullpath):
try:
t = loader.select_template([
'static/directory_index.html',
'static/directory_index',
])
except TemplateDoesNotExist:
t = Engine(libraries={'i18n': 'django.templatetags.i18n'}).from_string(DEFAULT_DIRECTORY_INDEX_TEMPLATE)
c = Context()
else:
c = {}
files = []
for f in fullpath.iterdir():
if not f.name.startswith('.'):
url = str(f.relative_to(fullpath))
if f.is_dir():
url += '/'
files.append(url)
c.update({
'directory': path + '/',
'file_list': files,
})
return HttpResponse(t.render(c))
def was_modified_since(header=None, mtime=0, size=0):
"""
Was something modified since the user last downloaded it?
header
This is the value of the If-Modified-Since header. If this is None,
I'll just return True.
mtime
This is the modification time of the item we're talking about.
size
This is the size of the item we're talking about.
"""
try:
if header is None:
raise ValueError
matches = re.match(r"^([^;]+)(; length=([0-9]+))?$", header,
re.IGNORECASE)
header_mtime = parse_http_date(matches.group(1))
header_len = matches.group(3)
if header_len and int(header_len) != size:
raise ValueError
if int(mtime) > header_mtime:
raise ValueError
except (AttributeError, ValueError, OverflowError):
return True
return False
|
f395b590c69e5b645d3bfd95d0add618d231430f343cd87055ecb0424ebc4416 | import itertools
import json
import os
import re
from urllib.parse import unquote
from django.apps import apps
from django.conf import settings
from django.http import HttpResponse, HttpResponseRedirect, JsonResponse
from django.template import Context, Engine
from django.urls import translate_url
from django.utils.formats import get_format
from django.utils.http import is_safe_url
from django.utils.translation import (
LANGUAGE_SESSION_KEY, check_for_language, get_language,
)
from django.utils.translation.trans_real import DjangoTranslation
from django.views.generic import View
LANGUAGE_QUERY_PARAMETER = 'language'
def set_language(request):
"""
Redirect to a given URL while setting the chosen language in the session
(if enabled) and in a cookie. The URL and the language code need to be
specified in the request parameters.
Since this view changes how the user will see the rest of the site, it must
only be accessed as a POST request. If called as a GET request, it will
redirect to the page in the request (the 'next' parameter) without changing
any state.
"""
next = request.POST.get('next', request.GET.get('next'))
if ((next or not request.is_ajax()) and
not is_safe_url(url=next, allowed_hosts={request.get_host()}, require_https=request.is_secure())):
next = request.META.get('HTTP_REFERER')
next = next and unquote(next) # HTTP_REFERER may be encoded.
if not is_safe_url(url=next, allowed_hosts={request.get_host()}, require_https=request.is_secure()):
next = '/'
response = HttpResponseRedirect(next) if next else HttpResponse(status=204)
if request.method == 'POST':
lang_code = request.POST.get(LANGUAGE_QUERY_PARAMETER)
if lang_code and check_for_language(lang_code):
if next:
next_trans = translate_url(next, lang_code)
if next_trans != next:
response = HttpResponseRedirect(next_trans)
if hasattr(request, 'session'):
# Storing the language in the session is deprecated.
# (RemovedInDjango40Warning)
request.session[LANGUAGE_SESSION_KEY] = lang_code
response.set_cookie(
settings.LANGUAGE_COOKIE_NAME, lang_code,
max_age=settings.LANGUAGE_COOKIE_AGE,
path=settings.LANGUAGE_COOKIE_PATH,
domain=settings.LANGUAGE_COOKIE_DOMAIN,
secure=settings.LANGUAGE_COOKIE_SECURE,
httponly=settings.LANGUAGE_COOKIE_HTTPONLY,
samesite=settings.LANGUAGE_COOKIE_SAMESITE,
)
return response
def get_formats():
"""Return all formats strings required for i18n to work."""
FORMAT_SETTINGS = (
'DATE_FORMAT', 'DATETIME_FORMAT', 'TIME_FORMAT',
'YEAR_MONTH_FORMAT', 'MONTH_DAY_FORMAT', 'SHORT_DATE_FORMAT',
'SHORT_DATETIME_FORMAT', 'FIRST_DAY_OF_WEEK', 'DECIMAL_SEPARATOR',
'THOUSAND_SEPARATOR', 'NUMBER_GROUPING',
'DATE_INPUT_FORMATS', 'TIME_INPUT_FORMATS', 'DATETIME_INPUT_FORMATS'
)
return {attr: get_format(attr) for attr in FORMAT_SETTINGS}
js_catalog_template = r"""
{% autoescape off %}
(function(globals) {
var django = globals.django || (globals.django = {});
{% if plural %}
django.pluralidx = function(n) {
var v={{ plural }};
if (typeof(v) == 'boolean') {
return v ? 1 : 0;
} else {
return v;
}
};
{% else %}
django.pluralidx = function(count) { return (count == 1) ? 0 : 1; };
{% endif %}
/* gettext library */
django.catalog = django.catalog || {};
{% if catalog_str %}
var newcatalog = {{ catalog_str }};
for (var key in newcatalog) {
django.catalog[key] = newcatalog[key];
}
{% endif %}
if (!django.jsi18n_initialized) {
django.gettext = function(msgid) {
var value = django.catalog[msgid];
if (typeof(value) == 'undefined') {
return msgid;
} else {
return (typeof(value) == 'string') ? value : value[0];
}
};
django.ngettext = function(singular, plural, count) {
var value = django.catalog[singular];
if (typeof(value) == 'undefined') {
return (count == 1) ? singular : plural;
} else {
return value.constructor === Array ? value[django.pluralidx(count)] : value;
}
};
django.gettext_noop = function(msgid) { return msgid; };
django.pgettext = function(context, msgid) {
var value = django.gettext(context + '\x04' + msgid);
if (value.indexOf('\x04') != -1) {
value = msgid;
}
return value;
};
django.npgettext = function(context, singular, plural, count) {
var value = django.ngettext(context + '\x04' + singular, context + '\x04' + plural, count);
if (value.indexOf('\x04') != -1) {
value = django.ngettext(singular, plural, count);
}
return value;
};
django.interpolate = function(fmt, obj, named) {
if (named) {
return fmt.replace(/%\(\w+\)s/g, function(match){return String(obj[match.slice(2,-2)])});
} else {
return fmt.replace(/%s/g, function(match){return String(obj.shift())});
}
};
/* formatting library */
django.formats = {{ formats_str }};
django.get_format = function(format_type) {
var value = django.formats[format_type];
if (typeof(value) == 'undefined') {
return format_type;
} else {
return value;
}
};
/* add to global namespace */
globals.pluralidx = django.pluralidx;
globals.gettext = django.gettext;
globals.ngettext = django.ngettext;
globals.gettext_noop = django.gettext_noop;
globals.pgettext = django.pgettext;
globals.npgettext = django.npgettext;
globals.interpolate = django.interpolate;
globals.get_format = django.get_format;
django.jsi18n_initialized = true;
}
}(this));
{% endautoescape %}
"""
class JavaScriptCatalog(View):
"""
Return the selected language catalog as a JavaScript library.
Receive the list of packages to check for translations in the `packages`
kwarg either from the extra dictionary passed to the url() function or as a
plus-sign delimited string from the request. Default is 'django.conf'.
You can override the gettext domain for this view, but usually you don't
want to do that as JavaScript messages go to the djangojs domain. This
might be needed if you deliver your JavaScript source from Django templates.
"""
domain = 'djangojs'
packages = None
def get(self, request, *args, **kwargs):
locale = get_language()
domain = kwargs.get('domain', self.domain)
# If packages are not provided, default to all installed packages, as
# DjangoTranslation without localedirs harvests them all.
packages = kwargs.get('packages', '')
packages = packages.split('+') if packages else self.packages
paths = self.get_paths(packages) if packages else None
self.translation = DjangoTranslation(locale, domain=domain, localedirs=paths)
context = self.get_context_data(**kwargs)
return self.render_to_response(context)
def get_paths(self, packages):
allowable_packages = {app_config.name: app_config for app_config in apps.get_app_configs()}
app_configs = [allowable_packages[p] for p in packages if p in allowable_packages]
if len(app_configs) < len(packages):
excluded = [p for p in packages if p not in allowable_packages]
raise ValueError(
'Invalid package(s) provided to JavaScriptCatalog: %s' % ','.join(excluded)
)
# paths of requested packages
return [os.path.join(app.path, 'locale') for app in app_configs]
@property
def _num_plurals(self):
"""
Return the number of plurals for this catalog language, or 2 if no
plural string is available.
"""
match = re.search(r'nplurals=\s*(\d+)', self._plural_string or '')
if match:
return int(match.groups()[0])
return 2
@property
def _plural_string(self):
"""
Return the plural string (including nplurals) for this catalog language,
or None if no plural string is available.
"""
if '' in self.translation._catalog:
for line in self.translation._catalog[''].split('\n'):
if line.startswith('Plural-Forms:'):
return line.split(':', 1)[1].strip()
return None
def get_plural(self):
plural = self._plural_string
if plural is not None:
# This should be a compiled function of a typical plural-form:
# Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 :
# n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;
plural = [el.strip() for el in plural.split(';') if el.strip().startswith('plural=')][0].split('=', 1)[1]
return plural
def get_catalog(self):
pdict = {}
num_plurals = self._num_plurals
catalog = {}
trans_cat = self.translation._catalog
trans_fallback_cat = self.translation._fallback._catalog if self.translation._fallback else {}
seen_keys = set()
for key, value in itertools.chain(trans_cat.items(), trans_fallback_cat.items()):
if key == '' or key in seen_keys:
continue
if isinstance(key, str):
catalog[key] = value
elif isinstance(key, tuple):
msgid, cnt = key
pdict.setdefault(msgid, {})[cnt] = value
else:
raise TypeError(key)
seen_keys.add(key)
for k, v in pdict.items():
catalog[k] = [v.get(i, '') for i in range(num_plurals)]
return catalog
def get_context_data(self, **kwargs):
return {
'catalog': self.get_catalog(),
'formats': get_formats(),
'plural': self.get_plural(),
}
def render_to_response(self, context, **response_kwargs):
def indent(s):
return s.replace('\n', '\n ')
template = Engine().from_string(js_catalog_template)
context['catalog_str'] = indent(
json.dumps(context['catalog'], sort_keys=True, indent=2)
) if context['catalog'] else None
context['formats_str'] = indent(json.dumps(context['formats'], sort_keys=True, indent=2))
return HttpResponse(template.render(Context(context)), 'text/javascript; charset="utf-8"')
class JSONCatalog(JavaScriptCatalog):
"""
Return the selected language catalog as a JSON object.
Receive the same parameters as JavaScriptCatalog and return a response
with a JSON object of the following format:
{
"catalog": {
# Translations catalog
},
"formats": {
# Language formats for date, time, etc.
},
"plural": '...' # Expression for plural forms, or null.
}
"""
def render_to_response(self, context, **response_kwargs):
return JsonResponse(context)
|
705c5fbe3c6ecaf57d7745f9150101e8f779da5091731939635c660bf36c331f | from urllib.parse import quote
from django.http import (
HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotFound,
HttpResponseServerError,
)
from django.template import Context, Engine, TemplateDoesNotExist, loader
from django.views.decorators.csrf import requires_csrf_token
ERROR_404_TEMPLATE_NAME = '404.html'
ERROR_403_TEMPLATE_NAME = '403.html'
ERROR_400_TEMPLATE_NAME = '400.html'
ERROR_500_TEMPLATE_NAME = '500.html'
ERROR_PAGE_TEMPLATE = """
<!doctype html>
<html lang="en">
<head>
<title>%(title)s</title>
</head>
<body>
<h1>%(title)s</h1><p>%(details)s</p>
</body>
</html>
"""
# This can be called when CsrfViewMiddleware.process_view has not run,
# therefore need @requires_csrf_token in case the template needs
# {% csrf_token %}.
@requires_csrf_token
def page_not_found(request, exception, template_name=ERROR_404_TEMPLATE_NAME):
"""
Default 404 handler.
Templates: :template:`404.html`
Context:
request_path
The path of the requested URL (e.g., '/app/pages/bad_page/'). It's
quoted to prevent a content injection attack.
exception
The message from the exception which triggered the 404 (if one was
supplied), or the exception class name
"""
exception_repr = exception.__class__.__name__
# Try to get an "interesting" exception message, if any (and not the ugly
# Resolver404 dictionary)
try:
message = exception.args[0]
except (AttributeError, IndexError):
pass
else:
if isinstance(message, str):
exception_repr = message
context = {
'request_path': quote(request.path),
'exception': exception_repr,
}
try:
template = loader.get_template(template_name)
body = template.render(context, request)
content_type = None # Django will use 'text/html'.
except TemplateDoesNotExist:
if template_name != ERROR_404_TEMPLATE_NAME:
# Reraise if it's a missing custom template.
raise
# Render template (even though there are no substitutions) to allow
# inspecting the context in tests.
template = Engine().from_string(
ERROR_PAGE_TEMPLATE % {
'title': 'Not Found',
'details': 'The requested resource was not found on this server.',
},
)
body = template.render(Context(context))
content_type = 'text/html'
return HttpResponseNotFound(body, content_type=content_type)
@requires_csrf_token
def server_error(request, template_name=ERROR_500_TEMPLATE_NAME):
"""
500 error handler.
Templates: :template:`500.html`
Context: None
"""
try:
template = loader.get_template(template_name)
except TemplateDoesNotExist:
if template_name != ERROR_500_TEMPLATE_NAME:
# Reraise if it's a missing custom template.
raise
return HttpResponseServerError(
ERROR_PAGE_TEMPLATE % {'title': 'Server Error (500)', 'details': ''},
content_type='text/html',
)
return HttpResponseServerError(template.render())
@requires_csrf_token
def bad_request(request, exception, template_name=ERROR_400_TEMPLATE_NAME):
"""
400 error handler.
Templates: :template:`400.html`
Context: None
"""
try:
template = loader.get_template(template_name)
except TemplateDoesNotExist:
if template_name != ERROR_400_TEMPLATE_NAME:
# Reraise if it's a missing custom template.
raise
return HttpResponseBadRequest(
ERROR_PAGE_TEMPLATE % {'title': 'Bad Request (400)', 'details': ''},
content_type='text/html',
)
# No exception content is passed to the template, to not disclose any sensitive information.
return HttpResponseBadRequest(template.render())
# This can be called when CsrfViewMiddleware.process_view has not run,
# therefore need @requires_csrf_token in case the template needs
# {% csrf_token %}.
@requires_csrf_token
def permission_denied(request, exception, template_name=ERROR_403_TEMPLATE_NAME):
"""
Permission denied (403) handler.
Templates: :template:`403.html`
Context: None
If the template does not exist, an Http403 response containing the text
"403 Forbidden" (as per RFC 7231) will be returned.
"""
try:
template = loader.get_template(template_name)
except TemplateDoesNotExist:
if template_name != ERROR_403_TEMPLATE_NAME:
# Reraise if it's a missing custom template.
raise
return HttpResponseForbidden(
ERROR_PAGE_TEMPLATE % {'title': '403 Forbidden', 'details': ''},
content_type='text/html',
)
return HttpResponseForbidden(
template.render(request=request, context={'exception': str(exception)})
)
|
8354435f99ee9f770b230d42f3a0f0241d72b7e13013ab599c516b9897371f89 | import functools
import re
import sys
import types
from pathlib import Path
from django.conf import settings
from django.http import HttpResponse, HttpResponseNotFound
from django.template import Context, Engine, TemplateDoesNotExist
from django.template.defaultfilters import pprint
from django.urls import Resolver404, resolve
from django.utils import timezone
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_str
from django.utils.module_loading import import_string
from django.utils.version import get_docs_version
# Minimal Django templates engine to render the error templates
# regardless of the project's TEMPLATES setting. Templates are
# read directly from the filesystem so that the error handler
# works even if the template loader is broken.
DEBUG_ENGINE = Engine(
debug=True,
libraries={'i18n': 'django.templatetags.i18n'},
)
HIDDEN_SETTINGS = re.compile('API|TOKEN|KEY|SECRET|PASS|SIGNATURE', flags=re.IGNORECASE)
CLEANSED_SUBSTITUTE = '********************'
CURRENT_DIR = Path(__file__).parent
class CallableSettingWrapper:
"""
Object to wrap callable appearing in settings.
* Not to call in the debug page (#21345).
* Not to break the debug page if the callable forbidding to set attributes
(#23070).
"""
def __init__(self, callable_setting):
self._wrapped = callable_setting
def __repr__(self):
return repr(self._wrapped)
def cleanse_setting(key, value):
"""
Cleanse an individual setting key/value of sensitive content. If the value
is a dictionary, recursively cleanse the keys in that dictionary.
"""
try:
if HIDDEN_SETTINGS.search(key):
cleansed = CLEANSED_SUBSTITUTE
else:
if isinstance(value, dict):
cleansed = {k: cleanse_setting(k, v) for k, v in value.items()}
else:
cleansed = value
except TypeError:
# If the key isn't regex-able, just return as-is.
cleansed = value
if callable(cleansed):
# For fixing #21345 and #23070
cleansed = CallableSettingWrapper(cleansed)
return cleansed
def get_safe_settings():
"""
Return a dictionary of the settings module with values of sensitive
settings replaced with stars (*********).
"""
settings_dict = {}
for k in dir(settings):
if k.isupper():
settings_dict[k] = cleanse_setting(k, getattr(settings, k))
return settings_dict
def technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""
Create a technical server error response. The last three arguments are
the values returned from sys.exc_info() and friends.
"""
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
if request.is_ajax():
text = reporter.get_traceback_text()
return HttpResponse(text, status=status_code, content_type='text/plain; charset=utf-8')
else:
html = reporter.get_traceback_html()
return HttpResponse(html, status=status_code, content_type='text/html')
@functools.lru_cache()
def get_default_exception_reporter_filter():
# Instantiate the default filter for the first time and cache it.
return import_string(settings.DEFAULT_EXCEPTION_REPORTER_FILTER)()
def get_exception_reporter_filter(request):
default_filter = get_default_exception_reporter_filter()
return getattr(request, 'exception_reporter_filter', default_filter)
class ExceptionReporterFilter:
"""
Base for all exception reporter filter classes. All overridable hooks
contain lenient default behaviors.
"""
def get_post_parameters(self, request):
if request is None:
return {}
else:
return request.POST
def get_traceback_frame_variables(self, request, tb_frame):
return list(tb_frame.f_locals.items())
class SafeExceptionReporterFilter(ExceptionReporterFilter):
"""
Use annotations made by the sensitive_post_parameters and
sensitive_variables decorators to filter out sensitive information.
"""
def is_active(self, request):
"""
This filter is to add safety in production environments (i.e. DEBUG
is False). If DEBUG is True then your site is not safe anyway.
This hook is provided as a convenience to easily activate or
deactivate the filter on a per request basis.
"""
return settings.DEBUG is False
def get_cleansed_multivaluedict(self, request, multivaluedict):
"""
Replace the keys in a MultiValueDict marked as sensitive with stars.
This mitigates leaking sensitive POST parameters if something like
request.POST['nonexistent_key'] throws an exception (#21098).
"""
sensitive_post_parameters = getattr(request, 'sensitive_post_parameters', [])
if self.is_active(request) and sensitive_post_parameters:
multivaluedict = multivaluedict.copy()
for param in sensitive_post_parameters:
if param in multivaluedict:
multivaluedict[param] = CLEANSED_SUBSTITUTE
return multivaluedict
def get_post_parameters(self, request):
"""
Replace the values of POST parameters marked as sensitive with
stars (*********).
"""
if request is None:
return {}
else:
sensitive_post_parameters = getattr(request, 'sensitive_post_parameters', [])
if self.is_active(request) and sensitive_post_parameters:
cleansed = request.POST.copy()
if sensitive_post_parameters == '__ALL__':
# Cleanse all parameters.
for k in cleansed:
cleansed[k] = CLEANSED_SUBSTITUTE
return cleansed
else:
# Cleanse only the specified parameters.
for param in sensitive_post_parameters:
if param in cleansed:
cleansed[param] = CLEANSED_SUBSTITUTE
return cleansed
else:
return request.POST
def cleanse_special_types(self, request, value):
try:
# If value is lazy or a complex object of another kind, this check
# might raise an exception. isinstance checks that lazy
# MultiValueDicts will have a return value.
is_multivalue_dict = isinstance(value, MultiValueDict)
except Exception as e:
return '{!r} while evaluating {!r}'.format(e, value)
if is_multivalue_dict:
# Cleanse MultiValueDicts (request.POST is the one we usually care about)
value = self.get_cleansed_multivaluedict(request, value)
return value
def get_traceback_frame_variables(self, request, tb_frame):
"""
Replace the values of variables marked as sensitive with
stars (*********).
"""
# Loop through the frame's callers to see if the sensitive_variables
# decorator was used.
current_frame = tb_frame.f_back
sensitive_variables = None
while current_frame is not None:
if (current_frame.f_code.co_name == 'sensitive_variables_wrapper' and
'sensitive_variables_wrapper' in current_frame.f_locals):
# The sensitive_variables decorator was used, so we take note
# of the sensitive variables' names.
wrapper = current_frame.f_locals['sensitive_variables_wrapper']
sensitive_variables = getattr(wrapper, 'sensitive_variables', None)
break
current_frame = current_frame.f_back
cleansed = {}
if self.is_active(request) and sensitive_variables:
if sensitive_variables == '__ALL__':
# Cleanse all variables
for name in tb_frame.f_locals:
cleansed[name] = CLEANSED_SUBSTITUTE
else:
# Cleanse specified variables
for name, value in tb_frame.f_locals.items():
if name in sensitive_variables:
value = CLEANSED_SUBSTITUTE
else:
value = self.cleanse_special_types(request, value)
cleansed[name] = value
else:
# Potentially cleanse the request and any MultiValueDicts if they
# are one of the frame variables.
for name, value in tb_frame.f_locals.items():
cleansed[name] = self.cleanse_special_types(request, value)
if (tb_frame.f_code.co_name == 'sensitive_variables_wrapper' and
'sensitive_variables_wrapper' in tb_frame.f_locals):
# For good measure, obfuscate the decorated function's arguments in
# the sensitive_variables decorator's frame, in case the variables
# associated with those arguments were meant to be obfuscated from
# the decorated function's frame.
cleansed['func_args'] = CLEANSED_SUBSTITUTE
cleansed['func_kwargs'] = CLEANSED_SUBSTITUTE
return cleansed.items()
class ExceptionReporter:
"""Organize and coordinate reporting on exceptions."""
def __init__(self, request, exc_type, exc_value, tb, is_email=False):
self.request = request
self.filter = get_exception_reporter_filter(self.request)
self.exc_type = exc_type
self.exc_value = exc_value
self.tb = tb
self.is_email = is_email
self.template_info = getattr(self.exc_value, 'template_debug', None)
self.template_does_not_exist = False
self.postmortem = None
def get_traceback_data(self):
"""Return a dictionary containing traceback information."""
if self.exc_type and issubclass(self.exc_type, TemplateDoesNotExist):
self.template_does_not_exist = True
self.postmortem = self.exc_value.chain or [self.exc_value]
frames = self.get_traceback_frames()
for i, frame in enumerate(frames):
if 'vars' in frame:
frame_vars = []
for k, v in frame['vars']:
v = pprint(v)
# Trim large blobs of data
if len(v) > 4096:
v = '%s… <trimmed %d bytes string>' % (v[0:4096], len(v))
frame_vars.append((k, v))
frame['vars'] = frame_vars
frames[i] = frame
unicode_hint = ''
if self.exc_type and issubclass(self.exc_type, UnicodeError):
start = getattr(self.exc_value, 'start', None)
end = getattr(self.exc_value, 'end', None)
if start is not None and end is not None:
unicode_str = self.exc_value.args[1]
unicode_hint = force_str(
unicode_str[max(start - 5, 0):min(end + 5, len(unicode_str))],
'ascii', errors='replace'
)
from django import get_version
if self.request is None:
user_str = None
else:
try:
user_str = str(self.request.user)
except Exception:
# request.user may raise OperationalError if the database is
# unavailable, for example.
user_str = '[unable to retrieve the current user]'
c = {
'is_email': self.is_email,
'unicode_hint': unicode_hint,
'frames': frames,
'request': self.request,
'user_str': user_str,
'filtered_POST_items': list(self.filter.get_post_parameters(self.request).items()),
'settings': get_safe_settings(),
'sys_executable': sys.executable,
'sys_version_info': '%d.%d.%d' % sys.version_info[0:3],
'server_time': timezone.now(),
'django_version_info': get_version(),
'sys_path': sys.path,
'template_info': self.template_info,
'template_does_not_exist': self.template_does_not_exist,
'postmortem': self.postmortem,
}
if self.request is not None:
c['request_GET_items'] = self.request.GET.items()
c['request_FILES_items'] = self.request.FILES.items()
c['request_COOKIES_items'] = self.request.COOKIES.items()
# Check whether exception info is available
if self.exc_type:
c['exception_type'] = self.exc_type.__name__
if self.exc_value:
c['exception_value'] = str(self.exc_value)
if frames:
c['lastframe'] = frames[-1]
return c
def get_traceback_html(self):
"""Return HTML version of debug 500 HTTP error page."""
with Path(CURRENT_DIR, 'templates', 'technical_500.html').open(encoding='utf-8') as fh:
t = DEBUG_ENGINE.from_string(fh.read())
c = Context(self.get_traceback_data(), use_l10n=False)
return t.render(c)
def get_traceback_text(self):
"""Return plain text version of debug 500 HTTP error page."""
with Path(CURRENT_DIR, 'templates', 'technical_500.txt').open(encoding='utf-8') as fh:
t = DEBUG_ENGINE.from_string(fh.read())
c = Context(self.get_traceback_data(), autoescape=False, use_l10n=False)
return t.render(c)
def _get_lines_from_file(self, filename, lineno, context_lines, loader=None, module_name=None):
"""
Return context_lines before and after lineno from file.
Return (pre_context_lineno, pre_context, context_line, post_context).
"""
source = None
if hasattr(loader, 'get_source'):
try:
source = loader.get_source(module_name)
except ImportError:
pass
if source is not None:
source = source.splitlines()
if source is None:
try:
with open(filename, 'rb') as fp:
source = fp.read().splitlines()
except OSError:
pass
if source is None:
return None, [], None, []
# If we just read the source from a file, or if the loader did not
# apply tokenize.detect_encoding to decode the source into a
# string, then we should do that ourselves.
if isinstance(source[0], bytes):
encoding = 'ascii'
for line in source[:2]:
# File coding may be specified. Match pattern from PEP-263
# (https://www.python.org/dev/peps/pep-0263/)
match = re.search(br'coding[:=]\s*([-\w.]+)', line)
if match:
encoding = match.group(1).decode('ascii')
break
source = [str(sline, encoding, 'replace') for sline in source]
lower_bound = max(0, lineno - context_lines)
upper_bound = lineno + context_lines
pre_context = source[lower_bound:lineno]
context_line = source[lineno]
post_context = source[lineno + 1:upper_bound]
return lower_bound, pre_context, context_line, post_context
def get_traceback_frames(self):
def explicit_or_implicit_cause(exc_value):
explicit = getattr(exc_value, '__cause__', None)
implicit = getattr(exc_value, '__context__', None)
return explicit or implicit
# Get the exception and all its causes
exceptions = []
exc_value = self.exc_value
while exc_value:
exceptions.append(exc_value)
exc_value = explicit_or_implicit_cause(exc_value)
if exc_value in exceptions:
# Avoid infinite loop if there's a cyclic reference (#29393).
break
frames = []
# No exceptions were supplied to ExceptionReporter
if not exceptions:
return frames
# In case there's just one exception, take the traceback from self.tb
exc_value = exceptions.pop()
tb = self.tb if not exceptions else exc_value.__traceback__
while tb is not None:
# Support for __traceback_hide__ which is used by a few libraries
# to hide internal frames.
if tb.tb_frame.f_locals.get('__traceback_hide__'):
tb = tb.tb_next
continue
filename = tb.tb_frame.f_code.co_filename
function = tb.tb_frame.f_code.co_name
lineno = tb.tb_lineno - 1
loader = tb.tb_frame.f_globals.get('__loader__')
module_name = tb.tb_frame.f_globals.get('__name__') or ''
pre_context_lineno, pre_context, context_line, post_context = self._get_lines_from_file(
filename, lineno, 7, loader, module_name,
)
if pre_context_lineno is None:
pre_context_lineno = lineno
pre_context = []
context_line = '<source code not available>'
post_context = []
frames.append({
'exc_cause': explicit_or_implicit_cause(exc_value),
'exc_cause_explicit': getattr(exc_value, '__cause__', True),
'tb': tb,
'type': 'django' if module_name.startswith('django.') else 'user',
'filename': filename,
'function': function,
'lineno': lineno + 1,
'vars': self.filter.get_traceback_frame_variables(self.request, tb.tb_frame),
'id': id(tb),
'pre_context': pre_context,
'context_line': context_line,
'post_context': post_context,
'pre_context_lineno': pre_context_lineno + 1,
})
# If the traceback for current exception is consumed, try the
# other exception.
if not tb.tb_next and exceptions:
exc_value = exceptions.pop()
tb = exc_value.__traceback__
else:
tb = tb.tb_next
return frames
def technical_404_response(request, exception):
"""Create a technical 404 error response. `exception` is the Http404."""
try:
error_url = exception.args[0]['path']
except (IndexError, TypeError, KeyError):
error_url = request.path_info[1:] # Trim leading slash
try:
tried = exception.args[0]['tried']
except (IndexError, TypeError, KeyError):
tried = []
else:
if (not tried or ( # empty URLconf
request.path == '/' and
len(tried) == 1 and # default URLconf
len(tried[0]) == 1 and
getattr(tried[0][0], 'app_name', '') == getattr(tried[0][0], 'namespace', '') == 'admin'
)):
return default_urlconf(request)
urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF)
if isinstance(urlconf, types.ModuleType):
urlconf = urlconf.__name__
caller = ''
try:
resolver_match = resolve(request.path)
except Resolver404:
pass
else:
obj = resolver_match.func
if hasattr(obj, '__name__'):
caller = obj.__name__
elif hasattr(obj, '__class__') and hasattr(obj.__class__, '__name__'):
caller = obj.__class__.__name__
if hasattr(obj, '__module__'):
module = obj.__module__
caller = '%s.%s' % (module, caller)
with Path(CURRENT_DIR, 'templates', 'technical_404.html').open(encoding='utf-8') as fh:
t = DEBUG_ENGINE.from_string(fh.read())
c = Context({
'urlconf': urlconf,
'root_urlconf': settings.ROOT_URLCONF,
'request_path': error_url,
'urlpatterns': tried,
'reason': str(exception),
'request': request,
'settings': get_safe_settings(),
'raising_view_name': caller,
})
return HttpResponseNotFound(t.render(c), content_type='text/html')
def default_urlconf(request):
"""Create an empty URLconf 404 error response."""
with Path(CURRENT_DIR, 'templates', 'default_urlconf.html').open(encoding='utf-8') as fh:
t = DEBUG_ENGINE.from_string(fh.read())
c = Context({
'version': get_docs_version(),
})
return HttpResponse(t.render(c), content_type='text/html')
|
9662293ead6c67168203f66d5f5a74512945101cae642a20ad936697c84bd4db | """
Settings and configuration for Django.
Read values from the module specified by the DJANGO_SETTINGS_MODULE environment
variable, and then from django.conf.global_settings; see the global_settings.py
for a list of all possible variables.
"""
import importlib
import os
import time
import traceback
import warnings
from pathlib import Path
import django
from django.conf import global_settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.deprecation import RemovedInDjango31Warning
from django.utils.functional import LazyObject, empty
ENVIRONMENT_VARIABLE = "DJANGO_SETTINGS_MODULE"
FILE_CHARSET_DEPRECATED_MSG = (
'The FILE_CHARSET setting is deprecated. Starting with Django 3.1, all '
'files read from disk must be UTF-8 encoded.'
)
class SettingsReference(str):
"""
String subclass which references a current settings value. It's treated as
the value in memory but serializes to a settings.NAME attribute reference.
"""
def __new__(self, value, setting_name):
return str.__new__(self, value)
def __init__(self, value, setting_name):
self.setting_name = setting_name
class LazySettings(LazyObject):
"""
A lazy proxy for either global Django settings or a custom settings object.
The user can manually configure settings prior to using them. Otherwise,
Django uses the settings module pointed to by DJANGO_SETTINGS_MODULE.
"""
def _setup(self, name=None):
"""
Load the settings module pointed to by the environment variable. This
is used the first time settings are needed, if the user hasn't
configured settings manually.
"""
settings_module = os.environ.get(ENVIRONMENT_VARIABLE)
if not settings_module:
desc = ("setting %s" % name) if name else "settings"
raise ImproperlyConfigured(
"Requested %s, but settings are not configured. "
"You must either define the environment variable %s "
"or call settings.configure() before accessing settings."
% (desc, ENVIRONMENT_VARIABLE))
self._wrapped = Settings(settings_module)
def __repr__(self):
# Hardcode the class name as otherwise it yields 'Settings'.
if self._wrapped is empty:
return '<LazySettings [Unevaluated]>'
return '<LazySettings "%(settings_module)s">' % {
'settings_module': self._wrapped.SETTINGS_MODULE,
}
def __getattr__(self, name):
"""Return the value of a setting and cache it in self.__dict__."""
if self._wrapped is empty:
self._setup(name)
val = getattr(self._wrapped, name)
self.__dict__[name] = val
return val
def __setattr__(self, name, value):
"""
Set the value of setting. Clear all cached values if _wrapped changes
(@override_settings does this) or clear single values when set.
"""
if name == '_wrapped':
self.__dict__.clear()
else:
self.__dict__.pop(name, None)
super().__setattr__(name, value)
def __delattr__(self, name):
"""Delete a setting and clear it from cache if needed."""
super().__delattr__(name)
self.__dict__.pop(name, None)
def configure(self, default_settings=global_settings, **options):
"""
Called to manually configure the settings. The 'default_settings'
parameter sets where to retrieve any unspecified values from (its
argument must support attribute access (__getattr__)).
"""
if self._wrapped is not empty:
raise RuntimeError('Settings already configured.')
holder = UserSettingsHolder(default_settings)
for name, value in options.items():
if not name.isupper():
raise TypeError('Setting %r must be uppercase.' % name)
setattr(holder, name, value)
self._wrapped = holder
@property
def configured(self):
"""Return True if the settings have already been configured."""
return self._wrapped is not empty
@property
def FILE_CHARSET(self):
stack = traceback.extract_stack()
# Show a warning if the setting is used outside of Django.
# Stack index: -1 this line, -2 the caller.
filename, _line_number, _function_name, _text = stack[-2]
if not filename.startswith(os.path.dirname(django.__file__)):
warnings.warn(
FILE_CHARSET_DEPRECATED_MSG,
RemovedInDjango31Warning,
stacklevel=2,
)
return self.__getattr__('FILE_CHARSET')
class Settings:
def __init__(self, settings_module):
# update this dict from global settings (but only for ALL_CAPS settings)
for setting in dir(global_settings):
if setting.isupper():
setattr(self, setting, getattr(global_settings, setting))
# store the settings module in case someone later cares
self.SETTINGS_MODULE = settings_module
mod = importlib.import_module(self.SETTINGS_MODULE)
tuple_settings = (
"INSTALLED_APPS",
"TEMPLATE_DIRS",
"LOCALE_PATHS",
)
self._explicit_settings = set()
for setting in dir(mod):
if setting.isupper():
setting_value = getattr(mod, setting)
if (setting in tuple_settings and
not isinstance(setting_value, (list, tuple))):
raise ImproperlyConfigured("The %s setting must be a list or a tuple. " % setting)
setattr(self, setting, setting_value)
self._explicit_settings.add(setting)
if not self.SECRET_KEY:
raise ImproperlyConfigured("The SECRET_KEY setting must not be empty.")
if self.is_overridden('FILE_CHARSET'):
warnings.warn(FILE_CHARSET_DEPRECATED_MSG, RemovedInDjango31Warning)
if hasattr(time, 'tzset') and self.TIME_ZONE:
# When we can, attempt to validate the timezone. If we can't find
# this file, no check happens and it's harmless.
zoneinfo_root = Path('/usr/share/zoneinfo')
zone_info_file = zoneinfo_root.joinpath(*self.TIME_ZONE.split('/'))
if zoneinfo_root.exists() and not zone_info_file.exists():
raise ValueError("Incorrect timezone setting: %s" % self.TIME_ZONE)
# Move the time zone info into os.environ. See ticket #2315 for why
# we don't do this unconditionally (breaks Windows).
os.environ['TZ'] = self.TIME_ZONE
time.tzset()
def is_overridden(self, setting):
return setting in self._explicit_settings
def __repr__(self):
return '<%(cls)s "%(settings_module)s">' % {
'cls': self.__class__.__name__,
'settings_module': self.SETTINGS_MODULE,
}
class UserSettingsHolder:
"""Holder for user configured settings."""
# SETTINGS_MODULE doesn't make much sense in the manually configured
# (standalone) case.
SETTINGS_MODULE = None
def __init__(self, default_settings):
"""
Requests for configuration variables not in this class are satisfied
from the module specified in default_settings (if possible).
"""
self.__dict__['_deleted'] = set()
self.default_settings = default_settings
def __getattr__(self, name):
if not name.isupper() or name in self._deleted:
raise AttributeError
return getattr(self.default_settings, name)
def __setattr__(self, name, value):
self._deleted.discard(name)
if name == 'FILE_CHARSET':
warnings.warn(FILE_CHARSET_DEPRECATED_MSG, RemovedInDjango31Warning)
super().__setattr__(name, value)
def __delattr__(self, name):
self._deleted.add(name)
if hasattr(self, name):
super().__delattr__(name)
def __dir__(self):
return sorted(
s for s in [*self.__dict__, *dir(self.default_settings)]
if s not in self._deleted
)
def is_overridden(self, setting):
deleted = (setting in self._deleted)
set_locally = (setting in self.__dict__)
set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting)
return deleted or set_locally or set_on_default
def __repr__(self):
return '<%(cls)s>' % {
'cls': self.__class__.__name__,
}
settings = LazySettings()
|
23983060bbb5d33d72f580ba15ac168e5d553a49b716f5590b3da2c12dc31fea | """
Default Django settings. Override these with settings in the module pointed to
by the DJANGO_SETTINGS_MODULE environment variable.
"""
# This is defined here as a do-nothing function because we can't import
# django.utils.translation -- that module depends on the settings.
def gettext_noop(s):
return s
####################
# CORE #
####################
DEBUG = False
# Whether the framework should propagate raw exceptions rather than catching
# them. This is useful under some testing situations and should never be used
# on a live site.
DEBUG_PROPAGATE_EXCEPTIONS = False
# People who get code error notifications.
# In the format [('Full Name', '[email protected]'), ('Full Name', '[email protected]')]
ADMINS = []
# List of IP addresses, as strings, that:
# * See debug comments, when DEBUG is true
# * Receive x-headers
INTERNAL_IPS = []
# Hosts/domain names that are valid for this site.
# "*" matches anything, ".example.com" matches example.com and all subdomains
ALLOWED_HOSTS = []
# Local time zone for this installation. All choices can be found here:
# https://en.wikipedia.org/wiki/List_of_tz_zones_by_name (although not all
# systems may support all possibilities). When USE_TZ is True, this is
# interpreted as the default user time zone.
TIME_ZONE = 'America/Chicago'
# If you set this to True, Django will use timezone-aware datetimes.
USE_TZ = False
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# Languages we provide translations for, out of the box.
LANGUAGES = [
('af', gettext_noop('Afrikaans')),
('ar', gettext_noop('Arabic')),
('ast', gettext_noop('Asturian')),
('az', gettext_noop('Azerbaijani')),
('bg', gettext_noop('Bulgarian')),
('be', gettext_noop('Belarusian')),
('bn', gettext_noop('Bengali')),
('br', gettext_noop('Breton')),
('bs', gettext_noop('Bosnian')),
('ca', gettext_noop('Catalan')),
('cs', gettext_noop('Czech')),
('cy', gettext_noop('Welsh')),
('da', gettext_noop('Danish')),
('de', gettext_noop('German')),
('dsb', gettext_noop('Lower Sorbian')),
('el', gettext_noop('Greek')),
('en', gettext_noop('English')),
('en-au', gettext_noop('Australian English')),
('en-gb', gettext_noop('British English')),
('eo', gettext_noop('Esperanto')),
('es', gettext_noop('Spanish')),
('es-ar', gettext_noop('Argentinian Spanish')),
('es-co', gettext_noop('Colombian Spanish')),
('es-mx', gettext_noop('Mexican Spanish')),
('es-ni', gettext_noop('Nicaraguan Spanish')),
('es-ve', gettext_noop('Venezuelan Spanish')),
('et', gettext_noop('Estonian')),
('eu', gettext_noop('Basque')),
('fa', gettext_noop('Persian')),
('fi', gettext_noop('Finnish')),
('fr', gettext_noop('French')),
('fy', gettext_noop('Frisian')),
('ga', gettext_noop('Irish')),
('gd', gettext_noop('Scottish Gaelic')),
('gl', gettext_noop('Galician')),
('he', gettext_noop('Hebrew')),
('hi', gettext_noop('Hindi')),
('hr', gettext_noop('Croatian')),
('hsb', gettext_noop('Upper Sorbian')),
('hu', gettext_noop('Hungarian')),
('hy', gettext_noop('Armenian')),
('ia', gettext_noop('Interlingua')),
('id', gettext_noop('Indonesian')),
('io', gettext_noop('Ido')),
('is', gettext_noop('Icelandic')),
('it', gettext_noop('Italian')),
('ja', gettext_noop('Japanese')),
('ka', gettext_noop('Georgian')),
('kab', gettext_noop('Kabyle')),
('kk', gettext_noop('Kazakh')),
('km', gettext_noop('Khmer')),
('kn', gettext_noop('Kannada')),
('ko', gettext_noop('Korean')),
('lb', gettext_noop('Luxembourgish')),
('lt', gettext_noop('Lithuanian')),
('lv', gettext_noop('Latvian')),
('mk', gettext_noop('Macedonian')),
('ml', gettext_noop('Malayalam')),
('mn', gettext_noop('Mongolian')),
('mr', gettext_noop('Marathi')),
('my', gettext_noop('Burmese')),
('nb', gettext_noop('Norwegian Bokmål')),
('ne', gettext_noop('Nepali')),
('nl', gettext_noop('Dutch')),
('nn', gettext_noop('Norwegian Nynorsk')),
('os', gettext_noop('Ossetic')),
('pa', gettext_noop('Punjabi')),
('pl', gettext_noop('Polish')),
('pt', gettext_noop('Portuguese')),
('pt-br', gettext_noop('Brazilian Portuguese')),
('ro', gettext_noop('Romanian')),
('ru', gettext_noop('Russian')),
('sk', gettext_noop('Slovak')),
('sl', gettext_noop('Slovenian')),
('sq', gettext_noop('Albanian')),
('sr', gettext_noop('Serbian')),
('sr-latn', gettext_noop('Serbian Latin')),
('sv', gettext_noop('Swedish')),
('sw', gettext_noop('Swahili')),
('ta', gettext_noop('Tamil')),
('te', gettext_noop('Telugu')),
('th', gettext_noop('Thai')),
('tr', gettext_noop('Turkish')),
('tt', gettext_noop('Tatar')),
('udm', gettext_noop('Udmurt')),
('uk', gettext_noop('Ukrainian')),
('ur', gettext_noop('Urdu')),
('vi', gettext_noop('Vietnamese')),
('zh-hans', gettext_noop('Simplified Chinese')),
('zh-hant', gettext_noop('Traditional Chinese')),
]
# Languages using BiDi (right-to-left) layout
LANGUAGES_BIDI = ["he", "ar", "fa", "ur"]
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
LOCALE_PATHS = []
# Settings for language cookie
LANGUAGE_COOKIE_NAME = 'django_language'
LANGUAGE_COOKIE_AGE = None
LANGUAGE_COOKIE_DOMAIN = None
LANGUAGE_COOKIE_PATH = '/'
LANGUAGE_COOKIE_SECURE = False
LANGUAGE_COOKIE_HTTPONLY = False
LANGUAGE_COOKIE_SAMESITE = None
# If you set this to True, Django will format dates, numbers and calendars
# according to user current locale.
USE_L10N = False
# Not-necessarily-technical managers of the site. They get broken link
# notifications and other various emails.
MANAGERS = ADMINS
# Default charset to use for all HttpResponse objects, if a MIME type isn't
# manually specified. It's used to construct the Content-Type header.
DEFAULT_CHARSET = 'utf-8'
# Encoding of files read from disk (template and initial SQL files).
FILE_CHARSET = 'utf-8'
# Email address that error messages come from.
SERVER_EMAIL = 'root@localhost'
# Database connection info. If left empty, will default to the dummy backend.
DATABASES = {}
# Classes used to implement DB routing behavior.
DATABASE_ROUTERS = []
# The email backend to use. For possible shortcuts see django.core.mail.
# The default is to use the SMTP backend.
# Third-party backends can be specified by providing a Python path
# to a module that defines an EmailBackend class.
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# Host for sending email.
EMAIL_HOST = 'localhost'
# Port for sending email.
EMAIL_PORT = 25
# Whether to send SMTP 'Date' header in the local time zone or in UTC.
EMAIL_USE_LOCALTIME = False
# Optional SMTP authentication information for EMAIL_HOST.
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = False
EMAIL_USE_SSL = False
EMAIL_SSL_CERTFILE = None
EMAIL_SSL_KEYFILE = None
EMAIL_TIMEOUT = None
# List of strings representing installed apps.
INSTALLED_APPS = []
TEMPLATES = []
# Default form rendering class.
FORM_RENDERER = 'django.forms.renderers.DjangoTemplates'
# Default email address to use for various automated correspondence from
# the site managers.
DEFAULT_FROM_EMAIL = 'webmaster@localhost'
# Subject-line prefix for email messages send with django.core.mail.mail_admins
# or ...mail_managers. Make sure to include the trailing space.
EMAIL_SUBJECT_PREFIX = '[Django] '
# Whether to append trailing slashes to URLs.
APPEND_SLASH = True
# Whether to prepend the "www." subdomain to URLs that don't have it.
PREPEND_WWW = False
# Override the server-derived value of SCRIPT_NAME
FORCE_SCRIPT_NAME = None
# List of compiled regular expression objects representing User-Agent strings
# that are not allowed to visit any page, systemwide. Use this for bad
# robots/crawlers. Here are a few examples:
# import re
# DISALLOWED_USER_AGENTS = [
# re.compile(r'^NaverBot.*'),
# re.compile(r'^EmailSiphon.*'),
# re.compile(r'^SiteSucker.*'),
# re.compile(r'^sohu-search'),
# ]
DISALLOWED_USER_AGENTS = []
ABSOLUTE_URL_OVERRIDES = {}
# List of compiled regular expression objects representing URLs that need not
# be reported by BrokenLinkEmailsMiddleware. Here are a few examples:
# import re
# IGNORABLE_404_URLS = [
# re.compile(r'^/apple-touch-icon.*\.png$'),
# re.compile(r'^/favicon.ico$'),
# re.compile(r'^/robots.txt$'),
# re.compile(r'^/phpmyadmin/'),
# re.compile(r'\.(cgi|php|pl)$'),
# ]
IGNORABLE_404_URLS = []
# A secret key for this particular Django installation. Used in secret-key
# hashing algorithms. Set this in your settings, or Django will complain
# loudly.
SECRET_KEY = ''
# Default file storage mechanism that holds media.
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = None
# URL that handles the static files served from STATIC_ROOT.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = None
# List of upload handler classes to be applied in order.
FILE_UPLOAD_HANDLERS = [
'django.core.files.uploadhandler.MemoryFileUploadHandler',
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
]
# Maximum size, in bytes, of a request before it will be streamed to the
# file system instead of into memory.
FILE_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
# Maximum size in bytes of request data (excluding file uploads) that will be
# read before a SuspiciousOperation (RequestDataTooBig) is raised.
DATA_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
# Maximum number of GET/POST parameters that will be read before a
# SuspiciousOperation (TooManyFieldsSent) is raised.
DATA_UPLOAD_MAX_NUMBER_FIELDS = 1000
# Directory in which upload streamed files will be temporarily saved. A value of
# `None` will make Django use the operating system's default temporary directory
# (i.e. "/tmp" on *nix systems).
FILE_UPLOAD_TEMP_DIR = None
# The numeric mode to set newly-uploaded files to. The value should be a mode
# you'd pass directly to os.chmod; see https://docs.python.org/library/os.html#files-and-directories.
FILE_UPLOAD_PERMISSIONS = 0o644
# The numeric mode to assign to newly-created directories, when uploading files.
# The value should be a mode as you'd pass to os.chmod;
# see https://docs.python.org/library/os.html#files-and-directories.
FILE_UPLOAD_DIRECTORY_PERMISSIONS = None
# Python module path where user will place custom format definition.
# The directory where this setting is pointing should contain subdirectories
# named as the locales, containing a formats.py file
# (i.e. "myproject.locale" for myproject/locale/en/formats.py etc. use)
FORMAT_MODULE_PATH = None
# Default formatting for date objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'N j, Y'
# Default formatting for datetime objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATETIME_FORMAT = 'N j, Y, P'
# Default formatting for time objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
TIME_FORMAT = 'P'
# Default formatting for date objects when only the year and month are relevant.
# See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
YEAR_MONTH_FORMAT = 'F Y'
# Default formatting for date objects when only the month and day are relevant.
# See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
MONTH_DAY_FORMAT = 'F j'
# Default short formatting for date objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATE_FORMAT = 'm/d/Y'
# Default short formatting for datetime objects.
# See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATETIME_FORMAT = 'm/d/Y P'
# Default formats to be used when parsing dates from input boxes, in order
# See all available format string here:
# https://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATE_INPUT_FORMATS = [
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
'%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
'%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
'%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
'%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
]
# Default formats to be used when parsing times from input boxes, in order
# See all available format string here:
# https://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
TIME_INPUT_FORMATS = [
'%H:%M:%S', # '14:30:59'
'%H:%M:%S.%f', # '14:30:59.000200'
'%H:%M', # '14:30'
]
# Default formats to be used when parsing dates and times from input boxes,
# in order
# See all available format string here:
# https://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
]
# First day of week, to be used on calendars
# 0 means Sunday, 1 means Monday...
FIRST_DAY_OF_WEEK = 0
# Decimal separator symbol
DECIMAL_SEPARATOR = '.'
# Boolean that sets whether to add thousand separator when formatting numbers
USE_THOUSAND_SEPARATOR = False
# Number of digits that will be together, when splitting them by
# THOUSAND_SEPARATOR. 0 means no grouping, 3 means splitting by thousands...
NUMBER_GROUPING = 0
# Thousand separator symbol
THOUSAND_SEPARATOR = ','
# The tablespaces to use for each model when not specified otherwise.
DEFAULT_TABLESPACE = ''
DEFAULT_INDEX_TABLESPACE = ''
# Default X-Frame-Options header value
X_FRAME_OPTIONS = 'SAMEORIGIN'
USE_X_FORWARDED_HOST = False
USE_X_FORWARDED_PORT = False
# The Python dotted path to the WSGI application that Django's internal server
# (runserver) will use. If `None`, the return value of
# 'django.core.wsgi.get_wsgi_application' is used, thus preserving the same
# behavior as previous versions of Django. Otherwise this should point to an
# actual WSGI application object.
WSGI_APPLICATION = None
# If your Django app is behind a proxy that sets a header to specify secure
# connections, AND that proxy ensures that user-submitted headers with the
# same name are ignored (so that people can't spoof it), set this value to
# a tuple of (header_name, header_value). For any requests that come in with
# that header/value, request.is_secure() will return True.
# WARNING! Only set this if you fully understand what you're doing. Otherwise,
# you may be opening yourself up to a security risk.
SECURE_PROXY_SSL_HEADER = None
##############
# MIDDLEWARE #
##############
# List of middleware to use. Order is important; in the request phase, these
# middleware will be applied in the order given, and in the response
# phase the middleware will be applied in reverse order.
MIDDLEWARE = []
############
# SESSIONS #
############
# Cache to store session data if using the cache session backend.
SESSION_CACHE_ALIAS = 'default'
# Cookie name. This can be whatever you want.
SESSION_COOKIE_NAME = 'sessionid'
# Age of cookie, in seconds (default: 2 weeks).
SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2
# A string like "example.com", or None for standard domain cookie.
SESSION_COOKIE_DOMAIN = None
# Whether the session cookie should be secure (https:// only).
SESSION_COOKIE_SECURE = False
# The path of the session cookie.
SESSION_COOKIE_PATH = '/'
# Whether to use the HttpOnly flag.
SESSION_COOKIE_HTTPONLY = True
# Whether to set the flag restricting cookie leaks on cross-site requests.
# This can be 'Lax', 'Strict', or None to disable the flag.
SESSION_COOKIE_SAMESITE = 'Lax'
# Whether to save the session data on every request.
SESSION_SAVE_EVERY_REQUEST = False
# Whether a user's session cookie expires when the Web browser is closed.
SESSION_EXPIRE_AT_BROWSER_CLOSE = False
# The module to store session data
SESSION_ENGINE = 'django.contrib.sessions.backends.db'
# Directory to store session files if using the file session module. If None,
# the backend will use a sensible default.
SESSION_FILE_PATH = None
# class to serialize session data
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
#########
# CACHE #
#########
# The cache backends to use.
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
CACHE_MIDDLEWARE_KEY_PREFIX = ''
CACHE_MIDDLEWARE_SECONDS = 600
CACHE_MIDDLEWARE_ALIAS = 'default'
##################
# AUTHENTICATION #
##################
AUTH_USER_MODEL = 'auth.User'
AUTHENTICATION_BACKENDS = ['django.contrib.auth.backends.ModelBackend']
LOGIN_URL = '/accounts/login/'
LOGIN_REDIRECT_URL = '/accounts/profile/'
LOGOUT_REDIRECT_URL = None
# The number of days a password reset link is valid for
PASSWORD_RESET_TIMEOUT_DAYS = 3
# the first hasher in this list is the preferred algorithm. any
# password using different algorithms will be converted automatically
# upon login
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.Argon2PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
]
AUTH_PASSWORD_VALIDATORS = []
###########
# SIGNING #
###########
SIGNING_BACKEND = 'django.core.signing.TimestampSigner'
########
# CSRF #
########
# Dotted path to callable to be used as view when a request is
# rejected by the CSRF middleware.
CSRF_FAILURE_VIEW = 'django.views.csrf.csrf_failure'
# Settings for CSRF cookie.
CSRF_COOKIE_NAME = 'csrftoken'
CSRF_COOKIE_AGE = 60 * 60 * 24 * 7 * 52
CSRF_COOKIE_DOMAIN = None
CSRF_COOKIE_PATH = '/'
CSRF_COOKIE_SECURE = False
CSRF_COOKIE_HTTPONLY = False
CSRF_COOKIE_SAMESITE = 'Lax'
CSRF_HEADER_NAME = 'HTTP_X_CSRFTOKEN'
CSRF_TRUSTED_ORIGINS = []
CSRF_USE_SESSIONS = False
############
# MESSAGES #
############
# Class to use as messages backend
MESSAGE_STORAGE = 'django.contrib.messages.storage.fallback.FallbackStorage'
# Default values of MESSAGE_LEVEL and MESSAGE_TAGS are defined within
# django.contrib.messages to avoid imports in this settings file.
###########
# LOGGING #
###########
# The callable to use to configure logging
LOGGING_CONFIG = 'logging.config.dictConfig'
# Custom logging configuration.
LOGGING = {}
# Default exception reporter filter class used in case none has been
# specifically assigned to the HttpRequest instance.
DEFAULT_EXCEPTION_REPORTER_FILTER = 'django.views.debug.SafeExceptionReporterFilter'
###########
# TESTING #
###########
# The name of the class to use to run the test suite
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# Apps that don't need to be serialized at test database creation time
# (only apps with migrations are to start with)
TEST_NON_SERIALIZED_APPS = []
############
# FIXTURES #
############
# The list of directories to search for fixtures
FIXTURE_DIRS = []
###############
# STATICFILES #
###############
# A list of locations of additional static files
STATICFILES_DIRS = []
# The default file storage backend used during the build process
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
]
##############
# MIGRATIONS #
##############
# Migration module overrides for apps, by app label.
MIGRATION_MODULES = {}
#################
# SYSTEM CHECKS #
#################
# List of all issues generated by system checks that should be silenced. Light
# issues like warnings, infos or debugs will not generate a message. Silencing
# serious issues like errors and criticals does not result in hiding the
# message, but Django will not stop you from e.g. running server.
SILENCED_SYSTEM_CHECKS = []
#######################
# SECURITY MIDDLEWARE #
#######################
SECURE_BROWSER_XSS_FILTER = False
SECURE_CONTENT_TYPE_NOSNIFF = False
SECURE_HSTS_INCLUDE_SUBDOMAINS = False
SECURE_HSTS_PRELOAD = False
SECURE_HSTS_SECONDS = 0
SECURE_REDIRECT_EXEMPT = []
SECURE_SSL_HOST = None
SECURE_SSL_REDIRECT = False
|
12780d2ad9360c5074ee5c1ded5285f6d432f6d21281961c1fb94adeedfc9c8a | "Functions that help with dynamically creating decorators for views."
from functools import partial, update_wrapper, wraps
class classonlymethod(classmethod):
def __get__(self, instance, cls=None):
if instance is not None:
raise AttributeError("This method is available only on the class, not on instances.")
return super().__get__(instance, cls)
def _update_method_wrapper(_wrapper, decorator):
# _multi_decorate()'s bound_method isn't available in this scope. Cheat by
# using it on a dummy function.
@decorator
def dummy(*args, **kwargs):
pass
update_wrapper(_wrapper, dummy)
def _multi_decorate(decorators, method):
"""
Decorate `method` with one or more function decorators. `decorators` can be
a single decorator or an iterable of decorators.
"""
if hasattr(decorators, '__iter__'):
# Apply a list/tuple of decorators if 'decorators' is one. Decorator
# functions are applied so that the call order is the same as the
# order in which they appear in the iterable.
decorators = decorators[::-1]
else:
decorators = [decorators]
def _wrapper(self, *args, **kwargs):
# bound_method has the signature that 'decorator' expects i.e. no
# 'self' argument, but it's a closure over self so it can call
# 'func'. Also, wrap method.__get__() in a function because new
# attributes can't be set on bound method objects, only on functions.
bound_method = partial(method.__get__(self, type(self)))
for dec in decorators:
bound_method = dec(bound_method)
return bound_method(*args, **kwargs)
# Copy any attributes that a decorator adds to the function it decorates.
for dec in decorators:
_update_method_wrapper(_wrapper, dec)
# Preserve any existing attributes of 'method', including the name.
update_wrapper(_wrapper, method)
return _wrapper
def method_decorator(decorator, name=''):
"""
Convert a function decorator into a method decorator
"""
# 'obj' can be a class or a function. If 'obj' is a function at the time it
# is passed to _dec, it will eventually be a method of the class it is
# defined on. If 'obj' is a class, the 'name' is required to be the name
# of the method that will be decorated.
def _dec(obj):
if not isinstance(obj, type):
return _multi_decorate(decorator, obj)
if not (name and hasattr(obj, name)):
raise ValueError(
"The keyword argument `name` must be the name of a method "
"of the decorated class: %s. Got '%s' instead." % (obj, name)
)
method = getattr(obj, name)
if not callable(method):
raise TypeError(
"Cannot decorate '%s' as it isn't a callable attribute of "
"%s (%s)." % (name, obj, method)
)
_wrapper = _multi_decorate(decorator, method)
setattr(obj, name, _wrapper)
return obj
# Don't worry about making _dec look similar to a list/tuple as it's rather
# meaningless.
if not hasattr(decorator, '__iter__'):
update_wrapper(_dec, decorator)
# Change the name to aid debugging.
obj = decorator if hasattr(decorator, '__name__') else decorator.__class__
_dec.__name__ = 'method_decorator(%s)' % obj.__name__
return _dec
def decorator_from_middleware_with_args(middleware_class):
"""
Like decorator_from_middleware, but return a function
that accepts the arguments to be passed to the middleware_class.
Use like::
cache_page = decorator_from_middleware_with_args(CacheMiddleware)
# ...
@cache_page(3600)
def my_view(request):
# ...
"""
return make_middleware_decorator(middleware_class)
def decorator_from_middleware(middleware_class):
"""
Given a middleware class (not an instance), return a view decorator. This
lets you use middleware functionality on a per-view basis. The middleware
is created with no params passed.
"""
return make_middleware_decorator(middleware_class)()
def make_middleware_decorator(middleware_class):
def _make_decorator(*m_args, **m_kwargs):
middleware = middleware_class(*m_args, **m_kwargs)
def _decorator(view_func):
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
if hasattr(middleware, 'process_request'):
result = middleware.process_request(request)
if result is not None:
return result
if hasattr(middleware, 'process_view'):
result = middleware.process_view(request, view_func, args, kwargs)
if result is not None:
return result
try:
response = view_func(request, *args, **kwargs)
except Exception as e:
if hasattr(middleware, 'process_exception'):
result = middleware.process_exception(request, e)
if result is not None:
return result
raise
if hasattr(response, 'render') and callable(response.render):
if hasattr(middleware, 'process_template_response'):
response = middleware.process_template_response(request, response)
# Defer running of process_response until after the template
# has been rendered:
if hasattr(middleware, 'process_response'):
def callback(response):
return middleware.process_response(request, response)
response.add_post_render_callback(callback)
else:
if hasattr(middleware, 'process_response'):
return middleware.process_response(request, response)
return response
return _wrapped_view
return _decorator
return _make_decorator
class classproperty:
def __init__(self, method=None):
self.fget = method
def __get__(self, instance, cls=None):
return self.fget(cls)
def getter(self, method):
self.fget = method
return self
|
f6cbe3b09f4d5603ae4a682879c9818f63a5024ea46d90653cb9cf4f18f1c9f7 | # These classes override date and datetime to ensure that strftime('%Y')
# returns four digits (with leading zeros) on years < 1000.
# https://bugs.python.org/issue13305
#
# Based on code submitted to comp.lang.python by Andrew Dalke
#
# >>> datetime_safe.date(10, 8, 2).strftime("%Y/%m/%d was a %A")
# '0010/08/02 was a Monday'
import re
import time as ttime
from datetime import (
date as real_date, datetime as real_datetime, time as real_time,
)
class date(real_date):
def strftime(self, fmt):
return strftime(self, fmt)
class datetime(real_datetime):
def strftime(self, fmt):
return strftime(self, fmt)
@classmethod
def combine(cls, date, time):
return cls(date.year, date.month, date.day,
time.hour, time.minute, time.second,
time.microsecond, time.tzinfo)
def date(self):
return date(self.year, self.month, self.day)
class time(real_time):
pass
def new_date(d):
"Generate a safe date from a datetime.date object."
return date(d.year, d.month, d.day)
def new_datetime(d):
"""
Generate a safe datetime from a datetime.date or datetime.datetime object.
"""
kw = [d.year, d.month, d.day]
if isinstance(d, real_datetime):
kw.extend([d.hour, d.minute, d.second, d.microsecond, d.tzinfo])
return datetime(*kw)
# This library does not support strftime's "%s" or "%y" format strings.
# Allowed if there's an even number of "%"s because they are escaped.
_illegal_formatting = re.compile(r"((^|[^%])(%%)*%[sy])")
def _findall(text, substr):
# Also finds overlaps
sites = []
i = 0
while True:
i = text.find(substr, i)
if i == -1:
break
sites.append(i)
i += 1
return sites
def strftime(dt, fmt):
if dt.year >= 1000:
return super(type(dt), dt).strftime(fmt)
illegal_formatting = _illegal_formatting.search(fmt)
if illegal_formatting:
raise TypeError("strftime of dates before 1000 does not handle " + illegal_formatting.group(0))
year = dt.year
# For every non-leap year century, advance by
# 6 years to get into the 28-year repeat cycle
delta = 2000 - year
off = 6 * (delta // 100 + delta // 400)
year = year + off
# Move to around the year 2000
year = year + ((2000 - year) // 28) * 28
timetuple = dt.timetuple()
s1 = ttime.strftime(fmt, (year,) + timetuple[1:])
sites1 = _findall(s1, str(year))
s2 = ttime.strftime(fmt, (year + 28,) + timetuple[1:])
sites2 = _findall(s2, str(year + 28))
sites = []
for site in sites1:
if site in sites2:
sites.append(site)
s = s1
syear = "%04d" % (dt.year,)
for site in sites:
s = s[:site] + syear + s[site + 4:]
return s
|
2c568b0bcb48d21483919ec9f06b3c71e389675cff4a484beb6578be2f50e234 | import functools
import itertools
import logging
import os
import pathlib
import signal
import subprocess
import sys
import threading
import time
import traceback
import weakref
from collections import defaultdict
from pathlib import Path
from types import ModuleType
from zipimport import zipimporter
from django.apps import apps
from django.core.signals import request_finished
from django.dispatch import Signal
from django.utils.functional import cached_property
from django.utils.version import get_version_tuple
autoreload_started = Signal()
file_changed = Signal(providing_args=['file_path', 'kind'])
DJANGO_AUTORELOAD_ENV = 'RUN_MAIN'
logger = logging.getLogger('django.utils.autoreload')
# If an error is raised while importing a file, it's not placed in sys.modules.
# This means that any future modifications aren't caught. Keep a list of these
# file paths to allow watching them in the future.
_error_files = []
_exception = None
try:
import termios
except ImportError:
termios = None
try:
import pywatchman
except ImportError:
pywatchman = None
def check_errors(fn):
@functools.wraps(fn)
def wrapper(*args, **kwargs):
global _exception
try:
fn(*args, **kwargs)
except Exception:
_exception = sys.exc_info()
et, ev, tb = _exception
if getattr(ev, 'filename', None) is None:
# get the filename from the last item in the stack
filename = traceback.extract_tb(tb)[-1][0]
else:
filename = ev.filename
if filename not in _error_files:
_error_files.append(filename)
raise
return wrapper
def raise_last_exception():
global _exception
if _exception is not None:
raise _exception[1]
def ensure_echo_on():
"""
Ensure that echo mode is enabled. Some tools such as PDB disable
it which causes usability issues after reload.
"""
if not termios or not sys.stdin.isatty():
return
attr_list = termios.tcgetattr(sys.stdin)
if not attr_list[3] & termios.ECHO:
attr_list[3] |= termios.ECHO
if hasattr(signal, 'SIGTTOU'):
old_handler = signal.signal(signal.SIGTTOU, signal.SIG_IGN)
else:
old_handler = None
termios.tcsetattr(sys.stdin, termios.TCSANOW, attr_list)
if old_handler is not None:
signal.signal(signal.SIGTTOU, old_handler)
def iter_all_python_module_files():
# This is a hot path during reloading. Create a stable sorted list of
# modules based on the module name and pass it to iter_modules_and_files().
# This ensures cached results are returned in the usual case that modules
# aren't loaded on the fly.
keys = sorted(sys.modules)
modules = tuple(m for m in map(sys.modules.__getitem__, keys) if not isinstance(m, weakref.ProxyTypes))
return iter_modules_and_files(modules, frozenset(_error_files))
@functools.lru_cache(maxsize=1)
def iter_modules_and_files(modules, extra_files):
"""Iterate through all modules needed to be watched."""
sys_file_paths = []
for module in modules:
# During debugging (with PyDev) the 'typing.io' and 'typing.re' objects
# are added to sys.modules, however they are types not modules and so
# cause issues here.
if not isinstance(module, ModuleType):
continue
if module.__name__ == '__main__':
# __main__ (usually manage.py) doesn't always have a __spec__ set.
# Handle this by falling back to using __file__, resolved below.
# See https://docs.python.org/reference/import.html#main-spec
sys_file_paths.append(module.__file__)
continue
if getattr(module, '__spec__', None) is None:
continue
spec = module.__spec__
# Modules could be loaded from places without a concrete location. If
# this is the case, skip them.
if spec.has_location:
origin = spec.loader.archive if isinstance(spec.loader, zipimporter) else spec.origin
sys_file_paths.append(origin)
results = set()
for filename in itertools.chain(sys_file_paths, extra_files):
if not filename:
continue
path = pathlib.Path(filename)
if not path.exists():
# The module could have been removed, don't fail loudly if this
# is the case.
continue
results.add(path.resolve().absolute())
return frozenset(results)
@functools.lru_cache(maxsize=1)
def common_roots(paths):
"""
Return a tuple of common roots that are shared between the given paths.
File system watchers operate on directories and aren't cheap to create.
Try to find the minimum set of directories to watch that encompass all of
the files that need to be watched.
"""
# Inspired from Werkzeug:
# https://github.com/pallets/werkzeug/blob/7477be2853df70a022d9613e765581b9411c3c39/werkzeug/_reloader.py
# Create a sorted list of the path components, longest first.
path_parts = sorted([x.parts for x in paths], key=len, reverse=True)
tree = {}
for chunks in path_parts:
node = tree
# Add each part of the path to the tree.
for chunk in chunks:
node = node.setdefault(chunk, {})
# Clear the last leaf in the tree.
node.clear()
# Turn the tree into a list of Path instances.
def _walk(node, path):
for prefix, child in node.items():
yield from _walk(child, path + (prefix,))
if not node:
yield Path(*path)
return tuple(_walk(tree, ()))
def sys_path_directories():
"""
Yield absolute directories from sys.path, ignoring entries that don't
exist.
"""
for path in sys.path:
path = Path(path)
if not path.exists():
continue
path = path.resolve().absolute()
# If the path is a file (like a zip file), watch the parent directory.
if path.is_file():
yield path.parent
else:
yield path
def get_child_arguments():
"""
Return the executable. This contains a workaround for Windows if the
executable is reported to not have the .exe extension which can cause bugs
on reloading.
"""
import django.__main__
args = [sys.executable] + ['-W%s' % o for o in sys.warnoptions]
if sys.argv[0] == django.__main__.__file__:
# The server was started with `python -m django runserver`.
args += ['-m', 'django']
args += sys.argv[1:]
else:
args += sys.argv
return args
def trigger_reload(filename):
logger.info('%s changed, reloading.', filename)
sys.exit(3)
def restart_with_reloader():
new_environ = {**os.environ, DJANGO_AUTORELOAD_ENV: 'true'}
args = get_child_arguments()
while True:
exit_code = subprocess.call(args, env=new_environ, close_fds=False)
if exit_code != 3:
return exit_code
class BaseReloader:
def __init__(self):
self.extra_files = set()
self.directory_globs = defaultdict(set)
self._stop_condition = threading.Event()
def watch_dir(self, path, glob):
path = Path(path)
if not path.is_absolute():
raise ValueError('%s must be absolute.' % path)
logger.debug('Watching dir %s with glob %s.', path, glob)
self.directory_globs[path].add(glob)
def watch_file(self, path):
path = Path(path)
if not path.is_absolute():
raise ValueError('%s must be absolute.' % path)
logger.debug('Watching file %s.', path)
self.extra_files.add(path)
def watched_files(self, include_globs=True):
"""
Yield all files that need to be watched, including module files and
files within globs.
"""
yield from iter_all_python_module_files()
yield from self.extra_files
if include_globs:
for directory, patterns in self.directory_globs.items():
for pattern in patterns:
yield from directory.glob(pattern)
def wait_for_apps_ready(self, app_reg, django_main_thread):
"""
Wait until Django reports that the apps have been loaded. If the given
thread has terminated before the apps are ready, then a SyntaxError or
other non-recoverable error has been raised. In that case, stop waiting
for the apps_ready event and continue processing.
Return True if the thread is alive and the ready event has been
triggered, or False if the thread is terminated while waiting for the
event.
"""
while django_main_thread.is_alive():
if app_reg.ready_event.wait(timeout=0.1):
return True
else:
logger.debug('Main Django thread has terminated before apps are ready.')
return False
def run(self, django_main_thread):
logger.debug('Waiting for apps ready_event.')
self.wait_for_apps_ready(apps, django_main_thread)
from django.urls import get_resolver
# Prevent a race condition where URL modules aren't loaded when the
# reloader starts by accessing the urlconf_module property.
try:
get_resolver().urlconf_module
except Exception:
# Loading the urlconf can result in errors during development.
# If this occurs then swallow the error and continue.
pass
logger.debug('Apps ready_event triggered. Sending autoreload_started signal.')
autoreload_started.send(sender=self)
self.run_loop()
def run_loop(self):
ticker = self.tick()
while not self.should_stop:
try:
next(ticker)
except StopIteration:
break
self.stop()
def tick(self):
"""
This generator is called in a loop from run_loop. It's important that
the method takes care of pausing or otherwise waiting for a period of
time. This split between run_loop() and tick() is to improve the
testability of the reloader implementations by decoupling the work they
do from the loop.
"""
raise NotImplementedError('subclasses must implement tick().')
@classmethod
def check_availability(cls):
raise NotImplementedError('subclasses must implement check_availability().')
def notify_file_changed(self, path):
results = file_changed.send(sender=self, file_path=path)
logger.debug('%s notified as changed. Signal results: %s.', path, results)
if not any(res[1] for res in results):
trigger_reload(path)
# These are primarily used for testing.
@property
def should_stop(self):
return self._stop_condition.is_set()
def stop(self):
self._stop_condition.set()
class StatReloader(BaseReloader):
SLEEP_TIME = 1 # Check for changes once per second.
def tick(self):
mtimes = {}
while True:
for filepath, mtime in self.snapshot_files():
old_time = mtimes.get(filepath)
mtimes[filepath] = mtime
if old_time is None:
logger.debug('File %s first seen with mtime %s', filepath, mtime)
continue
elif mtime > old_time:
logger.debug('File %s previous mtime: %s, current mtime: %s', filepath, old_time, mtime)
self.notify_file_changed(filepath)
time.sleep(self.SLEEP_TIME)
yield
def snapshot_files(self):
# watched_files may produce duplicate paths if globs overlap.
seen_files = set()
for file in self.watched_files():
if file in seen_files:
continue
try:
mtime = file.stat().st_mtime
except OSError:
# This is thrown when the file does not exist.
continue
seen_files.add(file)
yield file, mtime
@classmethod
def check_availability(cls):
return True
class WatchmanUnavailable(RuntimeError):
pass
class WatchmanReloader(BaseReloader):
def __init__(self):
self.roots = defaultdict(set)
self.processed_request = threading.Event()
self.client_timeout = int(os.environ.get('DJANGO_WATCHMAN_TIMEOUT', 5))
super().__init__()
@cached_property
def client(self):
return pywatchman.client(timeout=self.client_timeout)
def _watch_root(self, root):
# In practice this shouldn't occur, however, it's possible that a
# directory that doesn't exist yet is being watched. If it's outside of
# sys.path then this will end up a new root. How to handle this isn't
# clear: Not adding the root will likely break when subscribing to the
# changes, however, as this is currently an internal API, no files
# will be being watched outside of sys.path. Fixing this by checking
# inside watch_glob() and watch_dir() is expensive, instead this could
# could fall back to the StatReloader if this case is detected? For
# now, watching its parent, if possible, is sufficient.
if not root.exists():
if not root.parent.exists():
logger.warning('Unable to watch root dir %s as neither it or its parent exist.', root)
return
root = root.parent
result = self.client.query('watch-project', str(root.absolute()))
if 'warning' in result:
logger.warning('Watchman warning: %s', result['warning'])
logger.debug('Watchman watch-project result: %s', result)
return result['watch'], result.get('relative_path')
@functools.lru_cache()
def _get_clock(self, root):
return self.client.query('clock', root)['clock']
def _subscribe(self, directory, name, expression):
root, rel_path = self._watch_root(directory)
query = {
'expression': expression,
'fields': ['name'],
'since': self._get_clock(root),
'dedup_results': True,
}
if rel_path:
query['relative_root'] = rel_path
logger.debug('Issuing watchman subscription %s, for root %s. Query: %s', name, root, query)
self.client.query('subscribe', root, name, query)
def _subscribe_dir(self, directory, filenames):
if not directory.exists():
if not directory.parent.exists():
logger.warning('Unable to watch directory %s as neither it or its parent exist.', directory)
return
prefix = 'files-parent-%s' % directory.name
filenames = ['%s/%s' % (directory.name, filename) for filename in filenames]
directory = directory.parent
expression = ['name', filenames, 'wholename']
else:
prefix = 'files'
expression = ['name', filenames]
self._subscribe(directory, '%s:%s' % (prefix, directory), expression)
def _watch_glob(self, directory, patterns):
"""
Watch a directory with a specific glob. If the directory doesn't yet
exist, attempt to watch the parent directory and amend the patterns to
include this. It's important this method isn't called more than one per
directory when updating all subscriptions. Subsequent calls will
overwrite the named subscription, so it must include all possible glob
expressions.
"""
prefix = 'glob'
if not directory.exists():
if not directory.parent.exists():
logger.warning('Unable to watch directory %s as neither it or its parent exist.', directory)
return
prefix = 'glob-parent-%s' % directory.name
patterns = ['%s/%s' % (directory.name, pattern) for pattern in patterns]
directory = directory.parent
expression = ['anyof']
for pattern in patterns:
expression.append(['match', pattern, 'wholename'])
self._subscribe(directory, '%s:%s' % (prefix, directory), expression)
def watched_roots(self, watched_files):
extra_directories = self.directory_globs.keys()
watched_file_dirs = [f.parent for f in watched_files]
sys_paths = list(sys_path_directories())
return frozenset((*extra_directories, *watched_file_dirs, *sys_paths))
def _update_watches(self):
watched_files = list(self.watched_files(include_globs=False))
found_roots = common_roots(self.watched_roots(watched_files))
logger.debug('Watching %s files', len(watched_files))
logger.debug('Found common roots: %s', found_roots)
# Setup initial roots for performance, shortest roots first.
for root in sorted(found_roots):
self._watch_root(root)
for directory, patterns in self.directory_globs.items():
self._watch_glob(directory, patterns)
# Group sorted watched_files by their parent directory.
sorted_files = sorted(watched_files, key=lambda p: p.parent)
for directory, group in itertools.groupby(sorted_files, key=lambda p: p.parent):
# These paths need to be relative to the parent directory.
self._subscribe_dir(directory, [str(p.relative_to(directory)) for p in group])
def update_watches(self):
try:
self._update_watches()
except Exception as ex:
# If the service is still available, raise the original exception.
if self.check_server_status(ex):
raise
def _check_subscription(self, sub):
subscription = self.client.getSubscription(sub)
if not subscription:
return
logger.debug('Watchman subscription %s has results.', sub)
for result in subscription:
# When using watch-project, it's not simple to get the relative
# directory without storing some specific state. Store the full
# path to the directory in the subscription name, prefixed by its
# type (glob, files).
root_directory = Path(result['subscription'].split(':', 1)[1])
logger.debug('Found root directory %s', root_directory)
for file in result.get('files', []):
self.notify_file_changed(root_directory / file)
def request_processed(self, **kwargs):
logger.debug('Request processed. Setting update_watches event.')
self.processed_request.set()
def tick(self):
request_finished.connect(self.request_processed)
self.update_watches()
while True:
if self.processed_request.is_set():
self.update_watches()
self.processed_request.clear()
try:
self.client.receive()
except pywatchman.SocketTimeout:
pass
except pywatchman.WatchmanError as ex:
logger.debug('Watchman error: %s, checking server status.', ex)
self.check_server_status(ex)
else:
for sub in list(self.client.subs.keys()):
self._check_subscription(sub)
yield
def stop(self):
self.client.close()
super().stop()
def check_server_status(self, inner_ex=None):
"""Return True if the server is available."""
try:
self.client.query('version')
except Exception:
raise WatchmanUnavailable(str(inner_ex)) from inner_ex
return True
@classmethod
def check_availability(cls):
if not pywatchman:
raise WatchmanUnavailable('pywatchman not installed.')
client = pywatchman.client(timeout=0.1)
try:
result = client.capabilityCheck()
except Exception:
# The service is down?
raise WatchmanUnavailable('Cannot connect to the watchman service.')
version = get_version_tuple(result['version'])
# Watchman 4.9 includes multiple improvements to watching project
# directories as well as case insensitive filesystems.
logger.debug('Watchman version %s', version)
if version < (4, 9):
raise WatchmanUnavailable('Watchman 4.9 or later is required.')
def get_reloader():
"""Return the most suitable reloader for this environment."""
try:
WatchmanReloader.check_availability()
except WatchmanUnavailable:
return StatReloader()
return WatchmanReloader()
def start_django(reloader, main_func, *args, **kwargs):
ensure_echo_on()
main_func = check_errors(main_func)
django_main_thread = threading.Thread(target=main_func, args=args, kwargs=kwargs, name='django-main-thread')
django_main_thread.setDaemon(True)
django_main_thread.start()
while not reloader.should_stop:
try:
reloader.run(django_main_thread)
except WatchmanUnavailable as ex:
# It's possible that the watchman service shuts down or otherwise
# becomes unavailable. In that case, use the StatReloader.
reloader = StatReloader()
logger.error('Error connecting to Watchman: %s', ex)
logger.info('Watching for file changes with %s', reloader.__class__.__name__)
def run_with_reloader(main_func, *args, **kwargs):
signal.signal(signal.SIGTERM, lambda *args: sys.exit(0))
try:
if os.environ.get(DJANGO_AUTORELOAD_ENV) == 'true':
reloader = get_reloader()
logger.info('Watching for file changes with %s', reloader.__class__.__name__)
start_django(reloader, main_func, *args, **kwargs)
else:
exit_code = restart_with_reloader()
sys.exit(exit_code)
except KeyboardInterrupt:
pass
|
d87aa50f4588a9a2c39f97ab71d56a75879b5c47633a46ddbb65428a868c721e | import html.entities
import re
import unicodedata
import warnings
from gzip import GzipFile
from io import BytesIO
from django.utils.deprecation import RemovedInDjango40Warning
from django.utils.functional import SimpleLazyObject, keep_lazy_text, lazy
from django.utils.translation import gettext as _, gettext_lazy, pgettext
@keep_lazy_text
def capfirst(x):
"""Capitalize the first letter of a string."""
return x and str(x)[0].upper() + str(x)[1:]
# Set up regular expressions
re_words = re.compile(r'<.*?>|((?:\w[-\w]*|&.*?;)+)', re.S)
re_chars = re.compile(r'<.*?>|(.)', re.S)
re_tag = re.compile(r'<(/)?(\S+?)(?:(\s*/)|\s.*?)?>', re.S)
re_newlines = re.compile(r'\r\n|\r') # Used in normalize_newlines
re_camel_case = re.compile(r'(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))')
@keep_lazy_text
def wrap(text, width):
"""
A word-wrap function that preserves existing line breaks. Expects that
existing line breaks are posix newlines.
Preserve all white space except added line breaks consume the space on
which they break the line.
Don't wrap long words, thus the output text may have lines longer than
``width``.
"""
def _generator():
for line in text.splitlines(True): # True keeps trailing linebreaks
max_width = min((line.endswith('\n') and width + 1 or width), width)
while len(line) > max_width:
space = line[:max_width + 1].rfind(' ') + 1
if space == 0:
space = line.find(' ') + 1
if space == 0:
yield line
line = ''
break
yield '%s\n' % line[:space - 1]
line = line[space:]
max_width = min((line.endswith('\n') and width + 1 or width), width)
if line:
yield line
return ''.join(_generator())
class Truncator(SimpleLazyObject):
"""
An object used to truncate text, either by characters or words.
"""
def __init__(self, text):
super().__init__(lambda: str(text))
def add_truncation_text(self, text, truncate=None):
if truncate is None:
truncate = pgettext(
'String to return when truncating text',
'%(truncated_text)s…')
if '%(truncated_text)s' in truncate:
return truncate % {'truncated_text': text}
# The truncation text didn't contain the %(truncated_text)s string
# replacement argument so just append it to the text.
if text.endswith(truncate):
# But don't append the truncation text if the current text already
# ends in this.
return text
return '%s%s' % (text, truncate)
def chars(self, num, truncate=None, html=False):
"""
Return the text truncated to be no longer than the specified number
of characters.
`truncate` specifies what should be used to notify that the string has
been truncated, defaulting to a translatable string of an ellipsis.
"""
self._setup()
length = int(num)
text = unicodedata.normalize('NFC', self._wrapped)
# Calculate the length to truncate to (max length - end_text length)
truncate_len = length
for char in self.add_truncation_text('', truncate):
if not unicodedata.combining(char):
truncate_len -= 1
if truncate_len == 0:
break
if html:
return self._truncate_html(length, truncate, text, truncate_len, False)
return self._text_chars(length, truncate, text, truncate_len)
def _text_chars(self, length, truncate, text, truncate_len):
"""Truncate a string after a certain number of chars."""
s_len = 0
end_index = None
for i, char in enumerate(text):
if unicodedata.combining(char):
# Don't consider combining characters
# as adding to the string length
continue
s_len += 1
if end_index is None and s_len > truncate_len:
end_index = i
if s_len > length:
# Return the truncated string
return self.add_truncation_text(text[:end_index or 0],
truncate)
# Return the original string since no truncation was necessary
return text
def words(self, num, truncate=None, html=False):
"""
Truncate a string after a certain number of words. `truncate` specifies
what should be used to notify that the string has been truncated,
defaulting to ellipsis.
"""
self._setup()
length = int(num)
if html:
return self._truncate_html(length, truncate, self._wrapped, length, True)
return self._text_words(length, truncate)
def _text_words(self, length, truncate):
"""
Truncate a string after a certain number of words.
Strip newlines in the string.
"""
words = self._wrapped.split()
if len(words) > length:
words = words[:length]
return self.add_truncation_text(' '.join(words), truncate)
return ' '.join(words)
def _truncate_html(self, length, truncate, text, truncate_len, words):
"""
Truncate HTML to a certain number of chars (not counting tags and
comments), or, if words is True, then to a certain number of words.
Close opened tags if they were correctly closed in the given HTML.
Preserve newlines in the HTML.
"""
if words and length <= 0:
return ''
html4_singlets = (
'br', 'col', 'link', 'base', 'img',
'param', 'area', 'hr', 'input'
)
# Count non-HTML chars/words and keep note of open tags
pos = 0
end_text_pos = 0
current_len = 0
open_tags = []
regex = re_words if words else re_chars
while current_len <= length:
m = regex.search(text, pos)
if not m:
# Checked through whole string
break
pos = m.end(0)
if m.group(1):
# It's an actual non-HTML word or char
current_len += 1
if current_len == truncate_len:
end_text_pos = pos
continue
# Check for tag
tag = re_tag.match(m.group(0))
if not tag or current_len >= truncate_len:
# Don't worry about non tags or tags after our truncate point
continue
closing_tag, tagname, self_closing = tag.groups()
# Element names are always case-insensitive
tagname = tagname.lower()
if self_closing or tagname in html4_singlets:
pass
elif closing_tag:
# Check for match in open tags list
try:
i = open_tags.index(tagname)
except ValueError:
pass
else:
# SGML: An end tag closes, back to the matching start tag,
# all unclosed intervening start tags with omitted end tags
open_tags = open_tags[i + 1:]
else:
# Add it to the start of the open tags list
open_tags.insert(0, tagname)
if current_len <= length:
return text
out = text[:end_text_pos]
truncate_text = self.add_truncation_text('', truncate)
if truncate_text:
out += truncate_text
# Close any tags still open
for tag in open_tags:
out += '</%s>' % tag
# Return string
return out
@keep_lazy_text
def get_valid_filename(s):
"""
Return the given string converted to a string that can be used for a clean
filename. Remove leading and trailing spaces; convert other spaces to
underscores; and remove anything that is not an alphanumeric, dash,
underscore, or dot.
>>> get_valid_filename("john's portrait in 2004.jpg")
'johns_portrait_in_2004.jpg'
"""
s = str(s).strip().replace(' ', '_')
return re.sub(r'(?u)[^-\w.]', '', s)
@keep_lazy_text
def get_text_list(list_, last_word=gettext_lazy('or')):
"""
>>> get_text_list(['a', 'b', 'c', 'd'])
'a, b, c or d'
>>> get_text_list(['a', 'b', 'c'], 'and')
'a, b and c'
>>> get_text_list(['a', 'b'], 'and')
'a and b'
>>> get_text_list(['a'])
'a'
>>> get_text_list([])
''
"""
if not list_:
return ''
if len(list_) == 1:
return str(list_[0])
return '%s %s %s' % (
# Translators: This string is used as a separator between list elements
_(', ').join(str(i) for i in list_[:-1]), str(last_word), str(list_[-1])
)
@keep_lazy_text
def normalize_newlines(text):
"""Normalize CRLF and CR newlines to just LF."""
return re_newlines.sub('\n', str(text))
@keep_lazy_text
def phone2numeric(phone):
"""Convert a phone number with letters into its numeric equivalent."""
char2number = {
'a': '2', 'b': '2', 'c': '2', 'd': '3', 'e': '3', 'f': '3', 'g': '4',
'h': '4', 'i': '4', 'j': '5', 'k': '5', 'l': '5', 'm': '6', 'n': '6',
'o': '6', 'p': '7', 'q': '7', 'r': '7', 's': '7', 't': '8', 'u': '8',
'v': '8', 'w': '9', 'x': '9', 'y': '9', 'z': '9',
}
return ''.join(char2number.get(c, c) for c in phone.lower())
# From http://www.xhaus.com/alan/python/httpcomp.html#gzip
# Used with permission.
def compress_string(s):
zbuf = BytesIO()
with GzipFile(mode='wb', compresslevel=6, fileobj=zbuf, mtime=0) as zfile:
zfile.write(s)
return zbuf.getvalue()
class StreamingBuffer(BytesIO):
def read(self):
ret = self.getvalue()
self.seek(0)
self.truncate()
return ret
# Like compress_string, but for iterators of strings.
def compress_sequence(sequence):
buf = StreamingBuffer()
with GzipFile(mode='wb', compresslevel=6, fileobj=buf, mtime=0) as zfile:
# Output headers...
yield buf.read()
for item in sequence:
zfile.write(item)
data = buf.read()
if data:
yield data
yield buf.read()
# Expression to match some_token and some_token="with spaces" (and similarly
# for single-quoted strings).
smart_split_re = re.compile(r"""
((?:
[^\s'"]*
(?:
(?:"(?:[^"\\]|\\.)*" | '(?:[^'\\]|\\.)*')
[^\s'"]*
)+
) | \S+)
""", re.VERBOSE)
def smart_split(text):
r"""
Generator that splits a string by spaces, leaving quoted phrases together.
Supports both single and double quotes, and supports escaping quotes with
backslashes. In the output, strings will keep their initial and trailing
quote marks and escaped quotes will remain escaped (the results can then
be further processed with unescape_string_literal()).
>>> list(smart_split(r'This is "a person\'s" test.'))
['This', 'is', '"a person\\\'s"', 'test.']
>>> list(smart_split(r"Another 'person\'s' test."))
['Another', "'person\\'s'", 'test.']
>>> list(smart_split(r'A "\"funky\" style" test.'))
['A', '"\\"funky\\" style"', 'test.']
"""
for bit in smart_split_re.finditer(str(text)):
yield bit.group(0)
def _replace_entity(match):
text = match.group(1)
if text[0] == '#':
text = text[1:]
try:
if text[0] in 'xX':
c = int(text[1:], 16)
else:
c = int(text)
return chr(c)
except ValueError:
return match.group(0)
else:
try:
return chr(html.entities.name2codepoint[text])
except (ValueError, KeyError):
return match.group(0)
_entity_re = re.compile(r"&(#?[xX]?(?:[0-9a-fA-F]+|\w{1,8}));")
@keep_lazy_text
def unescape_entities(text):
warnings.warn(
'django.utils.text.unescape_entities() is deprecated in favor of '
'html.unescape().',
RemovedInDjango40Warning, stacklevel=2,
)
return _entity_re.sub(_replace_entity, str(text))
@keep_lazy_text
def unescape_string_literal(s):
r"""
Convert quoted string literals to unquoted strings with escaped quotes and
backslashes unquoted::
>>> unescape_string_literal('"abc"')
'abc'
>>> unescape_string_literal("'abc'")
'abc'
>>> unescape_string_literal('"a \"bc\""')
'a "bc"'
>>> unescape_string_literal("'\'ab\' c'")
"'ab' c"
"""
if s[0] not in "\"'" or s[-1] != s[0]:
raise ValueError("Not a string literal: %r" % s)
quote = s[0]
return s[1:-1].replace(r'\%s' % quote, quote).replace(r'\\', '\\')
@keep_lazy_text
def slugify(value, allow_unicode=False):
"""
Convert to ASCII if 'allow_unicode' is False. Convert spaces to hyphens.
Remove characters that aren't alphanumerics, underscores, or hyphens.
Convert to lowercase. Also strip leading and trailing whitespace.
"""
value = str(value)
if allow_unicode:
value = unicodedata.normalize('NFKC', value)
else:
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii')
value = re.sub(r'[^\w\s-]', '', value).strip().lower()
return re.sub(r'[-\s]+', '-', value)
def camel_case_to_spaces(value):
"""
Split CamelCase and convert to lowercase. Strip surrounding whitespace.
"""
return re_camel_case.sub(r' \1', value).strip().lower()
def _format_lazy(format_string, *args, **kwargs):
"""
Apply str.format() on 'format_string' where format_string, args,
and/or kwargs might be lazy.
"""
return format_string.format(*args, **kwargs)
format_lazy = lazy(_format_lazy, str)
|
06e74f6fc9ef5765ef461183d0452ee062675441e205685c4465ef436d549a7f | """Functions to parse datetime objects."""
# We're using regular expressions rather than time.strptime because:
# - They provide both validation and parsing.
# - They're more flexible for datetimes.
# - The date/datetime/time constructors produce friendlier error messages.
import datetime
import re
from django.utils.timezone import get_fixed_timezone, utc
date_re = re.compile(
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})$'
)
time_re = re.compile(
r'(?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
)
datetime_re = re.compile(
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
)
standard_duration_re = re.compile(
r'^'
r'(?:(?P<days>-?\d+) (days?, )?)?'
r'(?P<sign>-?)'
r'((?:(?P<hours>\d+):)(?=\d+:\d+))?'
r'(?:(?P<minutes>\d+):)?'
r'(?P<seconds>\d+)'
r'(?:\.(?P<microseconds>\d{1,6})\d{0,6})?'
r'$'
)
# Support the sections of ISO 8601 date representation that are accepted by
# timedelta
iso8601_duration_re = re.compile(
r'^(?P<sign>[-+]?)'
r'P'
r'(?:(?P<days>\d+(.\d+)?)D)?'
r'(?:T'
r'(?:(?P<hours>\d+(.\d+)?)H)?'
r'(?:(?P<minutes>\d+(.\d+)?)M)?'
r'(?:(?P<seconds>\d+(.\d+)?)S)?'
r')?'
r'$'
)
# Support PostgreSQL's day-time interval format, e.g. "3 days 04:05:06". The
# year-month and mixed intervals cannot be converted to a timedelta and thus
# aren't accepted.
postgres_interval_re = re.compile(
r'^'
r'(?:(?P<days>-?\d+) (days? ?))?'
r'(?:(?P<sign>[-+])?'
r'(?P<hours>\d+):'
r'(?P<minutes>\d\d):'
r'(?P<seconds>\d\d)'
r'(?:\.(?P<microseconds>\d{1,6}))?'
r')?$'
)
def parse_date(value):
"""Parse a string and return a datetime.date.
Raise ValueError if the input is well formatted but not a valid date.
Return None if the input isn't well formatted.
"""
match = date_re.match(value)
if match:
kw = {k: int(v) for k, v in match.groupdict().items()}
return datetime.date(**kw)
def parse_time(value):
"""Parse a string and return a datetime.time.
This function doesn't support time zone offsets.
Raise ValueError if the input is well formatted but not a valid time.
Return None if the input isn't well formatted, in particular if it
contains an offset.
"""
match = time_re.match(value)
if match:
kw = match.groupdict()
kw['microsecond'] = kw['microsecond'] and kw['microsecond'].ljust(6, '0')
kw = {k: int(v) for k, v in kw.items() if v is not None}
return datetime.time(**kw)
def parse_datetime(value):
"""Parse a string and return a datetime.datetime.
This function supports time zone offsets. When the input contains one,
the output uses a timezone with a fixed offset from UTC.
Raise ValueError if the input is well formatted but not a valid datetime.
Return None if the input isn't well formatted.
"""
match = datetime_re.match(value)
if match:
kw = match.groupdict()
kw['microsecond'] = kw['microsecond'] and kw['microsecond'].ljust(6, '0')
tzinfo = kw.pop('tzinfo')
if tzinfo == 'Z':
tzinfo = utc
elif tzinfo is not None:
offset_mins = int(tzinfo[-2:]) if len(tzinfo) > 3 else 0
offset = 60 * int(tzinfo[1:3]) + offset_mins
if tzinfo[0] == '-':
offset = -offset
tzinfo = get_fixed_timezone(offset)
kw = {k: int(v) for k, v in kw.items() if v is not None}
kw['tzinfo'] = tzinfo
return datetime.datetime(**kw)
def parse_duration(value):
"""Parse a duration string and return a datetime.timedelta.
The preferred format for durations in Django is '%d %H:%M:%S.%f'.
Also supports ISO 8601 representation and PostgreSQL's day-time interval
format.
"""
match = (
standard_duration_re.match(value) or
iso8601_duration_re.match(value) or
postgres_interval_re.match(value)
)
if match:
kw = match.groupdict()
days = datetime.timedelta(float(kw.pop('days', 0) or 0))
sign = -1 if kw.pop('sign', '+') == '-' else 1
if kw.get('microseconds'):
kw['microseconds'] = kw['microseconds'].ljust(6, '0')
if kw.get('seconds') and kw.get('microseconds') and kw['seconds'].startswith('-'):
kw['microseconds'] = '-' + kw['microseconds']
kw = {k: float(v) for k, v in kw.items() if v is not None}
return days + sign * datetime.timedelta(**kw)
|
00156bb4c5f555bbf7cfc04cf3ea1cd818b51719b04e0bd2a9a7d93348315633 | """
Utilities for XML generation/parsing.
"""
import re
from xml.sax.saxutils import XMLGenerator
class UnserializableContentError(ValueError):
pass
class SimplerXMLGenerator(XMLGenerator):
def addQuickElement(self, name, contents=None, attrs=None):
"Convenience method for adding an element with no children"
if attrs is None:
attrs = {}
self.startElement(name, attrs)
if contents is not None:
self.characters(contents)
self.endElement(name)
def characters(self, content):
if content and re.search(r'[\x00-\x08\x0B-\x0C\x0E-\x1F]', content):
# Fail loudly when content has control chars (unsupported in XML 1.0)
# See https://www.w3.org/International/questions/qa-controls
raise UnserializableContentError("Control characters are not supported in XML 1.0")
XMLGenerator.characters(self, content)
def startElement(self, name, attrs):
# Sort attrs for a deterministic output.
sorted_attrs = dict(sorted(attrs.items())) if attrs else attrs
super().startElement(name, sorted_attrs)
|
0301789af270e025e11602f203b1c2812658ebcb21f1a419c4368bf4d7b22190 | class CyclicDependencyError(ValueError):
pass
def topological_sort_as_sets(dependency_graph):
"""
Variation of Kahn's algorithm (1962) that returns sets.
Take a dependency graph as a dictionary of node => dependencies.
Yield sets of items in topological order, where the first set contains
all nodes without dependencies, and each following set contains all
nodes that may depend on the nodes only in the previously yielded sets.
"""
todo = dependency_graph.copy()
while todo:
current = {node for node, deps in todo.items() if not deps}
if not current:
raise CyclicDependencyError('Cyclic dependency in graph: {}'.format(
', '.join(repr(x) for x in todo.items())))
yield current
# remove current from todo's nodes & dependencies
todo = {node: (dependencies - current) for node, dependencies in
todo.items() if node not in current}
def stable_topological_sort(l, dependency_graph):
result = []
for layer in topological_sort_as_sets(dependency_graph):
for node in l:
if node in layer:
result.append(node)
return result
|
291576f9f948f8408000fd552bb99ab93b5c325c001a96ee49d92f22ec70c415 | from decimal import Decimal
from django.conf import settings
from django.utils.safestring import mark_safe
def format(number, decimal_sep, decimal_pos=None, grouping=0, thousand_sep='',
force_grouping=False, use_l10n=None):
"""
Get a number (as a number or string), and return it as a string,
using formats defined as arguments:
* decimal_sep: Decimal separator symbol (for example ".")
* decimal_pos: Number of decimal positions
* grouping: Number of digits in every group limited by thousand separator.
For non-uniform digit grouping, it can be a sequence with the number
of digit group sizes following the format used by the Python locale
module in locale.localeconv() LC_NUMERIC grouping (e.g. (3, 2, 0)).
* thousand_sep: Thousand separator symbol (for example ",")
"""
use_grouping = (use_l10n or (use_l10n is None and settings.USE_L10N)) and settings.USE_THOUSAND_SEPARATOR
use_grouping = use_grouping or force_grouping
use_grouping = use_grouping and grouping != 0
# Make the common case fast
if isinstance(number, int) and not use_grouping and not decimal_pos:
return mark_safe(number)
# sign
sign = ''
if isinstance(number, Decimal):
if decimal_pos is not None:
# If the provided number is too small to affect any of the visible
# decimal places, consider it equal to '0'.
cutoff = Decimal('0.' + '1'.rjust(decimal_pos, '0'))
if abs(number) < cutoff:
number = Decimal('0')
# Format values with more than 200 digits (an arbitrary cutoff) using
# scientific notation to avoid high memory usage in {:f}'.format().
_, digits, exponent = number.as_tuple()
if abs(exponent) + len(digits) > 200:
number = '{:e}'.format(number)
coefficient, exponent = number.split('e')
# Format the coefficient.
coefficient = format(
coefficient, decimal_sep, decimal_pos, grouping,
thousand_sep, force_grouping, use_l10n,
)
return '{}e{}'.format(coefficient, exponent)
else:
str_number = '{:f}'.format(number)
else:
str_number = str(number)
if str_number[0] == '-':
sign = '-'
str_number = str_number[1:]
# decimal part
if '.' in str_number:
int_part, dec_part = str_number.split('.')
if decimal_pos is not None:
dec_part = dec_part[:decimal_pos]
else:
int_part, dec_part = str_number, ''
if decimal_pos is not None:
dec_part = dec_part + ('0' * (decimal_pos - len(dec_part)))
dec_part = dec_part and decimal_sep + dec_part
# grouping
if use_grouping:
try:
# if grouping is a sequence
intervals = list(grouping)
except TypeError:
# grouping is a single value
intervals = [grouping, 0]
active_interval = intervals.pop(0)
int_part_gd = ''
cnt = 0
for digit in int_part[::-1]:
if cnt and cnt == active_interval:
if intervals:
active_interval = intervals.pop(0) or active_interval
int_part_gd += thousand_sep[::-1]
cnt = 0
int_part_gd += digit
cnt += 1
int_part = int_part_gd[::-1]
return sign + int_part + dec_part
|
da2d59bce066531c6e7dfea743e435828ed56788d2941231ade224bab0fcf50d | """
Timezone-related classes and functions.
"""
import functools
import warnings
from contextlib import ContextDecorator
from datetime import datetime, timedelta, timezone, tzinfo
from threading import local
import pytz
from django.conf import settings
from django.utils.deprecation import RemovedInDjango31Warning
__all__ = [
'utc', 'get_fixed_timezone',
'get_default_timezone', 'get_default_timezone_name',
'get_current_timezone', 'get_current_timezone_name',
'activate', 'deactivate', 'override',
'localtime', 'now',
'is_aware', 'is_naive', 'make_aware', 'make_naive',
]
# UTC and local time zones
ZERO = timedelta(0)
class FixedOffset(tzinfo):
"""
Fixed offset in minutes east from UTC. Taken from Python's docs.
Kept as close as possible to the reference version. __init__ was changed
to make its arguments optional, according to Python's requirement that
tzinfo subclasses can be instantiated without arguments.
"""
def __init__(self, offset=None, name=None):
warnings.warn(
'FixedOffset is deprecated in favor of datetime.timezone',
RemovedInDjango31Warning, stacklevel=2,
)
if offset is not None:
self.__offset = timedelta(minutes=offset)
if name is not None:
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return ZERO
# UTC time zone as a tzinfo instance.
utc = pytz.utc
def get_fixed_timezone(offset):
"""Return a tzinfo instance with a fixed offset from UTC."""
if isinstance(offset, timedelta):
offset = offset.total_seconds() // 60
sign = '-' if offset < 0 else '+'
hhmm = '%02d%02d' % divmod(abs(offset), 60)
name = sign + hhmm
return timezone(timedelta(minutes=offset), name)
# In order to avoid accessing settings at compile time,
# wrap the logic in a function and cache the result.
@functools.lru_cache()
def get_default_timezone():
"""
Return the default time zone as a tzinfo instance.
This is the time zone defined by settings.TIME_ZONE.
"""
return pytz.timezone(settings.TIME_ZONE)
# This function exists for consistency with get_current_timezone_name
def get_default_timezone_name():
"""Return the name of the default time zone."""
return _get_timezone_name(get_default_timezone())
_active = local()
def get_current_timezone():
"""Return the currently active time zone as a tzinfo instance."""
return getattr(_active, "value", get_default_timezone())
def get_current_timezone_name():
"""Return the name of the currently active time zone."""
return _get_timezone_name(get_current_timezone())
def _get_timezone_name(timezone):
"""Return the name of ``timezone``."""
return timezone.tzname(None)
# Timezone selection functions.
# These functions don't change os.environ['TZ'] and call time.tzset()
# because it isn't thread safe.
def activate(timezone):
"""
Set the time zone for the current thread.
The ``timezone`` argument must be an instance of a tzinfo subclass or a
time zone name.
"""
if isinstance(timezone, tzinfo):
_active.value = timezone
elif isinstance(timezone, str):
_active.value = pytz.timezone(timezone)
else:
raise ValueError("Invalid timezone: %r" % timezone)
def deactivate():
"""
Unset the time zone for the current thread.
Django will then use the time zone defined by settings.TIME_ZONE.
"""
if hasattr(_active, "value"):
del _active.value
class override(ContextDecorator):
"""
Temporarily set the time zone for the current thread.
This is a context manager that uses django.utils.timezone.activate()
to set the timezone on entry and restores the previously active timezone
on exit.
The ``timezone`` argument must be an instance of a ``tzinfo`` subclass, a
time zone name, or ``None``. If it is ``None``, Django enables the default
time zone.
"""
def __init__(self, timezone):
self.timezone = timezone
def __enter__(self):
self.old_timezone = getattr(_active, 'value', None)
if self.timezone is None:
deactivate()
else:
activate(self.timezone)
def __exit__(self, exc_type, exc_value, traceback):
if self.old_timezone is None:
deactivate()
else:
_active.value = self.old_timezone
# Templates
def template_localtime(value, use_tz=None):
"""
Check if value is a datetime and converts it to local time if necessary.
If use_tz is provided and is not None, that will force the value to
be converted (or not), overriding the value of settings.USE_TZ.
This function is designed for use by the template engine.
"""
should_convert = (
isinstance(value, datetime) and
(settings.USE_TZ if use_tz is None else use_tz) and
not is_naive(value) and
getattr(value, 'convert_to_local_time', True)
)
return localtime(value) if should_convert else value
# Utilities
def localtime(value=None, timezone=None):
"""
Convert an aware datetime.datetime to local time.
Only aware datetimes are allowed. When value is omitted, it defaults to
now().
Local time is defined by the current time zone, unless another time zone
is specified.
"""
if value is None:
value = now()
if timezone is None:
timezone = get_current_timezone()
# Emulate the behavior of astimezone() on Python < 3.6.
if is_naive(value):
raise ValueError("localtime() cannot be applied to a naive datetime")
return value.astimezone(timezone)
def localdate(value=None, timezone=None):
"""
Convert an aware datetime to local time and return the value's date.
Only aware datetimes are allowed. When value is omitted, it defaults to
now().
Local time is defined by the current time zone, unless another time zone is
specified.
"""
return localtime(value, timezone).date()
def now():
"""
Return an aware or naive datetime.datetime, depending on settings.USE_TZ.
"""
if settings.USE_TZ:
# timeit shows that datetime.now(tz=utc) is 24% slower
return datetime.utcnow().replace(tzinfo=utc)
else:
return datetime.now()
# By design, these four functions don't perform any checks on their arguments.
# The caller should ensure that they don't receive an invalid value like None.
def is_aware(value):
"""
Determine if a given datetime.datetime is aware.
The concept is defined in Python's docs:
https://docs.python.org/library/datetime.html#datetime.tzinfo
Assuming value.tzinfo is either None or a proper datetime.tzinfo,
value.utcoffset() implements the appropriate logic.
"""
return value.utcoffset() is not None
def is_naive(value):
"""
Determine if a given datetime.datetime is naive.
The concept is defined in Python's docs:
https://docs.python.org/library/datetime.html#datetime.tzinfo
Assuming value.tzinfo is either None or a proper datetime.tzinfo,
value.utcoffset() implements the appropriate logic.
"""
return value.utcoffset() is None
def make_aware(value, timezone=None, is_dst=None):
"""Make a naive datetime.datetime in a given time zone aware."""
if timezone is None:
timezone = get_current_timezone()
if hasattr(timezone, 'localize'):
# This method is available for pytz time zones.
return timezone.localize(value, is_dst=is_dst)
else:
# Check that we won't overwrite the timezone of an aware datetime.
if is_aware(value):
raise ValueError(
"make_aware expects a naive datetime, got %s" % value)
# This may be wrong around DST changes!
return value.replace(tzinfo=timezone)
def make_naive(value, timezone=None):
"""Make an aware datetime.datetime naive in a given time zone."""
if timezone is None:
timezone = get_current_timezone()
# Emulate the behavior of astimezone() on Python < 3.6.
if is_naive(value):
raise ValueError("make_naive() cannot be applied to a naive datetime")
return value.astimezone(timezone).replace(tzinfo=None)
|
1ca8be0c5921e8f0a65b1c1084a46f30f0cfe40b9f0ff058dc3a593358af9cda | """
A class for storing a tree graph. Primarily used for filter constructs in the
ORM.
"""
import copy
from django.utils.hashable import make_hashable
class Node:
"""
A single internal node in the tree graph. A Node should be viewed as a
connection (the root) with the children being either leaf nodes or other
Node instances.
"""
# Standard connector type. Clients usually won't use this at all and
# subclasses will usually override the value.
default = 'DEFAULT'
def __init__(self, children=None, connector=None, negated=False):
"""Construct a new Node. If no connector is given, use the default."""
self.children = children[:] if children else []
self.connector = connector or self.default
self.negated = negated
# Required because django.db.models.query_utils.Q. Q. __init__() is
# problematic, but it is a natural Node subclass in all other respects.
@classmethod
def _new_instance(cls, children=None, connector=None, negated=False):
"""
Create a new instance of this class when new Nodes (or subclasses) are
needed in the internal code in this class. Normally, it just shadows
__init__(). However, subclasses with an __init__ signature that aren't
an extension of Node.__init__ might need to implement this method to
allow a Node to create a new instance of them (if they have any extra
setting up to do).
"""
obj = Node(children, connector, negated)
obj.__class__ = cls
return obj
def __str__(self):
template = '(NOT (%s: %s))' if self.negated else '(%s: %s)'
return template % (self.connector, ', '.join(str(c) for c in self.children))
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
def __deepcopy__(self, memodict):
obj = Node(connector=self.connector, negated=self.negated)
obj.__class__ = self.__class__
obj.children = copy.deepcopy(self.children, memodict)
return obj
def __len__(self):
"""Return the number of children this node has."""
return len(self.children)
def __bool__(self):
"""Return whether or not this node has children."""
return bool(self.children)
def __contains__(self, other):
"""Return True if 'other' is a direct child of this instance."""
return other in self.children
def __eq__(self, other):
return (
self.__class__ == other.__class__ and
(self.connector, self.negated) == (other.connector, other.negated) and
self.children == other.children
)
def __hash__(self):
return hash((self.__class__, self.connector, self.negated, *make_hashable(self.children)))
def add(self, data, conn_type, squash=True):
"""
Combine this tree and the data represented by data using the
connector conn_type. The combine is done by squashing the node other
away if possible.
This tree (self) will never be pushed to a child node of the
combined tree, nor will the connector or negated properties change.
Return a node which can be used in place of data regardless if the
node other got squashed or not.
If `squash` is False the data is prepared and added as a child to
this tree without further logic.
"""
if data in self.children:
return data
if not squash:
self.children.append(data)
return data
if self.connector == conn_type:
# We can reuse self.children to append or squash the node other.
if (isinstance(data, Node) and not data.negated and
(data.connector == conn_type or len(data) == 1)):
# We can squash the other node's children directly into this
# node. We are just doing (AB)(CD) == (ABCD) here, with the
# addition that if the length of the other node is 1 the
# connector doesn't matter. However, for the len(self) == 1
# case we don't want to do the squashing, as it would alter
# self.connector.
self.children.extend(data.children)
return self
else:
# We could use perhaps additional logic here to see if some
# children could be used for pushdown here.
self.children.append(data)
return data
else:
obj = self._new_instance(self.children, self.connector,
self.negated)
self.connector = conn_type
self.children = [obj, data]
return data
def negate(self):
"""Negate the sense of the root connector."""
self.negated = not self.negated
|
69013e25931607ef326d8fe39299137ad21ca1f748313b680f19f3824d674aec | import inspect
import warnings
class RemovedInDjango31Warning(DeprecationWarning):
pass
class RemovedInDjango40Warning(PendingDeprecationWarning):
pass
RemovedInNextVersionWarning = RemovedInDjango31Warning
class warn_about_renamed_method:
def __init__(self, class_name, old_method_name, new_method_name, deprecation_warning):
self.class_name = class_name
self.old_method_name = old_method_name
self.new_method_name = new_method_name
self.deprecation_warning = deprecation_warning
def __call__(self, f):
def wrapped(*args, **kwargs):
warnings.warn(
"`%s.%s` is deprecated, use `%s` instead." %
(self.class_name, self.old_method_name, self.new_method_name),
self.deprecation_warning, 2)
return f(*args, **kwargs)
return wrapped
class RenameMethodsBase(type):
"""
Handles the deprecation paths when renaming a method.
It does the following:
1) Define the new method if missing and complain about it.
2) Define the old method if missing.
3) Complain whenever an old method is called.
See #15363 for more details.
"""
renamed_methods = ()
def __new__(cls, name, bases, attrs):
new_class = super().__new__(cls, name, bases, attrs)
for base in inspect.getmro(new_class):
class_name = base.__name__
for renamed_method in cls.renamed_methods:
old_method_name = renamed_method[0]
old_method = base.__dict__.get(old_method_name)
new_method_name = renamed_method[1]
new_method = base.__dict__.get(new_method_name)
deprecation_warning = renamed_method[2]
wrapper = warn_about_renamed_method(class_name, *renamed_method)
# Define the new method if missing and complain about it
if not new_method and old_method:
warnings.warn(
"`%s.%s` method should be renamed `%s`." %
(class_name, old_method_name, new_method_name),
deprecation_warning, 2)
setattr(base, new_method_name, old_method)
setattr(base, old_method_name, wrapper(old_method))
# Define the old method as a wrapped call to the new method.
if not old_method and new_method:
setattr(base, old_method_name, wrapper(new_method))
return new_class
class DeprecationInstanceCheck(type):
def __instancecheck__(self, instance):
warnings.warn(
"`%s` is deprecated, use `%s` instead." % (self.__name__, self.alternative),
self.deprecation_warning, 2
)
return super().__instancecheck__(instance)
class MiddlewareMixin:
def __init__(self, get_response=None):
self.get_response = get_response
super().__init__()
def __call__(self, request):
response = None
if hasattr(self, 'process_request'):
response = self.process_request(request)
response = response or self.get_response(request)
if hasattr(self, 'process_response'):
response = self.process_response(request, response)
return response
|
65be19f7d0da7912e4f914c68abe0b07e2467edef0d74224fd1c148c973eeec9 | import codecs
import datetime
import locale
import warnings
from decimal import Decimal
from urllib.parse import quote
from django.utils.deprecation import RemovedInDjango40Warning
from django.utils.functional import Promise
class DjangoUnicodeDecodeError(UnicodeDecodeError):
def __init__(self, obj, *args):
self.obj = obj
super().__init__(*args)
def __str__(self):
return '%s. You passed in %r (%s)' % (super().__str__(), self.obj, type(self.obj))
def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Return a string representing 's'. Treat bytestrings using the 'encoding'
codec.
If strings_only is True, don't convert (some) non-string-like objects.
"""
if isinstance(s, Promise):
# The input is the result of a gettext_lazy() call.
return s
return force_str(s, encoding, strings_only, errors)
_PROTECTED_TYPES = (
type(None), int, float, Decimal, datetime.datetime, datetime.date, datetime.time,
)
def is_protected_type(obj):
"""Determine if the object instance is of a protected type.
Objects of protected types are preserved as-is when passed to
force_str(strings_only=True).
"""
return isinstance(obj, _PROTECTED_TYPES)
def force_str(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Similar to smart_str(), except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
# Handle the common case first for performance reasons.
if issubclass(type(s), str):
return s
if strings_only and is_protected_type(s):
return s
try:
if isinstance(s, bytes):
s = str(s, encoding, errors)
else:
s = str(s)
except UnicodeDecodeError as e:
raise DjangoUnicodeDecodeError(s, *e.args)
return s
def smart_bytes(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Return a bytestring version of 's', encoded as specified in 'encoding'.
If strings_only is True, don't convert (some) non-string-like objects.
"""
if isinstance(s, Promise):
# The input is the result of a gettext_lazy() call.
return s
return force_bytes(s, encoding, strings_only, errors)
def force_bytes(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Similar to smart_bytes, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
# Handle the common case first for performance reasons.
if isinstance(s, bytes):
if encoding == 'utf-8':
return s
else:
return s.decode('utf-8', errors).encode(encoding, errors)
if strings_only and is_protected_type(s):
return s
if isinstance(s, memoryview):
return bytes(s)
return str(s).encode(encoding, errors)
def smart_text(s, encoding='utf-8', strings_only=False, errors='strict'):
warnings.warn(
'smart_text() is deprecated in favor of smart_str().',
RemovedInDjango40Warning, stacklevel=2,
)
return smart_str(s, encoding, strings_only, errors)
def force_text(s, encoding='utf-8', strings_only=False, errors='strict'):
warnings.warn(
'force_text() is deprecated in favor of force_str().',
RemovedInDjango40Warning, stacklevel=2,
)
return force_str(s, encoding, strings_only, errors)
def iri_to_uri(iri):
"""
Convert an Internationalized Resource Identifier (IRI) portion to a URI
portion that is suitable for inclusion in a URL.
This is the algorithm from section 3.1 of RFC 3987, slightly simplified
since the input is assumed to be a string rather than an arbitrary byte
stream.
Take an IRI (string or UTF-8 bytes, e.g. '/I ♥ Django/' or
b'/I \xe2\x99\xa5 Django/') and return a string containing the encoded
result with ASCII chars only (e.g. '/I%20%E2%99%A5%20Django/').
"""
# The list of safe characters here is constructed from the "reserved" and
# "unreserved" characters specified in sections 2.2 and 2.3 of RFC 3986:
# reserved = gen-delims / sub-delims
# gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@"
# sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
# / "*" / "+" / "," / ";" / "="
# unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
# Of the unreserved characters, urllib.parse.quote() already considers all
# but the ~ safe.
# The % character is also added to the list of safe characters here, as the
# end of section 3.1 of RFC 3987 specifically mentions that % must not be
# converted.
if iri is None:
return iri
elif isinstance(iri, Promise):
iri = str(iri)
return quote(iri, safe="/#%[]=:;$&()+,!?*@'~")
# List of byte values that uri_to_iri() decodes from percent encoding.
# First, the unreserved characters from RFC 3986:
_ascii_ranges = [[45, 46, 95, 126], range(65, 91), range(97, 123)]
_hextobyte = {
(fmt % char).encode(): bytes((char,))
for ascii_range in _ascii_ranges
for char in ascii_range
for fmt in ['%02x', '%02X']
}
# And then everything above 128, because bytes ≥ 128 are part of multibyte
# unicode characters.
_hexdig = '0123456789ABCDEFabcdef'
_hextobyte.update({
(a + b).encode(): bytes.fromhex(a + b)
for a in _hexdig[8:] for b in _hexdig
})
def uri_to_iri(uri):
"""
Convert a Uniform Resource Identifier(URI) into an Internationalized
Resource Identifier(IRI).
This is the algorithm from section 3.2 of RFC 3987, excluding step 4.
Take an URI in ASCII bytes (e.g. '/I%20%E2%99%A5%20Django/') and return
a string containing the encoded result (e.g. '/I%20♥%20Django/').
"""
if uri is None:
return uri
uri = force_bytes(uri)
# Fast selective unqote: First, split on '%' and then starting with the
# second block, decode the first 2 bytes if they represent a hex code to
# decode. The rest of the block is the part after '%AB', not containing
# any '%'. Add that to the output without further processing.
bits = uri.split(b'%')
if len(bits) == 1:
iri = uri
else:
parts = [bits[0]]
append = parts.append
hextobyte = _hextobyte
for item in bits[1:]:
hex = item[:2]
if hex in hextobyte:
append(hextobyte[item[:2]])
append(item[2:])
else:
append(b'%')
append(item)
iri = b''.join(parts)
return repercent_broken_unicode(iri).decode()
def escape_uri_path(path):
"""
Escape the unsafe characters from the path portion of a Uniform Resource
Identifier (URI).
"""
# These are the "reserved" and "unreserved" characters specified in
# sections 2.2 and 2.3 of RFC 2396:
# reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | "$" | ","
# unreserved = alphanum | mark
# mark = "-" | "_" | "." | "!" | "~" | "*" | "'" | "(" | ")"
# The list of safe characters here is constructed subtracting ";", "=",
# and "?" according to section 3.3 of RFC 2396.
# The reason for not subtracting and escaping "/" is that we are escaping
# the entire path, not a path segment.
return quote(path, safe="/:@&+$,-_.!~*'()")
def repercent_broken_unicode(path):
"""
As per section 3.2 of RFC 3987, step three of converting a URI into an IRI,
repercent-encode any octet produced that is not part of a strictly legal
UTF-8 octet sequence.
"""
try:
path.decode()
except UnicodeDecodeError as e:
repercent = quote(path[e.start:e.end], safe=b"/#%[]=:;$&()+,!?*@'~")
path = repercent_broken_unicode(
path[:e.start] + force_bytes(repercent) + path[e.end:])
return path
def filepath_to_uri(path):
"""Convert a file system path to a URI portion that is suitable for
inclusion in a URL.
Encode certain chars that would normally be recognized as special chars
for URIs. Do not encode the ' character, as it is a valid character
within URIs. See the encodeURIComponent() JavaScript function for details.
"""
if path is None:
return path
# I know about `os.sep` and `os.altsep` but I want to leave
# some flexibility for hardcoding separators.
return quote(path.replace("\\", "/"), safe="/~!*()'")
def get_system_encoding():
"""
The encoding of the default system locale. Fallback to 'ascii' if the
#encoding is unsupported by Python or could not be determined. See tickets
#10335 and #5846.
"""
try:
encoding = locale.getdefaultlocale()[1] or 'ascii'
codecs.lookup(encoding)
except Exception:
encoding = 'ascii'
return encoding
DEFAULT_LOCALE_ENCODING = get_system_encoding()
|
a35e19cc4c804fddfd4ebc6b9461390bc4f005949893e3e05797681063123edf | """
Django's standard crypto functions and utilities.
"""
import hashlib
import hmac
import secrets
from django.conf import settings
from django.utils.encoding import force_bytes
def salted_hmac(key_salt, value, secret=None):
"""
Return the HMAC-SHA1 of 'value', using a key generated from key_salt and a
secret (which defaults to settings.SECRET_KEY).
A different key_salt should be passed in for every application of HMAC.
"""
if secret is None:
secret = settings.SECRET_KEY
key_salt = force_bytes(key_salt)
secret = force_bytes(secret)
# We need to generate a derived key from our base key. We can do this by
# passing the key_salt and our base key through a pseudo-random function and
# SHA1 works nicely.
key = hashlib.sha1(key_salt + secret).digest()
# If len(key_salt + secret) > sha_constructor().block_size, the above
# line is redundant and could be replaced by key = key_salt + secret, since
# the hmac module does the same thing for keys longer than the block size.
# However, we need to ensure that we *always* do this.
return hmac.new(key, msg=force_bytes(value), digestmod=hashlib.sha1)
def get_random_string(length=12,
allowed_chars='abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'):
"""
Return a securely generated random string.
The default length of 12 with the a-z, A-Z, 0-9 character set returns
a 71-bit value. log_2((26+26+10)^12) =~ 71 bits
"""
return ''.join(secrets.choice(allowed_chars) for i in range(length))
def constant_time_compare(val1, val2):
"""Return True if the two strings are equal, False otherwise."""
return secrets.compare_digest(force_bytes(val1), force_bytes(val2))
def pbkdf2(password, salt, iterations, dklen=0, digest=None):
"""Return the hash of password using pbkdf2."""
if digest is None:
digest = hashlib.sha256
dklen = dklen or None
password = force_bytes(password)
salt = force_bytes(salt)
return hashlib.pbkdf2_hmac(digest().name, password, salt, iterations, dklen)
|
42591a8ddcd755fb5995ff55205faf94e2bd33f52912b2b60c7fba7024c998b4 | import calendar
import datetime
from django.utils.html import avoid_wrapping
from django.utils.timezone import is_aware, utc
from django.utils.translation import gettext, ngettext_lazy
TIME_STRINGS = {
'year': ngettext_lazy('%d year', '%d years'),
'month': ngettext_lazy('%d month', '%d months'),
'week': ngettext_lazy('%d week', '%d weeks'),
'day': ngettext_lazy('%d day', '%d days'),
'hour': ngettext_lazy('%d hour', '%d hours'),
'minute': ngettext_lazy('%d minute', '%d minutes'),
}
TIMESINCE_CHUNKS = (
(60 * 60 * 24 * 365, 'year'),
(60 * 60 * 24 * 30, 'month'),
(60 * 60 * 24 * 7, 'week'),
(60 * 60 * 24, 'day'),
(60 * 60, 'hour'),
(60, 'minute'),
)
def timesince(d, now=None, reversed=False, time_strings=None):
"""
Take two datetime objects and return the time between d and now as a nicely
formatted string, e.g. "10 minutes". If d occurs after now, return
"0 minutes".
Units used are years, months, weeks, days, hours, and minutes.
Seconds and microseconds are ignored. Up to two adjacent units will be
displayed. For example, "2 weeks, 3 days" and "1 year, 3 months" are
possible outputs, but "2 weeks, 3 hours" and "1 year, 5 days" are not.
`time_strings` is an optional dict of strings to replace the default
TIME_STRINGS dict.
Adapted from
https://web.archive.org/web/20060617175230/http://blog.natbat.co.uk/archive/2003/Jun/14/time_since
"""
if time_strings is None:
time_strings = TIME_STRINGS
# Convert datetime.date to datetime.datetime for comparison.
if not isinstance(d, datetime.datetime):
d = datetime.datetime(d.year, d.month, d.day)
if now and not isinstance(now, datetime.datetime):
now = datetime.datetime(now.year, now.month, now.day)
now = now or datetime.datetime.now(utc if is_aware(d) else None)
if reversed:
d, now = now, d
delta = now - d
# Deal with leapyears by subtracing the number of leapdays
leapdays = calendar.leapdays(d.year, now.year)
if leapdays != 0:
if calendar.isleap(d.year):
leapdays -= 1
elif calendar.isleap(now.year):
leapdays += 1
delta -= datetime.timedelta(leapdays)
# ignore microseconds
since = delta.days * 24 * 60 * 60 + delta.seconds
if since <= 0:
# d is in the future compared to now, stop processing.
return avoid_wrapping(gettext('0 minutes'))
for i, (seconds, name) in enumerate(TIMESINCE_CHUNKS):
count = since // seconds
if count != 0:
break
result = avoid_wrapping(time_strings[name] % count)
if i + 1 < len(TIMESINCE_CHUNKS):
# Now get the second item
seconds2, name2 = TIMESINCE_CHUNKS[i + 1]
count2 = (since - (seconds * count)) // seconds2
if count2 != 0:
result += gettext(', ') + avoid_wrapping(time_strings[name2] % count2)
return result
def timeuntil(d, now=None, time_strings=None):
"""
Like timesince, but return a string measuring the time until the given time.
"""
return timesince(d, now, reversed=True, time_strings=time_strings)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.