hash
stringlengths
64
64
content
stringlengths
0
1.51M
c3142bdfde38b3b2a6150733c7ab5429527712d74e8eb778cfe87247af180ece
from __future__ import unicode_literals import json from django.contrib.gis.geos import LinearRing, Point, Polygon from django.core import serializers from django.test import TestCase, skipUnlessDBFeature from .models import City, MultiFields, PennsylvaniaCity @skipUnlessDBFeature("gis_enabled") class GeoJSONSerializerTests(TestCase): fixtures = ['initial'] def test_builtin_serializers(self): """ 'geojson' should be listed in available serializers. """ all_formats = set(serializers.get_serializer_formats()) public_formats = set(serializers.get_public_serializer_formats()) self.assertIn('geojson', all_formats), self.assertIn('geojson', public_formats) def test_serialization_base(self): geojson = serializers.serialize('geojson', City.objects.all().order_by('name')) geodata = json.loads(geojson) self.assertEqual(len(geodata['features']), len(City.objects.all())) self.assertEqual(geodata['features'][0]['geometry']['type'], 'Point') self.assertEqual(geodata['features'][0]['properties']['name'], 'Chicago') first_city = City.objects.all().order_by('name').first() self.assertEqual(geodata['features'][0]['properties']['pk'], str(first_city.pk)) def test_geometry_field_option(self): """ When a model has several geometry fields, the 'geometry_field' option can be used to specify the field to use as the 'geometry' key. """ MultiFields.objects.create( city=City.objects.first(), name='Name', point=Point(5, 23), poly=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0)))) geojson = serializers.serialize('geojson', MultiFields.objects.all()) geodata = json.loads(geojson) self.assertEqual(geodata['features'][0]['geometry']['type'], 'Point') geojson = serializers.serialize( 'geojson', MultiFields.objects.all(), geometry_field='poly' ) geodata = json.loads(geojson) self.assertEqual(geodata['features'][0]['geometry']['type'], 'Polygon') # geometry_field is considered even if not in fields (#26138). geojson = serializers.serialize( 'geojson', MultiFields.objects.all(), geometry_field='poly', fields=('city',) ) geodata = json.loads(geojson) self.assertEqual(geodata['features'][0]['geometry']['type'], 'Polygon') def test_fields_option(self): """ The fields option allows to define a subset of fields to be present in the 'properties' of the generated output. """ PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)') geojson = serializers.serialize( 'geojson', PennsylvaniaCity.objects.all(), fields=('county', 'point'), ) geodata = json.loads(geojson) self.assertIn('county', geodata['features'][0]['properties']) self.assertNotIn('founded', geodata['features'][0]['properties']) self.assertNotIn('pk', geodata['features'][0]['properties']) def test_srid_option(self): geojson = serializers.serialize('geojson', City.objects.all().order_by('name'), srid=2847) geodata = json.loads(geojson) self.assertEqual( [int(c) for c in geodata['features'][0]['geometry']['coordinates']], [1564802, 5613214] ) def test_deserialization_exception(self): """ GeoJSON cannot be deserialized. """ with self.assertRaises(serializers.base.SerializerDoesNotExist): serializers.deserialize('geojson', '{}')
1948b3b927ada0d1c014cce0a0c09e9eca9f37fd611291d143413e51ebcb6b48
from __future__ import unicode_literals from django.conf.urls import url from django.contrib.gis import views as gis_views from django.contrib.gis.sitemaps import views as gis_sitemap_views from django.contrib.sitemaps import views as sitemap_views from .feeds import feed_dict from .sitemaps import sitemaps urlpatterns = [ url(r'^feeds/(?P<url>.*)/$', gis_views.feed, {'feed_dict': feed_dict}), ] urlpatterns += [ url(r'^sitemaps/(?P<section>\w+)\.xml$', sitemap_views.sitemap, {'sitemaps': sitemaps}), ] urlpatterns += [ url(r'^sitemaps/kml/(?P<label>\w+)/(?P<model>\w+)/(?P<field_name>\w+)\.kml$', gis_sitemap_views.kml, name='django.contrib.gis.sitemaps.views.kml'), url(r'^sitemaps/kml/(?P<label>\w+)/(?P<model>\w+)/(?P<field_name>\w+)\.kmz$', gis_sitemap_views.kmz, name='django.contrib.gis.sitemaps.views.kmz'), ]
ed7e7806a78787c5b03cd98bdd568e6735c1121f4c774f70cac577c400fa4b1d
# -*- encoding: utf-8 -*- from __future__ import unicode_literals from datetime import datetime from django.contrib.gis.db.models import Extent from django.contrib.gis.shortcuts import render_to_kmz from django.db.models import Count, Min from django.test import TestCase, skipUnlessDBFeature from ..utils import no_oracle from .models import City, PennsylvaniaCity, State, Truth @skipUnlessDBFeature("gis_enabled") class GeoRegressionTests(TestCase): fixtures = ['initial'] def test_update(self): "Testing GeoQuerySet.update(). See #10411." pnt = City.objects.get(name='Pueblo').point bak = pnt.clone() pnt.y += 0.005 pnt.x += 0.005 City.objects.filter(name='Pueblo').update(point=pnt) self.assertEqual(pnt, City.objects.get(name='Pueblo').point) City.objects.filter(name='Pueblo').update(point=bak) self.assertEqual(bak, City.objects.get(name='Pueblo').point) def test_kmz(self): "Testing `render_to_kmz` with non-ASCII data. See #11624." name = "Åland Islands" places = [{ 'name': name, 'description': name, 'kml': '<Point><coordinates>5.0,23.0</coordinates></Point>' }] render_to_kmz('gis/kml/placemarks.kml', {'places': places}) @skipUnlessDBFeature("supports_extent_aggr") def test_extent(self): "Testing `extent` on a table with a single point. See #11827." pnt = City.objects.get(name='Pueblo').point ref_ext = (pnt.x, pnt.y, pnt.x, pnt.y) extent = City.objects.filter(name='Pueblo').aggregate(Extent('point'))['point__extent'] for ref_val, val in zip(ref_ext, extent): self.assertAlmostEqual(ref_val, val, 4) def test_unicode_date(self): "Testing dates are converted properly, even on SpatiaLite. See #16408." founded = datetime(1857, 5, 23) PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)', founded=founded) self.assertEqual(founded, PennsylvaniaCity.objects.datetimes('founded', 'day')[0]) self.assertEqual(founded, PennsylvaniaCity.objects.aggregate(Min('founded'))['founded__min']) def test_empty_count(self): "Testing that PostGISAdapter.__eq__ does check empty strings. See #13670." # contrived example, but need a geo lookup paired with an id__in lookup pueblo = City.objects.get(name='Pueblo') state = State.objects.filter(poly__contains=pueblo.point) cities_within_state = City.objects.filter(id__in=state) # .count() should not throw TypeError in __eq__ self.assertEqual(cities_within_state.count(), 1) # TODO: fix on Oracle -- get the following error because the SQL is ordered # by a geometry object, which Oracle apparently doesn't like: # ORA-22901: cannot compare nested table or VARRAY or LOB attributes of an object type @no_oracle def test_defer_or_only_with_annotate(self): "Regression for #16409. Make sure defer() and only() work with annotate()" self.assertIsInstance(list(City.objects.annotate(Count('point')).defer('name')), list) self.assertIsInstance(list(City.objects.annotate(Count('point')).only('name')), list) def test_boolean_conversion(self): "Testing Boolean value conversion with the spatial backend, see #15169." t1 = Truth.objects.create(val=True) t2 = Truth.objects.create(val=False) val1 = Truth.objects.get(pk=t1.pk).val val2 = Truth.objects.get(pk=t2.pk).val # verify types -- shouldn't be 0/1 self.assertIsInstance(val1, bool) self.assertIsInstance(val2, bool) # verify values self.assertIs(val1, True) self.assertIs(val2, False)
d084b91842e17f6ba66860e77def24234cfe9ca8bceb4615d4e4d9f5384497ad
from __future__ import unicode_literals import zipfile from io import BytesIO from xml.dom import minidom from django.conf import settings from django.contrib.sites.models import Site from django.test import ( TestCase, modify_settings, override_settings, skipUnlessDBFeature, ) from .models import City, Country @modify_settings(INSTALLED_APPS={'append': ['django.contrib.sites', 'django.contrib.sitemaps']}) @override_settings(ROOT_URLCONF='gis_tests.geoapp.urls') @skipUnlessDBFeature("gis_enabled") class GeoSitemapTest(TestCase): def setUp(self): super(GeoSitemapTest, self).setUp() Site(id=settings.SITE_ID, domain="example.com", name="example.com").save() def assertChildNodes(self, elem, expected): "Taken from syndication/tests.py." actual = set(n.nodeName for n in elem.childNodes) expected = set(expected) self.assertEqual(actual, expected) def test_geositemap_kml(self): "Tests KML/KMZ geographic sitemaps." for kml_type in ('kml', 'kmz'): doc = minidom.parseString(self.client.get('/sitemaps/%s.xml' % kml_type).content) # Ensuring the right sitemaps namespace is present. urlset = doc.firstChild self.assertEqual(urlset.getAttribute('xmlns'), 'http://www.sitemaps.org/schemas/sitemap/0.9') urls = urlset.getElementsByTagName('url') self.assertEqual(2, len(urls)) # Should only be 2 sitemaps. for url in urls: self.assertChildNodes(url, ['loc']) # Getting the relative URL since we don't have a real site. kml_url = url.getElementsByTagName('loc')[0].childNodes[0].data.split('http://example.com')[1] if kml_type == 'kml': kml_doc = minidom.parseString(self.client.get(kml_url).content) elif kml_type == 'kmz': # Have to decompress KMZ before parsing. buf = BytesIO(self.client.get(kml_url).content) with zipfile.ZipFile(buf) as zf: self.assertEqual(1, len(zf.filelist)) self.assertEqual('doc.kml', zf.filelist[0].filename) kml_doc = minidom.parseString(zf.read('doc.kml')) # Ensuring the correct number of placemarks are in the KML doc. if 'city' in kml_url: model = City elif 'country' in kml_url: model = Country self.assertEqual(model.objects.count(), len(kml_doc.getElementsByTagName('Placemark')))
14bf71c43ca9e70ced98d779f9908c10576798855f44839116f2358ebb2861e1
""" Tests for geography support in PostGIS """ from __future__ import unicode_literals import os from unittest import skipUnless from django.contrib.gis.db import models from django.contrib.gis.db.models.functions import Area, Distance from django.contrib.gis.measure import D from django.db import connection from django.db.models.functions import Cast from django.test import TestCase, ignore_warnings, skipUnlessDBFeature from django.utils._os import upath from django.utils.deprecation import RemovedInDjango20Warning from ..utils import oracle, postgis from .models import City, County, Zipcode @skipUnlessDBFeature("gis_enabled") class GeographyTest(TestCase): fixtures = ['initial'] def test01_fixture_load(self): "Ensure geography features loaded properly." self.assertEqual(8, City.objects.count()) @skipUnlessDBFeature("supports_distances_lookups", "supports_distance_geodetic") def test02_distance_lookup(self): "Testing GeoQuerySet distance lookup support on non-point geography fields." z = Zipcode.objects.get(code='77002') cities1 = list(City.objects .filter(point__distance_lte=(z.poly, D(mi=500))) .order_by('name') .values_list('name', flat=True)) cities2 = list(City.objects .filter(point__dwithin=(z.poly, D(mi=500))) .order_by('name') .values_list('name', flat=True)) for cities in [cities1, cities2]: self.assertEqual(['Dallas', 'Houston', 'Oklahoma City'], cities) @skipUnlessDBFeature("has_distance_method", "supports_distance_geodetic") @ignore_warnings(category=RemovedInDjango20Warning) def test03_distance_method(self): "Testing GeoQuerySet.distance() support on non-point geography fields." # `GeoQuerySet.distance` is not allowed geometry fields. htown = City.objects.get(name='Houston') Zipcode.objects.distance(htown.point) @skipUnless(postgis, "This is a PostGIS-specific test") def test04_invalid_operators_functions(self): "Ensuring exceptions are raised for operators & functions invalid on geography fields." # Only a subset of the geometry functions & operator are available # to PostGIS geography types. For more information, visit: # http://postgis.refractions.net/documentation/manual-1.5/ch08.html#PostGIS_GeographyFunctions z = Zipcode.objects.get(code='77002') # ST_Within not available. with self.assertRaises(ValueError): City.objects.filter(point__within=z.poly).count() # `@` operator not available. with self.assertRaises(ValueError): City.objects.filter(point__contained=z.poly).count() # Regression test for #14060, `~=` was never really implemented for PostGIS. htown = City.objects.get(name='Houston') with self.assertRaises(ValueError): City.objects.get(point__exact=htown.point) def test05_geography_layermapping(self): "Testing LayerMapping support on models with geography fields." # There is a similar test in `layermap` that uses the same data set, # but the County model here is a bit different. from django.contrib.gis.utils import LayerMapping # Getting the shapefile and mapping dictionary. shp_path = os.path.realpath(os.path.join(os.path.dirname(upath(__file__)), '..', 'data')) co_shp = os.path.join(shp_path, 'counties', 'counties.shp') co_mapping = {'name': 'Name', 'state': 'State', 'mpoly': 'MULTIPOLYGON', } # Reference county names, number of polygons, and state names. names = ['Bexar', 'Galveston', 'Harris', 'Honolulu', 'Pueblo'] num_polys = [1, 2, 1, 19, 1] # Number of polygons for each. st_names = ['Texas', 'Texas', 'Texas', 'Hawaii', 'Colorado'] lm = LayerMapping(County, co_shp, co_mapping, source_srs=4269, unique='name') lm.save(silent=True, strict=True) for c, name, num_poly, state in zip(County.objects.order_by('name'), names, num_polys, st_names): self.assertEqual(4326, c.mpoly.srid) self.assertEqual(num_poly, len(c.mpoly)) self.assertEqual(name, c.name) self.assertEqual(state, c.state) @skipUnlessDBFeature("has_area_method", "supports_distance_geodetic") @ignore_warnings(category=RemovedInDjango20Warning) def test06_geography_area(self): "Testing that Area calculations work on geography columns." # SELECT ST_Area(poly) FROM geogapp_zipcode WHERE code='77002'; z = Zipcode.objects.area().get(code='77002') # Round to the nearest thousand as possible values (depending on # the database and geolib) include 5439084, 5439100, 5439101. rounded_value = z.area.sq_m rounded_value -= z.area.sq_m % 1000 self.assertEqual(rounded_value, 5439000) @skipUnlessDBFeature("gis_enabled") class GeographyFunctionTests(TestCase): fixtures = ['initial'] @skipUnlessDBFeature("supports_extent_aggr") def test_cast_aggregate(self): """ Cast a geography to a geometry field for an aggregate function that expects a geometry input. """ if not connection.ops.geography: self.skipTest("This test needs geography support") expected = (-96.8016128540039, 29.7633724212646, -95.3631439208984, 32.782058715820) res = City.objects.filter( name__in=('Houston', 'Dallas') ).aggregate(extent=models.Extent(Cast('point', models.PointField()))) for val, exp in zip(res['extent'], expected): self.assertAlmostEqual(exp, val, 4) @skipUnlessDBFeature("has_Distance_function", "supports_distance_geodetic") def test_distance_function(self): """ Testing Distance() support on non-point geography fields. """ if oracle: ref_dists = [0, 4899.68, 8081.30, 9115.15] else: ref_dists = [0, 4891.20, 8071.64, 9123.95] htown = City.objects.get(name='Houston') qs = Zipcode.objects.annotate(distance=Distance('poly', htown.point)) for z, ref in zip(qs, ref_dists): self.assertAlmostEqual(z.distance.m, ref, 2) # Distance function in combination with a lookup. hzip = Zipcode.objects.get(code='77002') self.assertEqual(qs.get(distance__lte=0), hzip) @skipUnlessDBFeature("has_Area_function", "supports_distance_geodetic") def test_geography_area(self): """ Testing that Area calculations work on geography columns. """ # SELECT ST_Area(poly) FROM geogapp_zipcode WHERE code='77002'; z = Zipcode.objects.annotate(area=Area('poly')).get(code='77002') # Round to the nearest thousand as possible values (depending on # the database and geolib) include 5439084, 5439100, 5439101. rounded_value = z.area.sq_m rounded_value -= z.area.sq_m % 1000 self.assertEqual(rounded_value, 5439000)
57b4c3bd7e19fa83774116333a45232d4ddc694c89c79373b6c816488ab13ca1
from django.utils.encoding import python_2_unicode_compatible from ..models import models @python_2_unicode_compatible class NamedModel(models.Model): name = models.CharField(max_length=30) objects = models.GeoManager() class Meta: abstract = True required_db_features = ['gis_enabled'] def __str__(self): return self.name class City(NamedModel): point = models.PointField(geography=True) class Meta: app_label = 'geogapp' required_db_features = ['gis_enabled'] class Zipcode(NamedModel): code = models.CharField(max_length=10) poly = models.PolygonField(geography=True) class County(NamedModel): state = models.CharField(max_length=20) mpoly = models.MultiPolygonField(geography=True) class Meta: app_label = 'geogapp' required_db_features = ['gis_enabled'] def __str__(self): return ' County, '.join([self.name, self.state])
c6d69b3d6e2bfdc03c018ccccc01a20875f75fdc334f9a4f4ff4ba4579060a61
from __future__ import unicode_literals from django.core.management import call_command from django.db import connection from django.test import TransactionTestCase, skipUnlessDBFeature @skipUnlessDBFeature("gis_enabled") class MigrateTests(TransactionTestCase): """ Tests running the migrate command in Geodjango. """ available_apps = ["gis_tests.gis_migrations"] def get_table_description(self, table): with connection.cursor() as cursor: return connection.introspection.get_table_description(cursor, table) def assertTableExists(self, table): with connection.cursor() as cursor: self.assertIn(table, connection.introspection.table_names(cursor)) def assertTableNotExists(self, table): with connection.cursor() as cursor: self.assertNotIn(table, connection.introspection.table_names(cursor)) def test_migrate_gis(self): """ Tests basic usage of the migrate command when a model uses Geodjango fields. Regression test for ticket #22001: https://code.djangoproject.com/ticket/22001 It's also used to showcase an error in migrations where spatialite is enabled and geo tables are renamed resulting in unique constraint failure on geometry_columns. Regression for ticket #23030: https://code.djangoproject.com/ticket/23030 """ # Make sure the right tables exist self.assertTableExists("gis_migrations_neighborhood") self.assertTableExists("gis_migrations_household") self.assertTableExists("gis_migrations_family") if connection.features.supports_raster: self.assertTableExists("gis_migrations_heatmap") # Unmigrate everything call_command("migrate", "gis_migrations", "zero", verbosity=0) # Make sure it's all gone self.assertTableNotExists("gis_migrations_neighborhood") self.assertTableNotExists("gis_migrations_household") self.assertTableNotExists("gis_migrations_family") if connection.features.supports_raster: self.assertTableNotExists("gis_migrations_heatmap") # Even geometry columns metadata try: GeoColumn = connection.ops.geometry_columns() except NotImplementedError: # Not all GIS backends have geometry columns model pass else: qs = GeoColumn.objects.filter( **{'%s__in' % GeoColumn.table_name_col(): ["gis_neighborhood", "gis_household"]} ) self.assertEqual(qs.count(), 0) # Revert the "unmigration" call_command("migrate", "gis_migrations", verbosity=0)
9802f9cc57ed43659aacf706a743bfd0c2042889e29aa561706a53bd7eae72f5
from __future__ import unicode_literals from unittest import skipIf from django.contrib.gis.db.models import fields from django.contrib.gis.geos import MultiPolygon, Polygon from django.core.exceptions import ImproperlyConfigured from django.db import connection, migrations, models from django.db.migrations.migration import Migration from django.db.migrations.state import ProjectState from django.test import ( TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature, ) from ..utils import mysql, spatialite if connection.features.gis_enabled: try: GeometryColumns = connection.ops.geometry_columns() HAS_GEOMETRY_COLUMNS = True except NotImplementedError: HAS_GEOMETRY_COLUMNS = False @skipUnlessDBFeature('gis_enabled') class OperationTestCase(TransactionTestCase): available_apps = ['gis_tests.gis_migrations'] def tearDown(self): # Delete table after testing if hasattr(self, 'current_state'): self.apply_operations('gis', self.current_state, [migrations.DeleteModel('Neighborhood')]) super(OperationTestCase, self).tearDown() @property def has_spatial_indexes(self): if mysql: with connection.cursor() as cursor: return connection.introspection.supports_spatial_index(cursor, 'gis_neighborhood') return True def get_table_description(self, table): with connection.cursor() as cursor: return connection.introspection.get_table_description(cursor, table) def assertColumnExists(self, table, column): self.assertIn(column, [c.name for c in self.get_table_description(table)]) def assertColumnNotExists(self, table, column): self.assertNotIn(column, [c.name for c in self.get_table_description(table)]) def apply_operations(self, app_label, project_state, operations): migration = Migration('name', app_label) migration.operations = operations with connection.schema_editor() as editor: return migration.apply(project_state, editor) def set_up_test_model(self, force_raster_creation=False): test_fields = [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=100, unique=True)), ('geom', fields.MultiPolygonField(srid=4326)) ] if connection.features.supports_raster or force_raster_creation: test_fields += [('rast', fields.RasterField(srid=4326, null=True))] operations = [migrations.CreateModel('Neighborhood', test_fields)] self.current_state = self.apply_operations('gis', ProjectState(), operations) def assertGeometryColumnsCount(self, expected_count): table_name = 'gis_neighborhood' if connection.features.uppercases_column_names: table_name = table_name.upper() self.assertEqual( GeometryColumns.objects.filter(**{ GeometryColumns.table_name_col(): table_name, }).count(), expected_count ) def assertSpatialIndexExists(self, table, column, raster=False): with connection.cursor() as cursor: constraints = connection.introspection.get_constraints(cursor, table) if raster: self.assertTrue(any( 'st_convexhull(%s)' % column in c['definition'] for c in constraints.values() if c['definition'] is not None )) else: self.assertIn([column], [c['columns'] for c in constraints.values()]) def alter_gis_model(self, migration_class, model_name, field_name, blank=False, field_class=None, field_class_kwargs=None): args = [model_name, field_name] if field_class: field_class_kwargs = field_class_kwargs or {'srid': 4326, 'blank': blank} args.append(field_class(**field_class_kwargs)) operation = migration_class(*args) old_state = self.current_state.clone() operation.state_forwards('gis', self.current_state) with connection.schema_editor() as editor: operation.database_forwards('gis', editor, old_state, self.current_state) class OperationTests(OperationTestCase): def setUp(self): super(OperationTests, self).setUp() self.set_up_test_model() def test_add_geom_field(self): """ Test the AddField operation with a geometry-enabled column. """ self.alter_gis_model(migrations.AddField, 'Neighborhood', 'path', False, fields.LineStringField) self.assertColumnExists('gis_neighborhood', 'path') # Test GeometryColumns when available if HAS_GEOMETRY_COLUMNS: self.assertGeometryColumnsCount(2) # Test spatial indices when available if self.has_spatial_indexes: self.assertSpatialIndexExists('gis_neighborhood', 'path') @skipUnlessDBFeature('supports_raster') def test_add_raster_field(self): """ Test the AddField operation with a raster-enabled column. """ self.alter_gis_model(migrations.AddField, 'Neighborhood', 'heatmap', False, fields.RasterField) self.assertColumnExists('gis_neighborhood', 'heatmap') # Test spatial indices when available if self.has_spatial_indexes: self.assertSpatialIndexExists('gis_neighborhood', 'heatmap', raster=True) def test_add_blank_geom_field(self): """ Should be able to add a GeometryField with blank=True. """ self.alter_gis_model(migrations.AddField, 'Neighborhood', 'path', True, fields.LineStringField) self.assertColumnExists('gis_neighborhood', 'path') # Test GeometryColumns when available if HAS_GEOMETRY_COLUMNS: self.assertGeometryColumnsCount(2) # Test spatial indices when available if self.has_spatial_indexes: self.assertSpatialIndexExists('gis_neighborhood', 'path') @skipUnlessDBFeature('supports_raster') def test_add_blank_raster_field(self): """ Should be able to add a RasterField with blank=True. """ self.alter_gis_model(migrations.AddField, 'Neighborhood', 'heatmap', True, fields.RasterField) self.assertColumnExists('gis_neighborhood', 'heatmap') # Test spatial indices when available if self.has_spatial_indexes: self.assertSpatialIndexExists('gis_neighborhood', 'heatmap', raster=True) def test_remove_geom_field(self): """ Test the RemoveField operation with a geometry-enabled column. """ self.alter_gis_model(migrations.RemoveField, 'Neighborhood', 'geom') self.assertColumnNotExists('gis_neighborhood', 'geom') # Test GeometryColumns when available if HAS_GEOMETRY_COLUMNS: self.assertGeometryColumnsCount(0) @skipUnlessDBFeature('supports_raster') def test_remove_raster_field(self): """ Test the RemoveField operation with a raster-enabled column. """ self.alter_gis_model(migrations.RemoveField, 'Neighborhood', 'rast') self.assertColumnNotExists('gis_neighborhood', 'rast') def test_create_model_spatial_index(self): if not self.has_spatial_indexes: self.skipTest('No support for Spatial indexes') self.assertSpatialIndexExists('gis_neighborhood', 'geom') if connection.features.supports_raster: self.assertSpatialIndexExists('gis_neighborhood', 'rast', raster=True) @skipUnlessDBFeature("supports_3d_storage") @skipIf(spatialite, "Django currently doesn't support altering Spatialite geometry fields") def test_alter_geom_field_dim(self): Neighborhood = self.current_state.apps.get_model('gis', 'Neighborhood') p1 = Polygon(((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))) Neighborhood.objects.create(name='TestDim', geom=MultiPolygon(p1, p1)) # Add 3rd dimension. self.alter_gis_model( migrations.AlterField, 'Neighborhood', 'geom', False, fields.MultiPolygonField, field_class_kwargs={'srid': 4326, 'dim': 3} ) self.assertTrue(Neighborhood.objects.first().geom.hasz) # Rewind to 2 dimensions. self.alter_gis_model( migrations.AlterField, 'Neighborhood', 'geom', False, fields.MultiPolygonField, field_class_kwargs={'srid': 4326, 'dim': 2} ) self.assertFalse(Neighborhood.objects.first().geom.hasz) @skipIfDBFeature('supports_raster') class NoRasterSupportTests(OperationTestCase): def test_create_raster_model_on_db_without_raster_support(self): msg = 'Raster fields require backends with raster support.' with self.assertRaisesMessage(ImproperlyConfigured, msg): self.set_up_test_model(force_raster_creation=True) def test_add_raster_field_on_db_without_raster_support(self): msg = 'Raster fields require backends with raster support.' with self.assertRaisesMessage(ImproperlyConfigured, msg): self.set_up_test_model() self.alter_gis_model( migrations.AddField, 'Neighborhood', 'heatmap', False, fields.RasterField )
e473d9e4461c90fdfd3b0ad816006183410ab3003664f65b58105741e7fdb04b
# Copyright (c) 2008-2009 Aryeh Leib Taurog, http://www.aryehleib.com # All rights reserved. # # Modified from original contribution by Aryeh Leib Taurog, which was # released under the New BSD license. import unittest from django.contrib.gis.geos.mutable_list import ListMixin from django.utils import six class UserListA(ListMixin): _mytype = tuple def __init__(self, i_list, *args, **kwargs): self._list = self._mytype(i_list) super(UserListA, self).__init__(*args, **kwargs) def __len__(self): return len(self._list) def __str__(self): return str(self._list) def __repr__(self): return repr(self._list) def _set_list(self, length, items): # this would work: # self._list = self._mytype(items) # but then we wouldn't be testing length parameter itemList = ['x'] * length for i, v in enumerate(items): itemList[i] = v self._list = self._mytype(itemList) def _get_single_external(self, index): return self._list[index] class UserListB(UserListA): _mytype = list def _set_single(self, index, value): self._list[index] = value def nextRange(length): nextRange.start += 100 return range(nextRange.start, nextRange.start + length) nextRange.start = 0 class ListMixinTest(unittest.TestCase): """ Tests base class ListMixin by comparing a list clone which is a ListMixin subclass with a real Python list. """ limit = 3 listType = UserListA def lists_of_len(self, length=None): if length is None: length = self.limit pl = list(range(length)) return pl, self.listType(pl) def limits_plus(self, b): return range(-self.limit - b, self.limit + b) def step_range(self): return list(range(-1 - self.limit, 0)) + list(range(1, 1 + self.limit)) def test01_getslice(self): 'Slice retrieval' pl, ul = self.lists_of_len() for i in self.limits_plus(1): self.assertEqual(pl[i:], ul[i:], 'slice [%d:]' % (i)) self.assertEqual(pl[:i], ul[:i], 'slice [:%d]' % (i)) for j in self.limits_plus(1): self.assertEqual(pl[i:j], ul[i:j], 'slice [%d:%d]' % (i, j)) for k in self.step_range(): self.assertEqual(pl[i:j:k], ul[i:j:k], 'slice [%d:%d:%d]' % (i, j, k)) for k in self.step_range(): self.assertEqual(pl[i::k], ul[i::k], 'slice [%d::%d]' % (i, k)) self.assertEqual(pl[:i:k], ul[:i:k], 'slice [:%d:%d]' % (i, k)) for k in self.step_range(): self.assertEqual(pl[::k], ul[::k], 'slice [::%d]' % (k)) def test02_setslice(self): 'Slice assignment' def setfcn(x, i, j, k, L): x[i:j:k] = range(L) pl, ul = self.lists_of_len() for slen in range(self.limit + 1): ssl = nextRange(slen) ul[:] = ssl pl[:] = ssl self.assertEqual(pl, ul[:], 'set slice [:]') for i in self.limits_plus(1): ssl = nextRange(slen) ul[i:] = ssl pl[i:] = ssl self.assertEqual(pl, ul[:], 'set slice [%d:]' % (i)) ssl = nextRange(slen) ul[:i] = ssl pl[:i] = ssl self.assertEqual(pl, ul[:], 'set slice [:%d]' % (i)) for j in self.limits_plus(1): ssl = nextRange(slen) ul[i:j] = ssl pl[i:j] = ssl self.assertEqual(pl, ul[:], 'set slice [%d:%d]' % (i, j)) for k in self.step_range(): ssl = nextRange(len(ul[i:j:k])) ul[i:j:k] = ssl pl[i:j:k] = ssl self.assertEqual(pl, ul[:], 'set slice [%d:%d:%d]' % (i, j, k)) sliceLen = len(ul[i:j:k]) with self.assertRaises(ValueError): setfcn(ul, i, j, k, sliceLen + 1) if sliceLen > 2: with self.assertRaises(ValueError): setfcn(ul, i, j, k, sliceLen - 1) for k in self.step_range(): ssl = nextRange(len(ul[i::k])) ul[i::k] = ssl pl[i::k] = ssl self.assertEqual(pl, ul[:], 'set slice [%d::%d]' % (i, k)) ssl = nextRange(len(ul[:i:k])) ul[:i:k] = ssl pl[:i:k] = ssl self.assertEqual(pl, ul[:], 'set slice [:%d:%d]' % (i, k)) for k in self.step_range(): ssl = nextRange(len(ul[::k])) ul[::k] = ssl pl[::k] = ssl self.assertEqual(pl, ul[:], 'set slice [::%d]' % (k)) def test03_delslice(self): 'Delete slice' for Len in range(self.limit): pl, ul = self.lists_of_len(Len) del pl[:] del ul[:] self.assertEqual(pl[:], ul[:], 'del slice [:]') for i in range(-Len - 1, Len + 1): pl, ul = self.lists_of_len(Len) del pl[i:] del ul[i:] self.assertEqual(pl[:], ul[:], 'del slice [%d:]' % (i)) pl, ul = self.lists_of_len(Len) del pl[:i] del ul[:i] self.assertEqual(pl[:], ul[:], 'del slice [:%d]' % (i)) for j in range(-Len - 1, Len + 1): pl, ul = self.lists_of_len(Len) del pl[i:j] del ul[i:j] self.assertEqual(pl[:], ul[:], 'del slice [%d:%d]' % (i, j)) for k in list(range(-Len - 1, 0)) + list(range(1, Len)): pl, ul = self.lists_of_len(Len) del pl[i:j:k] del ul[i:j:k] self.assertEqual(pl[:], ul[:], 'del slice [%d:%d:%d]' % (i, j, k)) for k in list(range(-Len - 1, 0)) + list(range(1, Len)): pl, ul = self.lists_of_len(Len) del pl[:i:k] del ul[:i:k] self.assertEqual(pl[:], ul[:], 'del slice [:%d:%d]' % (i, k)) pl, ul = self.lists_of_len(Len) del pl[i::k] del ul[i::k] self.assertEqual(pl[:], ul[:], 'del slice [%d::%d]' % (i, k)) for k in list(range(-Len - 1, 0)) + list(range(1, Len)): pl, ul = self.lists_of_len(Len) del pl[::k] del ul[::k] self.assertEqual(pl[:], ul[:], 'del slice [::%d]' % (k)) def test04_get_set_del_single(self): 'Get/set/delete single item' pl, ul = self.lists_of_len() for i in self.limits_plus(0): self.assertEqual(pl[i], ul[i], 'get single item [%d]' % i) for i in self.limits_plus(0): pl, ul = self.lists_of_len() pl[i] = 100 ul[i] = 100 self.assertEqual(pl[:], ul[:], 'set single item [%d]' % i) for i in self.limits_plus(0): pl, ul = self.lists_of_len() del pl[i] del ul[i] self.assertEqual(pl[:], ul[:], 'del single item [%d]' % i) def test05_out_of_range_exceptions(self): 'Out of range exceptions' def setfcn(x, i): x[i] = 20 def getfcn(x, i): return x[i] def delfcn(x, i): del x[i] pl, ul = self.lists_of_len() for i in (-1 - self.limit, self.limit): with self.assertRaises(IndexError): # 'set index %d' % i) setfcn(ul, i) with self.assertRaises(IndexError): # 'get index %d' % i) getfcn(ul, i) with self.assertRaises(IndexError): # 'del index %d' % i) delfcn(ul, i) def test06_list_methods(self): 'List methods' pl, ul = self.lists_of_len() pl.append(40) ul.append(40) self.assertEqual(pl[:], ul[:], 'append') pl.extend(range(50, 55)) ul.extend(range(50, 55)) self.assertEqual(pl[:], ul[:], 'extend') pl.reverse() ul.reverse() self.assertEqual(pl[:], ul[:], 'reverse') for i in self.limits_plus(1): pl, ul = self.lists_of_len() pl.insert(i, 50) ul.insert(i, 50) self.assertEqual(pl[:], ul[:], 'insert at %d' % i) for i in self.limits_plus(0): pl, ul = self.lists_of_len() self.assertEqual(pl.pop(i), ul.pop(i), 'popped value at %d' % i) self.assertEqual(pl[:], ul[:], 'after pop at %d' % i) pl, ul = self.lists_of_len() self.assertEqual(pl.pop(), ul.pop(i), 'popped value') self.assertEqual(pl[:], ul[:], 'after pop') pl, ul = self.lists_of_len() def popfcn(x, i): x.pop(i) with self.assertRaises(IndexError): popfcn(ul, self.limit) with self.assertRaises(IndexError): popfcn(ul, -1 - self.limit) pl, ul = self.lists_of_len() for val in range(self.limit): self.assertEqual(pl.index(val), ul.index(val), 'index of %d' % val) for val in self.limits_plus(2): self.assertEqual(pl.count(val), ul.count(val), 'count %d' % val) for val in range(self.limit): pl, ul = self.lists_of_len() pl.remove(val) ul.remove(val) self.assertEqual(pl[:], ul[:], 'after remove val %d' % val) def indexfcn(x, v): return x.index(v) def removefcn(x, v): return x.remove(v) with self.assertRaises(ValueError): indexfcn(ul, 40) with self.assertRaises(ValueError): removefcn(ul, 40) def test07_allowed_types(self): 'Type-restricted list' pl, ul = self.lists_of_len() ul._allowed = six.integer_types ul[1] = 50 ul[:2] = [60, 70, 80] def setfcn(x, i, v): x[i] = v with self.assertRaises(TypeError): setfcn(ul, 2, 'hello') with self.assertRaises(TypeError): setfcn(ul, slice(0, 3, 2), ('hello', 'goodbye')) def test08_min_length(self): 'Length limits' pl, ul = self.lists_of_len(5) ul._minlength = 3 def delfcn(x, i): del x[:i] def setfcn(x, i): x[:i] = [] for i in range(len(ul) - ul._minlength + 1, len(ul)): with self.assertRaises(ValueError): delfcn(ul, i) with self.assertRaises(ValueError): setfcn(ul, i) del ul[:len(ul) - ul._minlength] ul._maxlength = 4 for i in range(0, ul._maxlength - len(ul)): ul.append(i) with self.assertRaises(ValueError): ul.append(10) def test09_iterable_check(self): 'Error on assigning non-iterable to slice' pl, ul = self.lists_of_len(self.limit + 1) def setfcn(x, i, v): x[i] = v with self.assertRaises(TypeError): setfcn(ul, slice(0, 3, 2), 2) def test10_checkindex(self): 'Index check' pl, ul = self.lists_of_len() for i in self.limits_plus(0): if i < 0: self.assertEqual(ul._checkindex(i), i + self.limit, '_checkindex(neg index)') else: self.assertEqual(ul._checkindex(i), i, '_checkindex(pos index)') for i in (-self.limit - 1, self.limit): with self.assertRaises(IndexError): ul._checkindex(i) def test_11_sorting(self): 'Sorting' pl, ul = self.lists_of_len() pl.insert(0, pl.pop()) ul.insert(0, ul.pop()) pl.sort() ul.sort() self.assertEqual(pl[:], ul[:], 'sort') mid = pl[len(pl) // 2] pl.sort(key=lambda x: (mid - x) ** 2) ul.sort(key=lambda x: (mid - x) ** 2) self.assertEqual(pl[:], ul[:], 'sort w/ key') pl.insert(0, pl.pop()) ul.insert(0, ul.pop()) pl.sort(reverse=True) ul.sort(reverse=True) self.assertEqual(pl[:], ul[:], 'sort w/ reverse') mid = pl[len(pl) // 2] pl.sort(key=lambda x: (mid - x) ** 2) ul.sort(key=lambda x: (mid - x) ** 2) self.assertEqual(pl[:], ul[:], 'sort w/ key') def test_12_arithmetic(self): 'Arithmetic' pl, ul = self.lists_of_len() al = list(range(10, 14)) self.assertEqual(list(pl + al), list(ul + al), 'add') self.assertEqual(type(ul), type(ul + al), 'type of add result') self.assertEqual(list(al + pl), list(al + ul), 'radd') self.assertEqual(type(al), type(al + ul), 'type of radd result') objid = id(ul) pl += al ul += al self.assertEqual(pl[:], ul[:], 'in-place add') self.assertEqual(objid, id(ul), 'in-place add id') for n in (-1, 0, 1, 3): pl, ul = self.lists_of_len() self.assertEqual(list(pl * n), list(ul * n), 'mul by %d' % n) self.assertEqual(type(ul), type(ul * n), 'type of mul by %d result' % n) self.assertEqual(list(n * pl), list(n * ul), 'rmul by %d' % n) self.assertEqual(type(ul), type(n * ul), 'type of rmul by %d result' % n) objid = id(ul) pl *= n ul *= n self.assertEqual(pl[:], ul[:], 'in-place mul by %d' % n) self.assertEqual(objid, id(ul), 'in-place mul by %d id' % n) pl, ul = self.lists_of_len() self.assertEqual(pl, ul, 'cmp for equal') self.assertNotEqual(ul, pl + [2], 'cmp for not equal') self.assertGreaterEqual(pl, ul, 'cmp for gte self') self.assertLessEqual(pl, ul, 'cmp for lte self') self.assertGreaterEqual(ul, pl, 'cmp for self gte') self.assertLessEqual(ul, pl, 'cmp for self lte') self.assertGreater(pl + [5], ul, 'cmp') self.assertGreaterEqual(pl + [5], ul, 'cmp') self.assertLess(pl, ul + [2], 'cmp') self.assertLessEqual(pl, ul + [2], 'cmp') self.assertGreater(ul + [5], pl, 'cmp') self.assertGreaterEqual(ul + [5], pl, 'cmp') self.assertLess(ul, pl + [2], 'cmp') self.assertLessEqual(ul, pl + [2], 'cmp') # Also works with a custom IndexError ul_longer = ul + [2] ul_longer._IndexError = TypeError ul._IndexError = TypeError self.assertNotEqual(ul_longer, pl) self.assertGreater(ul_longer, ul) pl[1] = 20 self.assertGreater(pl, ul, 'cmp for gt self') self.assertLess(ul, pl, 'cmp for self lt') pl[1] = -20 self.assertLess(pl, ul, 'cmp for lt self') self.assertGreater(ul, pl, 'cmp for gt self') class ListMixinTestSingle(ListMixinTest): listType = UserListB
eba06760d17416e4d70b733d1189b9e32d763d7836d6ed21a669ad22919323d1
# Copyright (c) 2008-2009 Aryeh Leib Taurog, all rights reserved. # Modified from original contribution by Aryeh Leib Taurog, which was # released under the New BSD license. import unittest from unittest import skipUnless from django.contrib.gis.geos import ( HAS_GEOS, LinearRing, LineString, MultiPoint, Point, Polygon, fromstr, ) def api_get_distance(x): return x.distance(Point(-200, -200)) def api_get_buffer(x): return x.buffer(10) def api_get_geom_typeid(x): return x.geom_typeid def api_get_num_coords(x): return x.num_coords def api_get_centroid(x): return x.centroid def api_get_empty(x): return x.empty def api_get_valid(x): return x.valid def api_get_simple(x): return x.simple def api_get_ring(x): return x.ring def api_get_boundary(x): return x.boundary def api_get_convex_hull(x): return x.convex_hull def api_get_extent(x): return x.extent def api_get_area(x): return x.area def api_get_length(x): return x.length geos_function_tests = [ val for name, val in vars().items() if hasattr(val, '__call__') and name.startswith('api_get_') ] @skipUnless(HAS_GEOS, "Geos is required.") class GEOSMutationTest(unittest.TestCase): """ Tests Pythonic Mutability of Python GEOS geometry wrappers get/set/delitem on a slice, normal list methods """ def test00_GEOSIndexException(self): 'Testing Geometry IndexError' p = Point(1, 2) for i in range(-2, 2): p._checkindex(i) with self.assertRaises(IndexError): p._checkindex(2) with self.assertRaises(IndexError): p._checkindex(-3) def test01_PointMutations(self): 'Testing Point mutations' for p in (Point(1, 2, 3), fromstr('POINT (1 2 3)')): self.assertEqual(p._get_single_external(1), 2.0, 'Point _get_single_external') # _set_single p._set_single(0, 100) self.assertEqual(p.coords, (100.0, 2.0, 3.0), 'Point _set_single') # _set_list p._set_list(2, (50, 3141)) self.assertEqual(p.coords, (50.0, 3141.0), 'Point _set_list') def test02_PointExceptions(self): 'Testing Point exceptions' with self.assertRaises(TypeError): Point(range(1)) with self.assertRaises(TypeError): Point(range(4)) def test03_PointApi(self): 'Testing Point API' q = Point(4, 5, 3) for p in (Point(1, 2, 3), fromstr('POINT (1 2 3)')): p[0:2] = [4, 5] for f in geos_function_tests: self.assertEqual(f(q), f(p), 'Point ' + f.__name__) def test04_LineStringMutations(self): 'Testing LineString mutations' for ls in (LineString((1, 0), (4, 1), (6, -1)), fromstr('LINESTRING (1 0,4 1,6 -1)')): self.assertEqual(ls._get_single_external(1), (4.0, 1.0), 'LineString _get_single_external') # _set_single ls._set_single(0, (-50, 25)) self.assertEqual(ls.coords, ((-50.0, 25.0), (4.0, 1.0), (6.0, -1.0)), 'LineString _set_single') # _set_list ls._set_list(2, ((-50.0, 25.0), (6.0, -1.0))) self.assertEqual(ls.coords, ((-50.0, 25.0), (6.0, -1.0)), 'LineString _set_list') lsa = LineString(ls.coords) for f in geos_function_tests: self.assertEqual(f(lsa), f(ls), 'LineString ' + f.__name__) def test05_Polygon(self): 'Testing Polygon mutations' for pg in (Polygon(((1, 0), (4, 1), (6, -1), (8, 10), (1, 0)), ((5, 4), (6, 4), (6, 3), (5, 4))), fromstr('POLYGON ((1 0,4 1,6 -1,8 10,1 0),(5 4,6 4,6 3,5 4))')): self.assertEqual(pg._get_single_external(0), LinearRing((1, 0), (4, 1), (6, -1), (8, 10), (1, 0)), 'Polygon _get_single_external(0)') self.assertEqual(pg._get_single_external(1), LinearRing((5, 4), (6, 4), (6, 3), (5, 4)), 'Polygon _get_single_external(1)') # _set_list pg._set_list(2, (((1, 2), (10, 0), (12, 9), (-1, 15), (1, 2)), ((4, 2), (5, 2), (5, 3), (4, 2)))) self.assertEqual( pg.coords, (((1.0, 2.0), (10.0, 0.0), (12.0, 9.0), (-1.0, 15.0), (1.0, 2.0)), ((4.0, 2.0), (5.0, 2.0), (5.0, 3.0), (4.0, 2.0))), 'Polygon _set_list') lsa = Polygon(*pg.coords) for f in geos_function_tests: self.assertEqual(f(lsa), f(pg), 'Polygon ' + f.__name__) def test06_Collection(self): 'Testing Collection mutations' points = ( MultiPoint(*map(Point, ((3, 4), (-1, 2), (5, -4), (2, 8)))), fromstr('MULTIPOINT (3 4,-1 2,5 -4,2 8)'), ) for mp in points: self.assertEqual(mp._get_single_external(2), Point(5, -4), 'Collection _get_single_external') mp._set_list(3, map(Point, ((5, 5), (3, -2), (8, 1)))) self.assertEqual(mp.coords, ((5.0, 5.0), (3.0, -2.0), (8.0, 1.0)), 'Collection _set_list') lsa = MultiPoint(*map(Point, ((5, 5), (3, -2), (8, 1)))) for f in geos_function_tests: self.assertEqual(f(lsa), f(mp), 'MultiPoint ' + f.__name__)
9d29690ec550fefeb1250ba6c38e7ff0296c929f3a4c94bdadbb391ee52865cc
from __future__ import unicode_literals import binascii from unittest import skipUnless from django.contrib.gis.geos import ( HAS_GEOS, GEOSGeometry, Point, WKBReader, WKBWriter, WKTReader, WKTWriter, ) from django.test import SimpleTestCase from django.utils.six import memoryview @skipUnless(HAS_GEOS, "Geos is required.") class GEOSIOTest(SimpleTestCase): def test01_wktreader(self): # Creating a WKTReader instance wkt_r = WKTReader() wkt = 'POINT (5 23)' # read() should return a GEOSGeometry ref = GEOSGeometry(wkt) g1 = wkt_r.read(wkt.encode()) g2 = wkt_r.read(wkt) for geom in (g1, g2): self.assertEqual(ref, geom) # Should only accept six.string_types objects. with self.assertRaises(TypeError): wkt_r.read(1) with self.assertRaises(TypeError): wkt_r.read(memoryview(b'foo')) def test02_wktwriter(self): # Creating a WKTWriter instance, testing its ptr property. wkt_w = WKTWriter() with self.assertRaises(TypeError): wkt_w._set_ptr(WKTReader.ptr_type()) ref = GEOSGeometry('POINT (5 23)') ref_wkt = 'POINT (5.0000000000000000 23.0000000000000000)' self.assertEqual(ref_wkt, wkt_w.write(ref).decode()) def test_wktwriter_constructor_arguments(self): wkt_w = WKTWriter(dim=3, trim=True, precision=3) ref = GEOSGeometry('POINT (5.34562 23 1.5)') ref_wkt = 'POINT Z (5.35 23 1.5)' self.assertEqual(ref_wkt, wkt_w.write(ref).decode()) def test03_wkbreader(self): # Creating a WKBReader instance wkb_r = WKBReader() hex = b'000000000140140000000000004037000000000000' wkb = memoryview(binascii.a2b_hex(hex)) ref = GEOSGeometry(hex) # read() should return a GEOSGeometry on either a hex string or # a WKB buffer. g1 = wkb_r.read(wkb) g2 = wkb_r.read(hex) for geom in (g1, g2): self.assertEqual(ref, geom) bad_input = (1, 5.23, None, False) for bad_wkb in bad_input: with self.assertRaises(TypeError): wkb_r.read(bad_wkb) def test04_wkbwriter(self): wkb_w = WKBWriter() # Representations of 'POINT (5 23)' in hex -- one normal and # the other with the byte order changed. g = GEOSGeometry('POINT (5 23)') hex1 = b'010100000000000000000014400000000000003740' wkb1 = memoryview(binascii.a2b_hex(hex1)) hex2 = b'000000000140140000000000004037000000000000' wkb2 = memoryview(binascii.a2b_hex(hex2)) self.assertEqual(hex1, wkb_w.write_hex(g)) self.assertEqual(wkb1, wkb_w.write(g)) # Ensuring bad byteorders are not accepted. for bad_byteorder in (-1, 2, 523, 'foo', None): # Equivalent of `wkb_w.byteorder = bad_byteorder` with self.assertRaises(ValueError): wkb_w._set_byteorder(bad_byteorder) # Setting the byteorder to 0 (for Big Endian) wkb_w.byteorder = 0 self.assertEqual(hex2, wkb_w.write_hex(g)) self.assertEqual(wkb2, wkb_w.write(g)) # Back to Little Endian wkb_w.byteorder = 1 # Now, trying out the 3D and SRID flags. g = GEOSGeometry('POINT (5 23 17)') g.srid = 4326 hex3d = b'0101000080000000000000144000000000000037400000000000003140' wkb3d = memoryview(binascii.a2b_hex(hex3d)) hex3d_srid = b'01010000A0E6100000000000000000144000000000000037400000000000003140' wkb3d_srid = memoryview(binascii.a2b_hex(hex3d_srid)) # Ensuring bad output dimensions are not accepted for bad_outdim in (-1, 0, 1, 4, 423, 'foo', None): with self.assertRaisesMessage(ValueError, 'WKB output dimension must be 2 or 3'): wkb_w.outdim = bad_outdim # Now setting the output dimensions to be 3 wkb_w.outdim = 3 self.assertEqual(hex3d, wkb_w.write_hex(g)) self.assertEqual(wkb3d, wkb_w.write(g)) # Telling the WKBWriter to include the srid in the representation. wkb_w.srid = True self.assertEqual(hex3d_srid, wkb_w.write_hex(g)) self.assertEqual(wkb3d_srid, wkb_w.write(g)) def test_wkt_writer_trim(self): wkt_w = WKTWriter() self.assertFalse(wkt_w.trim) self.assertEqual(wkt_w.write(Point(1, 1)), b'POINT (1.0000000000000000 1.0000000000000000)') wkt_w.trim = True self.assertTrue(wkt_w.trim) self.assertEqual(wkt_w.write(Point(1, 1)), b'POINT (1 1)') self.assertEqual(wkt_w.write(Point(1.1, 1)), b'POINT (1.1 1)') self.assertEqual(wkt_w.write(Point(1. / 3, 1)), b'POINT (0.3333333333333333 1)') wkt_w.trim = False self.assertFalse(wkt_w.trim) self.assertEqual(wkt_w.write(Point(1, 1)), b'POINT (1.0000000000000000 1.0000000000000000)') def test_wkt_writer_precision(self): wkt_w = WKTWriter() self.assertIsNone(wkt_w.precision) self.assertEqual(wkt_w.write(Point(1. / 3, 2. / 3)), b'POINT (0.3333333333333333 0.6666666666666666)') wkt_w.precision = 1 self.assertEqual(wkt_w.precision, 1) self.assertEqual(wkt_w.write(Point(1. / 3, 2. / 3)), b'POINT (0.3 0.7)') wkt_w.precision = 0 self.assertEqual(wkt_w.precision, 0) self.assertEqual(wkt_w.write(Point(1. / 3, 2. / 3)), b'POINT (0 1)') wkt_w.precision = None self.assertIsNone(wkt_w.precision) self.assertEqual(wkt_w.write(Point(1. / 3, 2. / 3)), b'POINT (0.3333333333333333 0.6666666666666666)') with self.assertRaisesMessage(AttributeError, 'WKT output rounding precision must be '): wkt_w.precision = 'potato'
8d18c4a8d43ba970401243ebbf4f20c0e4672bc11d0c72ffbc3a2031bc0eb25a
from __future__ import unicode_literals import ctypes import json import random from binascii import a2b_hex, b2a_hex from io import BytesIO from unittest import skipUnless from django.contrib.gis import gdal from django.contrib.gis.gdal import HAS_GDAL from django.contrib.gis.geos import ( HAS_GEOS, GeometryCollection, GEOSException, GEOSGeometry, LinearRing, LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon, fromfile, fromstr, ) from django.contrib.gis.geos.base import GEOSBase from django.contrib.gis.geos.libgeos import geos_version_info from django.contrib.gis.shortcuts import numpy from django.template import Context from django.template.engine import Engine from django.test import SimpleTestCase, ignore_warnings, mock from django.utils import six from django.utils.deprecation import RemovedInDjango20Warning from django.utils.encoding import force_bytes from django.utils.six.moves import range from ..test_data import TestDataMixin @skipUnless(HAS_GEOS, "Geos is required.") class GEOSTest(SimpleTestCase, TestDataMixin): def test_base(self): "Tests out the GEOSBase class." # Testing out GEOSBase class, which provides a `ptr` property # that abstracts out access to underlying C pointers. class FakeGeom1(GEOSBase): pass # This one only accepts pointers to floats c_float_p = ctypes.POINTER(ctypes.c_float) class FakeGeom2(GEOSBase): ptr_type = c_float_p # Default ptr_type is `c_void_p`. fg1 = FakeGeom1() # Default ptr_type is C float pointer fg2 = FakeGeom2() # These assignments are OK -- None is allowed because # it's equivalent to the NULL pointer. fg1.ptr = ctypes.c_void_p() fg1.ptr = None fg2.ptr = c_float_p(ctypes.c_float(5.23)) fg2.ptr = None # Because pointers have been set to NULL, an exception should be # raised when we try to access it. Raising an exception is # preferable to a segmentation fault that commonly occurs when # a C method is given a NULL memory reference. for fg in (fg1, fg2): # Equivalent to `fg.ptr` with self.assertRaises(GEOSException): fg._get_ptr() # Anything that is either not None or the acceptable pointer type will # result in a TypeError when trying to assign it to the `ptr` property. # Thus, memory addresses (integers) and pointers of the incorrect type # (in `bad_ptrs`) will not be allowed. bad_ptrs = (5, ctypes.c_char_p(b'foobar')) for bad_ptr in bad_ptrs: # Equivalent to `fg.ptr = bad_ptr` with self.assertRaises(TypeError): fg1._set_ptr(bad_ptr) with self.assertRaises(TypeError): fg2._set_ptr(bad_ptr) def test_wkt(self): "Testing WKT output." for g in self.geometries.wkt_out: geom = fromstr(g.wkt) if geom.hasz: self.assertEqual(g.ewkt, geom.wkt) def test_hex(self): "Testing HEX output." for g in self.geometries.hex_wkt: geom = fromstr(g.wkt) self.assertEqual(g.hex, geom.hex.decode()) def test_hexewkb(self): "Testing (HEX)EWKB output." # For testing HEX(EWKB). ogc_hex = b'01010000000000000000000000000000000000F03F' ogc_hex_3d = b'01010000800000000000000000000000000000F03F0000000000000040' # `SELECT ST_AsHEXEWKB(ST_GeomFromText('POINT(0 1)', 4326));` hexewkb_2d = b'0101000020E61000000000000000000000000000000000F03F' # `SELECT ST_AsHEXEWKB(ST_GeomFromEWKT('SRID=4326;POINT(0 1 2)'));` hexewkb_3d = b'01010000A0E61000000000000000000000000000000000F03F0000000000000040' pnt_2d = Point(0, 1, srid=4326) pnt_3d = Point(0, 1, 2, srid=4326) # OGC-compliant HEX will not have SRID value. self.assertEqual(ogc_hex, pnt_2d.hex) self.assertEqual(ogc_hex_3d, pnt_3d.hex) # HEXEWKB should be appropriate for its dimension -- have to use an # a WKBWriter w/dimension set accordingly, else GEOS will insert # garbage into 3D coordinate if there is none. self.assertEqual(hexewkb_2d, pnt_2d.hexewkb) self.assertEqual(hexewkb_3d, pnt_3d.hexewkb) self.assertIs(GEOSGeometry(hexewkb_3d).hasz, True) # Same for EWKB. self.assertEqual(six.memoryview(a2b_hex(hexewkb_2d)), pnt_2d.ewkb) self.assertEqual(six.memoryview(a2b_hex(hexewkb_3d)), pnt_3d.ewkb) # Redundant sanity check. self.assertEqual(4326, GEOSGeometry(hexewkb_2d).srid) def test_kml(self): "Testing KML output." for tg in self.geometries.wkt_out: geom = fromstr(tg.wkt) kml = getattr(tg, 'kml', False) if kml: self.assertEqual(kml, geom.kml) def test_errors(self): "Testing the Error handlers." # string-based for err in self.geometries.errors: with self.assertRaises((GEOSException, ValueError)): fromstr(err.wkt) # Bad WKB with self.assertRaises(GEOSException): GEOSGeometry(six.memoryview(b'0')) class NotAGeometry(object): pass # Some other object with self.assertRaises(TypeError): GEOSGeometry(NotAGeometry()) # None with self.assertRaises(TypeError): GEOSGeometry(None) def test_wkb(self): "Testing WKB output." for g in self.geometries.hex_wkt: geom = fromstr(g.wkt) wkb = geom.wkb self.assertEqual(b2a_hex(wkb).decode().upper(), g.hex) def test_create_hex(self): "Testing creation from HEX." for g in self.geometries.hex_wkt: geom_h = GEOSGeometry(g.hex) # we need to do this so decimal places get normalized geom_t = fromstr(g.wkt) self.assertEqual(geom_t.wkt, geom_h.wkt) def test_create_wkb(self): "Testing creation from WKB." for g in self.geometries.hex_wkt: wkb = six.memoryview(a2b_hex(g.hex.encode())) geom_h = GEOSGeometry(wkb) # we need to do this so decimal places get normalized geom_t = fromstr(g.wkt) self.assertEqual(geom_t.wkt, geom_h.wkt) def test_ewkt(self): "Testing EWKT." srids = (-1, 32140) for srid in srids: for p in self.geometries.polygons: ewkt = 'SRID=%d;%s' % (srid, p.wkt) poly = fromstr(ewkt) self.assertEqual(srid, poly.srid) self.assertEqual(srid, poly.shell.srid) self.assertEqual(srid, fromstr(poly.ewkt).srid) # Checking export @skipUnless(HAS_GDAL, "GDAL is required.") def test_json(self): "Testing GeoJSON input/output (via GDAL)." for g in self.geometries.json_geoms: geom = GEOSGeometry(g.wkt) if not hasattr(g, 'not_equal'): # Loading jsons to prevent decimal differences self.assertEqual(json.loads(g.json), json.loads(geom.json)) self.assertEqual(json.loads(g.json), json.loads(geom.geojson)) self.assertEqual(GEOSGeometry(g.wkt), GEOSGeometry(geom.json)) def test_fromfile(self): "Testing the fromfile() factory." ref_pnt = GEOSGeometry('POINT(5 23)') wkt_f = BytesIO() wkt_f.write(force_bytes(ref_pnt.wkt)) wkb_f = BytesIO() wkb_f.write(bytes(ref_pnt.wkb)) # Other tests use `fromfile()` on string filenames so those # aren't tested here. for fh in (wkt_f, wkb_f): fh.seek(0) pnt = fromfile(fh) self.assertEqual(ref_pnt, pnt) def test_eq(self): "Testing equivalence." p = fromstr('POINT(5 23)') self.assertEqual(p, p.wkt) self.assertNotEqual(p, 'foo') ls = fromstr('LINESTRING(0 0, 1 1, 5 5)') self.assertEqual(ls, ls.wkt) self.assertNotEqual(p, 'bar') # Error shouldn't be raise on equivalence testing with # an invalid type. for g in (p, ls): self.assertNotEqual(g, None) self.assertNotEqual(g, {'foo': 'bar'}) self.assertNotEqual(g, False) def test_points(self): "Testing Point objects." prev = fromstr('POINT(0 0)') for p in self.geometries.points: # Creating the point from the WKT pnt = fromstr(p.wkt) self.assertEqual(pnt.geom_type, 'Point') self.assertEqual(pnt.geom_typeid, 0) self.assertEqual(pnt.dims, 0) self.assertEqual(p.x, pnt.x) self.assertEqual(p.y, pnt.y) self.assertEqual(pnt, fromstr(p.wkt)) self.assertEqual(False, pnt == prev) # Use assertEqual to test __eq__ # Making sure that the point's X, Y components are what we expect self.assertAlmostEqual(p.x, pnt.tuple[0], 9) self.assertAlmostEqual(p.y, pnt.tuple[1], 9) # Testing the third dimension, and getting the tuple arguments if hasattr(p, 'z'): self.assertIs(pnt.hasz, True) self.assertEqual(p.z, pnt.z) self.assertEqual(p.z, pnt.tuple[2], 9) tup_args = (p.x, p.y, p.z) set_tup1 = (2.71, 3.14, 5.23) set_tup2 = (5.23, 2.71, 3.14) else: self.assertIs(pnt.hasz, False) self.assertIsNone(pnt.z) tup_args = (p.x, p.y) set_tup1 = (2.71, 3.14) set_tup2 = (3.14, 2.71) # Centroid operation on point should be point itself self.assertEqual(p.centroid, pnt.centroid.tuple) # Now testing the different constructors pnt2 = Point(tup_args) # e.g., Point((1, 2)) pnt3 = Point(*tup_args) # e.g., Point(1, 2) self.assertEqual(pnt, pnt2) self.assertEqual(pnt, pnt3) # Now testing setting the x and y pnt.y = 3.14 pnt.x = 2.71 self.assertEqual(3.14, pnt.y) self.assertEqual(2.71, pnt.x) # Setting via the tuple/coords property pnt.tuple = set_tup1 self.assertEqual(set_tup1, pnt.tuple) pnt.coords = set_tup2 self.assertEqual(set_tup2, pnt.coords) prev = pnt # setting the previous geometry def test_multipoints(self): "Testing MultiPoint objects." for mp in self.geometries.multipoints: mpnt = fromstr(mp.wkt) self.assertEqual(mpnt.geom_type, 'MultiPoint') self.assertEqual(mpnt.geom_typeid, 4) self.assertEqual(mpnt.dims, 0) self.assertAlmostEqual(mp.centroid[0], mpnt.centroid.tuple[0], 9) self.assertAlmostEqual(mp.centroid[1], mpnt.centroid.tuple[1], 9) with self.assertRaises(IndexError): mpnt.__getitem__(len(mpnt)) self.assertEqual(mp.centroid, mpnt.centroid.tuple) self.assertEqual(mp.coords, tuple(m.tuple for m in mpnt)) for p in mpnt: self.assertEqual(p.geom_type, 'Point') self.assertEqual(p.geom_typeid, 0) self.assertIs(p.empty, False) self.assertIs(p.valid, True) def test_linestring(self): "Testing LineString objects." prev = fromstr('POINT(0 0)') for l in self.geometries.linestrings: ls = fromstr(l.wkt) self.assertEqual(ls.geom_type, 'LineString') self.assertEqual(ls.geom_typeid, 1) self.assertEqual(ls.dims, 1) self.assertIs(ls.empty, False) self.assertIs(ls.ring, False) if hasattr(l, 'centroid'): self.assertEqual(l.centroid, ls.centroid.tuple) if hasattr(l, 'tup'): self.assertEqual(l.tup, ls.tuple) self.assertEqual(ls, fromstr(l.wkt)) self.assertEqual(False, ls == prev) # Use assertEqual to test __eq__ with self.assertRaises(IndexError): ls.__getitem__(len(ls)) prev = ls # Creating a LineString from a tuple, list, and numpy array self.assertEqual(ls, LineString(ls.tuple)) # tuple self.assertEqual(ls, LineString(*ls.tuple)) # as individual arguments self.assertEqual(ls, LineString([list(tup) for tup in ls.tuple])) # as list # Point individual arguments self.assertEqual(ls.wkt, LineString(*tuple(Point(tup) for tup in ls.tuple)).wkt) if numpy: self.assertEqual(ls, LineString(numpy.array(ls.tuple))) # as numpy array with self.assertRaisesMessage(TypeError, 'Each coordinate should be a sequence (list or tuple)'): LineString((0, 0)) with self.assertRaisesMessage(ValueError, 'LineString requires at least 2 points, got 1.'): LineString([(0, 0)]) if numpy: with self.assertRaisesMessage(ValueError, 'LineString requires at least 2 points, got 1.'): LineString(numpy.array([(0, 0)])) with mock.patch('django.contrib.gis.geos.linestring.numpy', False): with self.assertRaisesMessage(TypeError, 'Invalid initialization input for LineStrings.'): LineString('wrong input') def test_multilinestring(self): "Testing MultiLineString objects." prev = fromstr('POINT(0 0)') for l in self.geometries.multilinestrings: ml = fromstr(l.wkt) self.assertEqual(ml.geom_type, 'MultiLineString') self.assertEqual(ml.geom_typeid, 5) self.assertEqual(ml.dims, 1) self.assertAlmostEqual(l.centroid[0], ml.centroid.x, 9) self.assertAlmostEqual(l.centroid[1], ml.centroid.y, 9) self.assertEqual(ml, fromstr(l.wkt)) self.assertEqual(False, ml == prev) # Use assertEqual to test __eq__ prev = ml for ls in ml: self.assertEqual(ls.geom_type, 'LineString') self.assertEqual(ls.geom_typeid, 1) self.assertIs(ls.empty, False) with self.assertRaises(IndexError): ml.__getitem__(len(ml)) self.assertEqual(ml.wkt, MultiLineString(*tuple(s.clone() for s in ml)).wkt) self.assertEqual(ml, MultiLineString(*tuple(LineString(s.tuple) for s in ml))) def test_linearring(self): "Testing LinearRing objects." for rr in self.geometries.linearrings: lr = fromstr(rr.wkt) self.assertEqual(lr.geom_type, 'LinearRing') self.assertEqual(lr.geom_typeid, 2) self.assertEqual(lr.dims, 1) self.assertEqual(rr.n_p, len(lr)) self.assertIs(lr.valid, True) self.assertIs(lr.empty, False) # Creating a LinearRing from a tuple, list, and numpy array self.assertEqual(lr, LinearRing(lr.tuple)) self.assertEqual(lr, LinearRing(*lr.tuple)) self.assertEqual(lr, LinearRing([list(tup) for tup in lr.tuple])) if numpy: self.assertEqual(lr, LinearRing(numpy.array(lr.tuple))) with self.assertRaisesMessage(ValueError, 'LinearRing requires at least 4 points, got 3.'): LinearRing((0, 0), (1, 1), (0, 0)) with self.assertRaisesMessage(ValueError, 'LinearRing requires at least 4 points, got 1.'): LinearRing([(0, 0)]) if numpy: with self.assertRaisesMessage(ValueError, 'LinearRing requires at least 4 points, got 1.'): LinearRing(numpy.array([(0, 0)])) def test_polygons_from_bbox(self): "Testing `from_bbox` class method." bbox = (-180, -90, 180, 90) p = Polygon.from_bbox(bbox) self.assertEqual(bbox, p.extent) # Testing numerical precision x = 3.14159265358979323 bbox = (0, 0, 1, x) p = Polygon.from_bbox(bbox) y = p.extent[-1] self.assertEqual(format(x, '.13f'), format(y, '.13f')) def test_polygons(self): "Testing Polygon objects." prev = fromstr('POINT(0 0)') for p in self.geometries.polygons: # Creating the Polygon, testing its properties. poly = fromstr(p.wkt) self.assertEqual(poly.geom_type, 'Polygon') self.assertEqual(poly.geom_typeid, 3) self.assertEqual(poly.dims, 2) self.assertIs(poly.empty, False) self.assertIs(poly.ring, False) self.assertEqual(p.n_i, poly.num_interior_rings) self.assertEqual(p.n_i + 1, len(poly)) # Testing __len__ self.assertEqual(p.n_p, poly.num_points) # Area & Centroid self.assertAlmostEqual(p.area, poly.area, 9) self.assertAlmostEqual(p.centroid[0], poly.centroid.tuple[0], 9) self.assertAlmostEqual(p.centroid[1], poly.centroid.tuple[1], 9) # Testing the geometry equivalence self.assertEqual(poly, fromstr(p.wkt)) # Should not be equal to previous geometry self.assertEqual(False, poly == prev) # Use assertEqual to test __eq__ self.assertNotEqual(poly, prev) # Use assertNotEqual to test __ne__ # Testing the exterior ring ring = poly.exterior_ring self.assertEqual(ring.geom_type, 'LinearRing') self.assertEqual(ring.geom_typeid, 2) if p.ext_ring_cs: self.assertEqual(p.ext_ring_cs, ring.tuple) self.assertEqual(p.ext_ring_cs, poly[0].tuple) # Testing __getitem__ # Testing __getitem__ and __setitem__ on invalid indices with self.assertRaises(IndexError): poly.__getitem__(len(poly)) with self.assertRaises(IndexError): poly.__setitem__(len(poly), False) with self.assertRaises(IndexError): poly.__getitem__(-1 * len(poly) - 1) # Testing __iter__ for r in poly: self.assertEqual(r.geom_type, 'LinearRing') self.assertEqual(r.geom_typeid, 2) # Testing polygon construction. with self.assertRaises(TypeError): Polygon(0, [1, 2, 3]) with self.assertRaises(TypeError): Polygon('foo') # Polygon(shell, (hole1, ... holeN)) rings = tuple(r for r in poly) self.assertEqual(poly, Polygon(rings[0], rings[1:])) # Polygon(shell_tuple, hole_tuple1, ... , hole_tupleN) ring_tuples = tuple(r.tuple for r in poly) self.assertEqual(poly, Polygon(*ring_tuples)) # Constructing with tuples of LinearRings. self.assertEqual(poly.wkt, Polygon(*tuple(r for r in poly)).wkt) self.assertEqual(poly.wkt, Polygon(*tuple(LinearRing(r.tuple) for r in poly)).wkt) def test_polygons_templates(self): # Accessing Polygon attributes in templates should work. engine = Engine() template = engine.from_string('{{ polygons.0.wkt }}') polygons = [fromstr(p.wkt) for p in self.geometries.multipolygons[:2]] content = template.render(Context({'polygons': polygons})) self.assertIn('MULTIPOLYGON (((100', content) def test_polygon_comparison(self): p1 = Polygon(((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))) p2 = Polygon(((0, 0), (0, 1), (1, 0), (0, 0))) self.assertGreater(p1, p2) self.assertLess(p2, p1) p3 = Polygon(((0, 0), (0, 1), (1, 1), (2, 0), (0, 0))) p4 = Polygon(((0, 0), (0, 1), (2, 2), (1, 0), (0, 0))) self.assertGreater(p4, p3) self.assertLess(p3, p4) def test_multipolygons(self): "Testing MultiPolygon objects." fromstr('POINT (0 0)') for mp in self.geometries.multipolygons: mpoly = fromstr(mp.wkt) self.assertEqual(mpoly.geom_type, 'MultiPolygon') self.assertEqual(mpoly.geom_typeid, 6) self.assertEqual(mpoly.dims, 2) self.assertEqual(mp.valid, mpoly.valid) if mp.valid: self.assertEqual(mp.num_geom, mpoly.num_geom) self.assertEqual(mp.n_p, mpoly.num_coords) self.assertEqual(mp.num_geom, len(mpoly)) with self.assertRaises(IndexError): mpoly.__getitem__(len(mpoly)) for p in mpoly: self.assertEqual(p.geom_type, 'Polygon') self.assertEqual(p.geom_typeid, 3) self.assertIs(p.valid, True) self.assertEqual(mpoly.wkt, MultiPolygon(*tuple(poly.clone() for poly in mpoly)).wkt) def test_memory_hijinks(self): "Testing Geometry __del__() on rings and polygons." # #### Memory issues with rings and poly # These tests are needed to ensure sanity with writable geometries. # Getting a polygon with interior rings, and pulling out the interior rings poly = fromstr(self.geometries.polygons[1].wkt) ring1 = poly[0] ring2 = poly[1] # These deletes should be 'harmless' since they are done on child geometries del ring1 del ring2 ring1 = poly[0] ring2 = poly[1] # Deleting the polygon del poly # Access to these rings is OK since they are clones. str(ring1) str(ring2) def test_coord_seq(self): "Testing Coordinate Sequence objects." for p in self.geometries.polygons: if p.ext_ring_cs: # Constructing the polygon and getting the coordinate sequence poly = fromstr(p.wkt) cs = poly.exterior_ring.coord_seq self.assertEqual(p.ext_ring_cs, cs.tuple) # done in the Polygon test too. self.assertEqual(len(p.ext_ring_cs), len(cs)) # Making sure __len__ works # Checks __getitem__ and __setitem__ for i in range(len(p.ext_ring_cs)): c1 = p.ext_ring_cs[i] # Expected value c2 = cs[i] # Value from coordseq self.assertEqual(c1, c2) # Constructing the test value to set the coordinate sequence with if len(c1) == 2: tset = (5, 23) else: tset = (5, 23, 8) cs[i] = tset # Making sure every set point matches what we expect for j in range(len(tset)): cs[i] = tset self.assertEqual(tset[j], cs[i][j]) def test_relate_pattern(self): "Testing relate() and relate_pattern()." g = fromstr('POINT (0 0)') with self.assertRaises(GEOSException): g.relate_pattern(0, 'invalid pattern, yo') for rg in self.geometries.relate_geoms: a = fromstr(rg.wkt_a) b = fromstr(rg.wkt_b) self.assertEqual(rg.result, a.relate_pattern(b, rg.pattern)) self.assertEqual(rg.pattern, a.relate(b)) def test_intersection(self): "Testing intersects() and intersection()." for i in range(len(self.geometries.topology_geoms)): a = fromstr(self.geometries.topology_geoms[i].wkt_a) b = fromstr(self.geometries.topology_geoms[i].wkt_b) i1 = fromstr(self.geometries.intersect_geoms[i].wkt) self.assertIs(a.intersects(b), True) i2 = a.intersection(b) self.assertEqual(i1, i2) self.assertEqual(i1, a & b) # __and__ is intersection operator a &= b # testing __iand__ self.assertEqual(i1, a) def test_union(self): "Testing union()." for i in range(len(self.geometries.topology_geoms)): a = fromstr(self.geometries.topology_geoms[i].wkt_a) b = fromstr(self.geometries.topology_geoms[i].wkt_b) u1 = fromstr(self.geometries.union_geoms[i].wkt) u2 = a.union(b) self.assertEqual(u1, u2) self.assertEqual(u1, a | b) # __or__ is union operator a |= b # testing __ior__ self.assertEqual(u1, a) def test_unary_union(self): "Testing unary_union." for i in range(len(self.geometries.topology_geoms)): a = fromstr(self.geometries.topology_geoms[i].wkt_a) b = fromstr(self.geometries.topology_geoms[i].wkt_b) u1 = fromstr(self.geometries.union_geoms[i].wkt) u2 = GeometryCollection(a, b).unary_union self.assertTrue(u1.equals(u2)) def test_difference(self): "Testing difference()." for i in range(len(self.geometries.topology_geoms)): a = fromstr(self.geometries.topology_geoms[i].wkt_a) b = fromstr(self.geometries.topology_geoms[i].wkt_b) d1 = fromstr(self.geometries.diff_geoms[i].wkt) d2 = a.difference(b) self.assertEqual(d1, d2) self.assertEqual(d1, a - b) # __sub__ is difference operator a -= b # testing __isub__ self.assertEqual(d1, a) def test_symdifference(self): "Testing sym_difference()." for i in range(len(self.geometries.topology_geoms)): a = fromstr(self.geometries.topology_geoms[i].wkt_a) b = fromstr(self.geometries.topology_geoms[i].wkt_b) d1 = fromstr(self.geometries.sdiff_geoms[i].wkt) d2 = a.sym_difference(b) self.assertEqual(d1, d2) self.assertEqual(d1, a ^ b) # __xor__ is symmetric difference operator a ^= b # testing __ixor__ self.assertEqual(d1, a) def test_buffer(self): "Testing buffer()." for bg in self.geometries.buffer_geoms: g = fromstr(bg.wkt) # The buffer we expect exp_buf = fromstr(bg.buffer_wkt) quadsegs = bg.quadsegs width = bg.width # Can't use a floating-point for the number of quadsegs. with self.assertRaises(ctypes.ArgumentError): g.buffer(width, float(quadsegs)) # Constructing our buffer buf = g.buffer(width, quadsegs) self.assertEqual(exp_buf.num_coords, buf.num_coords) self.assertEqual(len(exp_buf), len(buf)) # Now assuring that each point in the buffer is almost equal for j in range(len(exp_buf)): exp_ring = exp_buf[j] buf_ring = buf[j] self.assertEqual(len(exp_ring), len(buf_ring)) for k in range(len(exp_ring)): # Asserting the X, Y of each point are almost equal (due to floating point imprecision) self.assertAlmostEqual(exp_ring[k][0], buf_ring[k][0], 9) self.assertAlmostEqual(exp_ring[k][1], buf_ring[k][1], 9) def test_covers(self): poly = Polygon(((0, 0), (0, 10), (10, 10), (10, 0), (0, 0))) self.assertTrue(poly.covers(Point(5, 5))) self.assertFalse(poly.covers(Point(100, 100))) def test_closed(self): ls_closed = LineString((0, 0), (1, 1), (0, 0)) ls_not_closed = LineString((0, 0), (1, 1)) self.assertFalse(ls_not_closed.closed) self.assertTrue(ls_closed.closed) if geos_version_info()['version'] >= '3.5': self.assertFalse(MultiLineString(ls_closed, ls_not_closed).closed) self.assertTrue(MultiLineString(ls_closed, ls_closed).closed) with mock.patch('django.contrib.gis.geos.collections.geos_version_info', lambda: {'version': '3.4.9'}): with self.assertRaisesMessage(GEOSException, "MultiLineString.closed requires GEOS >= 3.5.0."): MultiLineString().closed def test_srid(self): "Testing the SRID property and keyword." # Testing SRID keyword on Point pnt = Point(5, 23, srid=4326) self.assertEqual(4326, pnt.srid) pnt.srid = 3084 self.assertEqual(3084, pnt.srid) with self.assertRaises(ctypes.ArgumentError): pnt.srid = '4326' # Testing SRID keyword on fromstr(), and on Polygon rings. poly = fromstr(self.geometries.polygons[1].wkt, srid=4269) self.assertEqual(4269, poly.srid) for ring in poly: self.assertEqual(4269, ring.srid) poly.srid = 4326 self.assertEqual(4326, poly.shell.srid) # Testing SRID keyword on GeometryCollection gc = GeometryCollection(Point(5, 23), LineString((0, 0), (1.5, 1.5), (3, 3)), srid=32021) self.assertEqual(32021, gc.srid) for i in range(len(gc)): self.assertEqual(32021, gc[i].srid) # GEOS may get the SRID from HEXEWKB # 'POINT(5 23)' at SRID=4326 in hex form -- obtained from PostGIS # using `SELECT GeomFromText('POINT (5 23)', 4326);`. hex = '0101000020E610000000000000000014400000000000003740' p1 = fromstr(hex) self.assertEqual(4326, p1.srid) p2 = fromstr(p1.hex) self.assertIsNone(p2.srid) p3 = fromstr(p1.hex, srid=-1) # -1 is intended. self.assertEqual(-1, p3.srid) # Testing that geometry SRID could be set to its own value pnt_wo_srid = Point(1, 1) pnt_wo_srid.srid = pnt_wo_srid.srid @skipUnless(HAS_GDAL, "GDAL is required.") def test_custom_srid(self): """Test with a null srid and a srid unknown to GDAL.""" for srid in [None, 999999]: pnt = Point(111200, 220900, srid=srid) self.assertTrue(pnt.ewkt.startswith(("SRID=%s;" % srid if srid else '') + "POINT (111200")) self.assertIsInstance(pnt.ogr, gdal.OGRGeometry) self.assertIsNone(pnt.srs) # Test conversion from custom to a known srid c2w = gdal.CoordTransform( gdal.SpatialReference( '+proj=mill +lat_0=0 +lon_0=0 +x_0=0 +y_0=0 +R_A +ellps=WGS84 ' '+datum=WGS84 +units=m +no_defs' ), gdal.SpatialReference(4326)) new_pnt = pnt.transform(c2w, clone=True) self.assertEqual(new_pnt.srid, 4326) self.assertAlmostEqual(new_pnt.x, 1, 3) self.assertAlmostEqual(new_pnt.y, 2, 3) def test_mutable_geometries(self): "Testing the mutability of Polygons and Geometry Collections." # ### Testing the mutability of Polygons ### for p in self.geometries.polygons: poly = fromstr(p.wkt) # Should only be able to use __setitem__ with LinearRing geometries. with self.assertRaises(TypeError): poly.__setitem__(0, LineString((1, 1), (2, 2))) # Constructing the new shell by adding 500 to every point in the old shell. shell_tup = poly.shell.tuple new_coords = [] for point in shell_tup: new_coords.append((point[0] + 500., point[1] + 500.)) new_shell = LinearRing(*tuple(new_coords)) # Assigning polygon's exterior ring w/the new shell poly.exterior_ring = new_shell str(new_shell) # new shell is still accessible self.assertEqual(poly.exterior_ring, new_shell) self.assertEqual(poly[0], new_shell) # ### Testing the mutability of Geometry Collections for tg in self.geometries.multipoints: mp = fromstr(tg.wkt) for i in range(len(mp)): # Creating a random point. pnt = mp[i] new = Point(random.randint(21, 100), random.randint(21, 100)) # Testing the assignment mp[i] = new str(new) # what was used for the assignment is still accessible self.assertEqual(mp[i], new) self.assertEqual(mp[i].wkt, new.wkt) self.assertNotEqual(pnt, mp[i]) # MultiPolygons involve much more memory management because each # Polygon w/in the collection has its own rings. for tg in self.geometries.multipolygons: mpoly = fromstr(tg.wkt) for i in range(len(mpoly)): poly = mpoly[i] old_poly = mpoly[i] # Offsetting the each ring in the polygon by 500. for j in range(len(poly)): r = poly[j] for k in range(len(r)): r[k] = (r[k][0] + 500., r[k][1] + 500.) poly[j] = r self.assertNotEqual(mpoly[i], poly) # Testing the assignment mpoly[i] = poly str(poly) # Still accessible self.assertEqual(mpoly[i], poly) self.assertNotEqual(mpoly[i], old_poly) # Extreme (!!) __setitem__ -- no longer works, have to detect # in the first object that __setitem__ is called in the subsequent # objects -- maybe mpoly[0, 0, 0] = (3.14, 2.71)? # mpoly[0][0][0] = (3.14, 2.71) # self.assertEqual((3.14, 2.71), mpoly[0][0][0]) # Doing it more slowly.. # self.assertEqual((3.14, 2.71), mpoly[0].shell[0]) # del mpoly def test_point_list_assignment(self): p = Point(0, 0) p[:] = (1, 2, 3) self.assertEqual(p, Point(1, 2, 3)) p[:] = () self.assertEqual(p.wkt, Point()) p[:] = (1, 2) self.assertEqual(p.wkt, Point(1, 2)) with self.assertRaises(ValueError): p[:] = (1,) with self.assertRaises(ValueError): p[:] = (1, 2, 3, 4, 5) def test_linestring_list_assignment(self): ls = LineString((0, 0), (1, 1)) ls[:] = () self.assertEqual(ls, LineString()) ls[:] = ((0, 0), (1, 1), (2, 2)) self.assertEqual(ls, LineString((0, 0), (1, 1), (2, 2))) with self.assertRaises(ValueError): ls[:] = (1,) def test_linearring_list_assignment(self): ls = LinearRing((0, 0), (0, 1), (1, 1), (0, 0)) ls[:] = () self.assertEqual(ls, LinearRing()) ls[:] = ((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)) self.assertEqual(ls, LinearRing((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))) with self.assertRaises(ValueError): ls[:] = ((0, 0), (1, 1), (2, 2)) def test_polygon_list_assignment(self): pol = Polygon() pol[:] = (((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)),) self.assertEqual(pol, Polygon(((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)),)) pol[:] = () self.assertEqual(pol, Polygon()) def test_geometry_collection_list_assignment(self): p = Point() gc = GeometryCollection() gc[:] = [p] self.assertEqual(gc, GeometryCollection(p)) gc[:] = () self.assertEqual(gc, GeometryCollection()) def test_threed(self): "Testing three-dimensional geometries." # Testing a 3D Point pnt = Point(2, 3, 8) self.assertEqual((2., 3., 8.), pnt.coords) with self.assertRaises(TypeError): pnt.tuple = (1., 2.) pnt.coords = (1., 2., 3.) self.assertEqual((1., 2., 3.), pnt.coords) # Testing a 3D LineString ls = LineString((2., 3., 8.), (50., 250., -117.)) self.assertEqual(((2., 3., 8.), (50., 250., -117.)), ls.tuple) with self.assertRaises(TypeError): ls.__setitem__(0, (1., 2.)) ls[0] = (1., 2., 3.) self.assertEqual((1., 2., 3.), ls[0]) def test_distance(self): "Testing the distance() function." # Distance to self should be 0. pnt = Point(0, 0) self.assertEqual(0.0, pnt.distance(Point(0, 0))) # Distance should be 1 self.assertEqual(1.0, pnt.distance(Point(0, 1))) # Distance should be ~ sqrt(2) self.assertAlmostEqual(1.41421356237, pnt.distance(Point(1, 1)), 11) # Distances are from the closest vertex in each geometry -- # should be 3 (distance from (2, 2) to (5, 2)). ls1 = LineString((0, 0), (1, 1), (2, 2)) ls2 = LineString((5, 2), (6, 1), (7, 0)) self.assertEqual(3, ls1.distance(ls2)) def test_length(self): "Testing the length property." # Points have 0 length. pnt = Point(0, 0) self.assertEqual(0.0, pnt.length) # Should be ~ sqrt(2) ls = LineString((0, 0), (1, 1)) self.assertAlmostEqual(1.41421356237, ls.length, 11) # Should be circumference of Polygon poly = Polygon(LinearRing((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))) self.assertEqual(4.0, poly.length) # Should be sum of each element's length in collection. mpoly = MultiPolygon(poly.clone(), poly) self.assertEqual(8.0, mpoly.length) def test_emptyCollections(self): "Testing empty geometries and collections." geoms = [ GeometryCollection([]), fromstr('GEOMETRYCOLLECTION EMPTY'), GeometryCollection(), fromstr('POINT EMPTY'), Point(), fromstr('LINESTRING EMPTY'), LineString(), fromstr('POLYGON EMPTY'), Polygon(), fromstr('MULTILINESTRING EMPTY'), MultiLineString(), fromstr('MULTIPOLYGON EMPTY'), MultiPolygon(()), MultiPolygon(), ] if numpy: geoms.append(LineString(numpy.array([]))) for g in geoms: self.assertIs(g.empty, True) # Testing len() and num_geom. if isinstance(g, Polygon): self.assertEqual(1, len(g)) # Has one empty linear ring self.assertEqual(1, g.num_geom) self.assertEqual(0, len(g[0])) elif isinstance(g, (Point, LineString)): self.assertEqual(1, g.num_geom) self.assertEqual(0, len(g)) else: self.assertEqual(0, g.num_geom) self.assertEqual(0, len(g)) # Testing __getitem__ (doesn't work on Point or Polygon) if isinstance(g, Point): with self.assertRaises(IndexError): g.x elif isinstance(g, Polygon): lr = g.shell self.assertEqual('LINEARRING EMPTY', lr.wkt) self.assertEqual(0, len(lr)) self.assertIs(lr.empty, True) with self.assertRaises(IndexError): lr.__getitem__(0) else: with self.assertRaises(IndexError): g.__getitem__(0) def test_collection_dims(self): gc = GeometryCollection([]) self.assertEqual(gc.dims, -1) gc = GeometryCollection(Point(0, 0)) self.assertEqual(gc.dims, 0) gc = GeometryCollection(LineString((0, 0), (1, 1)), Point(0, 0)) self.assertEqual(gc.dims, 1) gc = GeometryCollection(LineString((0, 0), (1, 1)), Polygon(((0, 0), (0, 1), (1, 1), (0, 0))), Point(0, 0)) self.assertEqual(gc.dims, 2) def test_collections_of_collections(self): "Testing GeometryCollection handling of other collections." # Creating a GeometryCollection WKT string composed of other # collections and polygons. coll = [mp.wkt for mp in self.geometries.multipolygons if mp.valid] coll.extend(mls.wkt for mls in self.geometries.multilinestrings) coll.extend(p.wkt for p in self.geometries.polygons) coll.extend(mp.wkt for mp in self.geometries.multipoints) gc_wkt = 'GEOMETRYCOLLECTION(%s)' % ','.join(coll) # Should construct ok from WKT gc1 = GEOSGeometry(gc_wkt) # Should also construct ok from individual geometry arguments. gc2 = GeometryCollection(*tuple(g for g in gc1)) # And, they should be equal. self.assertEqual(gc1, gc2) @skipUnless(HAS_GDAL, "GDAL is required.") def test_gdal(self): "Testing `ogr` and `srs` properties." g1 = fromstr('POINT(5 23)') self.assertIsInstance(g1.ogr, gdal.OGRGeometry) self.assertIsNone(g1.srs) g1_3d = fromstr('POINT(5 23 8)') self.assertIsInstance(g1_3d.ogr, gdal.OGRGeometry) self.assertEqual(g1_3d.ogr.z, 8) g2 = fromstr('LINESTRING(0 0, 5 5, 23 23)', srid=4326) self.assertIsInstance(g2.ogr, gdal.OGRGeometry) self.assertIsInstance(g2.srs, gdal.SpatialReference) self.assertEqual(g2.hex, g2.ogr.hex) self.assertEqual('WGS 84', g2.srs.name) def test_copy(self): "Testing use with the Python `copy` module." import copy poly = GEOSGeometry('POLYGON((0 0, 0 23, 23 23, 23 0, 0 0), (5 5, 5 10, 10 10, 10 5, 5 5))') cpy1 = copy.copy(poly) cpy2 = copy.deepcopy(poly) self.assertNotEqual(poly._ptr, cpy1._ptr) self.assertNotEqual(poly._ptr, cpy2._ptr) @skipUnless(HAS_GDAL, "GDAL is required to transform geometries") def test_transform(self): "Testing `transform` method." orig = GEOSGeometry('POINT (-104.609 38.255)', 4326) trans = GEOSGeometry('POINT (992385.4472045 481455.4944650)', 2774) # Using a srid, a SpatialReference object, and a CoordTransform object # for transformations. t1, t2, t3 = orig.clone(), orig.clone(), orig.clone() t1.transform(trans.srid) t2.transform(gdal.SpatialReference('EPSG:2774')) ct = gdal.CoordTransform(gdal.SpatialReference('WGS84'), gdal.SpatialReference(2774)) t3.transform(ct) # Testing use of the `clone` keyword. k1 = orig.clone() k2 = k1.transform(trans.srid, clone=True) self.assertEqual(k1, orig) self.assertNotEqual(k1, k2) prec = 3 for p in (t1, t2, t3, k2): self.assertAlmostEqual(trans.x, p.x, prec) self.assertAlmostEqual(trans.y, p.y, prec) @skipUnless(HAS_GDAL, "GDAL is required to transform geometries") def test_transform_3d(self): p3d = GEOSGeometry('POINT (5 23 100)', 4326) p3d.transform(2774) self.assertEqual(p3d.z, 100) @skipUnless(HAS_GDAL, "GDAL is required.") def test_transform_noop(self): """ Testing `transform` method (SRID match) """ # transform() should no-op if source & dest SRIDs match, # regardless of whether GDAL is available. g = GEOSGeometry('POINT (-104.609 38.255)', 4326) gt = g.tuple g.transform(4326) self.assertEqual(g.tuple, gt) self.assertEqual(g.srid, 4326) g = GEOSGeometry('POINT (-104.609 38.255)', 4326) g1 = g.transform(4326, clone=True) self.assertEqual(g1.tuple, g.tuple) self.assertEqual(g1.srid, 4326) self.assertIsNot(g1, g, "Clone didn't happen") @skipUnless(HAS_GDAL, "GDAL is required.") def test_transform_nosrid(self): """ Testing `transform` method (no SRID or negative SRID) """ g = GEOSGeometry('POINT (-104.609 38.255)', srid=None) with self.assertRaises(GEOSException): g.transform(2774) g = GEOSGeometry('POINT (-104.609 38.255)', srid=None) with self.assertRaises(GEOSException): g.transform(2774, clone=True) g = GEOSGeometry('POINT (-104.609 38.255)', srid=-1) with self.assertRaises(GEOSException): g.transform(2774) g = GEOSGeometry('POINT (-104.609 38.255)', srid=-1) with self.assertRaises(GEOSException): g.transform(2774, clone=True) def test_extent(self): "Testing `extent` method." # The xmin, ymin, xmax, ymax of the MultiPoint should be returned. mp = MultiPoint(Point(5, 23), Point(0, 0), Point(10, 50)) self.assertEqual((0.0, 0.0, 10.0, 50.0), mp.extent) pnt = Point(5.23, 17.8) # Extent of points is just the point itself repeated. self.assertEqual((5.23, 17.8, 5.23, 17.8), pnt.extent) # Testing on the 'real world' Polygon. poly = fromstr(self.geometries.polygons[3].wkt) ring = poly.shell x, y = ring.x, ring.y xmin, ymin = min(x), min(y) xmax, ymax = max(x), max(y) self.assertEqual((xmin, ymin, xmax, ymax), poly.extent) def test_pickle(self): "Testing pickling and unpickling support." # Using both pickle and cPickle -- just 'cause. from django.utils.six.moves import cPickle import pickle # Creating a list of test geometries for pickling, # and setting the SRID on some of them. def get_geoms(lst, srid=None): return [GEOSGeometry(tg.wkt, srid) for tg in lst] tgeoms = get_geoms(self.geometries.points) tgeoms.extend(get_geoms(self.geometries.multilinestrings, 4326)) tgeoms.extend(get_geoms(self.geometries.polygons, 3084)) tgeoms.extend(get_geoms(self.geometries.multipolygons, 3857)) for geom in tgeoms: s1, s2 = cPickle.dumps(geom), pickle.dumps(geom) g1, g2 = cPickle.loads(s1), pickle.loads(s2) for tmpg in (g1, g2): self.assertEqual(geom, tmpg) self.assertEqual(geom.srid, tmpg.srid) def test_prepared(self): "Testing PreparedGeometry support." # Creating a simple multipolygon and getting a prepared version. mpoly = GEOSGeometry('MULTIPOLYGON(((0 0,0 5,5 5,5 0,0 0)),((5 5,5 10,10 10,10 5,5 5)))') prep = mpoly.prepared # A set of test points. pnts = [Point(5, 5), Point(7.5, 7.5), Point(2.5, 7.5)] for pnt in pnts: # Results should be the same (but faster) self.assertEqual(mpoly.contains(pnt), prep.contains(pnt)) self.assertEqual(mpoly.intersects(pnt), prep.intersects(pnt)) self.assertEqual(mpoly.covers(pnt), prep.covers(pnt)) self.assertTrue(prep.crosses(fromstr('LINESTRING(1 1, 15 15)'))) self.assertTrue(prep.disjoint(Point(-5, -5))) poly = Polygon(((-1, -1), (1, 1), (1, 0), (-1, -1))) self.assertTrue(prep.overlaps(poly)) poly = Polygon(((-5, 0), (-5, 5), (0, 5), (-5, 0))) self.assertTrue(prep.touches(poly)) poly = Polygon(((-1, -1), (-1, 11), (11, 11), (11, -1), (-1, -1))) self.assertTrue(prep.within(poly)) # Original geometry deletion should not crash the prepared one (#21662) del mpoly self.assertTrue(prep.covers(Point(5, 5))) def test_line_merge(self): "Testing line merge support" ref_geoms = (fromstr('LINESTRING(1 1, 1 1, 3 3)'), fromstr('MULTILINESTRING((1 1, 3 3), (3 3, 4 2))'), ) ref_merged = (fromstr('LINESTRING(1 1, 3 3)'), fromstr('LINESTRING (1 1, 3 3, 4 2)'), ) for geom, merged in zip(ref_geoms, ref_merged): self.assertEqual(merged, geom.merged) def test_valid_reason(self): "Testing IsValidReason support" g = GEOSGeometry("POINT(0 0)") self.assertTrue(g.valid) self.assertIsInstance(g.valid_reason, six.string_types) self.assertEqual(g.valid_reason, "Valid Geometry") g = GEOSGeometry("LINESTRING(0 0, 0 0)") self.assertFalse(g.valid) self.assertIsInstance(g.valid_reason, six.string_types) self.assertTrue(g.valid_reason.startswith("Too few points in geometry component")) def test_linearref(self): "Testing linear referencing" ls = fromstr('LINESTRING(0 0, 0 10, 10 10, 10 0)') mls = fromstr('MULTILINESTRING((0 0, 0 10), (10 0, 10 10))') self.assertEqual(ls.project(Point(0, 20)), 10.0) self.assertEqual(ls.project(Point(7, 6)), 24) self.assertEqual(ls.project_normalized(Point(0, 20)), 1.0 / 3) self.assertEqual(ls.interpolate(10), Point(0, 10)) self.assertEqual(ls.interpolate(24), Point(10, 6)) self.assertEqual(ls.interpolate_normalized(1.0 / 3), Point(0, 10)) self.assertEqual(mls.project(Point(0, 20)), 10) self.assertEqual(mls.project(Point(7, 6)), 16) self.assertEqual(mls.interpolate(9), Point(0, 9)) self.assertEqual(mls.interpolate(17), Point(10, 7)) def test_deconstructible(self): """ Geometry classes should be deconstructible. """ point = Point(4.337844, 50.827537, srid=4326) path, args, kwargs = point.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.point.Point') self.assertEqual(args, (4.337844, 50.827537)) self.assertEqual(kwargs, {'srid': 4326}) ls = LineString(((0, 0), (1, 1))) path, args, kwargs = ls.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.linestring.LineString') self.assertEqual(args, (((0, 0), (1, 1)),)) self.assertEqual(kwargs, {}) ls2 = LineString([Point(0, 0), Point(1, 1)], srid=4326) path, args, kwargs = ls2.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.linestring.LineString') self.assertEqual(args, ([Point(0, 0), Point(1, 1)],)) self.assertEqual(kwargs, {'srid': 4326}) ext_coords = ((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)) int_coords = ((0.4, 0.4), (0.4, 0.6), (0.6, 0.6), (0.6, 0.4), (0.4, 0.4)) poly = Polygon(ext_coords, int_coords) path, args, kwargs = poly.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.polygon.Polygon') self.assertEqual(args, (ext_coords, int_coords)) self.assertEqual(kwargs, {}) lr = LinearRing((0, 0), (0, 1), (1, 1), (0, 0)) path, args, kwargs = lr.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.linestring.LinearRing') self.assertEqual(args, ((0, 0), (0, 1), (1, 1), (0, 0))) self.assertEqual(kwargs, {}) mp = MultiPoint(Point(0, 0), Point(1, 1)) path, args, kwargs = mp.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.collections.MultiPoint') self.assertEqual(args, (Point(0, 0), Point(1, 1))) self.assertEqual(kwargs, {}) ls1 = LineString((0, 0), (1, 1)) ls2 = LineString((2, 2), (3, 3)) mls = MultiLineString(ls1, ls2) path, args, kwargs = mls.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.collections.MultiLineString') self.assertEqual(args, (ls1, ls2)) self.assertEqual(kwargs, {}) p1 = Polygon(((0, 0), (0, 1), (1, 1), (0, 0))) p2 = Polygon(((1, 1), (1, 2), (2, 2), (1, 1))) mp = MultiPolygon(p1, p2) path, args, kwargs = mp.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.collections.MultiPolygon') self.assertEqual(args, (p1, p2, )) self.assertEqual(kwargs, {}) poly = Polygon(((0, 0), (0, 1), (1, 1), (0, 0))) gc = GeometryCollection(Point(0, 0), MultiPoint(Point(0, 0), Point(1, 1)), poly) path, args, kwargs = gc.deconstruct() self.assertEqual(path, 'django.contrib.gis.geos.collections.GeometryCollection') self.assertEqual(args, (Point(0, 0), MultiPoint(Point(0, 0), Point(1, 1)), poly)) self.assertEqual(kwargs, {}) def test_geos_version(self): """Testing the GEOS version regular expression.""" from django.contrib.gis.geos.libgeos import version_regex versions = [('3.0.0rc4-CAPI-1.3.3', '3.0.0', '1.3.3'), ('3.0.0-CAPI-1.4.1', '3.0.0', '1.4.1'), ('3.4.0dev-CAPI-1.8.0', '3.4.0', '1.8.0'), ('3.4.0dev-CAPI-1.8.0 r0', '3.4.0', '1.8.0')] for v_init, v_geos, v_capi in versions: m = version_regex.match(v_init) self.assertTrue(m, msg="Unable to parse the version string '%s'" % v_init) self.assertEqual(m.group('version'), v_geos) self.assertEqual(m.group('capi_version'), v_capi) def test_from_gml(self): self.assertEqual( GEOSGeometry('POINT(0 0)'), GEOSGeometry.from_gml( '<gml:Point gml:id="p21" srsName="http://www.opengis.net/def/crs/EPSG/0/4326">' ' <gml:pos srsDimension="2">0 0</gml:pos>' '</gml:Point>' ), ) @ignore_warnings(category=RemovedInDjango20Warning) def test_deprecated_srid_getters_setters(self): p = Point(1, 2, srid=123) self.assertEqual(p.get_srid(), p.srid) p.set_srid(321) self.assertEqual(p.srid, 321) @ignore_warnings(category=RemovedInDjango20Warning) def test_deprecated_point_coordinate_getters_setters(self): p = Point(1, 2, 3) self.assertEqual((p.get_x(), p.get_y(), p.get_z()), (p.x, p.y, p.z)) p.set_x(3) p.set_y(2) p.set_z(1) self.assertEqual((p.x, p.y, p.z), (3, 2, 1)) @ignore_warnings(category=RemovedInDjango20Warning) def test_deprecated_point_tuple_getters_setters(self): p = Point(1, 2, 3) self.assertEqual(p.get_coords(), (p.x, p.y, p.z)) p.set_coords((3, 2, 1)) self.assertEqual(p.get_coords(), (3, 2, 1)) @ignore_warnings(category=RemovedInDjango20Warning) def test_deprecated_cascaded_union(self): for geom in self.geometries.multipolygons: mpoly = GEOSGeometry(geom.wkt) self.assertEqual(mpoly.cascaded_union, mpoly.unary_union)
4ee3f6b764093177eca4289967d4791c96326a4005dc9becf97f912812b4e64d
from __future__ import unicode_literals from django.contrib.gis.db.models import Collect, Count, Extent, F, Union from django.contrib.gis.geometry.backend import Geometry from django.contrib.gis.geos import GEOSGeometry, MultiPoint, Point from django.db import connection from django.test import TestCase, skipUnlessDBFeature from django.test.utils import override_settings from django.utils import timezone from ..utils import no_oracle from .models import ( Article, Author, Book, City, DirectoryEntry, Event, Location, Parcel, ) @skipUnlessDBFeature("gis_enabled") class RelatedGeoModelTest(TestCase): fixtures = ['initial'] def test02_select_related(self): "Testing `select_related` on geographic models (see #7126)." qs1 = City.objects.order_by('id') qs2 = City.objects.order_by('id').select_related() qs3 = City.objects.order_by('id').select_related('location') # Reference data for what's in the fixtures. cities = ( ('Aurora', 'TX', -97.516111, 33.058333), ('Roswell', 'NM', -104.528056, 33.387222), ('Kecksburg', 'PA', -79.460734, 40.18476), ) for qs in (qs1, qs2, qs3): for ref, c in zip(cities, qs): nm, st, lon, lat = ref self.assertEqual(nm, c.name) self.assertEqual(st, c.state) self.assertEqual(Point(lon, lat), c.location.point) @skipUnlessDBFeature("has_transform_method") def test03_transform_related(self): "Testing the `transform` GeoQuerySet method on related geographic models." # All the transformations are to state plane coordinate systems using # US Survey Feet (thus a tolerance of 0 implies error w/in 1 survey foot). tol = 0 def check_pnt(ref, pnt): self.assertAlmostEqual(ref.x, pnt.x, tol) self.assertAlmostEqual(ref.y, pnt.y, tol) self.assertEqual(ref.srid, pnt.srid) # Each city transformed to the SRID of their state plane coordinate system. transformed = (('Kecksburg', 2272, 'POINT(1490553.98959621 314792.131023984)'), ('Roswell', 2257, 'POINT(481902.189077221 868477.766629735)'), ('Aurora', 2276, 'POINT(2269923.2484839 7069381.28722222)'), ) for name, srid, wkt in transformed: # Doing this implicitly sets `select_related` select the location. # TODO: Fix why this breaks on Oracle. qs = list(City.objects.filter(name=name).transform(srid, field_name='location__point')) check_pnt(GEOSGeometry(wkt, srid), qs[0].location.point) # Relations more than one level deep can be queried. self.assertEqual(list(Parcel.objects.transform(srid, field_name='city__location__point')), []) @skipUnlessDBFeature("supports_extent_aggr") def test_related_extent_aggregate(self): "Testing the `Extent` aggregate on related geographic models." # This combines the Extent and Union aggregates into one query aggs = City.objects.aggregate(Extent('location__point')) # One for all locations, one that excludes New Mexico (Roswell). all_extent = (-104.528056, 29.763374, -79.460734, 40.18476) txpa_extent = (-97.516111, 29.763374, -79.460734, 40.18476) e1 = City.objects.aggregate(Extent('location__point'))['location__point__extent'] e2 = City.objects.exclude(state='NM').aggregate(Extent('location__point'))['location__point__extent'] e3 = aggs['location__point__extent'] # The tolerance value is to four decimal places because of differences # between the Oracle and PostGIS spatial backends on the extent calculation. tol = 4 for ref, e in [(all_extent, e1), (txpa_extent, e2), (all_extent, e3)]: for ref_val, e_val in zip(ref, e): self.assertAlmostEqual(ref_val, e_val, tol) @skipUnlessDBFeature("supports_extent_aggr") def test_related_extent_annotate(self): """ Test annotation with Extent GeoAggregate. """ cities = City.objects.annotate(points_extent=Extent('location__point')).order_by('name') tol = 4 self.assertAlmostEqual( cities[0].points_extent, (-97.516111, 33.058333, -97.516111, 33.058333), tol ) @skipUnlessDBFeature("has_unionagg_method") def test_related_union_aggregate(self): "Testing the `Union` aggregate on related geographic models." # This combines the Extent and Union aggregates into one query aggs = City.objects.aggregate(Union('location__point')) # These are the points that are components of the aggregate geographic # union that is returned. Each point # corresponds to City PK. p1 = Point(-104.528056, 33.387222) p2 = Point(-97.516111, 33.058333) p3 = Point(-79.460734, 40.18476) p4 = Point(-96.801611, 32.782057) p5 = Point(-95.363151, 29.763374) # The second union aggregate is for a union # query that includes limiting information in the WHERE clause (in other # words a `.filter()` precedes the call to `.aggregate(Union()`). ref_u1 = MultiPoint(p1, p2, p4, p5, p3, srid=4326) ref_u2 = MultiPoint(p2, p3, srid=4326) u1 = City.objects.aggregate(Union('location__point'))['location__point__union'] u2 = City.objects.exclude( name__in=('Roswell', 'Houston', 'Dallas', 'Fort Worth'), ).aggregate(Union('location__point'))['location__point__union'] u3 = aggs['location__point__union'] self.assertEqual(type(u1), MultiPoint) self.assertEqual(type(u3), MultiPoint) # Ordering of points in the result of the union is not defined and # implementation-dependent (DB backend, GEOS version) self.assertSetEqual({p.ewkt for p in ref_u1}, {p.ewkt for p in u1}) self.assertSetEqual({p.ewkt for p in ref_u2}, {p.ewkt for p in u2}) self.assertSetEqual({p.ewkt for p in ref_u1}, {p.ewkt for p in u3}) def test05_select_related_fk_to_subclass(self): "Testing that calling select_related on a query over a model with an FK to a model subclass works" # Regression test for #9752. list(DirectoryEntry.objects.all().select_related()) def test06_f_expressions(self): "Testing F() expressions on GeometryFields." # Constructing a dummy parcel border and getting the City instance for # assigning the FK. b1 = GEOSGeometry( 'POLYGON((-97.501205 33.052520,-97.501205 33.052576,' '-97.501150 33.052576,-97.501150 33.052520,-97.501205 33.052520))', srid=4326 ) pcity = City.objects.get(name='Aurora') # First parcel has incorrect center point that is equal to the City; # it also has a second border that is different from the first as a # 100ft buffer around the City. c1 = pcity.location.point c2 = c1.transform(2276, clone=True) b2 = c2.buffer(100) Parcel.objects.create(name='P1', city=pcity, center1=c1, center2=c2, border1=b1, border2=b2) # Now creating a second Parcel where the borders are the same, just # in different coordinate systems. The center points are also the # same (but in different coordinate systems), and this time they # actually correspond to the centroid of the border. c1 = b1.centroid c2 = c1.transform(2276, clone=True) Parcel.objects.create(name='P2', city=pcity, center1=c1, center2=c2, border1=b1, border2=b1) # Should return the second Parcel, which has the center within the # border. qs = Parcel.objects.filter(center1__within=F('border1')) self.assertEqual(1, len(qs)) self.assertEqual('P2', qs[0].name) if connection.features.supports_transform: # This time center2 is in a different coordinate system and needs # to be wrapped in transformation SQL. qs = Parcel.objects.filter(center2__within=F('border1')) self.assertEqual(1, len(qs)) self.assertEqual('P2', qs[0].name) # Should return the first Parcel, which has the center point equal # to the point in the City ForeignKey. qs = Parcel.objects.filter(center1=F('city__location__point')) self.assertEqual(1, len(qs)) self.assertEqual('P1', qs[0].name) if connection.features.supports_transform: # This time the city column should be wrapped in transformation SQL. qs = Parcel.objects.filter(border2__contains=F('city__location__point')) self.assertEqual(1, len(qs)) self.assertEqual('P1', qs[0].name) def test07_values(self): "Testing values() and values_list() and GeoQuerySets." gqs = Location.objects.all() gvqs = Location.objects.values() gvlqs = Location.objects.values_list() # Incrementing through each of the models, dictionaries, and tuples # returned by the different types of GeoQuerySets. for m, d, t in zip(gqs, gvqs, gvlqs): # The values should be Geometry objects and not raw strings returned # by the spatial database. self.assertIsInstance(d['point'], Geometry) self.assertIsInstance(t[1], Geometry) self.assertEqual(m.point, d['point']) self.assertEqual(m.point, t[1]) @override_settings(USE_TZ=True) def test_07b_values(self): "Testing values() and values_list() with aware datetime. See #21565." Event.objects.create(name="foo", when=timezone.now()) list(Event.objects.values_list('when')) def test08_defer_only(self): "Testing defer() and only() on Geographic models." qs = Location.objects.all() def_qs = Location.objects.defer('point') for loc, def_loc in zip(qs, def_qs): self.assertEqual(loc.point, def_loc.point) def test09_pk_relations(self): "Ensuring correct primary key column is selected across relations. See #10757." # The expected ID values -- notice the last two location IDs # are out of order. Dallas and Houston have location IDs that differ # from their PKs -- this is done to ensure that the related location # ID column is selected instead of ID column for the city. city_ids = (1, 2, 3, 4, 5) loc_ids = (1, 2, 3, 5, 4) ids_qs = City.objects.order_by('id').values('id', 'location__id') for val_dict, c_id, l_id in zip(ids_qs, city_ids, loc_ids): self.assertEqual(val_dict['id'], c_id) self.assertEqual(val_dict['location__id'], l_id) # TODO: fix on Oracle -- qs2 returns an empty result for an unknown reason @no_oracle def test10_combine(self): "Testing the combination of two GeoQuerySets. See #10807." buf1 = City.objects.get(name='Aurora').location.point.buffer(0.1) buf2 = City.objects.get(name='Kecksburg').location.point.buffer(0.1) qs1 = City.objects.filter(location__point__within=buf1) qs2 = City.objects.filter(location__point__within=buf2) combined = qs1 | qs2 names = [c.name for c in combined] self.assertEqual(2, len(names)) self.assertIn('Aurora', names) self.assertIn('Kecksburg', names) # TODO: fix on Oracle -- get the following error because the SQL is ordered # by a geometry object, which Oracle apparently doesn't like: # ORA-22901: cannot compare nested table or VARRAY or LOB attributes of an object type @no_oracle def test12a_count(self): "Testing `Count` aggregate on geo-fields." # The City, 'Fort Worth' uses the same location as Dallas. dallas = City.objects.get(name='Dallas') # Count annotation should be 2 for the Dallas location now. loc = Location.objects.annotate(num_cities=Count('city')).get(id=dallas.location.id) self.assertEqual(2, loc.num_cities) def test12b_count(self): "Testing `Count` aggregate on non geo-fields." # Should only be one author (Trevor Paglen) returned by this query, and # the annotation should have 3 for the number of books, see #11087. # Also testing with a values(), see #11489. qs = Author.objects.annotate(num_books=Count('books')).filter(num_books__gt=1) vqs = Author.objects.values('name').annotate(num_books=Count('books')).filter(num_books__gt=1) self.assertEqual(1, len(qs)) self.assertEqual(3, qs[0].num_books) self.assertEqual(1, len(vqs)) self.assertEqual(3, vqs[0]['num_books']) # TODO: fix on Oracle -- get the following error because the SQL is ordered # by a geometry object, which Oracle apparently doesn't like: # ORA-22901: cannot compare nested table or VARRAY or LOB attributes of an object type @no_oracle def test13c_count(self): "Testing `Count` aggregate with `.values()`. See #15305." qs = Location.objects.filter(id=5).annotate(num_cities=Count('city')).values('id', 'point', 'num_cities') self.assertEqual(1, len(qs)) self.assertEqual(2, qs[0]['num_cities']) self.assertIsInstance(qs[0]['point'], GEOSGeometry) # TODO: The phantom model does appear on Oracle. @no_oracle def test13_select_related_null_fk(self): "Testing `select_related` on a nullable ForeignKey." Book.objects.create(title='Without Author') b = Book.objects.select_related('author').get(title='Without Author') # Should be `None`, and not a 'dummy' model. self.assertIsNone(b.author) @skipUnlessDBFeature("supports_collect_aggr") def test_collect(self): """ Testing the `Collect` aggregate. """ # Reference query: # SELECT AsText(ST_Collect("relatedapp_location"."point")) FROM "relatedapp_city" LEFT OUTER JOIN # "relatedapp_location" ON ("relatedapp_city"."location_id" = "relatedapp_location"."id") # WHERE "relatedapp_city"."state" = 'TX'; ref_geom = GEOSGeometry( 'MULTIPOINT(-97.516111 33.058333,-96.801611 32.782057,' '-95.363151 29.763374,-96.801611 32.782057)' ) coll = City.objects.filter(state='TX').aggregate(Collect('location__point'))['location__point__collect'] # Even though Dallas and Ft. Worth share same point, Collect doesn't # consolidate -- that's why 4 points in MultiPoint. self.assertEqual(4, len(coll)) self.assertTrue(ref_geom.equals(coll)) def test15_invalid_select_related(self): "Testing doing select_related on the related name manager of a unique FK. See #13934." qs = Article.objects.select_related('author__article') # This triggers TypeError when `get_default_columns` has no `local_only` # keyword. The TypeError is swallowed if QuerySet is actually # evaluated as list generation swallows TypeError in CPython. str(qs.query) def test16_annotated_date_queryset(self): "Ensure annotated date querysets work if spatial backend is used. See #14648." birth_years = [dt.year for dt in list(Author.objects.annotate(num_books=Count('books')).dates('dob', 'year'))] birth_years.sort() self.assertEqual([1950, 1974], birth_years) # TODO: Related tests for KML, GML, and distance lookups.
179a1b295cee20c6c05224ad2196f3e3e5913caa6fbce6633f1e68c836889e83
from django.utils.encoding import python_2_unicode_compatible from ..models import models class SimpleModel(models.Model): objects = models.GeoManager() class Meta: abstract = True required_db_features = ['gis_enabled'] @python_2_unicode_compatible class Location(SimpleModel): point = models.PointField() def __str__(self): return self.point.wkt @python_2_unicode_compatible class City(SimpleModel): name = models.CharField(max_length=50) state = models.CharField(max_length=2) location = models.ForeignKey(Location, models.CASCADE) def __str__(self): return self.name class AugmentedLocation(Location): extra_text = models.TextField(blank=True) class DirectoryEntry(SimpleModel): listing_text = models.CharField(max_length=50) location = models.ForeignKey(AugmentedLocation, models.CASCADE) @python_2_unicode_compatible class Parcel(SimpleModel): name = models.CharField(max_length=30) city = models.ForeignKey(City, models.CASCADE) center1 = models.PointField() # Throwing a curveball w/`db_column` here. center2 = models.PointField(srid=2276, db_column='mycenter') border1 = models.PolygonField() border2 = models.PolygonField(srid=2276) def __str__(self): return self.name class Author(SimpleModel): name = models.CharField(max_length=100) dob = models.DateField() class Article(SimpleModel): title = models.CharField(max_length=100) author = models.ForeignKey(Author, models.CASCADE, unique=True) class Book(SimpleModel): title = models.CharField(max_length=100) author = models.ForeignKey(Author, models.SET_NULL, related_name='books', null=True) class Event(SimpleModel): name = models.CharField(max_length=100) when = models.DateTimeField()
115ba04799d5b40f2052a62375dc1c2df75353c2d37a4412fa4384cf90139e6d
""" gdalinfo tests/gis_tests/data/rasters/raster.tif: Driver: GTiff/GeoTIFF Files: tests/gis_tests/data/rasters/raster.tif Size is 163, 174 Coordinate System is: PROJCS["NAD83 / Florida GDL Albers", GEOGCS["NAD83", DATUM["North_American_Datum_1983", SPHEROID["GRS 1980",6378137,298.2572221010002, AUTHORITY["EPSG","7019"]], TOWGS84[0,0,0,0,0,0,0], AUTHORITY["EPSG","6269"]], PRIMEM["Greenwich",0], UNIT["degree",0.0174532925199433], AUTHORITY["EPSG","4269"]], PROJECTION["Albers_Conic_Equal_Area"], PARAMETER["standard_parallel_1",24], PARAMETER["standard_parallel_2",31.5], PARAMETER["latitude_of_center",24], PARAMETER["longitude_of_center",-84], PARAMETER["false_easting",400000], PARAMETER["false_northing",0], UNIT["metre",1, AUTHORITY["EPSG","9001"]], AUTHORITY["EPSG","3086"]] Origin = (511700.468070655711927,435103.377123198588379) Pixel Size = (100.000000000000000,-100.000000000000000) Metadata: AREA_OR_POINT=Area Image Structure Metadata: INTERLEAVE=BAND Corner Coordinates: Upper Left ( 511700.468, 435103.377) ( 82d51'46.16"W, 27d55' 1.53"N) Lower Left ( 511700.468, 417703.377) ( 82d51'52.04"W, 27d45'37.50"N) Upper Right ( 528000.468, 435103.377) ( 82d41'48.81"W, 27d54'56.30"N) Lower Right ( 528000.468, 417703.377) ( 82d41'55.54"W, 27d45'32.28"N) Center ( 519850.468, 426403.377) ( 82d46'50.64"W, 27d50'16.99"N) Band 1 Block=163x50 Type=Byte, ColorInterp=Gray NoData Value=15 """ import os import struct import tempfile import unittest from django.contrib.gis.gdal import HAS_GDAL from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.shortcuts import numpy from django.test import SimpleTestCase from django.utils import six from django.utils._os import upath from ..data.rasters.textrasters import JSON_RASTER if HAS_GDAL: from django.contrib.gis.gdal import GDALRaster, GDAL_VERSION from django.contrib.gis.gdal.raster.band import GDALBand @unittest.skipUnless(HAS_GDAL, "GDAL is required") class GDALRasterTests(unittest.TestCase): """ Test a GDALRaster instance created from a file (GeoTiff). """ def setUp(self): self.rs_path = os.path.join(os.path.dirname(upath(__file__)), '../data/rasters/raster.tif') self.rs = GDALRaster(self.rs_path) def test_rs_name_repr(self): self.assertEqual(self.rs_path, self.rs.name) six.assertRegex(self, repr(self.rs), r"<Raster object at 0x\w+>") def test_rs_driver(self): self.assertEqual(self.rs.driver.name, 'GTiff') def test_rs_size(self): self.assertEqual(self.rs.width, 163) self.assertEqual(self.rs.height, 174) def test_rs_srs(self): self.assertEqual(self.rs.srs.srid, 3086) self.assertEqual(self.rs.srs.units, (1.0, 'metre')) def test_rs_srid(self): rast = GDALRaster({ 'width': 16, 'height': 16, 'srid': 4326, }) self.assertEqual(rast.srid, 4326) rast.srid = 3086 self.assertEqual(rast.srid, 3086) def test_geotransform_and_friends(self): # Assert correct values for file based raster self.assertEqual( self.rs.geotransform, [511700.4680706557, 100.0, 0.0, 435103.3771231986, 0.0, -100.0] ) self.assertEqual(self.rs.origin, [511700.4680706557, 435103.3771231986]) self.assertEqual(self.rs.origin.x, 511700.4680706557) self.assertEqual(self.rs.origin.y, 435103.3771231986) self.assertEqual(self.rs.scale, [100.0, -100.0]) self.assertEqual(self.rs.scale.x, 100.0) self.assertEqual(self.rs.scale.y, -100.0) self.assertEqual(self.rs.skew, [0, 0]) self.assertEqual(self.rs.skew.x, 0) self.assertEqual(self.rs.skew.y, 0) # Create in-memory rasters and change gtvalues rsmem = GDALRaster(JSON_RASTER) rsmem.geotransform = range(6) self.assertEqual(rsmem.geotransform, [float(x) for x in range(6)]) self.assertEqual(rsmem.origin, [0, 3]) self.assertEqual(rsmem.origin.x, 0) self.assertEqual(rsmem.origin.y, 3) self.assertEqual(rsmem.scale, [1, 5]) self.assertEqual(rsmem.scale.x, 1) self.assertEqual(rsmem.scale.y, 5) self.assertEqual(rsmem.skew, [2, 4]) self.assertEqual(rsmem.skew.x, 2) self.assertEqual(rsmem.skew.y, 4) self.assertEqual(rsmem.width, 5) self.assertEqual(rsmem.height, 5) def test_rs_extent(self): self.assertEqual( self.rs.extent, (511700.4680706557, 417703.3771231986, 528000.4680706557, 435103.3771231986) ) def test_rs_bands(self): self.assertEqual(len(self.rs.bands), 1) self.assertIsInstance(self.rs.bands[0], GDALBand) def test_memory_based_raster_creation(self): # Create uint8 raster with full pixel data range (0-255) rast = GDALRaster({ 'datatype': 1, 'width': 16, 'height': 16, 'srid': 4326, 'bands': [{ 'data': range(256), 'nodata_value': 255, }], }) # Get array from raster result = rast.bands[0].data() if numpy: result = result.flatten().tolist() # Assert data is same as original input self.assertEqual(result, list(range(256))) def test_file_based_raster_creation(self): # Prepare tempfile rstfile = tempfile.NamedTemporaryFile(suffix='.tif') # Create file-based raster from scratch GDALRaster({ 'datatype': self.rs.bands[0].datatype(), 'driver': 'tif', 'name': rstfile.name, 'width': 163, 'height': 174, 'nr_of_bands': 1, 'srid': self.rs.srs.wkt, 'origin': (self.rs.origin.x, self.rs.origin.y), 'scale': (self.rs.scale.x, self.rs.scale.y), 'skew': (self.rs.skew.x, self.rs.skew.y), 'bands': [{ 'data': self.rs.bands[0].data(), 'nodata_value': self.rs.bands[0].nodata_value, }], }) # Reload newly created raster from file restored_raster = GDALRaster(rstfile.name) self.assertEqual(restored_raster.srs.wkt, self.rs.srs.wkt) self.assertEqual(restored_raster.geotransform, self.rs.geotransform) if numpy: numpy.testing.assert_equal( restored_raster.bands[0].data(), self.rs.bands[0].data() ) else: self.assertEqual(restored_raster.bands[0].data(), self.rs.bands[0].data()) def test_raster_warp(self): # Create in memory raster source = GDALRaster({ 'datatype': 1, 'driver': 'MEM', 'name': 'sourceraster', 'width': 4, 'height': 4, 'nr_of_bands': 1, 'srid': 3086, 'origin': (500000, 400000), 'scale': (100, -100), 'skew': (0, 0), 'bands': [{ 'data': range(16), 'nodata_value': 255, }], }) # Test altering the scale, width, and height of a raster data = { 'scale': [200, -200], 'width': 2, 'height': 2, } target = source.warp(data) self.assertEqual(target.width, data['width']) self.assertEqual(target.height, data['height']) self.assertEqual(target.scale, data['scale']) self.assertEqual(target.bands[0].datatype(), source.bands[0].datatype()) self.assertEqual(target.name, 'sourceraster_copy.MEM') result = target.bands[0].data() if numpy: result = result.flatten().tolist() self.assertEqual(result, [5, 7, 13, 15]) # Test altering the name and datatype (to float) data = { 'name': '/path/to/targetraster.tif', 'datatype': 6, } target = source.warp(data) self.assertEqual(target.bands[0].datatype(), 6) self.assertEqual(target.name, '/path/to/targetraster.tif') self.assertEqual(target.driver.name, 'MEM') result = target.bands[0].data() if numpy: result = result.flatten().tolist() self.assertEqual( result, [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0] ) def test_raster_transform(self): if GDAL_VERSION < (1, 8, 1): self.skipTest("GDAL >= 1.8.1 is required for this test") # Prepare tempfile and nodata value rstfile = tempfile.NamedTemporaryFile(suffix='.tif') ndv = 99 # Create in file based raster source = GDALRaster({ 'datatype': 1, 'driver': 'tif', 'name': rstfile.name, 'width': 5, 'height': 5, 'nr_of_bands': 1, 'srid': 4326, 'origin': (-5, 5), 'scale': (2, -2), 'skew': (0, 0), 'bands': [{ 'data': range(25), 'nodata_value': ndv, }], }) # Transform raster into srid 4326. target = source.transform(3086) # Reload data from disk target = GDALRaster(target.name) self.assertEqual(target.srs.srid, 3086) self.assertEqual(target.width, 7) self.assertEqual(target.height, 7) self.assertEqual(target.bands[0].datatype(), source.bands[0].datatype()) self.assertEqual(target.origin, [9124842.791079799, 1589911.6476407414]) self.assertEqual(target.scale, [223824.82664250192, -223824.82664250192]) self.assertEqual(target.skew, [0, 0]) result = target.bands[0].data() if numpy: result = result.flatten().tolist() # The reprojection of a raster that spans over a large area # skews the data matrix and might introduce nodata values. self.assertEqual( result, [ ndv, ndv, ndv, ndv, 4, ndv, ndv, ndv, ndv, 2, 3, 9, ndv, ndv, ndv, 1, 2, 8, 13, 19, ndv, 0, 6, 6, 12, 18, 18, 24, ndv, 10, 11, 16, 22, 23, ndv, ndv, ndv, 15, 21, 22, ndv, ndv, ndv, ndv, 20, ndv, ndv, ndv, ndv, ] ) @unittest.skipUnless(HAS_GDAL, "GDAL is required") class GDALBandTests(SimpleTestCase): def setUp(self): self.rs_path = os.path.join(os.path.dirname(upath(__file__)), '../data/rasters/raster.tif') rs = GDALRaster(self.rs_path) self.band = rs.bands[0] def test_band_data(self): pam_file = self.rs_path + '.aux.xml' self.assertEqual(self.band.width, 163) self.assertEqual(self.band.height, 174) self.assertEqual(self.band.description, '') self.assertEqual(self.band.datatype(), 1) self.assertEqual(self.band.datatype(as_string=True), 'GDT_Byte') self.assertEqual(self.band.nodata_value, 15) if numpy: data = self.band.data() assert_array = numpy.loadtxt( os.path.join(os.path.dirname(upath(__file__)), '../data/rasters/raster.numpy.txt') ) numpy.testing.assert_equal(data, assert_array) self.assertEqual(data.shape, (self.band.height, self.band.width)) try: smin, smax, smean, sstd = self.band.statistics(approximate=True) self.assertEqual(smin, 0) self.assertEqual(smax, 9) self.assertAlmostEqual(smean, 2.842331288343558) self.assertAlmostEqual(sstd, 2.3965567248965356) smin, smax, smean, sstd = self.band.statistics(approximate=False, refresh=True) self.assertEqual(smin, 0) self.assertEqual(smax, 9) self.assertAlmostEqual(smean, 2.828326634228898) self.assertAlmostEqual(sstd, 2.4260526986669095) self.assertEqual(self.band.min, 0) self.assertEqual(self.band.max, 9) self.assertAlmostEqual(self.band.mean, 2.828326634228898) self.assertAlmostEqual(self.band.std, 2.4260526986669095) # Check that statistics are persisted into PAM file on band close self.band = None self.assertTrue(os.path.isfile(pam_file)) finally: # Close band and remove file if created self.band = None if os.path.isfile(pam_file): os.remove(pam_file) def test_read_mode_error(self): # Open raster in read mode rs = GDALRaster(self.rs_path, write=False) band = rs.bands[0] # Setting attributes in write mode raises exception in the _flush method with self.assertRaises(GDALException): setattr(band, 'nodata_value', 10) def test_band_data_setters(self): # Create in-memory raster and get band rsmem = GDALRaster({ 'datatype': 1, 'driver': 'MEM', 'name': 'mem_rst', 'width': 10, 'height': 10, 'nr_of_bands': 1, 'srid': 4326, }) bandmem = rsmem.bands[0] # Set nodata value bandmem.nodata_value = 99 self.assertEqual(bandmem.nodata_value, 99) # Set data for entire dataset bandmem.data(range(100)) if numpy: numpy.testing.assert_equal(bandmem.data(), numpy.arange(100).reshape(10, 10)) else: self.assertEqual(bandmem.data(), list(range(100))) # Prepare data for setting values in subsequent tests block = list(range(100, 104)) packed_block = struct.pack('<' + 'B B B B', *block) # Set data from list bandmem.data(block, (1, 1), (2, 2)) result = bandmem.data(offset=(1, 1), size=(2, 2)) if numpy: numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2)) else: self.assertEqual(result, block) # Set data from packed block bandmem.data(packed_block, (1, 1), (2, 2)) result = bandmem.data(offset=(1, 1), size=(2, 2)) if numpy: numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2)) else: self.assertEqual(result, block) # Set data from bytes bandmem.data(bytes(packed_block), (1, 1), (2, 2)) result = bandmem.data(offset=(1, 1), size=(2, 2)) if numpy: numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2)) else: self.assertEqual(result, block) # Set data from bytearray bandmem.data(bytearray(packed_block), (1, 1), (2, 2)) result = bandmem.data(offset=(1, 1), size=(2, 2)) if numpy: numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2)) else: self.assertEqual(result, block) # Set data from memoryview bandmem.data(six.memoryview(packed_block), (1, 1), (2, 2)) result = bandmem.data(offset=(1, 1), size=(2, 2)) if numpy: numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2)) else: self.assertEqual(result, block) # Set data from numpy array if numpy: bandmem.data(numpy.array(block, dtype='int8').reshape(2, 2), (1, 1), (2, 2)) numpy.testing.assert_equal( bandmem.data(offset=(1, 1), size=(2, 2)), numpy.array(block).reshape(2, 2) ) # Test json input data rsmemjson = GDALRaster(JSON_RASTER) bandmemjson = rsmemjson.bands[0] if numpy: numpy.testing.assert_equal( bandmemjson.data(), numpy.array(range(25)).reshape(5, 5) ) else: self.assertEqual(bandmemjson.data(), list(range(25))) def test_band_statistics_automatic_refresh(self): rsmem = GDALRaster({ 'srid': 4326, 'width': 2, 'height': 2, 'bands': [{'data': [0] * 4, 'nodata_value': 99}], }) band = rsmem.bands[0] # Populate statistics cache self.assertEqual(band.statistics(), (0, 0, 0, 0)) # Change data band.data([1, 1, 0, 0]) # Statistics are properly updated self.assertEqual(band.statistics(), (0.0, 1.0, 0.5, 0.5)) # Change nodata_value band.nodata_value = 0 # Statistics are properly updated self.assertEqual(band.statistics(), (1.0, 1.0, 1.0, 0.0)) def test_band_statistics_empty_band(self): rsmem = GDALRaster({ 'srid': 4326, 'width': 1, 'height': 1, 'bands': [{'data': [0], 'nodata_value': 0}], }) self.assertEqual(rsmem.bands[0].statistics(), (None, None, None, None)) def test_band_delete_nodata(self): rsmem = GDALRaster({ 'srid': 4326, 'width': 1, 'height': 1, 'bands': [{'data': [0], 'nodata_value': 1}], }) if GDAL_VERSION < (2, 1): msg = 'GDAL >= 2.1 required to delete nodata values.' with self.assertRaisesMessage(ValueError, msg): rsmem.bands[0].nodata_value = None else: rsmem.bands[0].nodata_value = None self.assertIsNone(rsmem.bands[0].nodata_value) def test_band_data_replication(self): band = GDALRaster({ 'srid': 4326, 'width': 3, 'height': 3, 'bands': [{'data': range(10, 19), 'nodata_value': 0}], }).bands[0] # Variations for input (data, shape, expected result). combos = ( ([1], (1, 1), [1] * 9), (range(3), (1, 3), [0, 0, 0, 1, 1, 1, 2, 2, 2]), (range(3), (3, 1), [0, 1, 2, 0, 1, 2, 0, 1, 2]), ) for combo in combos: band.data(combo[0], shape=combo[1]) if numpy: numpy.testing.assert_equal(band.data(), numpy.array(combo[2]).reshape(3, 3)) else: self.assertEqual(band.data(), list(combo[2]))
d7a203b3a743d5654e00f1a1d8877f49ff56a4554602d7e672e4d49275d87a83
import os import unittest from unittest import skipUnless from django.contrib.gis.gdal import HAS_GDAL from ..test_data import TEST_DATA, TestDS, get_ds_file if HAS_GDAL: from django.contrib.gis.gdal import DataSource, Envelope, OGRGeometry, GDALException, OGRIndexError, GDAL_VERSION from django.contrib.gis.gdal.field import OFTReal, OFTInteger, OFTString # List of acceptable data sources. ds_list = ( TestDS( 'test_point', nfeat=5, nfld=3, geom='POINT', gtype=1, driver='ESRI Shapefile', fields={'dbl': OFTReal, 'int': OFTInteger, 'str': OFTString}, extent=(-1.35011, 0.166623, -0.524093, 0.824508), # Got extent from QGIS srs_wkt=( 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_1984",' '6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",' '0.017453292519943295]]' ), field_values={ 'dbl': [float(i) for i in range(1, 6)], 'int': list(range(1, 6)), 'str': [str(i) for i in range(1, 6)], }, fids=range(5) ), TestDS( 'test_vrt', ext='vrt', nfeat=3, nfld=3, geom='POINT', gtype='Point25D', driver='OGR_VRT' if GDAL_VERSION >= (2, 0) else 'VRT', fields={ 'POINT_X': OFTString, 'POINT_Y': OFTString, 'NUM': OFTString, }, # VRT uses CSV, which all types are OFTString. extent=(1.0, 2.0, 100.0, 523.5), # Min/Max from CSV field_values={ 'POINT_X': ['1.0', '5.0', '100.0'], 'POINT_Y': ['2.0', '23.0', '523.5'], 'NUM': ['5', '17', '23'], }, fids=range(1, 4) ), TestDS( 'test_poly', nfeat=3, nfld=3, geom='POLYGON', gtype=3, driver='ESRI Shapefile', fields={'float': OFTReal, 'int': OFTInteger, 'str': OFTString}, extent=(-1.01513, -0.558245, 0.161876, 0.839637), # Got extent from QGIS srs_wkt=( 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_1984",' '6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",' '0.017453292519943295]]' ), ) ) bad_ds = (TestDS('foo'),) @skipUnless(HAS_GDAL, "GDAL is required") class DataSourceTest(unittest.TestCase): def test01_valid_shp(self): "Testing valid SHP Data Source files." for source in ds_list: # Loading up the data source ds = DataSource(source.ds) # Making sure the layer count is what's expected (only 1 layer in a SHP file) self.assertEqual(1, len(ds)) # Making sure GetName works self.assertEqual(source.ds, ds.name) # Making sure the driver name matches up self.assertEqual(source.driver, str(ds.driver)) # Making sure indexing works with self.assertRaises(OGRIndexError): ds[len(ds)] def test02_invalid_shp(self): "Testing invalid SHP files for the Data Source." for source in bad_ds: with self.assertRaises(GDALException): DataSource(source.ds) def test03a_layers(self): "Testing Data Source Layers." for source in ds_list: ds = DataSource(source.ds) # Incrementing through each layer, this tests DataSource.__iter__ for layer in ds: # Making sure we get the number of features we expect self.assertEqual(len(layer), source.nfeat) # Making sure we get the number of fields we expect self.assertEqual(source.nfld, layer.num_fields) self.assertEqual(source.nfld, len(layer.fields)) # Testing the layer's extent (an Envelope), and its properties self.assertIsInstance(layer.extent, Envelope) self.assertAlmostEqual(source.extent[0], layer.extent.min_x, 5) self.assertAlmostEqual(source.extent[1], layer.extent.min_y, 5) self.assertAlmostEqual(source.extent[2], layer.extent.max_x, 5) self.assertAlmostEqual(source.extent[3], layer.extent.max_y, 5) # Now checking the field names. flds = layer.fields for f in flds: self.assertIn(f, source.fields) # Negative FIDs are not allowed. with self.assertRaises(OGRIndexError): layer.__getitem__(-1) with self.assertRaises(OGRIndexError): layer.__getitem__(50000) if hasattr(source, 'field_values'): fld_names = source.field_values.keys() # Testing `Layer.get_fields` (which uses Layer.__iter__) for fld_name in fld_names: self.assertEqual(source.field_values[fld_name], layer.get_fields(fld_name)) # Testing `Layer.__getitem__`. for i, fid in enumerate(source.fids): feat = layer[fid] self.assertEqual(fid, feat.fid) # Maybe this should be in the test below, but we might as well test # the feature values here while in this loop. for fld_name in fld_names: self.assertEqual(source.field_values[fld_name][i], feat.get(fld_name)) def test03b_layer_slice(self): "Test indexing and slicing on Layers." # Using the first data-source because the same slice # can be used for both the layer and the control values. source = ds_list[0] ds = DataSource(source.ds) sl = slice(1, 3) feats = ds[0][sl] for fld_name in ds[0].fields: test_vals = [feat.get(fld_name) for feat in feats] control_vals = source.field_values[fld_name][sl] self.assertEqual(control_vals, test_vals) def test03c_layer_references(self): """ Ensure OGR objects keep references to the objects they belong to. """ source = ds_list[0] # See ticket #9448. def get_layer(): # This DataSource object is not accessible outside this # scope. However, a reference should still be kept alive # on the `Layer` returned. ds = DataSource(source.ds) return ds[0] # Making sure we can call OGR routines on the Layer returned. lyr = get_layer() self.assertEqual(source.nfeat, len(lyr)) self.assertEqual(source.gtype, lyr.geom_type.num) # Same issue for Feature/Field objects, see #18640 self.assertEqual(str(lyr[0]['str']), "1") def test04_features(self): "Testing Data Source Features." for source in ds_list: ds = DataSource(source.ds) # Incrementing through each layer for layer in ds: # Incrementing through each feature in the layer for feat in layer: # Making sure the number of fields, and the geometry type # are what's expected. self.assertEqual(source.nfld, len(list(feat))) self.assertEqual(source.gtype, feat.geom_type) # Making sure the fields match to an appropriate OFT type. for k, v in source.fields.items(): # Making sure we get the proper OGR Field instance, using # a string value index for the feature. self.assertIsInstance(feat[k], v) # Testing Feature.__iter__ for fld in feat: self.assertIn(fld.name, source.fields.keys()) def test05_geometries(self): "Testing Geometries from Data Source Features." for source in ds_list: ds = DataSource(source.ds) # Incrementing through each layer and feature. for layer in ds: for feat in layer: g = feat.geom # Making sure we get the right Geometry name & type self.assertEqual(source.geom, g.geom_name) self.assertEqual(source.gtype, g.geom_type) # Making sure the SpatialReference is as expected. if hasattr(source, 'srs_wkt'): self.assertEqual( source.srs_wkt, # Depending on lib versions, WGS_84 might be WGS_1984 g.srs.wkt.replace('SPHEROID["WGS_84"', 'SPHEROID["WGS_1984"') ) def test06_spatial_filter(self): "Testing the Layer.spatial_filter property." ds = DataSource(get_ds_file('cities', 'shp')) lyr = ds[0] # When not set, it should be None. self.assertIsNone(lyr.spatial_filter) # Must be set a/an OGRGeometry or 4-tuple. with self.assertRaises(TypeError): lyr._set_spatial_filter('foo') # Setting the spatial filter with a tuple/list with the extent of # a buffer centering around Pueblo. with self.assertRaises(ValueError): lyr._set_spatial_filter(list(range(5))) filter_extent = (-105.609252, 37.255001, -103.609252, 39.255001) lyr.spatial_filter = (-105.609252, 37.255001, -103.609252, 39.255001) self.assertEqual(OGRGeometry.from_bbox(filter_extent), lyr.spatial_filter) feats = [feat for feat in lyr] self.assertEqual(1, len(feats)) self.assertEqual('Pueblo', feats[0].get('Name')) # Setting the spatial filter with an OGRGeometry for buffer centering # around Houston. filter_geom = OGRGeometry( 'POLYGON((-96.363151 28.763374,-94.363151 28.763374,' '-94.363151 30.763374,-96.363151 30.763374,-96.363151 28.763374))' ) lyr.spatial_filter = filter_geom self.assertEqual(filter_geom, lyr.spatial_filter) feats = [feat for feat in lyr] self.assertEqual(1, len(feats)) self.assertEqual('Houston', feats[0].get('Name')) # Clearing the spatial filter by setting it to None. Now # should indicate that there are 3 features in the Layer. lyr.spatial_filter = None self.assertEqual(3, len(lyr)) def test07_integer_overflow(self): "Testing that OFTReal fields, treated as OFTInteger, do not overflow." # Using *.dbf from Census 2010 TIGER Shapefile for Texas, # which has land area ('ALAND10') stored in a Real field # with no precision. ds = DataSource(os.path.join(TEST_DATA, 'texas.dbf')) feat = ds[0][0] # Reference value obtained using `ogrinfo`. self.assertEqual(676586997978, feat.get('ALAND10'))
ab8554c3a4cfd3a67c35ec89b12c248c86630e8043be5a47b5602cc941275b44
import json import unittest from binascii import b2a_hex from unittest import skipUnless from django.contrib.gis.gdal import HAS_GDAL from django.utils.six.moves import range from ..test_data import TestDataMixin try: from django.utils.six.moves import cPickle as pickle except ImportError: import pickle if HAS_GDAL: from django.contrib.gis.gdal import ( OGRGeometry, OGRGeomType, GDALException, OGRIndexError, SpatialReference, CoordTransform, GDAL_VERSION, ) @skipUnless(HAS_GDAL, "GDAL is required") class OGRGeomTest(unittest.TestCase, TestDataMixin): "This tests the OGR Geometry." def test_geomtype(self): "Testing OGRGeomType object." # OGRGeomType should initialize on all these inputs. OGRGeomType(1) OGRGeomType(7) OGRGeomType('point') OGRGeomType('GeometrycollectioN') OGRGeomType('LINearrING') OGRGeomType('Unknown') # Should throw TypeError on this input with self.assertRaises(GDALException): OGRGeomType(23) with self.assertRaises(GDALException): OGRGeomType('fooD') with self.assertRaises(GDALException): OGRGeomType(9) # Equivalence can take strings, ints, and other OGRGeomTypes self.assertEqual(OGRGeomType(1), OGRGeomType(1)) self.assertEqual(OGRGeomType(7), 'GeometryCollection') self.assertEqual(OGRGeomType('point'), 'POINT') self.assertNotEqual(OGRGeomType('point'), 2) self.assertEqual(OGRGeomType('unknown'), 0) self.assertEqual(OGRGeomType(6), 'MULtiPolyGON') self.assertEqual(OGRGeomType(1), OGRGeomType('point')) self.assertNotEqual(OGRGeomType('POINT'), OGRGeomType(6)) # Testing the Django field name equivalent property. self.assertEqual('PointField', OGRGeomType('Point').django) self.assertEqual('GeometryField', OGRGeomType('Geometry').django) self.assertEqual('GeometryField', OGRGeomType('Unknown').django) self.assertIsNone(OGRGeomType('none').django) # 'Geometry' initialization implies an unknown geometry type. gt = OGRGeomType('Geometry') self.assertEqual(0, gt.num) self.assertEqual('Unknown', gt.name) def test_geomtype_25d(self): "Testing OGRGeomType object with 25D types." wkb25bit = OGRGeomType.wkb25bit self.assertEqual(OGRGeomType(wkb25bit + 1), 'Point25D') self.assertEqual(OGRGeomType('MultiLineString25D'), (5 + wkb25bit)) self.assertEqual('GeometryCollectionField', OGRGeomType('GeometryCollection25D').django) def test_wkt(self): "Testing WKT output." for g in self.geometries.wkt_out: geom = OGRGeometry(g.wkt) self.assertEqual(g.wkt, geom.wkt) def test_ewkt(self): "Testing EWKT input/output." for ewkt_val in ('POINT (1 2 3)', 'LINEARRING (0 0,1 1,2 1,0 0)'): # First with ewkt output when no SRID in EWKT self.assertEqual(ewkt_val, OGRGeometry(ewkt_val).ewkt) # No test consumption with an SRID specified. ewkt_val = 'SRID=4326;%s' % ewkt_val geom = OGRGeometry(ewkt_val) self.assertEqual(ewkt_val, geom.ewkt) self.assertEqual(4326, geom.srs.srid) def test_gml(self): "Testing GML output." for g in self.geometries.wkt_out: geom = OGRGeometry(g.wkt) exp_gml = g.gml if GDAL_VERSION >= (1, 8): # In GDAL 1.8, the non-conformant GML tag <gml:GeometryCollection> was # replaced with <gml:MultiGeometry>. exp_gml = exp_gml.replace('GeometryCollection', 'MultiGeometry') self.assertEqual(exp_gml, geom.gml) def test_hex(self): "Testing HEX input/output." for g in self.geometries.hex_wkt: geom1 = OGRGeometry(g.wkt) self.assertEqual(g.hex.encode(), geom1.hex) # Constructing w/HEX geom2 = OGRGeometry(g.hex) self.assertEqual(geom1, geom2) def test_wkb(self): "Testing WKB input/output." for g in self.geometries.hex_wkt: geom1 = OGRGeometry(g.wkt) wkb = geom1.wkb self.assertEqual(b2a_hex(wkb).upper(), g.hex.encode()) # Constructing w/WKB. geom2 = OGRGeometry(wkb) self.assertEqual(geom1, geom2) def test_json(self): "Testing GeoJSON input/output." for g in self.geometries.json_geoms: geom = OGRGeometry(g.wkt) if not hasattr(g, 'not_equal'): # Loading jsons to prevent decimal differences self.assertEqual(json.loads(g.json), json.loads(geom.json)) self.assertEqual(json.loads(g.json), json.loads(geom.geojson)) self.assertEqual(OGRGeometry(g.wkt), OGRGeometry(geom.json)) # Test input with some garbage content (but valid json) (#15529) geom = OGRGeometry('{"type": "Point", "coordinates": [ 100.0, 0.0 ], "other": "<test>"}') self.assertIsInstance(geom, OGRGeometry) def test_points(self): "Testing Point objects." OGRGeometry('POINT(0 0)') for p in self.geometries.points: if not hasattr(p, 'z'): # No 3D pnt = OGRGeometry(p.wkt) self.assertEqual(1, pnt.geom_type) self.assertEqual('POINT', pnt.geom_name) self.assertEqual(p.x, pnt.x) self.assertEqual(p.y, pnt.y) self.assertEqual((p.x, p.y), pnt.tuple) def test_multipoints(self): "Testing MultiPoint objects." for mp in self.geometries.multipoints: mgeom1 = OGRGeometry(mp.wkt) # First one from WKT self.assertEqual(4, mgeom1.geom_type) self.assertEqual('MULTIPOINT', mgeom1.geom_name) mgeom2 = OGRGeometry('MULTIPOINT') # Creating empty multipoint mgeom3 = OGRGeometry('MULTIPOINT') for g in mgeom1: mgeom2.add(g) # adding each point from the multipoints mgeom3.add(g.wkt) # should take WKT as well self.assertEqual(mgeom1, mgeom2) # they should equal self.assertEqual(mgeom1, mgeom3) self.assertEqual(mp.coords, mgeom2.coords) self.assertEqual(mp.n_p, mgeom2.point_count) def test_linestring(self): "Testing LineString objects." prev = OGRGeometry('POINT(0 0)') for ls in self.geometries.linestrings: linestr = OGRGeometry(ls.wkt) self.assertEqual(2, linestr.geom_type) self.assertEqual('LINESTRING', linestr.geom_name) self.assertEqual(ls.n_p, linestr.point_count) self.assertEqual(ls.coords, linestr.tuple) self.assertEqual(linestr, OGRGeometry(ls.wkt)) self.assertNotEqual(linestr, prev) with self.assertRaises(OGRIndexError): linestr.__getitem__(len(linestr)) prev = linestr # Testing the x, y properties. x = [tmpx for tmpx, tmpy in ls.coords] y = [tmpy for tmpx, tmpy in ls.coords] self.assertEqual(x, linestr.x) self.assertEqual(y, linestr.y) def test_multilinestring(self): "Testing MultiLineString objects." prev = OGRGeometry('POINT(0 0)') for mls in self.geometries.multilinestrings: mlinestr = OGRGeometry(mls.wkt) self.assertEqual(5, mlinestr.geom_type) self.assertEqual('MULTILINESTRING', mlinestr.geom_name) self.assertEqual(mls.n_p, mlinestr.point_count) self.assertEqual(mls.coords, mlinestr.tuple) self.assertEqual(mlinestr, OGRGeometry(mls.wkt)) self.assertNotEqual(mlinestr, prev) prev = mlinestr for ls in mlinestr: self.assertEqual(2, ls.geom_type) self.assertEqual('LINESTRING', ls.geom_name) with self.assertRaises(OGRIndexError): mlinestr.__getitem__(len(mlinestr)) def test_linearring(self): "Testing LinearRing objects." prev = OGRGeometry('POINT(0 0)') for rr in self.geometries.linearrings: lr = OGRGeometry(rr.wkt) # self.assertEqual(101, lr.geom_type.num) self.assertEqual('LINEARRING', lr.geom_name) self.assertEqual(rr.n_p, len(lr)) self.assertEqual(lr, OGRGeometry(rr.wkt)) self.assertNotEqual(lr, prev) prev = lr def test_polygons(self): "Testing Polygon objects." # Testing `from_bbox` class method bbox = (-180, -90, 180, 90) p = OGRGeometry.from_bbox(bbox) self.assertEqual(bbox, p.extent) prev = OGRGeometry('POINT(0 0)') for p in self.geometries.polygons: poly = OGRGeometry(p.wkt) self.assertEqual(3, poly.geom_type) self.assertEqual('POLYGON', poly.geom_name) self.assertEqual(p.n_p, poly.point_count) self.assertEqual(p.n_i + 1, len(poly)) # Testing area & centroid. self.assertAlmostEqual(p.area, poly.area, 9) x, y = poly.centroid.tuple self.assertAlmostEqual(p.centroid[0], x, 9) self.assertAlmostEqual(p.centroid[1], y, 9) # Testing equivalence self.assertEqual(poly, OGRGeometry(p.wkt)) self.assertNotEqual(poly, prev) if p.ext_ring_cs: ring = poly[0] self.assertEqual(p.ext_ring_cs, ring.tuple) self.assertEqual(p.ext_ring_cs, poly[0].tuple) self.assertEqual(len(p.ext_ring_cs), ring.point_count) for r in poly: self.assertEqual('LINEARRING', r.geom_name) def test_closepolygons(self): "Testing closing Polygon objects." # Both rings in this geometry are not closed. poly = OGRGeometry('POLYGON((0 0, 5 0, 5 5, 0 5), (1 1, 2 1, 2 2, 2 1))') self.assertEqual(8, poly.point_count) with self.assertRaises(GDALException): poly.centroid poly.close_rings() self.assertEqual(10, poly.point_count) # Two closing points should've been added self.assertEqual(OGRGeometry('POINT(2.5 2.5)'), poly.centroid) def test_multipolygons(self): "Testing MultiPolygon objects." OGRGeometry('POINT(0 0)') for mp in self.geometries.multipolygons: mpoly = OGRGeometry(mp.wkt) self.assertEqual(6, mpoly.geom_type) self.assertEqual('MULTIPOLYGON', mpoly.geom_name) if mp.valid: self.assertEqual(mp.n_p, mpoly.point_count) self.assertEqual(mp.num_geom, len(mpoly)) with self.assertRaises(OGRIndexError): mpoly.__getitem__(len(mpoly)) for p in mpoly: self.assertEqual('POLYGON', p.geom_name) self.assertEqual(3, p.geom_type) self.assertEqual(mpoly.wkt, OGRGeometry(mp.wkt).wkt) def test_srs(self): "Testing OGR Geometries with Spatial Reference objects." for mp in self.geometries.multipolygons: # Creating a geometry w/spatial reference sr = SpatialReference('WGS84') mpoly = OGRGeometry(mp.wkt, sr) self.assertEqual(sr.wkt, mpoly.srs.wkt) # Ensuring that SRS is propagated to clones. klone = mpoly.clone() self.assertEqual(sr.wkt, klone.srs.wkt) # Ensuring all children geometries (polygons and their rings) all # return the assigned spatial reference as well. for poly in mpoly: self.assertEqual(sr.wkt, poly.srs.wkt) for ring in poly: self.assertEqual(sr.wkt, ring.srs.wkt) # Ensuring SRS propagate in topological ops. a = OGRGeometry(self.geometries.topology_geoms[0].wkt_a, sr) b = OGRGeometry(self.geometries.topology_geoms[0].wkt_b, sr) diff = a.difference(b) union = a.union(b) self.assertEqual(sr.wkt, diff.srs.wkt) self.assertEqual(sr.srid, union.srs.srid) # Instantiating w/an integer SRID mpoly = OGRGeometry(mp.wkt, 4326) self.assertEqual(4326, mpoly.srid) mpoly.srs = SpatialReference(4269) self.assertEqual(4269, mpoly.srid) self.assertEqual('NAD83', mpoly.srs.name) # Incrementing through the multipolygon after the spatial reference # has been re-assigned. for poly in mpoly: self.assertEqual(mpoly.srs.wkt, poly.srs.wkt) poly.srs = 32140 for ring in poly: # Changing each ring in the polygon self.assertEqual(32140, ring.srs.srid) self.assertEqual('NAD83 / Texas South Central', ring.srs.name) ring.srs = str(SpatialReference(4326)) # back to WGS84 self.assertEqual(4326, ring.srs.srid) # Using the `srid` property. ring.srid = 4322 self.assertEqual('WGS 72', ring.srs.name) self.assertEqual(4322, ring.srid) # srs/srid may be assigned their own values, even when srs is None. mpoly = OGRGeometry(mp.wkt, srs=None) mpoly.srs = mpoly.srs mpoly.srid = mpoly.srid def test_srs_transform(self): "Testing transform()." orig = OGRGeometry('POINT (-104.609 38.255)', 4326) trans = OGRGeometry('POINT (992385.4472045 481455.4944650)', 2774) # Using an srid, a SpatialReference object, and a CoordTransform object # or transformations. t1, t2, t3 = orig.clone(), orig.clone(), orig.clone() t1.transform(trans.srid) t2.transform(SpatialReference('EPSG:2774')) ct = CoordTransform(SpatialReference('WGS84'), SpatialReference(2774)) t3.transform(ct) # Testing use of the `clone` keyword. k1 = orig.clone() k2 = k1.transform(trans.srid, clone=True) self.assertEqual(k1, orig) self.assertNotEqual(k1, k2) prec = 3 for p in (t1, t2, t3, k2): self.assertAlmostEqual(trans.x, p.x, prec) self.assertAlmostEqual(trans.y, p.y, prec) def test_transform_dim(self): "Testing coordinate dimension is the same on transformed geometries." ls_orig = OGRGeometry('LINESTRING(-104.609 38.255)', 4326) ls_trans = OGRGeometry('LINESTRING(992385.4472045 481455.4944650)', 2774) prec = 3 ls_orig.transform(ls_trans.srs) # Making sure the coordinate dimension is still 2D. self.assertEqual(2, ls_orig.coord_dim) self.assertAlmostEqual(ls_trans.x[0], ls_orig.x[0], prec) self.assertAlmostEqual(ls_trans.y[0], ls_orig.y[0], prec) def test_difference(self): "Testing difference()." for i in range(len(self.geometries.topology_geoms)): a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a) b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b) d1 = OGRGeometry(self.geometries.diff_geoms[i].wkt) d2 = a.difference(b) self.assertEqual(d1, d2) self.assertEqual(d1, a - b) # __sub__ is difference operator a -= b # testing __isub__ self.assertEqual(d1, a) def test_intersection(self): "Testing intersects() and intersection()." for i in range(len(self.geometries.topology_geoms)): a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a) b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b) i1 = OGRGeometry(self.geometries.intersect_geoms[i].wkt) self.assertTrue(a.intersects(b)) i2 = a.intersection(b) self.assertEqual(i1, i2) self.assertEqual(i1, a & b) # __and__ is intersection operator a &= b # testing __iand__ self.assertEqual(i1, a) def test_symdifference(self): "Testing sym_difference()." for i in range(len(self.geometries.topology_geoms)): a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a) b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b) d1 = OGRGeometry(self.geometries.sdiff_geoms[i].wkt) d2 = a.sym_difference(b) self.assertEqual(d1, d2) self.assertEqual(d1, a ^ b) # __xor__ is symmetric difference operator a ^= b # testing __ixor__ self.assertEqual(d1, a) def test_union(self): "Testing union()." for i in range(len(self.geometries.topology_geoms)): a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a) b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b) u1 = OGRGeometry(self.geometries.union_geoms[i].wkt) u2 = a.union(b) self.assertEqual(u1, u2) self.assertEqual(u1, a | b) # __or__ is union operator a |= b # testing __ior__ self.assertEqual(u1, a) def test_add(self): "Testing GeometryCollection.add()." # Can't insert a Point into a MultiPolygon. mp = OGRGeometry('MultiPolygon') pnt = OGRGeometry('POINT(5 23)') with self.assertRaises(GDALException): mp.add(pnt) # GeometryCollection.add may take an OGRGeometry (if another collection # of the same type all child geoms will be added individually) or WKT. for mp in self.geometries.multipolygons: mpoly = OGRGeometry(mp.wkt) mp1 = OGRGeometry('MultiPolygon') mp2 = OGRGeometry('MultiPolygon') mp3 = OGRGeometry('MultiPolygon') for poly in mpoly: mp1.add(poly) # Adding a geometry at a time mp2.add(poly.wkt) # Adding WKT mp3.add(mpoly) # Adding a MultiPolygon's entire contents at once. for tmp in (mp1, mp2, mp3): self.assertEqual(mpoly, tmp) def test_extent(self): "Testing `extent` property." # The xmin, ymin, xmax, ymax of the MultiPoint should be returned. mp = OGRGeometry('MULTIPOINT(5 23, 0 0, 10 50)') self.assertEqual((0.0, 0.0, 10.0, 50.0), mp.extent) # Testing on the 'real world' Polygon. poly = OGRGeometry(self.geometries.polygons[3].wkt) ring = poly.shell x, y = ring.x, ring.y xmin, ymin = min(x), min(y) xmax, ymax = max(x), max(y) self.assertEqual((xmin, ymin, xmax, ymax), poly.extent) def test_25D(self): "Testing 2.5D geometries." pnt_25d = OGRGeometry('POINT(1 2 3)') self.assertEqual('Point25D', pnt_25d.geom_type.name) self.assertEqual(3.0, pnt_25d.z) self.assertEqual(3, pnt_25d.coord_dim) ls_25d = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)') self.assertEqual('LineString25D', ls_25d.geom_type.name) self.assertEqual([1.0, 2.0, 3.0], ls_25d.z) self.assertEqual(3, ls_25d.coord_dim) def test_pickle(self): "Testing pickle support." g1 = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)', 'WGS84') g2 = pickle.loads(pickle.dumps(g1)) self.assertEqual(g1, g2) self.assertEqual(4326, g2.srs.srid) self.assertEqual(g1.srs.wkt, g2.srs.wkt) def test_ogrgeometry_transform_workaround(self): "Testing coordinate dimensions on geometries after transformation." # A bug in GDAL versions prior to 1.7 changes the coordinate # dimension of a geometry after it has been transformed. # This test ensures that the bug workarounds employed within # `OGRGeometry.transform` indeed work. wkt_2d = "MULTILINESTRING ((0 0,1 1,2 2))" wkt_3d = "MULTILINESTRING ((0 0 0,1 1 1,2 2 2))" srid = 4326 # For both the 2D and 3D MultiLineString, ensure _both_ the dimension # of the collection and the component LineString have the expected # coordinate dimension after transform. geom = OGRGeometry(wkt_2d, srid) geom.transform(srid) self.assertEqual(2, geom.coord_dim) self.assertEqual(2, geom[0].coord_dim) self.assertEqual(wkt_2d, geom.wkt) geom = OGRGeometry(wkt_3d, srid) geom.transform(srid) self.assertEqual(3, geom.coord_dim) self.assertEqual(3, geom[0].coord_dim) self.assertEqual(wkt_3d, geom.wkt) # Testing binary predicates, `assertIs` is used to check that bool is returned. def test_equivalence_regression(self): "Testing equivalence methods with non-OGRGeometry instances." self.assertIsNotNone(OGRGeometry('POINT(0 0)')) self.assertNotEqual(OGRGeometry('LINESTRING(0 0, 1 1)'), 3) def test_contains(self): self.assertIs(OGRGeometry('POINT(0 0)').contains(OGRGeometry('POINT(0 0)')), True) self.assertIs(OGRGeometry('POINT(0 0)').contains(OGRGeometry('POINT(0 1)')), False) def test_crosses(self): self.assertIs(OGRGeometry('LINESTRING(0 0, 1 1)').crosses(OGRGeometry('LINESTRING(0 1, 1 0)')), True) self.assertIs(OGRGeometry('LINESTRING(0 0, 0 1)').crosses(OGRGeometry('LINESTRING(1 0, 1 1)')), False) def test_disjoint(self): self.assertIs(OGRGeometry('LINESTRING(0 0, 1 1)').disjoint(OGRGeometry('LINESTRING(0 1, 1 0)')), False) self.assertIs(OGRGeometry('LINESTRING(0 0, 0 1)').disjoint(OGRGeometry('LINESTRING(1 0, 1 1)')), True) def test_equals(self): self.assertIs(OGRGeometry('POINT(0 0)').contains(OGRGeometry('POINT(0 0)')), True) self.assertIs(OGRGeometry('POINT(0 0)').contains(OGRGeometry('POINT(0 1)')), False) def test_intersects(self): self.assertIs(OGRGeometry('LINESTRING(0 0, 1 1)').intersects(OGRGeometry('LINESTRING(0 1, 1 0)')), True) self.assertIs(OGRGeometry('LINESTRING(0 0, 0 1)').intersects(OGRGeometry('LINESTRING(1 0, 1 1)')), False) def test_overlaps(self): self.assertIs( OGRGeometry('POLYGON ((0 0, 0 2, 2 2, 2 0, 0 0))').overlaps( OGRGeometry('POLYGON ((1 1, 1 5, 5 5, 5 1, 1 1))') ), True ) self.assertIs(OGRGeometry('POINT(0 0)').overlaps(OGRGeometry('POINT(0 1)')), False) def test_touches(self): self.assertIs( OGRGeometry('POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))').touches(OGRGeometry('LINESTRING(0 2, 2 0)')), True ) self.assertIs(OGRGeometry('POINT(0 0)').touches(OGRGeometry('POINT(0 1)')), False) def test_within(self): self.assertIs( OGRGeometry('POINT(0.5 0.5)').within(OGRGeometry('POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))')), True ) self.assertIs(OGRGeometry('POINT(0 0)').within(OGRGeometry('POINT(0 1)')), False) def test_from_gml(self): self.assertEqual( OGRGeometry('POINT(0 0)'), OGRGeometry.from_gml( '<gml:Point gml:id="p21" srsName="http://www.opengis.net/def/crs/EPSG/0/4326">' ' <gml:pos srsDimension="2">0 0</gml:pos>' '</gml:Point>' ), )
86f061d576f8f57f68ceeaebb00934c743343ca3ca20bd56b8ed4baef6224e0f
import unittest from unittest import skipUnless from django.contrib.gis.gdal import HAS_GDAL if HAS_GDAL: from django.contrib.gis.gdal import Envelope, GDALException class TestPoint(object): def __init__(self, x, y): self.x = x self.y = y @skipUnless(HAS_GDAL, "GDAL is required") class EnvelopeTest(unittest.TestCase): def setUp(self): self.e = Envelope(0, 0, 5, 5) def test01_init(self): "Testing Envelope initialization." e1 = Envelope((0, 0, 5, 5)) Envelope(0, 0, 5, 5) Envelope(0, '0', '5', 5) # Thanks to ww for this Envelope(e1._envelope) with self.assertRaises(GDALException): Envelope((5, 5, 0, 0)) with self.assertRaises(GDALException): Envelope(5, 5, 0, 0) with self.assertRaises(GDALException): Envelope((0, 0, 5, 5, 3)) with self.assertRaises(GDALException): Envelope(()) with self.assertRaises(ValueError): Envelope(0, 'a', 5, 5) with self.assertRaises(TypeError): Envelope('foo') with self.assertRaises(GDALException): Envelope((1, 1, 0, 0)) # Shouldn't raise an exception for min_x == max_x or min_y == max_y Envelope(0, 0, 0, 0) def test02_properties(self): "Testing Envelope properties." e = Envelope(0, 0, 2, 3) self.assertEqual(0, e.min_x) self.assertEqual(0, e.min_y) self.assertEqual(2, e.max_x) self.assertEqual(3, e.max_y) self.assertEqual((0, 0), e.ll) self.assertEqual((2, 3), e.ur) self.assertEqual((0, 0, 2, 3), e.tuple) self.assertEqual('POLYGON((0.0 0.0,0.0 3.0,2.0 3.0,2.0 0.0,0.0 0.0))', e.wkt) self.assertEqual('(0.0, 0.0, 2.0, 3.0)', str(e)) def test03_equivalence(self): "Testing Envelope equivalence." e1 = Envelope(0.523, 0.217, 253.23, 523.69) e2 = Envelope((0.523, 0.217, 253.23, 523.69)) self.assertEqual(e1, e2) self.assertEqual((0.523, 0.217, 253.23, 523.69), e1) def test04_expand_to_include_pt_2_params(self): "Testing Envelope expand_to_include -- point as two parameters." self.e.expand_to_include(2, 6) self.assertEqual((0, 0, 5, 6), self.e) self.e.expand_to_include(-1, -1) self.assertEqual((-1, -1, 5, 6), self.e) def test05_expand_to_include_pt_2_tuple(self): "Testing Envelope expand_to_include -- point as a single 2-tuple parameter." self.e.expand_to_include((10, 10)) self.assertEqual((0, 0, 10, 10), self.e) self.e.expand_to_include((-10, -10)) self.assertEqual((-10, -10, 10, 10), self.e) def test06_expand_to_include_extent_4_params(self): "Testing Envelope expand_to_include -- extent as 4 parameters." self.e.expand_to_include(-1, 1, 3, 7) self.assertEqual((-1, 0, 5, 7), self.e) def test06_expand_to_include_extent_4_tuple(self): "Testing Envelope expand_to_include -- extent as a single 4-tuple parameter." self.e.expand_to_include((-1, 1, 3, 7)) self.assertEqual((-1, 0, 5, 7), self.e) def test07_expand_to_include_envelope(self): "Testing Envelope expand_to_include with Envelope as parameter." self.e.expand_to_include(Envelope(-1, 1, 3, 7)) self.assertEqual((-1, 0, 5, 7), self.e) def test08_expand_to_include_point(self): "Testing Envelope expand_to_include with Point as parameter." self.e.expand_to_include(TestPoint(-1, 1)) self.assertEqual((-1, 0, 5, 5), self.e) self.e.expand_to_include(TestPoint(10, 10)) self.assertEqual((-1, 0, 10, 10), self.e)
d7f78e9de0789bda04f073b5777314f47b01894791046ed65154a609a9dcbe65
# -*- coding: utf-8 -*- from __future__ import unicode_literals import unittest from unittest import skipUnless from django.contrib.gis.gdal import HAS_GDAL if HAS_GDAL: from django.contrib.gis.gdal import SpatialReference, CoordTransform, GDALException, SRSException class TestSRS: def __init__(self, wkt, **kwargs): self.wkt = wkt for key, value in kwargs.items(): setattr(self, key, value) WGS84_proj = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs ' # Some Spatial Reference examples srlist = ( TestSRS( 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,' 'AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],' 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",' '0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]', epsg=4326, projected=False, geographic=True, local=False, lin_name='unknown', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199, auth={'GEOGCS': ('EPSG', '4326'), 'spheroid': ('EPSG', '7030')}, attr=(('DATUM', 'WGS_1984'), (('SPHEROID', 1), '6378137'), ('primem|authority', 'EPSG'),), ), TestSRS( 'PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",' 'SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],' 'AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],' 'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],' 'AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],' 'PARAMETER["standard_parallel_1",30.28333333333333],' 'PARAMETER["standard_parallel_2",28.38333333333333],' 'PARAMETER["latitude_of_origin",27.83333333333333],' 'PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],' 'PARAMETER["false_northing",4000000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],' 'AUTHORITY["EPSG","32140"]]', epsg=32140, projected=True, geographic=False, local=False, lin_name='metre', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199, auth={'PROJCS': ('EPSG', '32140'), 'spheroid': ('EPSG', '7019'), 'unit': ('EPSG', '9001')}, attr=( ('DATUM', 'North_American_Datum_1983'), (('SPHEROID', 2), '298.257222101'), ('PROJECTION', 'Lambert_Conformal_Conic_2SP'), ), ), TestSRS( 'PROJCS["NAD_1983_StatePlane_Texas_South_Central_FIPS_4204_Feet",' 'GEOGCS["GCS_North_American_1983",DATUM["North_American_Datum_1983",' 'SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],' 'UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],' 'PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",13123333.33333333],' 'PARAMETER["Central_Meridian",-99.0],PARAMETER["Standard_Parallel_1",28.38333333333333],' 'PARAMETER["Standard_Parallel_2",30.28333333333334],PARAMETER["Latitude_Of_Origin",27.83333333333333],' 'UNIT["Foot_US",0.3048006096012192]]', epsg=None, projected=True, geographic=False, local=False, lin_name='Foot_US', ang_name='Degree', lin_units=0.3048006096012192, ang_units=0.0174532925199, auth={'PROJCS': (None, None)}, attr=(('PROJCS|GeOgCs|spheroid', 'GRS_1980'), (('projcs', 9), 'UNIT'), (('projcs', 11), None),), ), # This is really ESRI format, not WKT -- but the import should work the same TestSRS( 'LOCAL_CS["Non-Earth (Meter)",LOCAL_DATUM["Local Datum",0],UNIT["Meter",1.0],AXIS["X",EAST],AXIS["Y",NORTH]]', esri=True, epsg=None, projected=False, geographic=False, local=True, lin_name='Meter', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199, attr=(('LOCAL_DATUM', 'Local Datum'), ('unit', 'Meter')), ), ) # Well-Known Names well_known = ( TestSRS( 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,' 'AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],' 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,' 'AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]', wk='WGS84', name='WGS 84', attrs=(('GEOGCS|AUTHORITY', 1, '4326'), ('SPHEROID', 'WGS 84')), ), TestSRS( 'GEOGCS["WGS 72",DATUM["WGS_1972",SPHEROID["WGS 72",6378135,298.26,' 'AUTHORITY["EPSG","7043"]],AUTHORITY["EPSG","6322"]],' 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],' 'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],' 'AUTHORITY["EPSG","4322"]]', wk='WGS72', name='WGS 72', attrs=(('GEOGCS|AUTHORITY', 1, '4322'), ('SPHEROID', 'WGS 72')), ), TestSRS( 'GEOGCS["NAD27",DATUM["North_American_Datum_1927",' 'SPHEROID["Clarke 1866",6378206.4,294.9786982138982,' 'AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],' 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],' 'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],' 'AUTHORITY["EPSG","4267"]]', wk='NAD27', name='NAD27', attrs=(('GEOGCS|AUTHORITY', 1, '4267'), ('SPHEROID', 'Clarke 1866')) ), TestSRS( 'GEOGCS["NAD83",DATUM["North_American_Datum_1983",' 'SPHEROID["GRS 1980",6378137,298.257222101,' 'AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],' 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],' 'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],' 'AUTHORITY["EPSG","4269"]]', wk='NAD83', name='NAD83', attrs=(('GEOGCS|AUTHORITY', 1, '4269'), ('SPHEROID', 'GRS 1980')), ), TestSRS( 'PROJCS["NZGD49 / Karamea Circuit",GEOGCS["NZGD49",' 'DATUM["New_Zealand_Geodetic_Datum_1949",' 'SPHEROID["International 1924",6378388,297,' 'AUTHORITY["EPSG","7022"]],' 'TOWGS84[59.47,-5.04,187.44,0.47,-0.1,1.024,-4.5993],' 'AUTHORITY["EPSG","6272"]],PRIMEM["Greenwich",0,' 'AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,' 'AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4272"]],' 'PROJECTION["Transverse_Mercator"],' 'PARAMETER["latitude_of_origin",-41.28991152777778],' 'PARAMETER["central_meridian",172.1090281944444],' 'PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],' 'PARAMETER["false_northing",700000],' 'UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","27216"]]', wk='EPSG:27216', name='NZGD49 / Karamea Circuit', attrs=(('PROJECTION', 'Transverse_Mercator'), ('SPHEROID', 'International 1924')), ), ) bad_srlist = ( 'Foobar', 'OOJCS["NAD83 / Texas South Central",GEOGCS["NAD83",' 'DATUM["North_American_Datum_1983",' 'SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],' 'AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],' 'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],' 'AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],' 'PARAMETER["standard_parallel_1",30.28333333333333],' 'PARAMETER["standard_parallel_2",28.38333333333333],' 'PARAMETER["latitude_of_origin",27.83333333333333],' 'PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],' 'PARAMETER["false_northing",4000000],UNIT["metre",1,' 'AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32140"]]', ) @skipUnless(HAS_GDAL, "GDAL is required") class SpatialRefTest(unittest.TestCase): def test01_wkt(self): "Testing initialization on valid OGC WKT." for s in srlist: SpatialReference(s.wkt) def test02_bad_wkt(self): "Testing initialization on invalid WKT." for bad in bad_srlist: try: srs = SpatialReference(bad) srs.validate() except (SRSException, GDALException): pass else: self.fail('Should not have initialized on bad WKT "%s"!') def test03_get_wkt(self): "Testing getting the WKT." for s in srlist: srs = SpatialReference(s.wkt) self.assertEqual(s.wkt, srs.wkt) def test04_proj(self): "Test PROJ.4 import and export." proj_parts = [ '+proj=longlat', '+ellps=WGS84', '+towgs84=0,0,0,0,0,0,0', '+datum=WGS84', '+no_defs' ] srs1 = SpatialReference(srlist[0].wkt) srs2 = SpatialReference(WGS84_proj) self.assertTrue(all([part in proj_parts for part in srs1.proj.split()])) self.assertTrue(all([part in proj_parts for part in srs2.proj.split()])) def test05_epsg(self): "Test EPSG import." for s in srlist: if s.epsg: srs1 = SpatialReference(s.wkt) srs2 = SpatialReference(s.epsg) srs3 = SpatialReference(str(s.epsg)) srs4 = SpatialReference('EPSG:%d' % s.epsg) for srs in (srs1, srs2, srs3, srs4): for attr, expected in s.attr: self.assertEqual(expected, srs[attr]) def test07_boolean_props(self): "Testing the boolean properties." for s in srlist: srs = SpatialReference(s.wkt) self.assertEqual(s.projected, srs.projected) self.assertEqual(s.geographic, srs.geographic) def test08_angular_linear(self): "Testing the linear and angular units routines." for s in srlist: srs = SpatialReference(s.wkt) self.assertEqual(s.ang_name, srs.angular_name) self.assertEqual(s.lin_name, srs.linear_name) self.assertAlmostEqual(s.ang_units, srs.angular_units, 9) self.assertAlmostEqual(s.lin_units, srs.linear_units, 9) def test09_authority(self): "Testing the authority name & code routines." for s in srlist: if hasattr(s, 'auth'): srs = SpatialReference(s.wkt) for target, tup in s.auth.items(): self.assertEqual(tup[0], srs.auth_name(target)) self.assertEqual(tup[1], srs.auth_code(target)) def test10_attributes(self): "Testing the attribute retrieval routines." for s in srlist: srs = SpatialReference(s.wkt) for tup in s.attr: att = tup[0] # Attribute to test exp = tup[1] # Expected result self.assertEqual(exp, srs[att]) def test11_wellknown(self): "Testing Well Known Names of Spatial References." for s in well_known: srs = SpatialReference(s.wk) self.assertEqual(s.name, srs.name) for tup in s.attrs: if len(tup) == 2: key = tup[0] exp = tup[1] elif len(tup) == 3: key = tup[:2] exp = tup[2] self.assertEqual(srs[key], exp) def test12_coordtransform(self): "Testing initialization of a CoordTransform." target = SpatialReference('WGS84') CoordTransform(SpatialReference(srlist[0].wkt), target) def test13_attr_value(self): "Testing the attr_value() method." s1 = SpatialReference('WGS84') with self.assertRaises(TypeError): s1.__getitem__(0) with self.assertRaises(TypeError): s1.__getitem__(('GEOGCS', 'foo')) self.assertEqual('WGS 84', s1['GEOGCS']) self.assertEqual('WGS_1984', s1['DATUM']) self.assertEqual('EPSG', s1['AUTHORITY']) self.assertEqual(4326, int(s1['AUTHORITY', 1])) self.assertIsNone(s1['FOOBAR']) def test_unicode(self): wkt = ( 'PROJCS["DHDN / Soldner 39 Langschoß",' 'GEOGCS["DHDN",DATUM["Deutsches_Hauptdreiecksnetz",' 'SPHEROID["Bessel 1841",6377397.155,299.1528128,AUTHORITY["EPSG","7004"]],AUTHORITY["EPSG","6314"]],' 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],' 'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],' 'AUTHORITY["EPSG","4314"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],' 'PROJECTION["Cassini_Soldner"],PARAMETER["latitude_of_origin",50.66738711],' 'PARAMETER["central_meridian",6.28935703],PARAMETER["false_easting",0],' 'PARAMETER["false_northing",0],AUTHORITY["mj10777.de","187939"],AXIS["x",NORTH],AXIS["y",EAST]]' ) srs = SpatialReference(wkt) srs_list = [srs, srs.clone()] srs.import_wkt(wkt) for srs in srs_list: self.assertEqual(srs.name, 'DHDN / Soldner 39 Langschoß') self.assertEqual(srs.wkt, wkt) self.assertIn('Langschoß', srs.pretty_wkt) self.assertIn('Langschoß', srs.xml)
46db2bbdee9d710daa7fecc1441ae80b4ad551bbc648736a95018c9167f6a1f9
import unittest from django.contrib.gis.gdal import HAS_GDAL from django.test import mock if HAS_GDAL: from django.contrib.gis.gdal import Driver, GDALException valid_drivers = ( # vector 'ESRI Shapefile', 'MapInfo File', 'TIGER', 'S57', 'DGN', 'Memory', 'CSV', 'GML', 'KML', # raster 'GTiff', 'JPEG', 'MEM', 'PNG', ) invalid_drivers = ('Foo baz', 'clucka', 'ESRI Shp', 'ESRI rast') aliases = { 'eSrI': 'ESRI Shapefile', 'TigER/linE': 'TIGER', 'SHAPE': 'ESRI Shapefile', 'sHp': 'ESRI Shapefile', 'tiFf': 'GTiff', 'tIf': 'GTiff', 'jPEg': 'JPEG', 'jpG': 'JPEG', } @unittest.skipUnless(HAS_GDAL, "GDAL is required") class DriverTest(unittest.TestCase): def test01_valid_driver(self): "Testing valid GDAL/OGR Data Source Drivers." for d in valid_drivers: dr = Driver(d) self.assertEqual(d, str(dr)) def test02_invalid_driver(self): "Testing invalid GDAL/OGR Data Source Drivers." for i in invalid_drivers: with self.assertRaises(GDALException): Driver(i) def test03_aliases(self): "Testing driver aliases." for alias, full_name in aliases.items(): dr = Driver(alias) self.assertEqual(full_name, str(dr)) @mock.patch('django.contrib.gis.gdal.driver.vcapi.get_driver_count') @mock.patch('django.contrib.gis.gdal.driver.rcapi.get_driver_count') @mock.patch('django.contrib.gis.gdal.driver.vcapi.register_all') @mock.patch('django.contrib.gis.gdal.driver.rcapi.register_all') def test_registered(self, rreg, vreg, rcount, vcount): """ Prototypes are registered only if their respective driver counts are zero. """ def check(rcount_val, vcount_val): vreg.reset_mock() rreg.reset_mock() rcount.return_value = rcount_val vcount.return_value = vcount_val Driver.ensure_registered() if rcount_val: self.assertFalse(rreg.called) else: rreg.assert_called_once_with() if vcount_val: self.assertFalse(vreg.called) else: vreg.assert_called_once_with() check(0, 0) check(120, 0) check(0, 120) check(120, 120)
5a9ac4a93d760b8688e8b89a54c537c3e48b896409fb3ba2c85f3876cb111972
from __future__ import unicode_literals from django.contrib.gis import admin from django.contrib.gis.geos import Point from django.test import TestCase, override_settings, skipUnlessDBFeature from django.test.utils import patch_logger from .admin import UnmodifiableAdmin from .models import City, site @skipUnlessDBFeature("gis_enabled") @override_settings(ROOT_URLCONF='django.contrib.gis.tests.geoadmin.urls') class GeoAdminTest(TestCase): def test_ensure_geographic_media(self): geoadmin = site._registry[City] admin_js = geoadmin.media.render_js() self.assertTrue(any(geoadmin.openlayers_url in js for js in admin_js)) def test_olmap_OSM_rendering(self): delete_all_btn = """<a href="javascript:geodjango_point.clearFeatures()">Delete all Features</a>""" original_geoadmin = site._registry[City] params = original_geoadmin.get_map_widget(City._meta.get_field('point')).params result = original_geoadmin.get_map_widget(City._meta.get_field('point'))( ).render('point', Point(-79.460734, 40.18476), params) self.assertIn( """geodjango_point.layers.base = new OpenLayers.Layer.OSM("OpenStreetMap (Mapnik)");""", result) self.assertIn(delete_all_btn, result) site.unregister(City) site.register(City, UnmodifiableAdmin) try: geoadmin = site._registry[City] params = geoadmin.get_map_widget(City._meta.get_field('point')).params result = geoadmin.get_map_widget(City._meta.get_field('point'))( ).render('point', Point(-79.460734, 40.18476), params) self.assertNotIn(delete_all_btn, result) finally: site.unregister(City) site.register(City, original_geoadmin.__class__) def test_olmap_WMS_rendering(self): geoadmin = admin.GeoModelAdmin(City, site) result = geoadmin.get_map_widget(City._meta.get_field('point'))( ).render('point', Point(-79.460734, 40.18476)) self.assertIn( """geodjango_point.layers.base = new OpenLayers.Layer.WMS("OpenLayers WMS", """ """"http://vmap0.tiles.osgeo.org/wms/vmap0", {layers: 'basic', format: 'image/jpeg'});""", result) def test_olwidget_has_changed(self): """ Check that changes are accurately noticed by OpenLayersWidget. """ geoadmin = site._registry[City] form = geoadmin.get_changelist_form(None)() has_changed = form.fields['point'].has_changed initial = Point(13.4197458572965953, 52.5194108501149799, srid=4326) data_same = "SRID=3857;POINT(1493879.2754093995 6894592.019687599)" data_almost_same = "SRID=3857;POINT(1493879.2754093990 6894592.019687590)" data_changed = "SRID=3857;POINT(1493884.0527237 6894593.8111804)" self.assertTrue(has_changed(None, data_changed)) self.assertTrue(has_changed(initial, "")) self.assertFalse(has_changed(None, "")) self.assertFalse(has_changed(initial, data_same)) self.assertFalse(has_changed(initial, data_almost_same)) self.assertTrue(has_changed(initial, data_changed)) def test_olwidget_empty_string(self): geoadmin = site._registry[City] form = geoadmin.get_changelist_form(None)({'point': ''}) with patch_logger('django.contrib.gis', 'error') as logger_calls: output = str(form['point']) self.assertInHTML( '<textarea id="id_point" class="vWKTField required" cols="150"' ' rows="10" name="point"></textarea>', output ) self.assertEqual(logger_calls, []) def test_olwidget_invalid_string(self): geoadmin = site._registry[City] form = geoadmin.get_changelist_form(None)({'point': 'INVALID()'}) with patch_logger('django.contrib.gis', 'error') as logger_calls: output = str(form['point']) self.assertInHTML( '<textarea id="id_point" class="vWKTField required" cols="150"' ' rows="10" name="point"></textarea>', output ) self.assertEqual(len(logger_calls), 1) self.assertEqual( logger_calls[0], "Error creating geometry from value 'INVALID()' (String or unicode input " "unrecognized as WKT EWKT, and HEXEWKB.)" )
1a85c024f7d44bc6d682af857c408b61ff16e29d2215037a52800d25cea8a070
from django.utils.encoding import python_2_unicode_compatible from ..admin import admin from ..models import models @python_2_unicode_compatible class City(models.Model): name = models.CharField(max_length=30) point = models.PointField() class Meta: app_label = 'geoadmin' required_db_features = ['gis_enabled'] def __str__(self): return self.name site = admin.AdminSite(name='admin_gis') site.register(City, admin.OSMGeoAdmin)
843e251c079dbe7f20a104237da82238665e8aeace053496b15dcb716e3637e0
from django.db import connection, migrations, models from ...models import models as gis_models ops = [ migrations.CreateModel( name='Neighborhood', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=100, unique=True)), ('geom', gis_models.MultiPolygonField(srid=4326)), ], options={ 'required_db_features': ['gis_enabled'], }, bases=(models.Model,), ), migrations.CreateModel( name='Household', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('neighborhood', models.ForeignKey( 'gis_migrations.Neighborhood', models.SET_NULL, to_field='id', null=True, )), ('address', models.CharField(max_length=100)), ('zip_code', models.IntegerField(null=True, blank=True)), ('geom', gis_models.PointField(srid=4326, geography=True)), ], options={ 'required_db_features': ['gis_enabled'], }, bases=(models.Model,), ), migrations.CreateModel( name='Family', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=100, unique=True)), ], options={ }, bases=(models.Model,), ), migrations.AddField( model_name='household', name='family', field=models.ForeignKey('gis_migrations.Family', models.SET_NULL, blank=True, null=True), preserve_default=True, ) ] if connection.features.gis_enabled and connection.features.supports_raster: ops += [ migrations.CreateModel( name='Heatmap', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=100, unique=True)), ('rast', gis_models.fields.RasterField(srid=4326)), ], options={ }, bases=(models.Model,), ), ] class Migration(migrations.Migration): """ Used for gis-specific migration tests. """ operations = ops
e86a4f113633fe9d20b0f3461784297d36484631a667ea0283353f80fdbae7c6
from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class Article(models.Model): headline = models.CharField(max_length=100, default='Default headline') pub_date = models.DateTimeField() def __str__(self): return self.headline class Meta: app_label = 'fixtures_model_package' ordering = ('-pub_date', 'headline')
9455327b13117e03f274b656aeb012e95b74ac65cf9a3d8155ec9954543b761f
from django.db import models from django.db.models.fields.related import ( ForeignObjectRel, ReverseManyToOneDescriptor, ) from django.db.models.lookups import StartsWith from django.db.models.query_utils import PathInfo from django.utils.encoding import python_2_unicode_compatible class CustomForeignObjectRel(ForeignObjectRel): """ Define some extra Field methods so this Rel acts more like a Field, which lets us use ReverseManyToOneDescriptor in both directions. """ @property def foreign_related_fields(self): return tuple(lhs_field for lhs_field, rhs_field in self.field.related_fields) def get_attname(self): return self.name class StartsWithRelation(models.ForeignObject): """ A ForeignObject that uses StartsWith operator in its joins instead of the default equality operator. This is logically a many-to-many relation and creates a ReverseManyToOneDescriptor in both directions. """ auto_created = False many_to_many = False many_to_one = True one_to_many = False one_to_one = False rel_class = CustomForeignObjectRel def __init__(self, *args, **kwargs): kwargs['on_delete'] = models.DO_NOTHING super(StartsWithRelation, self).__init__(*args, **kwargs) @property def field(self): """ Makes ReverseManyToOneDescriptor work in both directions. """ return self.remote_field def get_extra_restriction(self, where_class, alias, related_alias): to_field = self.remote_field.model._meta.get_field(self.to_fields[0]) from_field = self.model._meta.get_field(self.from_fields[0]) return StartsWith(to_field.get_col(alias), from_field.get_col(related_alias)) def get_joining_columns(self, reverse_join=False): return tuple() def get_path_info(self): to_opts = self.remote_field.model._meta from_opts = self.model._meta return [PathInfo(from_opts, to_opts, (to_opts.pk,), self, False, False)] def get_reverse_path_info(self): to_opts = self.model._meta from_opts = self.remote_field.model._meta return [PathInfo(from_opts, to_opts, (to_opts.pk,), self.remote_field, False, False)] def contribute_to_class(self, cls, name, private_only=False): super(StartsWithRelation, self).contribute_to_class(cls, name, private_only) setattr(cls, self.name, ReverseManyToOneDescriptor(self)) class BrokenContainsRelation(StartsWithRelation): """ This model is designed to yield no join conditions and raise an exception in ``Join.as_sql()``. """ def get_extra_restriction(self, where_class, alias, related_alias): return None @python_2_unicode_compatible class SlugPage(models.Model): slug = models.CharField(max_length=20, unique=True) descendants = StartsWithRelation( 'self', from_fields=['slug'], to_fields=['slug'], related_name='ascendants', ) containers = BrokenContainsRelation( 'self', from_fields=['slug'], to_fields=['slug'], ) class Meta: ordering = ['slug'] def __str__(self): return 'SlugPage %s' % self.slug
fbf929d45b0cebcca642e96faf66811da5bb90349efc7a3ce669188f09e2ea54
import datetime from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class Country(models.Model): # Table Column Fields name = models.CharField(max_length=50) def __str__(self): return self.name @python_2_unicode_compatible class Person(models.Model): # Table Column Fields name = models.CharField(max_length=128) person_country_id = models.IntegerField() # Relation Fields person_country = models.ForeignObject( Country, from_fields=['person_country_id'], to_fields=['id'], on_delete=models.CASCADE, ) friends = models.ManyToManyField('self', through='Friendship', symmetrical=False) class Meta: ordering = ('name',) def __str__(self): return self.name @python_2_unicode_compatible class Group(models.Model): # Table Column Fields name = models.CharField(max_length=128) group_country = models.ForeignKey(Country, models.CASCADE) members = models.ManyToManyField(Person, related_name='groups', through='Membership') class Meta: ordering = ('name',) def __str__(self): return self.name @python_2_unicode_compatible class Membership(models.Model): # Table Column Fields membership_country = models.ForeignKey(Country, models.CASCADE) date_joined = models.DateTimeField(default=datetime.datetime.now) invite_reason = models.CharField(max_length=64, null=True) person_id = models.IntegerField() group_id = models.IntegerField(blank=True, null=True) # Relation Fields person = models.ForeignObject( Person, from_fields=['person_id', 'membership_country'], to_fields=['id', 'person_country_id'], on_delete=models.CASCADE, ) group = models.ForeignObject( Group, from_fields=['group_id', 'membership_country'], to_fields=['id', 'group_country'], on_delete=models.CASCADE, ) class Meta: ordering = ('date_joined', 'invite_reason') def __str__(self): group_name = self.group.name if self.group_id else 'NULL' return "%s is a member of %s" % (self.person.name, group_name) class Friendship(models.Model): # Table Column Fields from_friend_country = models.ForeignKey(Country, models.CASCADE, related_name="from_friend_country") from_friend_id = models.IntegerField() to_friend_country_id = models.IntegerField() to_friend_id = models.IntegerField() # Relation Fields from_friend = models.ForeignObject( Person, on_delete=models.CASCADE, from_fields=['from_friend_country', 'from_friend_id'], to_fields=['person_country_id', 'id'], related_name='from_friend') to_friend_country = models.ForeignObject( Country, from_fields=['to_friend_country_id'], to_fields=['id'], related_name='to_friend_country', on_delete=models.CASCADE, ) to_friend = models.ForeignObject( Person, from_fields=['to_friend_country_id', 'to_friend_id'], to_fields=['person_country_id', 'id'], related_name='to_friend', on_delete=models.CASCADE, )
3497f633e42d8116a582ffcdc1c3b93bfda8c00fa463fe396b3a3ae4bf57c11f
from django.db import models from django.db.models.fields.related import ForwardManyToOneDescriptor from django.utils.encoding import python_2_unicode_compatible from django.utils.translation import get_language class ArticleTranslationDescriptor(ForwardManyToOneDescriptor): """ The set of articletranslation should not set any local fields. """ def __set__(self, instance, value): if instance is None: raise AttributeError("%s must be accessed via instance" % self.field.name) setattr(instance, self.cache_name, value) if value is not None and not self.field.remote_field.multiple: setattr(value, self.field.related.get_cache_name(), instance) class ColConstraint(object): # Anything with as_sql() method works in get_extra_restriction(). def __init__(self, alias, col, value): self.alias, self.col, self.value = alias, col, value def as_sql(self, compiler, connection): qn = compiler.quote_name_unless_alias return '%s.%s = %%s' % (qn(self.alias), qn(self.col)), [self.value] class ActiveTranslationField(models.ForeignObject): """ This field will allow querying and fetching the currently active translation for Article from ArticleTranslation. """ requires_unique_target = False def get_extra_restriction(self, where_class, alias, related_alias): return ColConstraint(alias, 'lang', get_language()) def get_extra_descriptor_filter(self, instance): return {'lang': get_language()} def contribute_to_class(self, cls, name): super(ActiveTranslationField, self).contribute_to_class(cls, name) setattr(cls, self.name, ArticleTranslationDescriptor(self)) class ActiveTranslationFieldWithQ(ActiveTranslationField): def get_extra_descriptor_filter(self, instance): return models.Q(lang=get_language()) @python_2_unicode_compatible class Article(models.Model): active_translation = ActiveTranslationField( 'ArticleTranslation', from_fields=['id'], to_fields=['article'], related_name='+', on_delete=models.CASCADE, null=True, ) active_translation_q = ActiveTranslationFieldWithQ( 'ArticleTranslation', from_fields=['id'], to_fields=['article'], related_name='+', on_delete=models.CASCADE, null=True, ) pub_date = models.DateField() def __str__(self): try: return self.active_translation.title except ArticleTranslation.DoesNotExist: return '[No translation found]' class NewsArticle(Article): pass class ArticleTranslation(models.Model): article = models.ForeignKey(Article, models.CASCADE) lang = models.CharField(max_length=2) title = models.CharField(max_length=100) body = models.TextField() abstract = models.CharField(max_length=400, null=True) class Meta: unique_together = ('article', 'lang') ordering = ('active_translation__title',) class ArticleTag(models.Model): article = models.ForeignKey( Article, models.CASCADE, related_name='tags', related_query_name='tag', ) name = models.CharField(max_length=255) class ArticleIdea(models.Model): articles = models.ManyToManyField( Article, related_name='ideas', related_query_name='idea_things', ) name = models.CharField(max_length=255)
b32bf612dc9bbb67b49f5ec7671e150dd752d0a0a706c3140d42f18d375acfc3
from django.db import models # Since the test database doesn't have tablespaces, it's impossible for Django # to create the tables for models where db_tablespace is set. To avoid this # problem, we mark the models as unmanaged, and temporarily revert them to # managed during each test. We also set them to use the same tables as the # "reference" models to avoid errors when other tests run 'migrate' # (proxy_models_inheritance does). class ScientistRef(models.Model): name = models.CharField(max_length=50) class ArticleRef(models.Model): title = models.CharField(max_length=50, unique=True) code = models.CharField(max_length=50, unique=True) authors = models.ManyToManyField(ScientistRef, related_name='articles_written_set') reviewers = models.ManyToManyField(ScientistRef, related_name='articles_reviewed_set') class Scientist(models.Model): name = models.CharField(max_length=50) class Meta: db_table = 'model_options_scientistref' db_tablespace = 'tbl_tbsp' managed = False class Article(models.Model): title = models.CharField(max_length=50, unique=True) code = models.CharField(max_length=50, unique=True, db_tablespace='idx_tbsp') authors = models.ManyToManyField(Scientist, related_name='articles_written_set') reviewers = models.ManyToManyField(Scientist, related_name='articles_reviewed_set', db_tablespace='idx_tbsp') class Meta: db_table = 'model_options_articleref' db_tablespace = 'tbl_tbsp' managed = False # Also set the tables for automatically created models Authors = Article._meta.get_field('authors').remote_field.through Authors._meta.db_table = 'model_options_articleref_authors' Reviewers = Article._meta.get_field('reviewers').remote_field.through Reviewers._meta.db_table = 'model_options_articleref_reviewers'
369b7d0cf556ddb6dda5a837ec39d072defcd98ce517325f33a0de946ffd0d4f
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('sites', '0001_initial'), ] operations = [ migrations.CreateModel( name='CustomArticle', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('title', models.CharField(max_length=50)), ('places_this_article_should_appear', models.ForeignKey('sites.Site', models.CASCADE)), ], options={ 'abstract': False, }, bases=(models.Model,), ), migrations.CreateModel( name='ExclusiveArticle', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('title', models.CharField(max_length=50)), ('site', models.ForeignKey('sites.Site', models.CASCADE)), ], options={ 'abstract': False, }, bases=(models.Model,), ), migrations.CreateModel( name='SyndicatedArticle', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('title', models.CharField(max_length=50)), ('sites', models.ManyToManyField('sites.Site')), ], options={ 'abstract': False, }, bases=(models.Model,), ), ]
2d027cd2adc2252ecdb7142b4c43138ff1a42a56692b991628e9353319493ce4
# This package is used to test the --exclude option of # the makemessages and compilemessages management commands. # The locale directory for this app is generated automatically # by the test cases. from django.utils.translation import ugettext as _ # Translators: This comment should be extracted dummy1 = _("This is a translatable string.") # This comment should not be extracted dummy2 = _("This is another translatable string.")
a32e7178c0227a17dd43d9fbea9c52edc5ae246028783c38534f21464a3465a1
from django.utils.translation import ugettext as _, ungettext # Translators: This comment should be extracted dummy1 = _("This is a translatable string.") # This comment should not be extracted dummy2 = _("This is another translatable string.") # This file has a literal with plural forms. When processed first, makemessages # shouldn't create a .po file with duplicate `Plural-Forms` headers number = 3 dummy3 = ungettext("%(number)s Foo", "%(number)s Foos", number) % {'number': number} dummy4 = _('Size') # This string is intentionally duplicated in test.html dummy5 = _('This literal should be included.')
02133171b3c1b28e9b5fb9e79dc923a3bc17aac5ea81d7b33b1de2f89a1f2648
from __future__ import unicode_literals import os from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.http import HttpResponsePermanentRedirect from django.middleware.locale import LocaleMiddleware from django.template import Context, Template from django.test import SimpleTestCase, override_settings from django.test.client import RequestFactory from django.test.utils import override_script_prefix from django.urls import clear_url_caches, reverse, translate_url from django.utils import translation from django.utils._os import upath class PermanentRedirectLocaleMiddleWare(LocaleMiddleware): response_redirect_class = HttpResponsePermanentRedirect @override_settings( USE_I18N=True, LOCALE_PATHS=[ os.path.join(os.path.dirname(upath(__file__)), 'locale'), ], LANGUAGE_CODE='en-us', LANGUAGES=[ ('nl', 'Dutch'), ('en', 'English'), ('pt-br', 'Brazilian Portuguese'), ], MIDDLEWARE=[ 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ROOT_URLCONF='i18n.patterns.urls.default', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(os.path.dirname(upath(__file__)), 'templates')], 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.i18n', ], }, }], ) class URLTestCaseBase(SimpleTestCase): """ TestCase base-class for the URL tests. """ def setUp(self): # Make sure the cache is empty before we are doing our tests. clear_url_caches() def tearDown(self): # Make sure we will leave an empty cache for other testcases. clear_url_caches() class URLPrefixTests(URLTestCaseBase): """ Tests if the `i18n_patterns` is adding the prefix correctly. """ def test_not_prefixed(self): with translation.override('en'): self.assertEqual(reverse('not-prefixed'), '/not-prefixed/') self.assertEqual(reverse('not-prefixed-included-url'), '/not-prefixed-include/foo/') with translation.override('nl'): self.assertEqual(reverse('not-prefixed'), '/not-prefixed/') self.assertEqual(reverse('not-prefixed-included-url'), '/not-prefixed-include/foo/') def test_prefixed(self): with translation.override('en'): self.assertEqual(reverse('prefixed'), '/en/prefixed/') with translation.override('nl'): self.assertEqual(reverse('prefixed'), '/nl/prefixed/') with translation.override(None): self.assertEqual(reverse('prefixed'), '/%s/prefixed/' % settings.LANGUAGE_CODE) @override_settings(ROOT_URLCONF='i18n.patterns.urls.wrong') def test_invalid_prefix_use(self): with self.assertRaises(ImproperlyConfigured): reverse('account:register') @override_settings(ROOT_URLCONF='i18n.patterns.urls.disabled') class URLDisabledTests(URLTestCaseBase): @override_settings(USE_I18N=False) def test_prefixed_i18n_disabled(self): with translation.override('en'): self.assertEqual(reverse('prefixed'), '/prefixed/') with translation.override('nl'): self.assertEqual(reverse('prefixed'), '/prefixed/') class RequestURLConfTests(SimpleTestCase): @override_settings(ROOT_URLCONF='i18n.patterns.urls.path_unused') def test_request_urlconf_considered(self): request = RequestFactory().get('/nl/') request.urlconf = 'i18n.patterns.urls.default' middleware = LocaleMiddleware() with translation.override('nl'): middleware.process_request(request) self.assertEqual(request.LANGUAGE_CODE, 'nl') @override_settings(ROOT_URLCONF='i18n.patterns.urls.path_unused') class PathUnusedTests(URLTestCaseBase): """ Check that if no i18n_patterns is used in root URLconfs, then no language activation happens based on url prefix. """ def test_no_lang_activate(self): response = self.client.get('/nl/foo/') self.assertEqual(response.status_code, 200) self.assertEqual(response['content-language'], 'en') self.assertEqual(response.context['LANGUAGE_CODE'], 'en') class URLTranslationTests(URLTestCaseBase): """ Tests if the pattern-strings are translated correctly (within the `i18n_patterns` and the normal `patterns` function). """ def test_no_prefix_translated(self): with translation.override('en'): self.assertEqual(reverse('no-prefix-translated'), '/translated/') self.assertEqual(reverse('no-prefix-translated-slug', kwargs={'slug': 'yeah'}), '/translated/yeah/') with translation.override('nl'): self.assertEqual(reverse('no-prefix-translated'), '/vertaald/') self.assertEqual(reverse('no-prefix-translated-slug', kwargs={'slug': 'yeah'}), '/vertaald/yeah/') with translation.override('pt-br'): self.assertEqual(reverse('no-prefix-translated'), '/traduzidos/') self.assertEqual(reverse('no-prefix-translated-slug', kwargs={'slug': 'yeah'}), '/traduzidos/yeah/') def test_users_url(self): with translation.override('en'): self.assertEqual(reverse('users'), '/en/users/') with translation.override('nl'): self.assertEqual(reverse('users'), '/nl/gebruikers/') self.assertEqual(reverse('prefixed_xml'), '/nl/prefixed.xml') with translation.override('pt-br'): self.assertEqual(reverse('users'), '/pt-br/usuarios/') def test_translate_url_utility(self): with translation.override('en'): self.assertEqual(translate_url('/en/non-existent/', 'nl'), '/en/non-existent/') self.assertEqual(translate_url('/en/users/', 'nl'), '/nl/gebruikers/') # Namespaced URL self.assertEqual(translate_url('/en/account/register/', 'nl'), '/nl/profiel/registeren/') self.assertEqual(translation.get_language(), 'en') with translation.override('nl'): self.assertEqual(translate_url('/nl/gebruikers/', 'en'), '/en/users/') self.assertEqual(translation.get_language(), 'nl') class URLNamespaceTests(URLTestCaseBase): """ Tests if the translations are still working within namespaces. """ def test_account_register(self): with translation.override('en'): self.assertEqual(reverse('account:register'), '/en/account/register/') with translation.override('nl'): self.assertEqual(reverse('account:register'), '/nl/profiel/registeren/') class URLRedirectTests(URLTestCaseBase): """ Tests if the user gets redirected to the right URL when there is no language-prefix in the request URL. """ def test_no_prefix_response(self): response = self.client.get('/not-prefixed/') self.assertEqual(response.status_code, 200) def test_en_redirect(self): response = self.client.get('/account/register/', HTTP_ACCEPT_LANGUAGE='en') self.assertRedirects(response, '/en/account/register/') response = self.client.get(response['location']) self.assertEqual(response.status_code, 200) def test_en_redirect_wrong_url(self): response = self.client.get('/profiel/registeren/', HTTP_ACCEPT_LANGUAGE='en') self.assertEqual(response.status_code, 404) def test_nl_redirect(self): response = self.client.get('/profiel/registeren/', HTTP_ACCEPT_LANGUAGE='nl') self.assertRedirects(response, '/nl/profiel/registeren/') response = self.client.get(response['location']) self.assertEqual(response.status_code, 200) def test_nl_redirect_wrong_url(self): response = self.client.get('/account/register/', HTTP_ACCEPT_LANGUAGE='nl') self.assertEqual(response.status_code, 404) def test_pt_br_redirect(self): response = self.client.get('/conta/registre-se/', HTTP_ACCEPT_LANGUAGE='pt-br') self.assertRedirects(response, '/pt-br/conta/registre-se/') response = self.client.get(response['location']) self.assertEqual(response.status_code, 200) def test_pl_pl_redirect(self): # language from outside of the supported LANGUAGES list response = self.client.get('/account/register/', HTTP_ACCEPT_LANGUAGE='pl-pl') self.assertRedirects(response, '/en/account/register/') response = self.client.get(response['location']) self.assertEqual(response.status_code, 200) @override_settings( MIDDLEWARE=[ 'i18n.patterns.tests.PermanentRedirectLocaleMiddleWare', 'django.middleware.common.CommonMiddleware', ], ) def test_custom_redirect_class(self): response = self.client.get('/account/register/', HTTP_ACCEPT_LANGUAGE='en') self.assertRedirects(response, '/en/account/register/', 301) class URLVaryAcceptLanguageTests(URLTestCaseBase): """ Tests that 'Accept-Language' is not added to the Vary header when using prefixed URLs. """ def test_no_prefix_response(self): response = self.client.get('/not-prefixed/') self.assertEqual(response.status_code, 200) self.assertEqual(response.get('Vary'), 'Accept-Language') def test_en_redirect(self): response = self.client.get('/account/register/', HTTP_ACCEPT_LANGUAGE='en') self.assertRedirects(response, '/en/account/register/') self.assertFalse(response.get('Vary')) response = self.client.get(response['location']) self.assertEqual(response.status_code, 200) self.assertFalse(response.get('Vary')) class URLRedirectWithoutTrailingSlashTests(URLTestCaseBase): """ Tests the redirect when the requested URL doesn't end with a slash (`settings.APPEND_SLASH=True`). """ def test_not_prefixed_redirect(self): response = self.client.get('/not-prefixed', HTTP_ACCEPT_LANGUAGE='en') self.assertRedirects(response, '/not-prefixed/', 301) def test_en_redirect(self): response = self.client.get('/account/register', HTTP_ACCEPT_LANGUAGE='en', follow=True) # We only want one redirect, bypassing CommonMiddleware self.assertListEqual(response.redirect_chain, [('/en/account/register/', 302)]) self.assertRedirects(response, '/en/account/register/', 302) response = self.client.get('/prefixed.xml', HTTP_ACCEPT_LANGUAGE='en', follow=True) self.assertRedirects(response, '/en/prefixed.xml', 302) class URLRedirectWithoutTrailingSlashSettingTests(URLTestCaseBase): """ Tests the redirect when the requested URL doesn't end with a slash (`settings.APPEND_SLASH=False`). """ @override_settings(APPEND_SLASH=False) def test_not_prefixed_redirect(self): response = self.client.get('/not-prefixed', HTTP_ACCEPT_LANGUAGE='en') self.assertEqual(response.status_code, 404) @override_settings(APPEND_SLASH=False) def test_en_redirect(self): response = self.client.get('/account/register-without-slash', HTTP_ACCEPT_LANGUAGE='en') self.assertRedirects(response, '/en/account/register-without-slash', 302) response = self.client.get(response['location']) self.assertEqual(response.status_code, 200) class URLResponseTests(URLTestCaseBase): """ Tests if the response has the right language-code. """ def test_not_prefixed_with_prefix(self): response = self.client.get('/en/not-prefixed/') self.assertEqual(response.status_code, 404) def test_en_url(self): response = self.client.get('/en/account/register/') self.assertEqual(response.status_code, 200) self.assertEqual(response['content-language'], 'en') self.assertEqual(response.context['LANGUAGE_CODE'], 'en') def test_nl_url(self): response = self.client.get('/nl/profiel/registeren/') self.assertEqual(response.status_code, 200) self.assertEqual(response['content-language'], 'nl') self.assertEqual(response.context['LANGUAGE_CODE'], 'nl') def test_wrong_en_prefix(self): response = self.client.get('/en/profiel/registeren/') self.assertEqual(response.status_code, 404) def test_wrong_nl_prefix(self): response = self.client.get('/nl/account/register/') self.assertEqual(response.status_code, 404) def test_pt_br_url(self): response = self.client.get('/pt-br/conta/registre-se/') self.assertEqual(response.status_code, 200) self.assertEqual(response['content-language'], 'pt-br') self.assertEqual(response.context['LANGUAGE_CODE'], 'pt-br') class URLRedirectWithScriptAliasTests(URLTestCaseBase): """ #21579 - LocaleMiddleware should respect the script prefix. """ def test_language_prefix_with_script_prefix(self): prefix = '/script_prefix' with override_script_prefix(prefix): response = self.client.get('/prefixed/', HTTP_ACCEPT_LANGUAGE='en', SCRIPT_NAME=prefix) self.assertRedirects(response, '%s/en/prefixed/' % prefix, target_status_code=404) class URLTagTests(URLTestCaseBase): """ Test if the language tag works. """ def test_strings_only(self): t = Template("""{% load i18n %} {% language 'nl' %}{% url 'no-prefix-translated' %}{% endlanguage %} {% language 'pt-br' %}{% url 'no-prefix-translated' %}{% endlanguage %}""") self.assertEqual(t.render(Context({})).strip().split(), ['/vertaald/', '/traduzidos/']) def test_context(self): ctx = Context({'lang1': 'nl', 'lang2': 'pt-br'}) tpl = Template("""{% load i18n %} {% language lang1 %}{% url 'no-prefix-translated' %}{% endlanguage %} {% language lang2 %}{% url 'no-prefix-translated' %}{% endlanguage %}""") self.assertEqual(tpl.render(ctx).strip().split(), ['/vertaald/', '/traduzidos/']) def test_args(self): tpl = Template("""{% load i18n %} {% language 'nl' %}{% url 'no-prefix-translated-slug' 'apo' %}{% endlanguage %} {% language 'pt-br' %}{% url 'no-prefix-translated-slug' 'apo' %}{% endlanguage %}""") self.assertEqual(tpl.render(Context({})).strip().split(), ['/vertaald/apo/', '/traduzidos/apo/']) def test_kwargs(self): tpl = Template("""{% load i18n %} {% language 'nl' %}{% url 'no-prefix-translated-slug' slug='apo' %}{% endlanguage %} {% language 'pt-br' %}{% url 'no-prefix-translated-slug' slug='apo' %}{% endlanguage %}""") self.assertEqual(tpl.render(Context({})).strip().split(), ['/vertaald/apo/', '/traduzidos/apo/'])
fbd6480f2feb8095c553d5c2515e62af94a56f6a00785121bd94968107b0af9a
# Sample project used by test_extraction.CustomLayoutExtractionTests from django.utils.translation import ugettext as _ string = _("This is a project-level string")
c6a5e47f920b510d724fe738aef219ac2561dfc8e0d0848ae16094147de75fe3
# -*- coding: utf-8 -*- from __future__ import unicode_literals import os from django.contrib.contenttypes.models import ContentType from django.test import TestCase, override_settings from django.utils import six, translation from django.utils._os import upath @override_settings( USE_I18N=True, LOCALE_PATHS=[ os.path.join(os.path.dirname(upath(__file__)), 'locale'), ], LANGUAGE_CODE='en', LANGUAGES=[ ('en', 'English'), ('fr', 'French'), ], ) class ContentTypeTests(TestCase): def test_verbose_name(self): company_type = ContentType.objects.get(app_label='i18n', model='company') with translation.override('en'): self.assertEqual(six.text_type(company_type), 'Company') with translation.override('fr'): self.assertEqual(six.text_type(company_type), 'Société')
5501d0f82447bae5261ac4ff5a6fe8fad244784bbbf9a6e0c39725d8ba3e5c28
from django.conf.urls import include, url from django.conf.urls.i18n import i18n_patterns from django.utils.translation import ugettext_lazy as _ urlpatterns = i18n_patterns( url(_(r'^account/'), include('i18n.patterns.urls.wrong_namespace', namespace='account')), )
736c2a58d0af199f71e0a7ec37700cda46968e741cd2b718898aef4d66c4f222
from django.conf.urls import include, url from django.conf.urls.i18n import i18n_patterns from django.utils.translation import ugettext_lazy as _ from django.views.generic import TemplateView view = TemplateView.as_view(template_name='dummy.html') urlpatterns = [ url(r'^not-prefixed/$', view, name='not-prefixed'), url(r'^not-prefixed-include/', include('i18n.patterns.urls.included')), url(_(r'^translated/$'), view, name='no-prefix-translated'), url(_(r'^translated/(?P<slug>[\w-]+)/$'), view, name='no-prefix-translated-slug'), ] urlpatterns += i18n_patterns( url(r'^prefixed/$', view, name='prefixed'), url(r'^prefixed\.xml$', view, name='prefixed_xml'), url(_(r'^users/$'), view, name='users'), url(_(r'^account/'), include('i18n.patterns.urls.namespace', namespace='account')), )
f24aef7b4efb5c4ddcdf3bbbe6492b1c0404fa25e68785b53a0d0af69301f69b
from django.conf.urls import url from django.conf.urls.i18n import i18n_patterns from django.utils.translation import ugettext_lazy as _ from django.views.generic import TemplateView view = TemplateView.as_view(template_name='dummy.html') app_name = 'account' urlpatterns = i18n_patterns( url(_(r'^register/$'), view, name='register'), )
4d873f900487e54d9304edc6d9f690d4774b44501f7343f531f28e4095306fb3
from django.conf.urls import url from django.utils.translation import ugettext_lazy as _ from django.views.generic import TemplateView view = TemplateView.as_view(template_name='dummy.html') app_name = 'account' urlpatterns = [ url(_(r'^register/$'), view, name='register'), url(_(r'^register-without-slash$'), view, name='register-without-slash'), ]
e1e46f70479686a2327fe947cb3138b3c1f92c81a490e16ccccaa910d456b5b5
from django.utils.translation import ugettext as _ string = _("This app has a locale directory")
14f102b66008d485e38a948c8c9cd9a149671b24d7dc7eb38b6c30b28407fa04
from django.utils.translation import ugettext as _ string = _("This app has no locale directory")
96b4000a62417836788152c7eaabc7682c88d56a756c9f76cc30bd21e99ef5c0
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): operations = [ migrations.CreateModel( "Signal", [ ("id", models.AutoField(primary_key=True)), ], ), ]
9304f8987ab73adc8740fa187598a715f5d6dda010ad63724e131697605eaa2d
""" Sphinx plugins for Django documentation. """ import json import os import re from docutils import nodes from docutils.parsers.rst import directives from sphinx import addnodes from sphinx.builders.html import StandaloneHTMLBuilder from sphinx.domains.std import Cmdoption from sphinx.util.compat import Directive from sphinx.util.console import bold from sphinx.util.nodes import set_source_info from sphinx.writers.html import SmartyPantsHTMLTranslator # RE for option descriptions without a '--' prefix simple_option_desc_re = re.compile( r'([-_a-zA-Z0-9]+)(\s*.*?)(?=,\s+(?:/|-|--)|$)') def setup(app): app.add_crossref_type( directivename="setting", rolename="setting", indextemplate="pair: %s; setting", ) app.add_crossref_type( directivename="templatetag", rolename="ttag", indextemplate="pair: %s; template tag" ) app.add_crossref_type( directivename="templatefilter", rolename="tfilter", indextemplate="pair: %s; template filter" ) app.add_crossref_type( directivename="fieldlookup", rolename="lookup", indextemplate="pair: %s; field lookup type", ) app.add_description_unit( directivename="django-admin", rolename="djadmin", indextemplate="pair: %s; django-admin command", parse_node=parse_django_admin_node, ) app.add_directive('django-admin-option', Cmdoption) app.add_config_value('django_next_version', '0.0', True) app.add_directive('versionadded', VersionDirective) app.add_directive('versionchanged', VersionDirective) app.add_builder(DjangoStandaloneHTMLBuilder) # register the snippet directive app.add_directive('snippet', SnippetWithFilename) # register a node for snippet directive so that the xml parser # knows how to handle the enter/exit parsing event app.add_node(snippet_with_filename, html=(visit_snippet, depart_snippet_literal), latex=(visit_snippet_latex, depart_snippet_latex), man=(visit_snippet_literal, depart_snippet_literal), text=(visit_snippet_literal, depart_snippet_literal), texinfo=(visit_snippet_literal, depart_snippet_literal)) return {'parallel_read_safe': True} class snippet_with_filename(nodes.literal_block): """ Subclass the literal_block to override the visit/depart event handlers """ pass def visit_snippet_literal(self, node): """ default literal block handler """ self.visit_literal_block(node) def depart_snippet_literal(self, node): """ default literal block handler """ self.depart_literal_block(node) def visit_snippet(self, node): """ HTML document generator visit handler """ lang = self.highlightlang linenos = node.rawsource.count('\n') >= self.highlightlinenothreshold - 1 fname = node['filename'] highlight_args = node.get('highlight_args', {}) if 'language' in node: # code-block directives lang = node['language'] highlight_args['force'] = True if 'linenos' in node: linenos = node['linenos'] def warner(msg): self.builder.warn(msg, (self.builder.current_docname, node.line)) highlighted = self.highlighter.highlight_block(node.rawsource, lang, warn=warner, linenos=linenos, **highlight_args) starttag = self.starttag(node, 'div', suffix='', CLASS='highlight-%s snippet' % lang) self.body.append(starttag) self.body.append('<div class="snippet-filename">%s</div>\n''' % (fname,)) self.body.append(highlighted) self.body.append('</div>\n') raise nodes.SkipNode def visit_snippet_latex(self, node): """ Latex document generator visit handler """ code = node.rawsource.rstrip('\n') lang = self.hlsettingstack[-1][0] linenos = code.count('\n') >= self.hlsettingstack[-1][1] - 1 fname = node['filename'] highlight_args = node.get('highlight_args', {}) if 'language' in node: # code-block directives lang = node['language'] highlight_args['force'] = True if 'linenos' in node: linenos = node['linenos'] def warner(msg): self.builder.warn(msg, (self.curfilestack[-1], node.line)) hlcode = self.highlighter.highlight_block(code, lang, warn=warner, linenos=linenos, **highlight_args) self.body.append( '\n{\\colorbox[rgb]{0.9,0.9,0.9}' '{\\makebox[\\textwidth][l]' '{\\small\\texttt{%s}}}}\n' % ( # Some filenames have '_', which is special in latex. fname.replace('_', r'\_'), ) ) if self.table: hlcode = hlcode.replace('\\begin{Verbatim}', '\\begin{OriginalVerbatim}') self.table.has_problematic = True self.table.has_verbatim = True hlcode = hlcode.rstrip()[:-14] # strip \end{Verbatim} hlcode = hlcode.rstrip() + '\n' self.body.append('\n' + hlcode + '\\end{%sVerbatim}\n' % (self.table and 'Original' or '')) # Prevent rawsource from appearing in output a second time. raise nodes.SkipNode def depart_snippet_latex(self, node): """ Latex document generator depart handler. """ pass class SnippetWithFilename(Directive): """ The 'snippet' directive that allows to add the filename (optional) of a code snippet in the document. This is modeled after CodeBlock. """ has_content = True optional_arguments = 1 option_spec = {'filename': directives.unchanged_required} def run(self): code = '\n'.join(self.content) literal = snippet_with_filename(code, code) if self.arguments: literal['language'] = self.arguments[0] literal['filename'] = self.options['filename'] set_source_info(self, literal) return [literal] class VersionDirective(Directive): has_content = True required_arguments = 1 optional_arguments = 1 final_argument_whitespace = True option_spec = {} def run(self): if len(self.arguments) > 1: msg = """Only one argument accepted for directive '{directive_name}::'. Comments should be provided as content, not as an extra argument.""".format(directive_name=self.name) raise self.error(msg) env = self.state.document.settings.env ret = [] node = addnodes.versionmodified() ret.append(node) if self.arguments[0] == env.config.django_next_version: node['version'] = "Development version" else: node['version'] = self.arguments[0] node['type'] = self.name if self.content: self.state.nested_parse(self.content, self.content_offset, node) env.note_versionchange(node['type'], node['version'], node, self.lineno) return ret class DjangoHTMLTranslator(SmartyPantsHTMLTranslator): """ Django-specific reST to HTML tweaks. """ # Don't use border=1, which docutils does by default. def visit_table(self, node): self.context.append(self.compact_p) self.compact_p = True self._table_row_index = 0 # Needed by Sphinx self.body.append(self.starttag(node, 'table', CLASS='docutils')) def depart_table(self, node): self.compact_p = self.context.pop() self.body.append('</table>\n') def visit_desc_parameterlist(self, node): self.body.append('(') # by default sphinx puts <big> around the "(" self.first_param = 1 self.optional_param_level = 0 self.param_separator = node.child_text_separator self.required_params_left = sum([isinstance(c, addnodes.desc_parameter) for c in node.children]) def depart_desc_parameterlist(self, node): self.body.append(')') # # Turn the "new in version" stuff (versionadded/versionchanged) into a # better callout -- the Sphinx default is just a little span, # which is a bit less obvious that I'd like. # # FIXME: these messages are all hardcoded in English. We need to change # that to accommodate other language docs, but I can't work out how to make # that work. # version_text = { 'versionchanged': 'Changed in Django %s', 'versionadded': 'New in Django %s', } def visit_versionmodified(self, node): self.body.append( self.starttag(node, 'div', CLASS=node['type']) ) version_text = self.version_text.get(node['type']) if version_text: title = "%s%s" % ( version_text % node['version'], ":" if len(node) else "." ) self.body.append('<span class="title">%s</span> ' % title) def depart_versionmodified(self, node): self.body.append("</div>\n") # Give each section a unique ID -- nice for custom CSS hooks def visit_section(self, node): old_ids = node.get('ids', []) node['ids'] = ['s-' + i for i in old_ids] node['ids'].extend(old_ids) SmartyPantsHTMLTranslator.visit_section(self, node) node['ids'] = old_ids def parse_django_admin_node(env, sig, signode): command = sig.split(' ')[0] env.ref_context['std:program'] = command title = "django-admin %s" % sig signode += addnodes.desc_name(title, title) return command class DjangoStandaloneHTMLBuilder(StandaloneHTMLBuilder): """ Subclass to add some extra things we need. """ name = 'djangohtml' def finish(self): super(DjangoStandaloneHTMLBuilder, self).finish() self.info(bold("writing templatebuiltins.js...")) xrefs = self.env.domaindata["std"]["objects"] templatebuiltins = { "ttags": [n for ((t, n), (l, a)) in xrefs.items() if t == "templatetag" and l == "ref/templates/builtins"], "tfilters": [n for ((t, n), (l, a)) in xrefs.items() if t == "templatefilter" and l == "ref/templates/builtins"], } outfilename = os.path.join(self.outdir, "templatebuiltins.js") with open(outfilename, 'w') as fp: fp.write('var django_template_builtins = ') json.dump(templatebuiltins, fp) fp.write(';\n')
6504c9dd4f061b8357ab2c381808037be913f53e117eff1307ea47f4461da37c
import datetime import pickle import unittest import uuid from copy import deepcopy from unittest import mock from django.core.exceptions import FieldError from django.db import DatabaseError, connection, models from django.db.models import CharField, Q, TimeField, UUIDField from django.db.models.aggregates import ( Avg, Count, Max, Min, StdDev, Sum, Variance, ) from django.db.models.expressions import ( Case, Col, Combinable, Exists, Expression, ExpressionList, ExpressionWrapper, F, Func, OrderBy, OuterRef, Random, RawSQL, Ref, Subquery, Value, When, ) from django.db.models.functions import ( Coalesce, Concat, Length, Lower, Substr, Upper, ) from django.db.models.sql import constants from django.db.models.sql.datastructures import Join from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import Approximate, isolate_apps from .models import ( UUID, UUIDPK, Company, Employee, Experiment, Number, RemoteEmployee, Result, SimulationRun, Time, ) class BasicExpressionsTests(TestCase): @classmethod def setUpTestData(cls): cls.example_inc = Company.objects.create( name="Example Inc.", num_employees=2300, num_chairs=5, ceo=Employee.objects.create(firstname="Joe", lastname="Smith", salary=10) ) cls.foobar_ltd = Company.objects.create( name="Foobar Ltd.", num_employees=3, num_chairs=4, based_in_eu=True, ceo=Employee.objects.create(firstname="Frank", lastname="Meyer", salary=20) ) cls.max = Employee.objects.create(firstname='Max', lastname='Mustermann', salary=30) cls.gmbh = Company.objects.create(name='Test GmbH', num_employees=32, num_chairs=1, ceo=cls.max) def setUp(self): self.company_query = Company.objects.values( "name", "num_employees", "num_chairs" ).order_by( "name", "num_employees", "num_chairs" ) def test_annotate_values_aggregate(self): companies = Company.objects.annotate( salaries=F('ceo__salary'), ).values('num_employees', 'salaries').aggregate( result=Sum( F('salaries') + F('num_employees'), output_field=models.IntegerField() ), ) self.assertEqual(companies['result'], 2395) def test_annotate_values_filter(self): companies = Company.objects.annotate( foo=RawSQL('%s', ['value']), ).filter(foo='value').order_by('name') self.assertQuerysetEqual( companies, ['<Company: Example Inc.>', '<Company: Foobar Ltd.>', '<Company: Test GmbH>'], ) def test_annotate_values_count(self): companies = Company.objects.annotate(foo=RawSQL('%s', ['value'])) self.assertEqual(companies.count(), 3) @skipUnlessDBFeature('supports_boolean_expr_in_select_clause') def test_filtering_on_annotate_that_uses_q(self): self.assertEqual( Company.objects.annotate( num_employees_check=ExpressionWrapper(Q(num_employees__gt=3), output_field=models.BooleanField()) ).filter(num_employees_check=True).count(), 2, ) def test_filtering_on_q_that_is_boolean(self): self.assertEqual( Company.objects.filter( ExpressionWrapper(Q(num_employees__gt=3), output_field=models.BooleanField()) ).count(), 2, ) def test_filter_inter_attribute(self): # We can filter on attribute relationships on same model obj, e.g. # find companies where the number of employees is greater # than the number of chairs. self.assertSequenceEqual( self.company_query.filter(num_employees__gt=F("num_chairs")), [ { "num_chairs": 5, "name": "Example Inc.", "num_employees": 2300, }, { "num_chairs": 1, "name": "Test GmbH", "num_employees": 32 }, ], ) def test_update(self): # We can set one field to have the value of another field # Make sure we have enough chairs self.company_query.update(num_chairs=F("num_employees")) self.assertSequenceEqual( self.company_query, [ { "num_chairs": 2300, "name": "Example Inc.", "num_employees": 2300 }, { "num_chairs": 3, "name": "Foobar Ltd.", "num_employees": 3 }, { "num_chairs": 32, "name": "Test GmbH", "num_employees": 32 } ], ) def test_arithmetic(self): # We can perform arithmetic operations in expressions # Make sure we have 2 spare chairs self.company_query.update(num_chairs=F("num_employees") + 2) self.assertSequenceEqual( self.company_query, [ { 'num_chairs': 2302, 'name': 'Example Inc.', 'num_employees': 2300 }, { 'num_chairs': 5, 'name': 'Foobar Ltd.', 'num_employees': 3 }, { 'num_chairs': 34, 'name': 'Test GmbH', 'num_employees': 32 } ], ) def test_order_of_operations(self): # Law of order of operations is followed self.company_query.update(num_chairs=F('num_employees') + 2 * F('num_employees')) self.assertSequenceEqual( self.company_query, [ { 'num_chairs': 6900, 'name': 'Example Inc.', 'num_employees': 2300 }, { 'num_chairs': 9, 'name': 'Foobar Ltd.', 'num_employees': 3 }, { 'num_chairs': 96, 'name': 'Test GmbH', 'num_employees': 32 } ], ) def test_parenthesis_priority(self): # Law of order of operations can be overridden by parentheses self.company_query.update(num_chairs=(F('num_employees') + 2) * F('num_employees')) self.assertSequenceEqual( self.company_query, [ { 'num_chairs': 5294600, 'name': 'Example Inc.', 'num_employees': 2300 }, { 'num_chairs': 15, 'name': 'Foobar Ltd.', 'num_employees': 3 }, { 'num_chairs': 1088, 'name': 'Test GmbH', 'num_employees': 32 } ], ) def test_update_with_fk(self): # ForeignKey can become updated with the value of another ForeignKey. self.assertEqual(Company.objects.update(point_of_contact=F('ceo')), 3) self.assertQuerysetEqual( Company.objects.all(), ['Joe Smith', 'Frank Meyer', 'Max Mustermann'], lambda c: str(c.point_of_contact), ordered=False ) def test_update_with_none(self): Number.objects.create(integer=1, float=1.0) Number.objects.create(integer=2) Number.objects.filter(float__isnull=False).update(float=Value(None)) self.assertQuerysetEqual( Number.objects.all(), [None, None], lambda n: n.float, ordered=False ) def test_filter_with_join(self): # F Expressions can also span joins Company.objects.update(point_of_contact=F('ceo')) c = Company.objects.first() c.point_of_contact = Employee.objects.create(firstname="Guido", lastname="van Rossum") c.save() self.assertQuerysetEqual( Company.objects.filter(ceo__firstname=F('point_of_contact__firstname')), ['Foobar Ltd.', 'Test GmbH'], lambda c: c.name, ordered=False ) Company.objects.exclude( ceo__firstname=F("point_of_contact__firstname") ).update(name="foo") self.assertEqual( Company.objects.exclude( ceo__firstname=F('point_of_contact__firstname') ).get().name, "foo", ) msg = "Joined field references are not permitted in this query" with self.assertRaisesMessage(FieldError, msg): Company.objects.exclude( ceo__firstname=F('point_of_contact__firstname') ).update(name=F('point_of_contact__lastname')) def test_object_update(self): # F expressions can be used to update attributes on single objects self.gmbh.num_employees = F('num_employees') + 4 self.gmbh.save() self.gmbh.refresh_from_db() self.assertEqual(self.gmbh.num_employees, 36) def test_new_object_save(self): # We should be able to use Funcs when inserting new data test_co = Company(name=Lower(Value('UPPER')), num_employees=32, num_chairs=1, ceo=self.max) test_co.save() test_co.refresh_from_db() self.assertEqual(test_co.name, "upper") def test_new_object_create(self): test_co = Company.objects.create(name=Lower(Value('UPPER')), num_employees=32, num_chairs=1, ceo=self.max) test_co.refresh_from_db() self.assertEqual(test_co.name, "upper") def test_object_create_with_aggregate(self): # Aggregates are not allowed when inserting new data msg = 'Aggregate functions are not allowed in this query (num_employees=Max(Value(1))).' with self.assertRaisesMessage(FieldError, msg): Company.objects.create( name='Company', num_employees=Max(Value(1)), num_chairs=1, ceo=Employee.objects.create(firstname="Just", lastname="Doit", salary=30), ) def test_object_update_fk(self): # F expressions cannot be used to update attributes which are foreign # keys, or attributes which involve joins. test_gmbh = Company.objects.get(pk=self.gmbh.pk) msg = 'F(ceo)": "Company.point_of_contact" must be a "Employee" instance.' with self.assertRaisesMessage(ValueError, msg): test_gmbh.point_of_contact = F('ceo') test_gmbh.point_of_contact = self.gmbh.ceo test_gmbh.save() test_gmbh.name = F('ceo__lastname') msg = 'Joined field references are not permitted in this query' with self.assertRaisesMessage(FieldError, msg): test_gmbh.save() def test_update_inherited_field_value(self): msg = 'Joined field references are not permitted in this query' with self.assertRaisesMessage(FieldError, msg): RemoteEmployee.objects.update(adjusted_salary=F('salary') * 5) def test_object_update_unsaved_objects(self): # F expressions cannot be used to update attributes on objects which do # not yet exist in the database acme = Company(name='The Acme Widget Co.', num_employees=12, num_chairs=5, ceo=self.max) acme.num_employees = F("num_employees") + 16 msg = ( 'Failed to insert expression "Col(expressions_company, ' 'expressions.Company.num_employees) + Value(16)" on ' 'expressions.Company.num_employees. F() expressions can only be ' 'used to update, not to insert.' ) with self.assertRaisesMessage(ValueError, msg): acme.save() acme.num_employees = 12 acme.name = Lower(F('name')) msg = ( 'Failed to insert expression "Lower(Col(expressions_company, ' 'expressions.Company.name))" on expressions.Company.name. F() ' 'expressions can only be used to update, not to insert.' ) with self.assertRaisesMessage(ValueError, msg): acme.save() def test_ticket_11722_iexact_lookup(self): Employee.objects.create(firstname="John", lastname="Doe") Employee.objects.create(firstname="Test", lastname="test") queryset = Employee.objects.filter(firstname__iexact=F('lastname')) self.assertQuerysetEqual(queryset, ["<Employee: Test test>"]) def test_ticket_16731_startswith_lookup(self): Employee.objects.create(firstname="John", lastname="Doe") e2 = Employee.objects.create(firstname="Jack", lastname="Jackson") e3 = Employee.objects.create(firstname="Jack", lastname="jackson") self.assertSequenceEqual( Employee.objects.filter(lastname__startswith=F('firstname')), [e2, e3] if connection.features.has_case_insensitive_like else [e2] ) qs = Employee.objects.filter(lastname__istartswith=F('firstname')).order_by('pk') self.assertSequenceEqual(qs, [e2, e3]) def test_ticket_18375_join_reuse(self): # Reverse multijoin F() references and the lookup target the same join. # Pre #18375 the F() join was generated first and the lookup couldn't # reuse that join. qs = Employee.objects.filter(company_ceo_set__num_chairs=F('company_ceo_set__num_employees')) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_ticket_18375_kwarg_ordering(self): # The next query was dict-randomization dependent - if the "gte=1" # was seen first, then the F() will reuse the join generated by the # gte lookup, if F() was seen first, then it generated a join the # other lookups could not reuse. qs = Employee.objects.filter( company_ceo_set__num_chairs=F('company_ceo_set__num_employees'), company_ceo_set__num_chairs__gte=1, ) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_ticket_18375_kwarg_ordering_2(self): # Another similar case for F() than above. Now we have the same join # in two filter kwargs, one in the lhs lookup, one in F. Here pre # #18375 the amount of joins generated was random if dict # randomization was enabled, that is the generated query dependent # on which clause was seen first. qs = Employee.objects.filter( company_ceo_set__num_employees=F('pk'), pk=F('company_ceo_set__num_employees') ) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_ticket_18375_chained_filters(self): # F() expressions do not reuse joins from previous filter. qs = Employee.objects.filter( company_ceo_set__num_employees=F('pk') ).filter( company_ceo_set__num_employees=F('company_ceo_set__num_employees') ) self.assertEqual(str(qs.query).count('JOIN'), 2) def test_order_by_exists(self): mary = Employee.objects.create(firstname='Mary', lastname='Mustermann', salary=20) mustermanns_by_seniority = Employee.objects.filter(lastname='Mustermann').order_by( # Order by whether the employee is the CEO of a company Exists(Company.objects.filter(ceo=OuterRef('pk'))).desc() ) self.assertSequenceEqual(mustermanns_by_seniority, [self.max, mary]) def test_order_by_multiline_sql(self): raw_order_by = ( RawSQL(''' CASE WHEN num_employees > 1000 THEN num_chairs ELSE 0 END ''', []).desc(), RawSQL(''' CASE WHEN num_chairs > 1 THEN 1 ELSE 0 END ''', []).asc() ) for qs in ( Company.objects.all(), Company.objects.distinct(), ): with self.subTest(qs=qs): self.assertSequenceEqual( qs.order_by(*raw_order_by), [self.example_inc, self.gmbh, self.foobar_ltd], ) def test_outerref(self): inner = Company.objects.filter(point_of_contact=OuterRef('pk')) msg = ( 'This queryset contains a reference to an outer query and may only ' 'be used in a subquery.' ) with self.assertRaisesMessage(ValueError, msg): inner.exists() outer = Employee.objects.annotate(is_point_of_contact=Exists(inner)) self.assertIs(outer.exists(), True) def test_exist_single_field_output_field(self): queryset = Company.objects.values('pk') self.assertIsInstance(Exists(queryset).output_field, models.BooleanField) def test_subquery(self): Company.objects.filter(name='Example Inc.').update( point_of_contact=Employee.objects.get(firstname='Joe', lastname='Smith'), ceo=self.max, ) Employee.objects.create(firstname='Bob', lastname='Brown', salary=40) qs = Employee.objects.annotate( is_point_of_contact=Exists(Company.objects.filter(point_of_contact=OuterRef('pk'))), is_not_point_of_contact=~Exists(Company.objects.filter(point_of_contact=OuterRef('pk'))), is_ceo_of_small_company=Exists(Company.objects.filter(num_employees__lt=200, ceo=OuterRef('pk'))), is_ceo_small_2=~~Exists(Company.objects.filter(num_employees__lt=200, ceo=OuterRef('pk'))), largest_company=Subquery(Company.objects.order_by('-num_employees').filter( models.Q(ceo=OuterRef('pk')) | models.Q(point_of_contact=OuterRef('pk')) ).values('name')[:1], output_field=models.CharField()) ).values( 'firstname', 'is_point_of_contact', 'is_not_point_of_contact', 'is_ceo_of_small_company', 'is_ceo_small_2', 'largest_company', ).order_by('firstname') results = list(qs) # Could use Coalesce(subq, Value('')) instead except for the bug in # cx_Oracle mentioned in #23843. bob = results[0] if bob['largest_company'] == '' and connection.features.interprets_empty_strings_as_nulls: bob['largest_company'] = None self.assertEqual(results, [ { 'firstname': 'Bob', 'is_point_of_contact': False, 'is_not_point_of_contact': True, 'is_ceo_of_small_company': False, 'is_ceo_small_2': False, 'largest_company': None, }, { 'firstname': 'Frank', 'is_point_of_contact': False, 'is_not_point_of_contact': True, 'is_ceo_of_small_company': True, 'is_ceo_small_2': True, 'largest_company': 'Foobar Ltd.', }, { 'firstname': 'Joe', 'is_point_of_contact': True, 'is_not_point_of_contact': False, 'is_ceo_of_small_company': False, 'is_ceo_small_2': False, 'largest_company': 'Example Inc.', }, { 'firstname': 'Max', 'is_point_of_contact': False, 'is_not_point_of_contact': True, 'is_ceo_of_small_company': True, 'is_ceo_small_2': True, 'largest_company': 'Example Inc.' } ]) # A less elegant way to write the same query: this uses a LEFT OUTER # JOIN and an IS NULL, inside a WHERE NOT IN which is probably less # efficient than EXISTS. self.assertCountEqual( qs.filter(is_point_of_contact=True).values('pk'), Employee.objects.exclude(company_point_of_contact_set=None).values('pk') ) def test_in_subquery(self): # This is a contrived test (and you really wouldn't write this query), # but it is a succinct way to test the __in=Subquery() construct. small_companies = Company.objects.filter(num_employees__lt=200).values('pk') subquery_test = Company.objects.filter(pk__in=Subquery(small_companies)) self.assertCountEqual(subquery_test, [self.foobar_ltd, self.gmbh]) subquery_test2 = Company.objects.filter(pk=Subquery(small_companies.filter(num_employees=3))) self.assertCountEqual(subquery_test2, [self.foobar_ltd]) def test_uuid_pk_subquery(self): u = UUIDPK.objects.create() UUID.objects.create(uuid_fk=u) qs = UUIDPK.objects.filter(id__in=Subquery(UUID.objects.values('uuid_fk__id'))) self.assertCountEqual(qs, [u]) def test_nested_subquery(self): inner = Company.objects.filter(point_of_contact=OuterRef('pk')) outer = Employee.objects.annotate(is_point_of_contact=Exists(inner)) contrived = Employee.objects.annotate( is_point_of_contact=Subquery( outer.filter(pk=OuterRef('pk')).values('is_point_of_contact'), output_field=models.BooleanField(), ), ) self.assertCountEqual(contrived.values_list(), outer.values_list()) def test_nested_subquery_outer_ref_2(self): first = Time.objects.create(time='09:00') second = Time.objects.create(time='17:00') third = Time.objects.create(time='21:00') SimulationRun.objects.bulk_create([ SimulationRun(start=first, end=second, midpoint='12:00'), SimulationRun(start=first, end=third, midpoint='15:00'), SimulationRun(start=second, end=first, midpoint='00:00'), ]) inner = Time.objects.filter(time=OuterRef(OuterRef('time')), pk=OuterRef('start')).values('time') middle = SimulationRun.objects.annotate(other=Subquery(inner)).values('other')[:1] outer = Time.objects.annotate(other=Subquery(middle, output_field=models.TimeField())) # This is a contrived example. It exercises the double OuterRef form. self.assertCountEqual(outer, [first, second, third]) def test_nested_subquery_outer_ref_with_autofield(self): first = Time.objects.create(time='09:00') second = Time.objects.create(time='17:00') SimulationRun.objects.create(start=first, end=second, midpoint='12:00') inner = SimulationRun.objects.filter(start=OuterRef(OuterRef('pk'))).values('start') middle = Time.objects.annotate(other=Subquery(inner)).values('other')[:1] outer = Time.objects.annotate(other=Subquery(middle, output_field=models.IntegerField())) # This exercises the double OuterRef form with AutoField as pk. self.assertCountEqual(outer, [first, second]) def test_annotations_within_subquery(self): Company.objects.filter(num_employees__lt=50).update(ceo=Employee.objects.get(firstname='Frank')) inner = Company.objects.filter( ceo=OuterRef('pk') ).values('ceo').annotate(total_employees=models.Sum('num_employees')).values('total_employees') outer = Employee.objects.annotate(total_employees=Subquery(inner)).filter(salary__lte=Subquery(inner)) self.assertSequenceEqual( outer.order_by('-total_employees').values('salary', 'total_employees'), [{'salary': 10, 'total_employees': 2300}, {'salary': 20, 'total_employees': 35}], ) def test_subquery_references_joined_table_twice(self): inner = Company.objects.filter( num_chairs__gte=OuterRef('ceo__salary'), num_employees__gte=OuterRef('point_of_contact__salary'), ) # Another contrived example (there is no need to have a subquery here) outer = Company.objects.filter(pk__in=Subquery(inner.values('pk'))) self.assertFalse(outer.exists()) def test_subquery_filter_by_aggregate(self): Number.objects.create(integer=1000, float=1.2) Employee.objects.create(salary=1000) qs = Number.objects.annotate( min_valuable_count=Subquery( Employee.objects.filter( salary=OuterRef('integer'), ).annotate(cnt=Count('salary')).filter(cnt__gt=0).values('cnt')[:1] ), ) self.assertEqual(qs.get().float, 1.2) def test_aggregate_subquery_annotation(self): with self.assertNumQueries(1) as ctx: aggregate = Company.objects.annotate( ceo_salary=Subquery( Employee.objects.filter( id=OuterRef('ceo_id'), ).values('salary') ), ).aggregate( ceo_salary_gt_20=Count('pk', filter=Q(ceo_salary__gt=20)), ) self.assertEqual(aggregate, {'ceo_salary_gt_20': 1}) # Aggregation over a subquery annotation doesn't annotate the subquery # twice in the inner query. sql = ctx.captured_queries[0]['sql'] self.assertLessEqual(sql.count('SELECT'), 3) # GROUP BY isn't required to aggregate over a query that doesn't # contain nested aggregates. self.assertNotIn('GROUP BY', sql) def test_explicit_output_field(self): class FuncA(Func): output_field = models.CharField() class FuncB(Func): pass expr = FuncB(FuncA()) self.assertEqual(expr.output_field, FuncA.output_field) def test_outerref_mixed_case_table_name(self): inner = Result.objects.filter(result_time__gte=OuterRef('experiment__assigned')) outer = Result.objects.filter(pk__in=Subquery(inner.values('pk'))) self.assertFalse(outer.exists()) def test_outerref_with_operator(self): inner = Company.objects.filter(num_employees=OuterRef('ceo__salary') + 2) outer = Company.objects.filter(pk__in=Subquery(inner.values('pk'))) self.assertEqual(outer.get().name, 'Test GmbH') def test_annotation_with_outerref(self): gmbh_salary = Company.objects.annotate( max_ceo_salary_raise=Subquery( Company.objects.annotate( salary_raise=OuterRef('num_employees') + F('num_employees'), ).order_by('-salary_raise').values('salary_raise')[:1], output_field=models.IntegerField(), ), ).get(pk=self.gmbh.pk) self.assertEqual(gmbh_salary.max_ceo_salary_raise, 2332) def test_pickle_expression(self): expr = Value(1, output_field=models.IntegerField()) expr.convert_value # populate cached property self.assertEqual(pickle.loads(pickle.dumps(expr)), expr) def test_incorrect_field_in_F_expression(self): with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."): list(Employee.objects.filter(firstname=F('nope'))) def test_incorrect_joined_field_in_F_expression(self): with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."): list(Company.objects.filter(ceo__pk=F('point_of_contact__nope'))) def test_exists_in_filter(self): inner = Company.objects.filter(ceo=OuterRef('pk')).values('pk') qs1 = Employee.objects.filter(Exists(inner)) qs2 = Employee.objects.annotate(found=Exists(inner)).filter(found=True) self.assertCountEqual(qs1, qs2) self.assertFalse(Employee.objects.exclude(Exists(inner)).exists()) self.assertCountEqual(qs2, Employee.objects.exclude(~Exists(inner))) def test_subquery_in_filter(self): inner = Company.objects.filter(ceo=OuterRef('pk')).values('based_in_eu') self.assertSequenceEqual( Employee.objects.filter(Subquery(inner)), [self.foobar_ltd.ceo], ) def test_case_in_filter_if_boolean_output_field(self): is_ceo = Company.objects.filter(ceo=OuterRef('pk')) is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) qs = Employee.objects.filter( Case( When(Exists(is_ceo), then=True), When(Exists(is_poc), then=True), default=False, output_field=models.BooleanField(), ), ) self.assertSequenceEqual(qs, [self.example_inc.ceo, self.foobar_ltd.ceo, self.max]) def test_boolean_expression_combined(self): is_ceo = Company.objects.filter(ceo=OuterRef('pk')) is_poc = Company.objects.filter(point_of_contact=OuterRef('pk')) self.gmbh.point_of_contact = self.max self.gmbh.save() self.assertSequenceEqual( Employee.objects.filter(Exists(is_ceo) | Exists(is_poc)), [self.example_inc.ceo, self.foobar_ltd.ceo, self.max], ) self.assertSequenceEqual( Employee.objects.filter(Exists(is_ceo) & Exists(is_poc)), [self.max], ) self.assertSequenceEqual( Employee.objects.filter(Exists(is_ceo) & Q(salary__gte=30)), [self.max], ) self.assertSequenceEqual( Employee.objects.filter(Exists(is_poc) | Q(salary__lt=15)), [self.example_inc.ceo, self.max], ) class IterableLookupInnerExpressionsTests(TestCase): @classmethod def setUpTestData(cls): ceo = Employee.objects.create(firstname='Just', lastname='Doit', salary=30) # MySQL requires that the values calculated for expressions don't pass # outside of the field's range, so it's inconvenient to use the values # in the more general tests. Company.objects.create(name='5020 Ltd', num_employees=50, num_chairs=20, ceo=ceo) Company.objects.create(name='5040 Ltd', num_employees=50, num_chairs=40, ceo=ceo) Company.objects.create(name='5050 Ltd', num_employees=50, num_chairs=50, ceo=ceo) Company.objects.create(name='5060 Ltd', num_employees=50, num_chairs=60, ceo=ceo) Company.objects.create(name='99300 Ltd', num_employees=99, num_chairs=300, ceo=ceo) def test_in_lookup_allows_F_expressions_and_expressions_for_integers(self): # __in lookups can use F() expressions for integers. queryset = Company.objects.filter(num_employees__in=([F('num_chairs') - 10])) self.assertQuerysetEqual(queryset, ['<Company: 5060 Ltd>'], ordered=False) self.assertQuerysetEqual( Company.objects.filter(num_employees__in=([F('num_chairs') - 10, F('num_chairs') + 10])), ['<Company: 5040 Ltd>', '<Company: 5060 Ltd>'], ordered=False ) self.assertQuerysetEqual( Company.objects.filter( num_employees__in=([F('num_chairs') - 10, F('num_chairs'), F('num_chairs') + 10]) ), ['<Company: 5040 Ltd>', '<Company: 5050 Ltd>', '<Company: 5060 Ltd>'], ordered=False ) def test_expressions_in_lookups_join_choice(self): midpoint = datetime.time(13, 0) t1 = Time.objects.create(time=datetime.time(12, 0)) t2 = Time.objects.create(time=datetime.time(14, 0)) SimulationRun.objects.create(start=t1, end=t2, midpoint=midpoint) SimulationRun.objects.create(start=t1, end=None, midpoint=midpoint) SimulationRun.objects.create(start=None, end=t2, midpoint=midpoint) SimulationRun.objects.create(start=None, end=None, midpoint=midpoint) queryset = SimulationRun.objects.filter(midpoint__range=[F('start__time'), F('end__time')]) self.assertQuerysetEqual( queryset, ['<SimulationRun: 13:00:00 (12:00:00 to 14:00:00)>'], ordered=False ) for alias in queryset.query.alias_map.values(): if isinstance(alias, Join): self.assertEqual(alias.join_type, constants.INNER) queryset = SimulationRun.objects.exclude(midpoint__range=[F('start__time'), F('end__time')]) self.assertQuerysetEqual(queryset, [], ordered=False) for alias in queryset.query.alias_map.values(): if isinstance(alias, Join): self.assertEqual(alias.join_type, constants.LOUTER) def test_range_lookup_allows_F_expressions_and_expressions_for_integers(self): # Range lookups can use F() expressions for integers. Company.objects.filter(num_employees__exact=F("num_chairs")) self.assertQuerysetEqual( Company.objects.filter(num_employees__range=(F('num_chairs'), 100)), ['<Company: 5020 Ltd>', '<Company: 5040 Ltd>', '<Company: 5050 Ltd>'], ordered=False ) self.assertQuerysetEqual( Company.objects.filter(num_employees__range=(F('num_chairs') - 10, F('num_chairs') + 10)), ['<Company: 5040 Ltd>', '<Company: 5050 Ltd>', '<Company: 5060 Ltd>'], ordered=False ) self.assertQuerysetEqual( Company.objects.filter(num_employees__range=(F('num_chairs') - 10, 100)), ['<Company: 5020 Ltd>', '<Company: 5040 Ltd>', '<Company: 5050 Ltd>', '<Company: 5060 Ltd>'], ordered=False ) self.assertQuerysetEqual( Company.objects.filter(num_employees__range=(1, 100)), [ '<Company: 5020 Ltd>', '<Company: 5040 Ltd>', '<Company: 5050 Ltd>', '<Company: 5060 Ltd>', '<Company: 99300 Ltd>', ], ordered=False ) @unittest.skipUnless(connection.vendor == 'sqlite', "This defensive test only works on databases that don't validate parameter types") def test_complex_expressions_do_not_introduce_sql_injection_via_untrusted_string_inclusion(self): """ This tests that SQL injection isn't possible using compilation of expressions in iterable filters, as their compilation happens before the main query compilation. It's limited to SQLite, as PostgreSQL, Oracle and other vendors have defense in depth against this by type checking. Testing against SQLite (the most permissive of the built-in databases) demonstrates that the problem doesn't exist while keeping the test simple. """ queryset = Company.objects.filter(name__in=[F('num_chairs') + '1)) OR ((1==1']) self.assertQuerysetEqual(queryset, [], ordered=False) def test_in_lookup_allows_F_expressions_and_expressions_for_datetimes(self): start = datetime.datetime(2016, 2, 3, 15, 0, 0) end = datetime.datetime(2016, 2, 5, 15, 0, 0) experiment_1 = Experiment.objects.create( name='Integrity testing', assigned=start.date(), start=start, end=end, completed=end.date(), estimated_time=end - start, ) experiment_2 = Experiment.objects.create( name='Taste testing', assigned=start.date(), start=start, end=end, completed=end.date(), estimated_time=end - start, ) Result.objects.create( experiment=experiment_1, result_time=datetime.datetime(2016, 2, 4, 15, 0, 0), ) Result.objects.create( experiment=experiment_1, result_time=datetime.datetime(2016, 3, 10, 2, 0, 0), ) Result.objects.create( experiment=experiment_2, result_time=datetime.datetime(2016, 1, 8, 5, 0, 0), ) within_experiment_time = [F('experiment__start'), F('experiment__end')] queryset = Result.objects.filter(result_time__range=within_experiment_time) self.assertQuerysetEqual(queryset, ["<Result: Result at 2016-02-04 15:00:00>"]) within_experiment_time = [F('experiment__start'), F('experiment__end')] queryset = Result.objects.filter(result_time__range=within_experiment_time) self.assertQuerysetEqual(queryset, ["<Result: Result at 2016-02-04 15:00:00>"]) class FTests(SimpleTestCase): def test_deepcopy(self): f = F("foo") g = deepcopy(f) self.assertEqual(f.name, g.name) def test_deconstruct(self): f = F('name') path, args, kwargs = f.deconstruct() self.assertEqual(path, 'django.db.models.expressions.F') self.assertEqual(args, (f.name,)) self.assertEqual(kwargs, {}) def test_equal(self): f = F('name') same_f = F('name') other_f = F('username') self.assertEqual(f, same_f) self.assertNotEqual(f, other_f) def test_hash(self): d = {F('name'): 'Bob'} self.assertIn(F('name'), d) self.assertEqual(d[F('name')], 'Bob') def test_not_equal_Value(self): f = F('name') value = Value('name') self.assertNotEqual(f, value) self.assertNotEqual(value, f) class ExpressionsTests(TestCase): def test_F_reuse(self): f = F('id') n = Number.objects.create(integer=-1) c = Company.objects.create( name="Example Inc.", num_employees=2300, num_chairs=5, ceo=Employee.objects.create(firstname="Joe", lastname="Smith") ) c_qs = Company.objects.filter(id=f) self.assertEqual(c_qs.get(), c) # Reuse the same F-object for another queryset n_qs = Number.objects.filter(id=f) self.assertEqual(n_qs.get(), n) # The original query still works correctly self.assertEqual(c_qs.get(), c) def test_patterns_escape(self): r""" Special characters (e.g. %, _ and \) stored in database are properly escaped when using a pattern lookup with an expression refs #16731 """ Employee.objects.bulk_create([ Employee(firstname="%Joh\\nny", lastname="%Joh\\n"), Employee(firstname="Johnny", lastname="%John"), Employee(firstname="Jean-Claude", lastname="Claud_"), Employee(firstname="Jean-Claude", lastname="Claude"), Employee(firstname="Jean-Claude", lastname="Claude%"), Employee(firstname="Johnny", lastname="Joh\\n"), Employee(firstname="Johnny", lastname="John"), Employee(firstname="Johnny", lastname="_ohn"), ]) self.assertQuerysetEqual( Employee.objects.filter(firstname__contains=F('lastname')), ["<Employee: %Joh\\nny %Joh\\n>", "<Employee: Jean-Claude Claude>", "<Employee: Johnny John>"], ordered=False, ) self.assertQuerysetEqual( Employee.objects.filter(firstname__startswith=F('lastname')), ["<Employee: %Joh\\nny %Joh\\n>", "<Employee: Johnny John>"], ordered=False, ) self.assertQuerysetEqual( Employee.objects.filter(firstname__endswith=F('lastname')), ["<Employee: Jean-Claude Claude>"], ordered=False, ) def test_insensitive_patterns_escape(self): r""" Special characters (e.g. %, _ and \) stored in database are properly escaped when using a case insensitive pattern lookup with an expression -- refs #16731 """ Employee.objects.bulk_create([ Employee(firstname="%Joh\\nny", lastname="%joh\\n"), Employee(firstname="Johnny", lastname="%john"), Employee(firstname="Jean-Claude", lastname="claud_"), Employee(firstname="Jean-Claude", lastname="claude"), Employee(firstname="Jean-Claude", lastname="claude%"), Employee(firstname="Johnny", lastname="joh\\n"), Employee(firstname="Johnny", lastname="john"), Employee(firstname="Johnny", lastname="_ohn"), ]) self.assertQuerysetEqual( Employee.objects.filter(firstname__icontains=F('lastname')), ["<Employee: %Joh\\nny %joh\\n>", "<Employee: Jean-Claude claude>", "<Employee: Johnny john>"], ordered=False, ) self.assertQuerysetEqual( Employee.objects.filter(firstname__istartswith=F('lastname')), ["<Employee: %Joh\\nny %joh\\n>", "<Employee: Johnny john>"], ordered=False, ) self.assertQuerysetEqual( Employee.objects.filter(firstname__iendswith=F('lastname')), ["<Employee: Jean-Claude claude>"], ordered=False, ) @isolate_apps('expressions') class SimpleExpressionTests(SimpleTestCase): def test_equal(self): self.assertEqual(Expression(), Expression()) self.assertEqual( Expression(models.IntegerField()), Expression(output_field=models.IntegerField()) ) self.assertEqual(Expression(models.IntegerField()), mock.ANY) self.assertNotEqual( Expression(models.IntegerField()), Expression(models.CharField()) ) class TestModel(models.Model): field = models.IntegerField() other_field = models.IntegerField() self.assertNotEqual( Expression(TestModel._meta.get_field('field')), Expression(TestModel._meta.get_field('other_field')), ) def test_hash(self): self.assertEqual(hash(Expression()), hash(Expression())) self.assertEqual( hash(Expression(models.IntegerField())), hash(Expression(output_field=models.IntegerField())) ) self.assertNotEqual( hash(Expression(models.IntegerField())), hash(Expression(models.CharField())), ) class TestModel(models.Model): field = models.IntegerField() other_field = models.IntegerField() self.assertNotEqual( hash(Expression(TestModel._meta.get_field('field'))), hash(Expression(TestModel._meta.get_field('other_field'))), ) class ExpressionsNumericTests(TestCase): @classmethod def setUpTestData(cls): Number(integer=-1).save() Number(integer=42).save() Number(integer=1337).save() Number.objects.update(float=F('integer')) def test_fill_with_value_from_same_object(self): """ We can fill a value in all objects with an other value of the same object. """ self.assertQuerysetEqual( Number.objects.all(), ['<Number: -1, -1.000>', '<Number: 42, 42.000>', '<Number: 1337, 1337.000>'], ordered=False ) def test_increment_value(self): """ We can increment a value of all objects in a query set. """ self.assertEqual(Number.objects.filter(integer__gt=0).update(integer=F('integer') + 1), 2) self.assertQuerysetEqual( Number.objects.all(), ['<Number: -1, -1.000>', '<Number: 43, 42.000>', '<Number: 1338, 1337.000>'], ordered=False ) def test_filter_not_equals_other_field(self): """ We can filter for objects, where a value is not equals the value of an other field. """ self.assertEqual(Number.objects.filter(integer__gt=0).update(integer=F('integer') + 1), 2) self.assertQuerysetEqual( Number.objects.exclude(float=F('integer')), ['<Number: 43, 42.000>', '<Number: 1338, 1337.000>'], ordered=False ) def test_complex_expressions(self): """ Complex expressions of different connection types are possible. """ n = Number.objects.create(integer=10, float=123.45) self.assertEqual(Number.objects.filter(pk=n.pk).update( float=F('integer') + F('float') * 2), 1) self.assertEqual(Number.objects.get(pk=n.pk).integer, 10) self.assertEqual(Number.objects.get(pk=n.pk).float, Approximate(256.900, places=3)) class ExpressionOperatorTests(TestCase): @classmethod def setUpTestData(cls): cls.n = Number.objects.create(integer=42, float=15.5) cls.n1 = Number.objects.create(integer=-42, float=-15.5) def test_lefthand_addition(self): # LH Addition of floats and integers Number.objects.filter(pk=self.n.pk).update( integer=F('integer') + 15, float=F('float') + 42.7 ) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3)) def test_lefthand_subtraction(self): # LH Subtraction of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') - 15, float=F('float') - 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(-27.200, places=3)) def test_lefthand_multiplication(self): # Multiplication of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') * 15, float=F('float') * 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3)) def test_lefthand_division(self): # LH Division of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') / 2, float=F('float') / 42.7) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 21) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(0.363, places=3)) def test_lefthand_modulo(self): # LH Modulo arithmetic on integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') % 20) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 2) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3)) def test_lefthand_bitwise_and(self): # LH Bitwise ands on integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer').bitand(56)) Number.objects.filter(pk=self.n1.pk).update(integer=F('integer').bitand(-56)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 40) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -64) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3)) def test_lefthand_bitwise_left_shift_operator(self): Number.objects.update(integer=F('integer').bitleftshift(2)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 168) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -168) def test_lefthand_bitwise_right_shift_operator(self): Number.objects.update(integer=F('integer').bitrightshift(2)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 10) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -11) def test_lefthand_bitwise_or(self): # LH Bitwise or on integers Number.objects.update(integer=F('integer').bitor(48)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 58) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -10) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3)) def test_lefthand_power(self): # LH Powert arithmetic operation on floats and integers Number.objects.filter(pk=self.n.pk).update(integer=F('integer') ** 2, float=F('float') ** 1.5) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 1764) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(61.02, places=2)) def test_right_hand_addition(self): # Right hand operators Number.objects.filter(pk=self.n.pk).update(integer=15 + F('integer'), float=42.7 + F('float')) # RH Addition of floats and integers self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3)) def test_right_hand_subtraction(self): Number.objects.filter(pk=self.n.pk).update(integer=15 - F('integer'), float=42.7 - F('float')) # RH Subtraction of floats and integers self.assertEqual(Number.objects.get(pk=self.n.pk).integer, -27) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(27.200, places=3)) def test_right_hand_multiplication(self): # RH Multiplication of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=15 * F('integer'), float=42.7 * F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3)) def test_right_hand_division(self): # RH Division of floats and integers Number.objects.filter(pk=self.n.pk).update(integer=640 / F('integer'), float=42.7 / F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 15) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(2.755, places=3)) def test_right_hand_modulo(self): # RH Modulo arithmetic on integers Number.objects.filter(pk=self.n.pk).update(integer=69 % F('integer')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3)) def test_righthand_power(self): # RH Powert arithmetic operation on floats and integers Number.objects.filter(pk=self.n.pk).update(integer=2 ** F('integer'), float=1.5 ** F('float')) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 4398046511104) self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(536.308, places=3)) class FTimeDeltaTests(TestCase): @classmethod def setUpTestData(cls): cls.sday = sday = datetime.date(2010, 6, 25) cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000) midnight = datetime.time(0) delta0 = datetime.timedelta(0) delta1 = datetime.timedelta(microseconds=253000) delta2 = datetime.timedelta(seconds=44) delta3 = datetime.timedelta(hours=21, minutes=8) delta4 = datetime.timedelta(days=10) delta5 = datetime.timedelta(days=90) # Test data is set so that deltas and delays will be # strictly increasing. cls.deltas = [] cls.delays = [] cls.days_long = [] # e0: started same day as assigned, zero duration end = stime + delta0 e0 = Experiment.objects.create( name='e0', assigned=sday, start=stime, end=end, completed=end.date(), estimated_time=delta0, ) cls.deltas.append(delta0) cls.delays.append(e0.start - datetime.datetime.combine(e0.assigned, midnight)) cls.days_long.append(e0.completed - e0.assigned) # e1: started one day after assigned, tiny duration, data # set so that end time has no fractional seconds, which # tests an edge case on sqlite. delay = datetime.timedelta(1) end = stime + delay + delta1 e1 = Experiment.objects.create( name='e1', assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta1, ) cls.deltas.append(delta1) cls.delays.append(e1.start - datetime.datetime.combine(e1.assigned, midnight)) cls.days_long.append(e1.completed - e1.assigned) # e2: started three days after assigned, small duration end = stime + delta2 e2 = Experiment.objects.create( name='e2', assigned=sday - datetime.timedelta(3), start=stime, end=end, completed=end.date(), estimated_time=datetime.timedelta(hours=1), ) cls.deltas.append(delta2) cls.delays.append(e2.start - datetime.datetime.combine(e2.assigned, midnight)) cls.days_long.append(e2.completed - e2.assigned) # e3: started four days after assigned, medium duration delay = datetime.timedelta(4) end = stime + delay + delta3 e3 = Experiment.objects.create( name='e3', assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta3, ) cls.deltas.append(delta3) cls.delays.append(e3.start - datetime.datetime.combine(e3.assigned, midnight)) cls.days_long.append(e3.completed - e3.assigned) # e4: started 10 days after assignment, long duration end = stime + delta4 e4 = Experiment.objects.create( name='e4', assigned=sday - datetime.timedelta(10), start=stime, end=end, completed=end.date(), estimated_time=delta4 - datetime.timedelta(1), ) cls.deltas.append(delta4) cls.delays.append(e4.start - datetime.datetime.combine(e4.assigned, midnight)) cls.days_long.append(e4.completed - e4.assigned) # e5: started a month after assignment, very long duration delay = datetime.timedelta(30) end = stime + delay + delta5 e5 = Experiment.objects.create( name='e5', assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta5, ) cls.deltas.append(delta5) cls.delays.append(e5.start - datetime.datetime.combine(e5.assigned, midnight)) cls.days_long.append(e5.completed - e5.assigned) cls.expnames = [e.name for e in Experiment.objects.all()] def test_multiple_query_compilation(self): # Ticket #21643 queryset = Experiment.objects.filter(end__lt=F('start') + datetime.timedelta(hours=1)) q1 = str(queryset.query) q2 = str(queryset.query) self.assertEqual(q1, q2) def test_query_clone(self): # Ticket #21643 - Crash when compiling query more than once qs = Experiment.objects.filter(end__lt=F('start') + datetime.timedelta(hours=1)) qs2 = qs.all() list(qs) list(qs2) # Intentionally no assert def test_delta_add(self): for i, delta in enumerate(self.deltas): test_set = [e.name for e in Experiment.objects.filter(end__lt=F('start') + delta)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(end__lt=delta + F('start'))] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(end__lte=F('start') + delta)] self.assertEqual(test_set, self.expnames[:i + 1]) def test_delta_subtract(self): for i, delta in enumerate(self.deltas): test_set = [e.name for e in Experiment.objects.filter(start__gt=F('end') - delta)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(start__gte=F('end') - delta)] self.assertEqual(test_set, self.expnames[:i + 1]) def test_exclude(self): for i, delta in enumerate(self.deltas): test_set = [e.name for e in Experiment.objects.exclude(end__lt=F('start') + delta)] self.assertEqual(test_set, self.expnames[i:]) test_set = [e.name for e in Experiment.objects.exclude(end__lte=F('start') + delta)] self.assertEqual(test_set, self.expnames[i + 1:]) def test_date_comparison(self): for i, days in enumerate(self.days_long): test_set = [e.name for e in Experiment.objects.filter(completed__lt=F('assigned') + days)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(completed__lte=F('assigned') + days)] self.assertEqual(test_set, self.expnames[:i + 1]) @skipUnlessDBFeature("supports_mixed_date_datetime_comparisons") def test_mixed_comparisons1(self): for i, delay in enumerate(self.delays): test_set = [e.name for e in Experiment.objects.filter(assigned__gt=F('start') - delay)] self.assertEqual(test_set, self.expnames[:i]) test_set = [e.name for e in Experiment.objects.filter(assigned__gte=F('start') - delay)] self.assertEqual(test_set, self.expnames[:i + 1]) def test_mixed_comparisons2(self): for i, delay in enumerate(self.delays): delay = datetime.timedelta(delay.days) test_set = [e.name for e in Experiment.objects.filter(start__lt=F('assigned') + delay)] self.assertEqual(test_set, self.expnames[:i]) test_set = [ e.name for e in Experiment.objects.filter(start__lte=F('assigned') + delay + datetime.timedelta(1)) ] self.assertEqual(test_set, self.expnames[:i + 1]) def test_delta_update(self): for delta in self.deltas: exps = Experiment.objects.all() expected_durations = [e.duration() for e in exps] expected_starts = [e.start + delta for e in exps] expected_ends = [e.end + delta for e in exps] Experiment.objects.update(start=F('start') + delta, end=F('end') + delta) exps = Experiment.objects.all() new_starts = [e.start for e in exps] new_ends = [e.end for e in exps] new_durations = [e.duration() for e in exps] self.assertEqual(expected_starts, new_starts) self.assertEqual(expected_ends, new_ends) self.assertEqual(expected_durations, new_durations) def test_invalid_operator(self): with self.assertRaises(DatabaseError): list(Experiment.objects.filter(start=F('start') * datetime.timedelta(0))) def test_durationfield_add(self): zeros = [e.name for e in Experiment.objects.filter(start=F('start') + F('estimated_time'))] self.assertEqual(zeros, ['e0']) end_less = [e.name for e in Experiment.objects.filter(end__lt=F('start') + F('estimated_time'))] self.assertEqual(end_less, ['e2']) delta_math = [ e.name for e in Experiment.objects.filter(end__gte=F('start') + F('estimated_time') + datetime.timedelta(hours=1)) ] self.assertEqual(delta_math, ['e4']) queryset = Experiment.objects.annotate(shifted=ExpressionWrapper( F('start') + Value(None, output_field=models.DurationField()), output_field=models.DateTimeField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_date_subtraction(self): queryset = Experiment.objects.annotate( completion_duration=ExpressionWrapper( F('completed') - F('assigned'), output_field=models.DurationField() ) ) at_least_5_days = {e.name for e in queryset.filter(completion_duration__gte=datetime.timedelta(days=5))} self.assertEqual(at_least_5_days, {'e3', 'e4', 'e5'}) at_least_120_days = {e.name for e in queryset.filter(completion_duration__gte=datetime.timedelta(days=120))} self.assertEqual(at_least_120_days, {'e5'}) less_than_5_days = {e.name for e in queryset.filter(completion_duration__lt=datetime.timedelta(days=5))} self.assertEqual(less_than_5_days, {'e0', 'e1', 'e2'}) queryset = Experiment.objects.annotate(difference=ExpressionWrapper( F('completed') - Value(None, output_field=models.DateField()), output_field=models.DurationField(), )) self.assertIsNone(queryset.first().difference) queryset = Experiment.objects.annotate(shifted=ExpressionWrapper( F('completed') - Value(None, output_field=models.DurationField()), output_field=models.DateField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_time_subtraction(self): Time.objects.create(time=datetime.time(12, 30, 15, 2345)) queryset = Time.objects.annotate( difference=ExpressionWrapper( F('time') - Value(datetime.time(11, 15, 0), output_field=models.TimeField()), output_field=models.DurationField(), ) ) self.assertEqual( queryset.get().difference, datetime.timedelta(hours=1, minutes=15, seconds=15, microseconds=2345) ) queryset = Time.objects.annotate(difference=ExpressionWrapper( F('time') - Value(None, output_field=models.TimeField()), output_field=models.DurationField(), )) self.assertIsNone(queryset.first().difference) queryset = Time.objects.annotate(shifted=ExpressionWrapper( F('time') - Value(None, output_field=models.DurationField()), output_field=models.TimeField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_datetime_subtraction(self): under_estimate = [ e.name for e in Experiment.objects.filter(estimated_time__gt=F('end') - F('start')) ] self.assertEqual(under_estimate, ['e2']) over_estimate = [ e.name for e in Experiment.objects.filter(estimated_time__lt=F('end') - F('start')) ] self.assertEqual(over_estimate, ['e4']) queryset = Experiment.objects.annotate(difference=ExpressionWrapper( F('start') - Value(None, output_field=models.DateTimeField()), output_field=models.DurationField(), )) self.assertIsNone(queryset.first().difference) queryset = Experiment.objects.annotate(shifted=ExpressionWrapper( F('start') - Value(None, output_field=models.DurationField()), output_field=models.DateTimeField(), )) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature('supports_temporal_subtraction') def test_datetime_subtraction_microseconds(self): delta = datetime.timedelta(microseconds=8999999999999999) Experiment.objects.update(end=F('start') + delta) qs = Experiment.objects.annotate( delta=ExpressionWrapper(F('end') - F('start'), output_field=models.DurationField()) ) for e in qs: self.assertEqual(e.delta, delta) def test_duration_with_datetime(self): # Exclude e1 which has very high precision so we can test this on all # backends regardless of whether or not it supports # microsecond_precision. over_estimate = Experiment.objects.exclude(name='e1').filter( completed__gt=self.stime + F('estimated_time'), ).order_by('name') self.assertQuerysetEqual(over_estimate, ['e3', 'e4', 'e5'], lambda e: e.name) def test_duration_with_datetime_microseconds(self): delta = datetime.timedelta(microseconds=8999999999999999) qs = Experiment.objects.annotate(dt=ExpressionWrapper( F('start') + delta, output_field=models.DateTimeField(), )) for e in qs: self.assertEqual(e.dt, e.start + delta) def test_date_minus_duration(self): more_than_4_days = Experiment.objects.filter( assigned__lt=F('completed') - Value(datetime.timedelta(days=4), output_field=models.DurationField()) ) self.assertQuerysetEqual(more_than_4_days, ['e3', 'e4', 'e5'], lambda e: e.name) def test_negative_timedelta_update(self): # subtract 30 seconds, 30 minutes, 2 hours and 2 days experiments = Experiment.objects.filter(name='e0').annotate( start_sub_seconds=F('start') + datetime.timedelta(seconds=-30), ).annotate( start_sub_minutes=F('start_sub_seconds') + datetime.timedelta(minutes=-30), ).annotate( start_sub_hours=F('start_sub_minutes') + datetime.timedelta(hours=-2), ).annotate( new_start=F('start_sub_hours') + datetime.timedelta(days=-2), ) expected_start = datetime.datetime(2010, 6, 23, 9, 45, 0) # subtract 30 microseconds experiments = experiments.annotate(new_start=F('new_start') + datetime.timedelta(microseconds=-30)) expected_start += datetime.timedelta(microseconds=+746970) experiments.update(start=F('new_start')) e0 = Experiment.objects.get(name='e0') self.assertEqual(e0.start, expected_start) class ValueTests(TestCase): def test_update_TimeField_using_Value(self): Time.objects.create() Time.objects.update(time=Value(datetime.time(1), output_field=TimeField())) self.assertEqual(Time.objects.get().time, datetime.time(1)) def test_update_UUIDField_using_Value(self): UUID.objects.create() UUID.objects.update(uuid=Value(uuid.UUID('12345678901234567890123456789012'), output_field=UUIDField())) self.assertEqual(UUID.objects.get().uuid, uuid.UUID('12345678901234567890123456789012')) def test_deconstruct(self): value = Value('name') path, args, kwargs = value.deconstruct() self.assertEqual(path, 'django.db.models.expressions.Value') self.assertEqual(args, (value.value,)) self.assertEqual(kwargs, {}) def test_deconstruct_output_field(self): value = Value('name', output_field=CharField()) path, args, kwargs = value.deconstruct() self.assertEqual(path, 'django.db.models.expressions.Value') self.assertEqual(args, (value.value,)) self.assertEqual(len(kwargs), 1) self.assertEqual(kwargs['output_field'].deconstruct(), CharField().deconstruct()) def test_equal(self): value = Value('name') self.assertEqual(value, Value('name')) self.assertNotEqual(value, Value('username')) def test_hash(self): d = {Value('name'): 'Bob'} self.assertIn(Value('name'), d) self.assertEqual(d[Value('name')], 'Bob') def test_equal_output_field(self): value = Value('name', output_field=CharField()) same_value = Value('name', output_field=CharField()) other_value = Value('name', output_field=TimeField()) no_output_field = Value('name') self.assertEqual(value, same_value) self.assertNotEqual(value, other_value) self.assertNotEqual(value, no_output_field) def test_raise_empty_expressionlist(self): msg = 'ExpressionList requires at least one expression' with self.assertRaisesMessage(ValueError, msg): ExpressionList() class FieldTransformTests(TestCase): @classmethod def setUpTestData(cls): cls.sday = sday = datetime.date(2010, 6, 25) cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000) cls.ex1 = Experiment.objects.create( name='Experiment 1', assigned=sday, completed=sday + datetime.timedelta(2), estimated_time=datetime.timedelta(2), start=stime, end=stime + datetime.timedelta(2), ) def test_month_aggregation(self): self.assertEqual( Experiment.objects.aggregate(month_count=Count('assigned__month')), {'month_count': 1} ) def test_transform_in_values(self): self.assertQuerysetEqual( Experiment.objects.values('assigned__month'), ["{'assigned__month': 6}"] ) def test_multiple_transforms_in_values(self): self.assertQuerysetEqual( Experiment.objects.values('end__date__month'), ["{'end__date__month': 6}"] ) class ReprTests(SimpleTestCase): def test_expressions(self): self.assertEqual( repr(Case(When(a=1))), "<Case: CASE WHEN <Q: (AND: ('a', 1))> THEN Value(None), ELSE Value(None)>" ) self.assertEqual( repr(When(Q(age__gte=18), then=Value('legal'))), "<When: WHEN <Q: (AND: ('age__gte', 18))> THEN Value(legal)>" ) self.assertEqual(repr(Col('alias', 'field')), "Col(alias, field)") self.assertEqual(repr(F('published')), "F(published)") self.assertEqual(repr(F('cost') + F('tax')), "<CombinedExpression: F(cost) + F(tax)>") self.assertEqual( repr(ExpressionWrapper(F('cost') + F('tax'), models.IntegerField())), "ExpressionWrapper(F(cost) + F(tax))" ) self.assertEqual(repr(Func('published', function='TO_CHAR')), "Func(F(published), function=TO_CHAR)") self.assertEqual(repr(OrderBy(Value(1))), 'OrderBy(Value(1), descending=False)') self.assertEqual(repr(Random()), "Random()") self.assertEqual(repr(RawSQL('table.col', [])), "RawSQL(table.col, [])") self.assertEqual(repr(Ref('sum_cost', Sum('cost'))), "Ref(sum_cost, Sum(F(cost)))") self.assertEqual(repr(Value(1)), "Value(1)") self.assertEqual( repr(ExpressionList(F('col'), F('anothercol'))), 'ExpressionList(F(col), F(anothercol))' ) self.assertEqual( repr(ExpressionList(OrderBy(F('col'), descending=False))), 'ExpressionList(OrderBy(F(col), descending=False))' ) def test_functions(self): self.assertEqual(repr(Coalesce('a', 'b')), "Coalesce(F(a), F(b))") self.assertEqual(repr(Concat('a', 'b')), "Concat(ConcatPair(F(a), F(b)))") self.assertEqual(repr(Length('a')), "Length(F(a))") self.assertEqual(repr(Lower('a')), "Lower(F(a))") self.assertEqual(repr(Substr('a', 1, 3)), "Substr(F(a), Value(1), Value(3))") self.assertEqual(repr(Upper('a')), "Upper(F(a))") def test_aggregates(self): self.assertEqual(repr(Avg('a')), "Avg(F(a))") self.assertEqual(repr(Count('a')), "Count(F(a))") self.assertEqual(repr(Count('*')), "Count('*')") self.assertEqual(repr(Max('a')), "Max(F(a))") self.assertEqual(repr(Min('a')), "Min(F(a))") self.assertEqual(repr(StdDev('a')), "StdDev(F(a), sample=False)") self.assertEqual(repr(Sum('a')), "Sum(F(a))") self.assertEqual(repr(Variance('a', sample=True)), "Variance(F(a), sample=True)") def test_distinct_aggregates(self): self.assertEqual(repr(Count('a', distinct=True)), "Count(F(a), distinct=True)") self.assertEqual(repr(Count('*', distinct=True)), "Count('*', distinct=True)") def test_filtered_aggregates(self): filter = Q(a=1) self.assertEqual(repr(Avg('a', filter=filter)), "Avg(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(Count('a', filter=filter)), "Count(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(Max('a', filter=filter)), "Max(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(Min('a', filter=filter)), "Min(F(a), filter=(AND: ('a', 1)))") self.assertEqual(repr(StdDev('a', filter=filter)), "StdDev(F(a), filter=(AND: ('a', 1)), sample=False)") self.assertEqual(repr(Sum('a', filter=filter)), "Sum(F(a), filter=(AND: ('a', 1)))") self.assertEqual( repr(Variance('a', sample=True, filter=filter)), "Variance(F(a), filter=(AND: ('a', 1)), sample=True)" ) self.assertEqual( repr(Count('a', filter=filter, distinct=True)), "Count(F(a), distinct=True, filter=(AND: ('a', 1)))" ) class CombinableTests(SimpleTestCase): bitwise_msg = 'Use .bitand() and .bitor() for bitwise logical operations.' def test_negation(self): c = Combinable() self.assertEqual(-c, c * -1) def test_and(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): Combinable() & Combinable() def test_or(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): Combinable() | Combinable() def test_reversed_and(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): object() & Combinable() def test_reversed_or(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): object() | Combinable()
dbf32dde931c32e73b971f3ae2559a43cf1359121eb7a4ca69d4d3fc0e4d97a2
""" The matplotlib build options can be modified with a setup.cfg file. See setup.cfg.template for more information. """ # NOTE: This file must remain Python 2 compatible for the foreseeable future, # to ensure that we error out properly for people with outdated setuptools # and/or pip. import sys min_version = (3, 6) if sys.version_info < min_version: error = """ Beginning with Matplotlib 3.1, Python {0} or above is required. This may be due to an out of date pip. Make sure you have pip >= 9.0.1. """.format('.'.join(str(n) for n in min_version)), sys.exit(error) from io import BytesIO import os from string import Template import urllib.request from zipfile import ZipFile from setuptools import setup, Extension from setuptools.command.build_ext import build_ext as BuildExtCommand from setuptools.command.develop import develop as DevelopCommand from setuptools.command.install_lib import install_lib as InstallLibCommand from setuptools.command.test import test as TestCommand # The setuptools version of sdist adds a setup.cfg file to the tree. # We don't want that, so we simply remove it, and it will fall back to # vanilla distutils. try: from setuptools.command import sdist except ImportError: pass else: del sdist.sdist.make_release_tree from distutils.dist import Distribution import setupext from setupext import (print_line, print_raw, print_message, print_status, download_or_cache) # Get the version from versioneer import versioneer __version__ = versioneer.get_version() # These are the packages in the order we want to display them. This # list may contain strings to create section headers for the display. mpl_packages = [ 'Building Matplotlib', setupext.Matplotlib(), setupext.Python(), setupext.Platform(), setupext.Numpy(), setupext.LibAgg(), setupext.FreeType(), setupext.FT2Font(), setupext.Png(), setupext.Qhull(), setupext.Image(), setupext.TTConv(), setupext.Path(), setupext.Contour(), setupext.QhullWrap(), setupext.Tri(), 'Optional subpackages', setupext.SampleData(), setupext.Tests(), 'Optional backend extensions', setupext.BackendAgg(), setupext.BackendTkAgg(), setupext.BackendMacOSX(), 'Optional package data', setupext.Dlls(), ] classifiers = [ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: Python Software Foundation License', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Topic :: Scientific/Engineering :: Visualization', ] class NoopTestCommand(TestCommand): def __init__(self, dist): print("Matplotlib does not support running tests with " "'python setup.py test'. Please run 'pytest'.") class BuildExtraLibraries(BuildExtCommand): def finalize_options(self): self.distribution.ext_modules[:] = filter( None, (package.get_extension() for package in good_packages)) super().finalize_options() def build_extensions(self): # Remove the -Wstrict-prototypes option, it's not valid for C++. Fixed # in Py3.7 as bpo-5755. try: self.compiler.compiler_so.remove('-Wstrict-prototypes') except (ValueError, AttributeError): pass for package in good_packages: package.do_custom_build() return super().build_extensions() cmdclass = versioneer.get_cmdclass() cmdclass['test'] = NoopTestCommand cmdclass['build_ext'] = BuildExtraLibraries def _download_jquery_to(dest): # Note: When bumping the jquery-ui version, also update the versions in # single_figure.html and all_figures.html. url = "https://jqueryui.com/resources/download/jquery-ui-1.12.1.zip" sha = 'f8233674366ab36b2c34c577ec77a3d70cac75d2e387d8587f3836345c0f624d' if not os.path.exists(os.path.join(dest, "jquery-ui-1.12.1")): os.makedirs(dest, exist_ok=True) try: buff = download_or_cache(url, sha) except Exception: raise IOError("Failed to download jquery-ui. Please download " + "{url} and extract it to {dest}.".format( url=url, dest=dest)) with ZipFile(buff) as zf: zf.extractall(dest) # Relying on versioneer's implementation detail. class sdist_with_jquery(cmdclass['sdist']): def make_release_tree(self, base_dir, files): super(sdist_with_jquery, self).make_release_tree(base_dir, files) _download_jquery_to( os.path.join(base_dir, "lib/matplotlib/backends/web_backend/")) # Affects install and bdist_wheel. class install_lib_with_jquery(InstallLibCommand): def run(self): super(install_lib_with_jquery, self).run() _download_jquery_to( os.path.join(self.install_dir, "matplotlib/backends/web_backend/")) class develop_with_jquery(DevelopCommand): def run(self): super(develop_with_jquery, self).run() _download_jquery_to("lib/matplotlib/backends/web_backend/") cmdclass['sdist'] = sdist_with_jquery cmdclass['install_lib'] = install_lib_with_jquery cmdclass['develop'] = develop_with_jquery # One doesn't normally see `if __name__ == '__main__'` blocks in a setup.py, # however, this is needed on Windows to avoid creating infinite subprocesses # when using multiprocessing. if __name__ == '__main__': # These are distutils.setup parameters that the various packages add # things to. packages = [] namespace_packages = [] py_modules = [] # Dummy extension to trigger build_ext, which will swap it out with real # extensions that can depend on numpy for the build. ext_modules = [Extension('', [])] package_data = {} package_dir = {'': 'lib'} install_requires = [] setup_requires = [] # If the user just queries for information, don't bother figuring out which # packages to build or install. if (any('--' + opt in sys.argv for opt in Distribution.display_option_names + ['help']) or 'clean' in sys.argv): setup_requires = [] else: # Go through all of the packages and figure out which ones we are # going to build/install. print_line() print_raw("Edit setup.cfg to change the build options") required_failed = [] good_packages = [] for package in mpl_packages: if isinstance(package, str): print_raw('') print_raw(package.upper()) else: try: result = package.check() if result is not None: message = 'yes [%s]' % result print_status(package.name, message) except setupext.CheckFailed as e: msg = str(e).strip() if len(msg): print_status(package.name, 'no [%s]' % msg) else: print_status(package.name, 'no') if not package.optional: required_failed.append(package) else: good_packages.append(package) print_raw('') # Abort if any of the required packages can not be built. if required_failed: print_line() print_message("The following required packages can not be built: " "%s" % ", ".join(x.name for x in required_failed)) for pkg in required_failed: msg = pkg.install_help_msg() if msg: print_message(msg) sys.exit(1) # Now collect all of the information we need to build all of the # packages. for package in good_packages: packages.extend(package.get_packages()) namespace_packages.extend(package.get_namespace_packages()) py_modules.extend(package.get_py_modules()) # Extension modules only get added in build_ext, as numpy will have # been installed (as setup_requires) at that point. data = package.get_package_data() for key, val in data.items(): package_data.setdefault(key, []) package_data[key] = list(set(val + package_data[key])) install_requires.extend(package.get_install_requires()) setup_requires.extend(package.get_setup_requires()) # Write the default matplotlibrc file with open('matplotlibrc.template') as fd: template_lines = fd.read().splitlines(True) backend_line_idx, = [ # Also asserts that there is a single such line. idx for idx, line in enumerate(template_lines) if line.startswith('#backend ')] if setupext.options['backend']: template_lines[backend_line_idx] = ( 'backend: {}'.format(setupext.options['backend'])) with open('lib/matplotlib/mpl-data/matplotlibrc', 'w') as fd: fd.write(''.join(template_lines)) # Finally, pass this all along to distutils to do the heavy lifting. setup( name="matplotlib", version=__version__, description="Python plotting package", author="John D. Hunter, Michael Droettboom", author_email="[email protected]", url="https://matplotlib.org", long_description=""" Matplotlib strives to produce publication quality 2D graphics for interactive graphing, scientific publishing, user interface development and web application servers targeting multiple user interfaces and hardcopy output formats. """, license="PSF", packages=packages, namespace_packages=namespace_packages, platforms='any', py_modules=py_modules, ext_modules=ext_modules, package_dir=package_dir, package_data=package_data, classifiers=classifiers, download_url="https://matplotlib.org/users/installing.html", project_urls={ 'Bug Tracker': 'https://github.com/matplotlib/matplotlib/issues', 'Documentation': 'https://matplotlib.org/contents.html', 'Source Code': 'https://github.com/matplotlib/matplotlib' }, python_requires='>={}'.format('.'.join(str(n) for n in min_version)), # List third-party Python packages that we require install_requires=install_requires, setup_requires=setup_requires, # matplotlib has C/C++ extensions, so it's not zip safe. # Telling setuptools this prevents it from doing an automatic # check for zip safety. zip_safe=False, cmdclass=cmdclass, )
1df80385146c87dc2a3c887ecf7ed89b219e741a7c56dee01ef7f93ae10d6de6
#!/usr/bin/env python # # This allows running the matplotlib tests from the command line: e.g. # # $ python tests.py -v -d # # The arguments are identical to the arguments accepted by pytest. # # See http://doc.pytest.org/ for a detailed description of these options. import sys import argparse if __name__ == '__main__': import dateutil.parser try: import setuptools except ImportError: pass # The warnings need to be before any of matplotlib imports, but after # dateutil.parser and setuptools (if present) which has syntax error with # the warnings enabled. Filtering by module does not work as this will be # raised by Python itself so `module=matplotlib.*` is out of question. import warnings # Python 3.6 deprecate invalid character-pairs \A, \* ... in non # raw-strings and other things. Let's not re-introduce them warnings.filterwarnings('error', '.*invalid escape sequence.*', category=DeprecationWarning) warnings.filterwarnings( 'default', r'.*inspect.getargspec\(\) is deprecated.*', category=DeprecationWarning) from matplotlib import test parser = argparse.ArgumentParser(add_help=False) parser.add_argument('--recursionlimit', type=int, default=0, help='Specify recursionlimit for test run') args, extra_args = parser.parse_known_args() print('Python byte-compilation optimization level:', sys.flags.optimize) retcode = test(argv=extra_args, recursionlimit=args.recursionlimit) sys.exit(retcode)
ac652709af3cfd81417f7076c182aae4e7f57b08562931231d1642d2b2c172e1
import builtins import configparser from distutils import sysconfig from distutils.core import Extension from io import BytesIO import glob import hashlib import importlib import logging import os import pathlib import platform import setuptools import shlex import shutil import subprocess import sys import tarfile import textwrap import urllib.request from urllib.request import Request import versioneer import warnings _log = logging.getLogger(__name__) def _get_xdg_cache_dir(): """ Return the XDG cache directory. See https://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html """ cache_dir = os.environ.get('XDG_CACHE_HOME') if not cache_dir: cache_dir = os.path.expanduser('~/.cache') if cache_dir.startswith('~/'): # Expansion failed. return None return os.path.join(cache_dir, 'matplotlib') def get_fd_hash(fd): """ Compute the sha256 hash of the bytes in a file-like """ BLOCKSIZE = 1 << 16 hasher = hashlib.sha256() old_pos = fd.tell() fd.seek(0) buf = fd.read(BLOCKSIZE) while buf: hasher.update(buf) buf = fd.read(BLOCKSIZE) fd.seek(old_pos) return hasher.hexdigest() def download_or_cache(url, sha): """ Get bytes from the given url or local cache. Parameters ---------- url : str The url to download sha : str The sha256 of the file Returns ------- BytesIO The file loaded into memory. """ cache_dir = _get_xdg_cache_dir() def get_from_cache(local_fn): if cache_dir is None: raise Exception("no cache dir") buf = BytesIO(pathlib.Path(cache_dir, local_fn).read_bytes()) if get_fd_hash(buf) != sha: return None buf.seek(0) return buf def write_cache(local_fn, data): if cache_dir is None: raise Exception("no cache dir") cache_filename = os.path.join(cache_dir, local_fn) os.makedirs(cache_dir, exist_ok=True) old_pos = data.tell() data.seek(0) with open(cache_filename, "xb") as fout: fout.write(data.read()) data.seek(old_pos) try: return get_from_cache(sha) except Exception: pass # jQueryUI's website blocks direct downloads from urllib.request's # default User-Agent, but not (for example) wget; so I don't feel too # bad passing in an empty User-Agent. with urllib.request.urlopen( Request(url, headers={"User-Agent": ""})) as req: file_contents = BytesIO(req.read()) file_contents.seek(0) file_sha = get_fd_hash(file_contents) if file_sha != sha: raise Exception(("The download file does not match the " "expected sha. {url} was expected to have " "{sha} but it had {file_sha}").format( sha=sha, file_sha=file_sha, url=url)) try: write_cache(sha, file_contents) except Exception: pass file_contents.seek(0) return file_contents # SHA256 hashes of the FreeType tarballs _freetype_hashes = { '2.6.1': '0a3c7dfbda6da1e8fce29232e8e96d987ababbbf71ebc8c75659e4132c367014', '2.6.2': '8da42fc4904e600be4b692555ae1dcbf532897da9c5b9fb5ebd3758c77e5c2d4', '2.6.3': '7942096c40ee6fea882bd4207667ad3f24bff568b96b10fd3885e11a7baad9a3', '2.6.4': '27f0e38347a1850ad57f84fc4dfed68ba0bc30c96a6fa6138ef84d485dd9a8d7', '2.6.5': '3bb24add9b9ec53636a63ea8e867ed978c4f8fdd8f1fa5ccfd41171163d4249a', '2.7': '7b657d5f872b0ab56461f3bd310bd1c5ec64619bd15f0d8e08282d494d9cfea4', '2.7.1': '162ef25aa64480b1189cdb261228e6c5c44f212aac4b4621e28cf2157efb59f5', '2.8': '33a28fabac471891d0523033e99c0005b95e5618dc8ffa7fa47f9dadcacb1c9b', '2.8.1': '876711d064a6a1bd74beb18dd37f219af26100f72daaebd2d86cb493d7cd7ec6', } # This is the version of FreeType to use when building a local # version. It must match the value in # lib/matplotlib.__init__.py and also needs to be changed below in the # embedded windows build script (grep for "REMINDER" in this file) LOCAL_FREETYPE_VERSION = '2.6.1' LOCAL_FREETYPE_HASH = _freetype_hashes.get(LOCAL_FREETYPE_VERSION, 'unknown') # matplotlib build options, which can be altered using setup.cfg options = { 'display_status': True, 'backend': None, } setup_cfg = os.environ.get('MPLSETUPCFG', 'setup.cfg') if os.path.exists(setup_cfg): config = configparser.ConfigParser() config.read(setup_cfg) if config.has_option('status', 'suppress'): options['display_status'] = not config.getboolean("status", "suppress") if config.has_option('rc_options', 'backend'): options['backend'] = config.get("rc_options", "backend") if config.has_option('test', 'local_freetype'): options['local_freetype'] = config.getboolean("test", "local_freetype") else: config = None lft = bool(os.environ.get('MPLLOCALFREETYPE', False)) options['local_freetype'] = lft or options.get('local_freetype', False) # Define the display functions only if display_status is True. if options['display_status']: def print_line(char='='): print(char * 80) def print_status(package, status): initial_indent = "%12s: " % package indent = ' ' * 18 print(textwrap.fill(str(status), width=80, initial_indent=initial_indent, subsequent_indent=indent)) def print_message(message): indent = ' ' * 18 + "* " print(textwrap.fill(str(message), width=80, initial_indent=indent, subsequent_indent=indent)) def print_raw(section): print(section) else: def print_line(*args, **kwargs): pass print_status = print_message = print_raw = print_line def get_buffer_hash(fd): BLOCKSIZE = 1 << 16 hasher = hashlib.sha256() buf = fd.read(BLOCKSIZE) while buf: hasher.update(buf) buf = fd.read(BLOCKSIZE) return hasher.hexdigest() class PkgConfig(object): """This is a class for communicating with pkg-config.""" def __init__(self): """Determines whether pkg-config exists on this machine.""" self.pkg_config = None if sys.platform != 'win32': pkg_config = os.environ.get('PKG_CONFIG', 'pkg-config') if shutil.which(pkg_config) is not None: self.pkg_config = pkg_config self.set_pkgconfig_path() else: print("IMPORTANT WARNING:\n" " pkg-config is not installed.\n" " matplotlib may not be able to find some of its dependencies") def set_pkgconfig_path(self): pkgconfig_path = sysconfig.get_config_var('LIBDIR') if pkgconfig_path is None: return pkgconfig_path = os.path.join(pkgconfig_path, 'pkgconfig') if not os.path.isdir(pkgconfig_path): return try: os.environ['PKG_CONFIG_PATH'] += ':' + pkgconfig_path except KeyError: os.environ['PKG_CONFIG_PATH'] = pkgconfig_path def setup_extension( self, ext, package, atleast_version=None, alt_exec=None, default_libraries=()): """Add parameters to the given *ext* for the given *package*.""" # First, try to get the flags from pkg-config. cmd = ([self.pkg_config, package] if self.pkg_config else alt_exec) if cmd is not None: try: if self.pkg_config and atleast_version: subprocess.check_call( [*cmd, f"--atleast-version={atleast_version}"]) # Use sys.getfilesystemencoding() to allow round-tripping # when passed back to later subprocess calls; do not use # locale.getpreferredencoding() which universal_newlines=True # would do. cflags = shlex.split( os.fsdecode(subprocess.check_output([*cmd, "--cflags"]))) libs = shlex.split( os.fsdecode(subprocess.check_output([*cmd, "--libs"]))) except (OSError, subprocess.CalledProcessError): pass else: ext.extra_compile_args.extend(cflags) ext.extra_link_args.extend(libs) return # If that fails, fall back on the defaults. # conda Windows header and library paths. # https://github.com/conda/conda/issues/2312 re: getting the env dir. if sys.platform == 'win32': conda_env_path = (os.getenv('CONDA_PREFIX') # conda >= 4.1 or os.getenv('CONDA_DEFAULT_ENV')) # conda < 4.1 if conda_env_path and os.path.isdir(conda_env_path): ext.include_dirs.append(os.fspath( pathlib.Path(conda_env_path, "Library/include"))) ext.library_dirs.append(os.fspath( pathlib.Path(conda_env_path, "Library/lib"))) # Default linked libs. ext.libraries.extend(default_libraries) # The PkgConfig class should be used through this singleton pkg_config = PkgConfig() class CheckFailed(Exception): """ Exception thrown when a `SetupPackage.check` method fails. """ pass class SetupPackage(object): optional = False pkg_names = { "apt-get": None, "yum": None, "dnf": None, "brew": None, "port": None, "windows_url": None } def check(self): """ Checks whether the build dependencies are met. Should raise a `CheckFailed` exception if the dependency could not be met, otherwise return a string indicating a version number or some other message indicating what was found. """ pass def get_packages(self): """ Get a list of package names to add to the configuration. These are added to the `packages` list passed to `distutils.setup`. """ return [] def get_namespace_packages(self): """ Get a list of namespace package names to add to the configuration. These are added to the `namespace_packages` list passed to `distutils.setup`. """ return [] def get_py_modules(self): """ Get a list of top-level modules to add to the configuration. These are added to the `py_modules` list passed to `distutils.setup`. """ return [] def get_package_data(self): """ Get a package data dictionary to add to the configuration. These are merged into to the `package_data` list passed to `distutils.setup`. """ return {} def get_extension(self): """ Get a list of C extensions (`distutils.core.Extension` objects) to add to the configuration. These are added to the `extensions` list passed to `distutils.setup`. """ return None def get_install_requires(self): """ Get a list of Python packages that we require. pip/easy_install will attempt to download and install this package if it is not installed. """ return [] def get_setup_requires(self): """ Get a list of Python packages that we require at build time. pip/easy_install will attempt to download and install this package if it is not installed. """ return [] def do_custom_build(self): """ If a package needs to do extra custom things, such as building a third-party library, before building an extension, it should override this method. """ pass def install_help_msg(self): """ Do not override this method ! Generate the help message to show if the package is not installed. To use this in subclasses, simply add the dictionary `pkg_names` as a class variable: pkg_names = { "apt-get": <Name of the apt-get package>, "yum": <Name of the yum package>, "dnf": <Name of the dnf package>, "brew": <Name of the brew package>, "port": <Name of the port package>, "windows_url": <The url which has installation instructions> } All the dictionary keys are optional. If a key is not present or has the value `None` no message is provided for that platform. """ def _try_managers(*managers): for manager in managers: pkg_name = self.pkg_names.get(manager, None) if pkg_name: if shutil.which(manager) is not None: if manager == 'port': pkgconfig = 'pkgconfig' else: pkgconfig = 'pkg-config' return ('Try installing {0} with `{1} install {2}` ' 'and pkg-config with `{1} install {3}`' .format(self.name, manager, pkg_name, pkgconfig)) message = None if sys.platform == "win32": url = self.pkg_names.get("windows_url", None) if url: message = ('Please check {0} for instructions to install {1}' .format(url, self.name)) elif sys.platform == "darwin": message = _try_managers("brew", "port") elif sys.platform == "linux": release = platform.linux_distribution()[0].lower() if release in ('debian', 'ubuntu'): message = _try_managers('apt-get') elif release in ('centos', 'redhat', 'fedora'): message = _try_managers('dnf', 'yum') return message class OptionalPackage(SetupPackage): optional = True force = False config_category = "packages" default_config = "auto" @classmethod def get_config(cls): """ Look at `setup.cfg` and return one of ["auto", True, False] indicating if the package is at default state ("auto"), forced by the user (case insensitively defined as 1, true, yes, on for True) or opted-out (case insensitively defined as 0, false, no, off for False). """ conf = cls.default_config if config is not None and config.has_option(cls.config_category, cls.name): try: conf = config.getboolean(cls.config_category, cls.name) except ValueError: conf = config.get(cls.config_category, cls.name) return conf def check(self): """ Do not override this method! For custom dependency checks override self.check_requirements(). Two things are checked: Configuration file and requirements. """ # Check configuration file conf = self.get_config() # Default "auto" state or install forced by user if conf in [True, 'auto']: message = "installing" # Set non-optional if user sets `True` in config if conf is True: self.optional = False # Configuration opt-out by user else: # Some backend extensions (e.g. Agg) need to be built for certain # other GUI backends (e.g. TkAgg) even when manually disabled if self.force is True: message = "installing forced (config override)" else: raise CheckFailed("skipping due to configuration") # Check requirements and add extra information (if any) to message. # If requirements are not met a CheckFailed should be raised in there. additional_info = self.check_requirements() if additional_info: message += ", " + additional_info # No CheckFailed raised until now, return install message. return message def check_requirements(self): """ Override this method to do custom dependency checks. - Raise CheckFailed() if requirements are not met. - Return message with additional information, or an empty string (or None) for no additional information. """ return "" class OptionalBackendPackage(OptionalPackage): config_category = "gui_support" class Platform(SetupPackage): name = "platform" def check(self): return sys.platform class Python(SetupPackage): name = "python" def check(self): return sys.version def _pkg_data_helper(pkg, subdir): """Glob "lib/$pkg/$subdir/**/*", returning paths relative to "lib/$pkg".""" base = pathlib.Path("lib", pkg) return [str(path.relative_to(base)) for path in (base / subdir).rglob("*")] class Matplotlib(SetupPackage): name = "matplotlib" def check(self): return versioneer.get_version() def get_packages(self): return setuptools.find_packages("lib", exclude=["*.tests"]) def get_namespace_packages(self): return ['mpl_toolkits'] def get_py_modules(self): return ['pylab'] def get_package_data(self): return { 'matplotlib': [ 'mpl-data/matplotlibrc', *_pkg_data_helper('matplotlib', 'mpl-data/fonts'), *_pkg_data_helper('matplotlib', 'mpl-data/images'), *_pkg_data_helper('matplotlib', 'mpl-data/stylelib'), *_pkg_data_helper('matplotlib', 'backends/web_backend'), ], } def get_install_requires(self): return [ "cycler>=0.10", "kiwisolver>=1.0.1", "pyparsing>=2.0.1,!=2.0.4,!=2.1.2,!=2.1.6", "python-dateutil>=2.1", ] class SampleData(OptionalPackage): """ This handles the sample data that ships with matplotlib. It is technically optional, though most often will be desired. """ name = "sample_data" def get_package_data(self): return { 'matplotlib': [ *_pkg_data_helper('matplotlib', 'mpl-data/sample_data'), ], } class Tests(OptionalPackage): name = "tests" default_config = True def get_packages(self): return setuptools.find_packages("lib", include=["*.tests"]) def get_package_data(self): return { 'matplotlib': [ *_pkg_data_helper('matplotlib', 'tests/baseline_images'), *_pkg_data_helper('matplotlib', 'tests/tinypages'), 'tests/cmr10.pfb', 'tests/mpltest.ttf', 'tests/test_rcparams.rc', 'tests/test_utf32_be_rcparams.rc', ], 'mpl_toolkits': [ *_pkg_data_helper('mpl_toolkits', 'tests/baseline_images'), ] } class Numpy(SetupPackage): name = "numpy" def add_flags(self, ext): import numpy as np ext.include_dirs.append(np.get_include()) ext.define_macros.extend([ # Ensure that PY_ARRAY_UNIQUE_SYMBOL is uniquely defined for each # extension. ('PY_ARRAY_UNIQUE_SYMBOL', 'MPL_' + ext.name.replace('.', '_') + '_ARRAY_API'), ('NPY_NO_DEPRECATED_API', 'NPY_1_7_API_VERSION'), # Allow NumPy's printf format specifiers in C++. ('__STDC_FORMAT_MACROS', 1), ]) def get_setup_requires(self): return ['numpy>=1.11'] def get_install_requires(self): return ['numpy>=1.11'] class LibAgg(SetupPackage): name = 'libagg' def add_flags(self, ext, add_sources=True): # We need a patched Agg not available elsewhere, so always use the # vendored version. ext.include_dirs.insert(0, 'extern/agg24-svn/include') if add_sources: agg_sources = [ 'agg_bezier_arc.cpp', 'agg_curves.cpp', 'agg_image_filters.cpp', 'agg_trans_affine.cpp', 'agg_vcgen_contour.cpp', 'agg_vcgen_dash.cpp', 'agg_vcgen_stroke.cpp', 'agg_vpgen_segmentator.cpp' ] ext.sources.extend(os.path.join('extern', 'agg24-svn', 'src', x) for x in agg_sources) # For FreeType2 and libpng, we add a separate checkdep_foo.c source to at the # top of the extension sources. This file is compiled first and immediately # aborts the compilation either with "foo.h: No such file or directory" if the # header is not found, or an appropriate error message if the header indicates # a too-old version. class FreeType(SetupPackage): name = "freetype" pkg_names = { "apt-get": "libfreetype6-dev", "yum": "freetype-devel", "dnf": "freetype-devel", "brew": "freetype", "port": "freetype", "windows_url": "http://gnuwin32.sourceforge.net/packages/freetype.htm" } def add_flags(self, ext): ext.sources.insert(0, 'src/checkdep_freetype2.c') if options.get('local_freetype'): src_path = os.path.join( 'build', 'freetype-{0}'.format(LOCAL_FREETYPE_VERSION)) # Statically link to the locally-built freetype. # This is certainly broken on Windows. ext.include_dirs.insert(0, os.path.join(src_path, 'include')) if sys.platform == 'win32': libfreetype = 'libfreetype.lib' else: libfreetype = 'libfreetype.a' ext.extra_objects.insert( 0, os.path.join(src_path, 'objs', '.libs', libfreetype)) ext.define_macros.append(('FREETYPE_BUILD_TYPE', 'local')) else: pkg_config.setup_extension( # FreeType 2.3 has libtool version 9.11.3 as can be checked # from the tarball. For FreeType>=2.4, there is a conversion # table in docs/VERSIONS.txt in the FreeType source tree. ext, 'freetype2', atleast_version='9.11.3', alt_exec=['freetype-config'], default_libraries=['freetype', 'z']) ext.define_macros.append(('FREETYPE_BUILD_TYPE', 'system')) def do_custom_build(self): # We're using a system freetype if not options.get('local_freetype'): return src_path = os.path.join( 'build', 'freetype-{0}'.format(LOCAL_FREETYPE_VERSION)) # We've already built freetype if sys.platform == 'win32': libfreetype = 'libfreetype.lib' else: libfreetype = 'libfreetype.a' # bailing because it is already built if os.path.isfile(os.path.join( src_path, 'objs', '.libs', libfreetype)): return # do we need to download / load the source from cache? if not os.path.exists(src_path): os.makedirs('build', exist_ok=True) url_fmts = [ ('https://downloads.sourceforge.net/project/freetype' '/freetype2/{version}/{tarball}'), ('https://download.savannah.gnu.org/releases/freetype' '/{tarball}') ] tarball = 'freetype-{0}.tar.gz'.format(LOCAL_FREETYPE_VERSION) target_urls = [ url_fmt.format(version=LOCAL_FREETYPE_VERSION, tarball=tarball) for url_fmt in url_fmts] for tarball_url in target_urls: try: tar_contents = download_or_cache(tarball_url, LOCAL_FREETYPE_HASH) break except Exception: pass else: raise IOError("Failed to download FreeType. Please download " "one of {target_urls} and extract it into " "{src_path} at the top-level of the source " "repository".format( target_urls=target_urls, src_path=src_path)) print("Extracting {}".format(tarball)) # just to be sure tar_contents.seek(0) with tarfile.open(tarball, mode="r:gz", fileobj=tar_contents) as tgz: tgz.extractall("build") print("Building freetype in {}".format(src_path)) if sys.platform != 'win32': # compilation on all other platforms than windows env = {**os.environ, "CFLAGS": "{} -fPIC".format(os.environ.get("CFLAGS", ""))} subprocess.check_call( ["./configure", "--with-zlib=no", "--with-bzip2=no", "--with-png=no", "--with-harfbuzz=no"], env=env, cwd=src_path) subprocess.check_call(["make"], env=env, cwd=src_path) else: # compilation on windows shutil.rmtree(str(pathlib.Path(src_path, "objs")), ignore_errors=True) FREETYPE_BUILD_CMD = r""" call "%ProgramFiles%\Microsoft SDKs\Windows\v7.0\Bin\SetEnv.Cmd" ^ /Release /{xXX} /xp call "{vcvarsall}" {xXX} set MSBUILD=C:\Windows\Microsoft.NET\Framework\v4.0.30319\MSBuild.exe %MSBUILD% "builds\windows\{vc20xx}\freetype.sln" ^ /t:Clean;Build /p:Configuration="Release";Platform={WinXX} """ import distutils.msvc9compiler as msvc # Note: freetype has no build profile for 2014, so we don't bother... vc = 'vc2010' WinXX = 'x64' if platform.architecture()[0] == '64bit' else 'Win32' xXX = 'x64' if platform.architecture()[0] == '64bit' else 'x86' vcvarsall = msvc.find_vcvarsall(10.0) if vcvarsall is None: raise RuntimeError('Microsoft VS 2010 required') cmdfile = pathlib.Path("build/build_freetype.cmd") cmdfile.write_text(FREETYPE_BUILD_CMD.format( vc20xx=vc, WinXX=WinXX, xXX=xXX, vcvarsall=vcvarsall)) subprocess.check_call([str(cmdfile.resolve())], shell=True, cwd=src_path) # Move to the corresponding Unix build path. pathlib.Path(src_path, "objs/.libs").mkdir() # Be robust against change of FreeType version. lib_path, = (pathlib.Path(src_path, "objs", vc, xXX) .glob("freetype*.lib")) shutil.copy2( str(lib_path), str(pathlib.Path(src_path, "objs/.libs/libfreetype.lib"))) class FT2Font(SetupPackage): name = 'ft2font' def get_extension(self): sources = [ 'src/ft2font.cpp', 'src/ft2font_wrapper.cpp', 'src/mplutils.cpp', 'src/py_converters.cpp', ] ext = Extension('matplotlib.ft2font', sources) FreeType().add_flags(ext) Numpy().add_flags(ext) LibAgg().add_flags(ext, add_sources=False) return ext class Png(SetupPackage): name = "png" pkg_names = { "apt-get": "libpng12-dev", "yum": "libpng-devel", "dnf": "libpng-devel", "brew": "libpng", "port": "libpng", "windows_url": "http://gnuwin32.sourceforge.net/packages/libpng.htm" } def get_extension(self): sources = [ 'src/checkdep_libpng.c', 'src/_png.cpp', 'src/mplutils.cpp', ] ext = Extension('matplotlib._png', sources) pkg_config.setup_extension( ext, 'libpng', atleast_version='1.2', alt_exec=['libpng-config', '--ldflags'], default_libraries=['png', 'z']) Numpy().add_flags(ext) return ext class Qhull(SetupPackage): name = "qhull" def add_flags(self, ext): # Qhull doesn't distribute pkg-config info, so we have no way of # knowing whether a system install is recent enough. Thus, always use # the vendored version. ext.include_dirs.insert(0, 'extern') ext.sources.extend(sorted(glob.glob('extern/libqhull/*.c'))) if sysconfig.get_config_var('LIBM') == '-lm': ext.libraries.extend('m') class TTConv(SetupPackage): name = "ttconv" def get_extension(self): sources = [ 'src/_ttconv.cpp', 'extern/ttconv/pprdrv_tt.cpp', 'extern/ttconv/pprdrv_tt2.cpp', 'extern/ttconv/ttutil.cpp' ] ext = Extension('matplotlib.ttconv', sources) Numpy().add_flags(ext) ext.include_dirs.insert(0, 'extern') return ext class Path(SetupPackage): name = "path" def get_extension(self): sources = [ 'src/py_converters.cpp', 'src/_path_wrapper.cpp' ] ext = Extension('matplotlib._path', sources) Numpy().add_flags(ext) LibAgg().add_flags(ext) return ext class Image(SetupPackage): name = "image" def get_extension(self): sources = [ 'src/_image.cpp', 'src/mplutils.cpp', 'src/_image_wrapper.cpp', 'src/py_converters.cpp' ] ext = Extension('matplotlib._image', sources) Numpy().add_flags(ext) LibAgg().add_flags(ext) return ext class Contour(SetupPackage): name = "contour" def get_extension(self): sources = [ "src/_contour.cpp", "src/_contour_wrapper.cpp", 'src/py_converters.cpp', ] ext = Extension('matplotlib._contour', sources) Numpy().add_flags(ext) LibAgg().add_flags(ext, add_sources=False) return ext class QhullWrap(SetupPackage): name = "qhull_wrap" def get_extension(self): sources = ['src/qhull_wrap.c'] ext = Extension('matplotlib._qhull', sources, define_macros=[('MPL_DEVNULL', os.devnull)]) Numpy().add_flags(ext) Qhull().add_flags(ext) return ext class Tri(SetupPackage): name = "tri" def get_extension(self): sources = [ "src/tri/_tri.cpp", "src/tri/_tri_wrapper.cpp", "src/mplutils.cpp" ] ext = Extension('matplotlib._tri', sources) Numpy().add_flags(ext) return ext class BackendAgg(OptionalBackendPackage): name = "agg" force = True def get_extension(self): sources = [ "src/mplutils.cpp", "src/py_converters.cpp", "src/_backend_agg.cpp", "src/_backend_agg_wrapper.cpp" ] ext = Extension('matplotlib.backends._backend_agg', sources) Numpy().add_flags(ext) LibAgg().add_flags(ext) FreeType().add_flags(ext) return ext class BackendTkAgg(OptionalBackendPackage): name = "tkagg" force = True def check(self): return "installing; run-time loading from Python Tcl/Tk" def get_extension(self): sources = [ 'src/_tkagg.cpp', 'src/py_converters.cpp', ] ext = Extension('matplotlib.backends._tkagg', sources) self.add_flags(ext) Numpy().add_flags(ext) LibAgg().add_flags(ext, add_sources=False) return ext def add_flags(self, ext): ext.include_dirs.insert(0, 'src') if sys.platform == 'win32': # psapi library needed for finding Tcl/Tk at run time. # user32 library needed for window manipulation functions. ext.libraries.extend(['psapi', 'user32']) ext.extra_link_args.extend(["-mwindows"]) elif sys.platform == 'linux': ext.libraries.extend(['dl']) class BackendMacOSX(OptionalBackendPackage): name = 'macosx' def check_requirements(self): if sys.platform != 'darwin': raise CheckFailed("Mac OS-X only") return 'darwin' def get_extension(self): sources = [ 'src/_macosx.m' ] ext = Extension('matplotlib.backends._macosx', sources) ext.extra_link_args.extend(['-framework', 'Cocoa']) if platform.python_implementation().lower() == 'pypy': ext.extra_compile_args.append('-DPYPY=1') return ext class OptionalPackageData(OptionalPackage): config_category = "package_data" class Dlls(OptionalPackageData): """ On Windows, this packages any DLL files that can be found in the lib/matplotlib/* directories. """ name = "dlls" def check_requirements(self): if sys.platform != 'win32': raise CheckFailed("Microsoft Windows only") def get_package_data(self): return {'': ['*.dll']} @classmethod def get_config(cls): """ Look at `setup.cfg` and return one of ["auto", True, False] indicating if the package is at default state ("auto"), forced by the user (True) or opted-out (False). """ try: return config.getboolean(cls.config_category, cls.name) except Exception: return False # <-- default
80b23547bd70ebea6f5c5d64d751ca0ced31b9b2c738ebe03d037f8919187449
# Version: 0.15 +matplotlib modifications to avoid the need for setup.cfg to exist. """ The Versioneer ============== * like a rocketeer, but for versions! * https://github.com/warner/python-versioneer * Brian Warner * License: Public Domain * Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, and pypy * [![Latest Version] (https://pypip.in/version/versioneer/badge.svg?style=flat) ](https://pypi.python.org/pypi/versioneer/) * [![Build Status] (https://travis-ci.org/warner/python-versioneer.png?branch=master) ](https://travis-ci.org/warner/python-versioneer) This is a tool for managing a recorded version number in distutils-based python projects. The goal is to remove the tedious and error-prone "update the embedded version string" step from your release process. Making a new release should be as easy as recording a new tag in your version-control system, and maybe making new tarballs. ## Quick Install * `pip install versioneer` to somewhere to your $PATH * add a `[versioneer]` section to your setup.cfg (see below) * run `versioneer install` in your source tree, commit the results ## Version Identifiers Source trees come from a variety of places: * a version-control system checkout (mostly used by developers) * a nightly tarball, produced by build automation * a snapshot tarball, produced by a web-based VCS browser, like github's "tarball from tag" feature * a release tarball, produced by "setup.py sdist", distributed through PyPI Within each source tree, the version identifier (either a string or a number, this tool is format-agnostic) can come from a variety of places: * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows about recent "tags" and an absolute revision-id * the name of the directory into which the tarball was unpacked * an expanded VCS keyword ($Id$, etc) * a `_version.py` created by some earlier build step For released software, the version identifier is closely related to a VCS tag. Some projects use tag names that include more than just the version string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool needs to strip the tag prefix to extract the version identifier. For unreleased software (between tags), the version identifier should provide enough information to help developers recreate the same tree, while also giving them an idea of roughly how old the tree is (after version 1.2, before version 1.3). Many VCS systems can report a description that captures this, for example `git describe --tags --dirty --always` reports things like "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has uncommitted changes. The version identifier is used for multiple purposes: * to allow the module to self-identify its version: `myproject.__version__` * to choose a name and prefix for a 'setup.py sdist' tarball ## Theory of Operation Versioneer works by adding a special `_version.py` file into your source tree, where your `__init__.py` can import it. This `_version.py` knows how to dynamically ask the VCS tool for version information at import time. `_version.py` also contains `$Revision$` markers, and the installation process marks `_version.py` to have this marker rewritten with a tag name during the `git archive` command. As a result, generated tarballs will contain enough information to get the proper version. To allow `setup.py` to compute a version too, a `versioneer.py` is added to the top level of your source tree, next to `setup.py` and the `setup.cfg` that configures it. This overrides several distutils/setuptools commands to compute the version when invoked, and changes `setup.py build` and `setup.py sdist` to replace `_version.py` with a small static file that contains just the generated version data. ## Installation First, decide on values for the following configuration variables: * `VCS`: the version control system you use. Currently accepts "git". * `style`: the style of version string to be produced. See "Styles" below for details. Defaults to "pep440", which looks like `TAG[+DISTANCE.gSHORTHASH[.dirty]]`. * `versionfile_source`: A project-relative pathname into which the generated version strings should be written. This is usually a `_version.py` next to your project's main `__init__.py` file, so it can be imported at runtime. If your project uses `src/myproject/__init__.py`, this should be `src/myproject/_version.py`. This file should be checked in to your VCS as usual: the copy created below by `setup.py setup_versioneer` will include code that parses expanded VCS keywords in generated tarballs. The 'build' and 'sdist' commands will replace it with a copy that has just the calculated version string. This must be set even if your project does not have any modules (and will therefore never import `_version.py`), since "setup.py sdist" -based trees still need somewhere to record the pre-calculated version strings. Anywhere in the source tree should do. If there is a `__init__.py` next to your `_version.py`, the `setup.py setup_versioneer` command (described below) will append some `__version__`-setting assignments, if they aren't already present. * `versionfile_build`: Like `versionfile_source`, but relative to the build directory instead of the source directory. These will differ when your setup.py uses 'package_dir='. If you have `package_dir={'myproject': 'src/myproject'}`, then you will probably have `versionfile_build='myproject/_version.py'` and `versionfile_source='src/myproject/_version.py'`. If this is set to None, then `setup.py build` will not attempt to rewrite any `_version.py` in the built tree. If your project does not have any libraries (e.g. if it only builds a script), then you should use `versionfile_build = None` and override `distutils.command.build_scripts` to explicitly insert a copy of `versioneer.get_version()` into your generated script. * `tag_prefix`: a string, like 'PROJECTNAME-', which appears at the start of all VCS tags. If your tags look like 'myproject-1.2.0', then you should use tag_prefix='myproject-'. If you use unprefixed tags like '1.2.0', this should be an empty string. * `parentdir_prefix`: a optional string, frequently the same as tag_prefix, which appears at the start of all unpacked tarball filenames. If your tarball unpacks into 'myproject-1.2.0', this should be 'myproject-'. To disable this feature, just omit the field from your `setup.cfg`. This tool provides one script, named `versioneer`. That script has one mode, "install", which writes a copy of `versioneer.py` into the current directory and runs `versioneer.py setup` to finish the installation. To versioneer-enable your project: * 1: Modify your `setup.cfg`, adding a section named `[versioneer]` and populating it with the configuration values you decided earlier (note that the option names are not case-sensitive): ```` [versioneer] VCS = git style = pep440 versionfile_source = src/myproject/_version.py versionfile_build = myproject/_version.py tag_prefix = "" parentdir_prefix = myproject- ```` * 2: Run `versioneer install`. This will do the following: * copy `versioneer.py` into the top of your source tree * create `_version.py` in the right place (`versionfile_source`) * modify your `__init__.py` (if one exists next to `_version.py`) to define `__version__` (by calling a function from `_version.py`) * modify your `MANIFEST.in` to include both `versioneer.py` and the generated `_version.py` in sdist tarballs `versioneer install` will complain about any problems it finds with your `setup.py` or `setup.cfg`. Run it multiple times until you have fixed all the problems. * 3: add a `import versioneer` to your setup.py, and add the following arguments to the setup() call: version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), * 4: commit these changes to your VCS. To make sure you won't forget, `versioneer install` will mark everything it touched for addition using `git add`. Don't forget to add `setup.py` and `setup.cfg` too. ## Post-Installation Usage Once established, all uses of your tree from a VCS checkout should get the current version string. All generated tarballs should include an embedded version string (so users who unpack them will not need a VCS tool installed). If you distribute your project through PyPI, then the release process should boil down to two steps: * 1: git tag 1.0 * 2: python setup.py register sdist upload If you distribute it through github (i.e. users use github to generate tarballs with `git archive`), the process is: * 1: git tag 1.0 * 2: git push; git push --tags Versioneer will report "0+untagged.NUMCOMMITS.gHASH" until your tree has at least one tag in its history. ## Version-String Flavors Code which uses Versioneer can learn about its version string at runtime by importing `_version` from your main `__init__.py` file and running the `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can import the top-level `versioneer.py` and run `get_versions()`. Both functions return a dictionary with different flavors of version information: * `['version']`: A condensed version string, rendered using the selected style. This is the most commonly used value for the project's version string. The default "pep440" style yields strings like `0.11`, `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section below for alternative styles. * `['full-revisionid']`: detailed revision identifier. For Git, this is the full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that this is only accurate if run in a VCS checkout, otherwise it is likely to be False or None * `['error']`: if the version string could not be computed, this will be set to a string describing the problem, otherwise it will be None. It may be useful to throw an exception in setup.py if this is set, to avoid e.g. creating tarballs with a version string of "unknown". Some variants are more useful than others. Including `full-revisionid` in a bug report should allow developers to reconstruct the exact code being tested (or indicate the presence of local changes that should be shared with the developers). `version` is suitable for display in an "about" box or a CLI `--version` output: it can be easily compared against release notes and lists of bugs fixed in various releases. The installer adds the following text to your `__init__.py` to place a basic version in `YOURPROJECT.__version__`: from ._version import get_versions __version__ = get_versions()['version'] del get_versions ## Styles The setup.cfg `style=` configuration controls how the VCS information is rendered into a version string. The default style, "pep440", produces a PEP440-compliant string, equal to the un-prefixed tag name for actual releases, and containing an additional "local version" section with more detail for in-between builds. For Git, this is TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and that this commit is two revisions ("+2") beyond the "0.11" tag. For released software (exactly equal to a known tag), the identifier will only contain the stripped tag, e.g. "0.11". Other styles are available. See details.md in the Versioneer source tree for descriptions. ## Debugging Versioneer tries to avoid fatal errors: if something goes wrong, it will tend to return a version of "0+unknown". To investigate the problem, run `setup.py version`, which will run the version-lookup code in a verbose mode, and will display the full contents of `get_versions()` (including the `error` string, which may help identify what went wrong). ## Updating Versioneer To upgrade your project to a new release of Versioneer, do the following: * install the new Versioneer (`pip install -U versioneer` or equivalent) * edit `setup.cfg`, if necessary, to include any new configuration settings indicated by the release notes * re-run `versioneer install` in your source tree, to replace `SRC/_version.py` * commit any changed files ### Upgrading to 0.15 Starting with this version, Versioneer is configured with a `[versioneer]` section in your `setup.cfg` file. Earlier versions required the `setup.py` to set attributes on the `versioneer` module immediately after import. The new version will refuse to run (raising an exception during import) until you have provided the necessary `setup.cfg` section. In addition, the Versioneer package provides an executable named `versioneer`, and the installation process is driven by running `versioneer install`. In 0.14 and earlier, the executable was named `versioneer-installer` and was run without an argument. ### Upgrading to 0.14 0.14 changes the format of the version string. 0.13 and earlier used hyphen-separated strings like "0.11-2-g1076c97-dirty". 0.14 and beyond use a plus-separated "local version" section strings, with dot-separated components, like "0.11+2.g1076c97". PEP440-strict tools did not like the old format, but should be ok with the new one. ### Upgrading from 0.11 to 0.12 Nothing special. ### Upgrading from 0.10 to 0.11 You must add a `versioneer.VCS = "git"` to your `setup.py` before re-running `setup.py setup_versioneer`. This will enable the use of additional version-control systems (SVN, etc) in the future. ## Future Directions This tool is designed to make it easily extended to other version-control systems: all VCS-specific components are in separate directories like src/git/ . The top-level `versioneer.py` script is assembled from these components by running make-versioneer.py . In the future, make-versioneer.py will take a VCS name as an argument, and will construct a version of `versioneer.py` that is specific to the given VCS. It might also take the configuration arguments that are currently provided manually during installation by editing setup.py . Alternatively, it might go the other direction and include code from all supported VCS systems, reducing the number of intermediate scripts. ## License To make Versioneer easier to embed, all its code is hereby released into the public domain. The `_version.py` that it creates is also in the public domain. """ from __future__ import print_function try: import configparser except ImportError: import ConfigParser as configparser import errno import json import os import re import subprocess import sys class VersioneerConfig: pass def get_root(): # we require that all commands are run from the project root, i.e. the # directory that contains setup.py, setup.cfg, and versioneer.py . root = os.path.realpath(os.path.abspath(os.getcwd())) setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): # allow 'python path/to/setup.py COMMAND' root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): err = ("Versioneer was unable to run the project root directory. " "Versioneer requires setup.py to be executed from " "its immediate directory (like 'python setup.py COMMAND'), " "or in a way that lets it use sys.argv[0] to find the root " "(like 'python path/to/setup.py COMMAND').") raise VersioneerBadRootError(err) try: # Certain runtime workflows (setup.py install/develop in a setuptools # tree) execute all dependencies in a single python process, so # "versioneer" may be imported multiple times, and python's shared # module-import table will cache the first one. So we can't use # os.path.dirname(__file__), as that will find whichever # versioneer.py was first imported, even in later projects. me = os.path.realpath(os.path.abspath(__file__)) if os.path.splitext(me)[0] != os.path.splitext(versioneer_py)[0]: print("Warning: build in %s is using versioneer.py from %s" % (os.path.dirname(me), versioneer_py)) except NameError: pass return root def get_config_from_root(root): # This might raise EnvironmentError (if setup.cfg is missing), or # configparser.NoSectionError (if it lacks a [versioneer] section), or # configparser.NoOptionError (if it lacks "VCS="). See the docstring at # the top of versioneer.py for instructions on writing your setup.cfg . setup_cfg = os.path.join(root, "setup.cfg") parser = configparser.SafeConfigParser() if os.path.exists(setup_cfg): with open(setup_cfg, "r") as f: parser.readfp(f) def get(parser, name): if parser.has_option("versioneer", name): return parser.get("versioneer", name) return None cfg = VersioneerConfig() VCS = get(parser, "VCS") or 'git' cfg.VCS = VCS cfg.style = get(parser, "style") or "pep440" version_file = os.path.join('lib', 'matplotlib', '_version.py') cfg.versionfile_source = get(parser, "versionfile_source") or version_file cfg.versionfile_build = get(parser, "versionfile_build") or os.path.join('matplotlib', '_version.py') cfg.tag_prefix = get(parser, "tag_prefix") or 'v' cfg.parentdir_prefix = get(parser, "parentdir_prefix") or 'matplotlib-' cfg.verbose = get(parser, "verbose") return cfg class NotThisMethod(Exception): pass # these dictionaries contain VCS-specific tools LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator def decorate(f): if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) return None return stdout LONG_VERSION_PY['git'] = r''' # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.15 (https://github.com/warner/python-versioneer) import errno import os import re import subprocess import sys def get_keywords(): # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" keywords = {"refnames": git_refnames, "full": git_full} return keywords class VersioneerConfig: pass def get_config(): # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "%(STYLE)s" cfg.tag_prefix = "%(TAG_PREFIX)s" cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" cfg.verbose = False return cfg class NotThisMethod(Exception): pass LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator def decorate(f): if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %%s" %% dispcmd) print(e) return None else: if verbose: print("unable to find command, tried %%s" %% (commands,)) return None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %%s (error)" %% dispcmd) return None return stdout def versions_from_parentdir(parentdir_prefix, root, verbose): # Source tarballs conventionally unpack into a directory that includes # both the project name and a version string. dirname = os.path.basename(root) if not dirname.startswith(parentdir_prefix): if verbose: print("guessing rootdir is '%%s', but '%%s' doesn't start with " "prefix '%%s'" %% (root, dirname, parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None} @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): if not keywords: raise NotThisMethod("no keywords at all, weird") refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %%d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%%s', no digits" %% ",".join(refs-tags)) if verbose: print("likely tags: %%s" %% ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %%s" %% r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags"} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # this runs 'git' from the root of the source tree. This only gets called # if the git-archive 'subst' keywords were *not* expanded, and # _version.py hasn't already been rewritten with a short version string, # meaning we're inside a checked out source tree. if not os.path.exists(os.path.join(root, ".git")): if verbose: print("no .git in %%s" %% root) raise NotThisMethod("no .git directory") GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] # if there is a tag, this yields TAG-NUM-gHEX[-dirty] # if there are no tags, this yields HEX[-dirty] (no NUM) describe_out = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long"], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%%s'" %% describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%%s' doesn't start with prefix '%%s'" print(fmt %% (full_tag, tag_prefix)) pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" %% (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits return pieces def plus_or_dot(pieces): if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): # now build up version string, with post-release "local version # identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty # exceptions: # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): # TAG[.post.devDISTANCE] . No -dirty # exceptions: # 1: no tags. 0.post.devDISTANCE if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%%d" %% pieces["distance"] else: # exception #1 rendered = "0.post.dev%%d" %% pieces["distance"] return rendered def render_pep440_post(pieces): # TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that # .dev0 sorts backwards (a dirty tree will appear "older" than the # corresponding clean one), but you shouldn't be releasing software with # -dirty anyways. # exceptions: # 1: no tags. 0.postDISTANCE[.dev0] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%%s" %% pieces["short"] else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%%s" %% pieces["short"] return rendered def render_pep440_old(pieces): # TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. # exceptions: # 1: no tags. 0.postDISTANCE[.dev0] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty # --always' # exceptions: # 1: no tags. HEX[-dirty] (note: no 'g' prefix) if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty # --always -long'. The distance/hash is unconditional. # exceptions: # 1: no tags. HEX[-dirty] (note: no 'g' prefix) if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"]} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%%s'" %% style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None} def get_versions(): # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree"} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version"} ''' @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): if not keywords: raise NotThisMethod("no keywords at all, weird") refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs-tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags"} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # this runs 'git' from the root of the source tree. This only gets called # if the git-archive 'subst' keywords were *not* expanded, and # _version.py hasn't already been rewritten with a short version string, # meaning we're inside a checked out source tree. if not os.path.exists(os.path.join(root, ".git")): if verbose: print("no .git in %s" % root) raise NotThisMethod("no .git directory") GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] # if there is a tag, this yields TAG-NUM-gHEX[-dirty] # if there are no tags, this yields HEX[-dirty] (no NUM) describe_out = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long"], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits return pieces def do_vcs_install(manifest_in, versionfile_source, ipy): GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] files = [manifest_in, versionfile_source] if ipy: files.append(ipy) try: me = __file__ if me.endswith(".pyc") or me.endswith(".pyo"): me = os.path.splitext(me)[0] + ".py" versioneer_file = os.path.relpath(me) except NameError: versioneer_file = "versioneer.py" files.append(versioneer_file) present = False try: f = open(".gitattributes", "r") for line in f.readlines(): if line.strip().startswith(versionfile_source): if "export-subst" in line.strip().split()[1:]: present = True f.close() except EnvironmentError: pass if not present: f = open(".gitattributes", "a+") f.write("%s export-subst\n" % versionfile_source) f.close() files.append(".gitattributes") run_command(GITS, ["add", "--"] + files) def versions_from_parentdir(parentdir_prefix, root, verbose): # Source tarballs conventionally unpack into a directory that includes # both the project name and a version string. dirname = os.path.basename(root) if not dirname.startswith(parentdir_prefix): if verbose: print("guessing rootdir is '%s', but '%s' doesn't start with " "prefix '%s'" % (root, dirname, parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None} SHORT_VERSION_PY = """ # This file was generated by 'versioneer.py' (0.15) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. import json import sys version_json = ''' %s ''' # END VERSION_JSON def get_versions(): return json.loads(version_json) """ def versions_from_file(filename): try: with open(filename) as f: contents = f.read() except EnvironmentError: raise NotThisMethod("unable to read _version.py") mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: raise NotThisMethod("no version_json in _version.py") return json.loads(mo.group(1)) def write_to_version_file(filename, versions): os.unlink(filename) contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) print("set %s to '%s'" % (filename, versions["version"])) def plus_or_dot(pieces): if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): # now build up version string, with post-release "local version # identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty # exceptions: # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): # TAG[.post.devDISTANCE] . No -dirty # exceptions: # 1: no tags. 0.post.devDISTANCE if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): # TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that # .dev0 sorts backwards (a dirty tree will appear "older" than the # corresponding clean one), but you shouldn't be releasing software with # -dirty anyways. # exceptions: # 1: no tags. 0.postDISTANCE[.dev0] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): # TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. # exceptions: # 1: no tags. 0.postDISTANCE[.dev0] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty # --always' # exceptions: # 1: no tags. HEX[-dirty] (note: no 'g' prefix) if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty # --always -long'. The distance/hash is unconditional. # exceptions: # 1: no tags. HEX[-dirty] (note: no 'g' prefix) if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"]} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None} class VersioneerBadRootError(Exception): pass def get_versions(verbose=False): # returns dict with two keys: 'version' and 'full' if "versioneer" in sys.modules: # see the discussion in cmdclass.py:get_cmdclass() del sys.modules["versioneer"] root = get_root() cfg = get_config_from_root(root) assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" handlers = HANDLERS.get(cfg.VCS) assert handlers, "unrecognized VCS '%s'" % cfg.VCS verbose = verbose or cfg.verbose assert cfg.versionfile_source is not None, \ "please set versioneer.versionfile_source" assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" versionfile_abs = os.path.join(root, cfg.versionfile_source) # extract version from first of: _version.py, VCS command (e.g. 'git # describe'), parentdir. This is meant to work for developers using a # source checkout, for users of a tarball created by 'setup.py sdist', # and for users of a tarball/zipball created by 'git archive' or github's # download-from-tag feature or the equivalent in other VCSes. get_keywords_f = handlers.get("get_keywords") from_keywords_f = handlers.get("keywords") if get_keywords_f and from_keywords_f: try: keywords = get_keywords_f(versionfile_abs) ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) if verbose: print("got version from expanded keyword %s" % ver) return ver except NotThisMethod: pass try: ver = versions_from_file(versionfile_abs) if verbose: print("got version from file %s %s" % (versionfile_abs, ver)) return ver except NotThisMethod: pass from_vcs_f = handlers.get("pieces_from_vcs") if from_vcs_f: try: pieces = from_vcs_f(cfg.tag_prefix, root, verbose) ver = render(pieces, cfg.style) if verbose: print("got version from VCS %s" % ver) return ver except NotThisMethod: pass try: if cfg.parentdir_prefix: ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) if verbose: print("got version from parentdir %s" % ver) return ver except NotThisMethod: pass if verbose: print("unable to compute version") return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version"} def get_version(): return get_versions()["version"] def get_cmdclass(): if "versioneer" in sys.modules: del sys.modules["versioneer"] # this fixes the "python setup.py develop" case (also 'install' and # 'easy_install .'), in which subdependencies of the main project are # built (using setup.py bdist_egg) in the same python process. Assume # a main project A and a dependency B, which use different versions # of Versioneer. A's setup.py imports A's Versioneer, leaving it in # sys.modules by the time B's setup.py is executed, causing B to run # with the wrong versioneer. Setuptools wraps the sub-dep builds in a # sandbox that restores sys.modules to it's pre-build state, so the # parent is protected against the child's "import versioneer". By # removing ourselves from sys.modules here, before the child build # happens, we protect the child from the parent's versioneer too. # Also see https://github.com/warner/python-versioneer/issues/52 cmds = {} # we add "version" to both distutils and setuptools from distutils.core import Command class cmd_version(Command): description = "report generated version string" user_options = [] boolean_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): vers = get_versions(verbose=True) print("Version: %s" % vers["version"]) print(" full-revisionid: %s" % vers.get("full-revisionid")) print(" dirty: %s" % vers.get("dirty")) if vers["error"]: print(" error: %s" % vers["error"]) cmds["version"] = cmd_version # we override "build_py" in both distutils and setuptools # # most invocation pathways end up running build_py: # distutils/build -> build_py # distutils/install -> distutils/build ->.. # setuptools/bdist_wheel -> distutils/install ->.. # setuptools/bdist_egg -> distutils/install_lib -> build_py # setuptools/install -> bdist_egg ->.. # setuptools/develop -> ? from distutils.command.build_py import build_py as _build_py class cmd_build_py(_build_py): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() _build_py.run(self) # now locate _version.py in the new build/ directory and replace # it with an updated value if cfg.versionfile_build: target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) cmds["build_py"] = cmd_build_py if "cx_Freeze" in sys.modules: # cx_freeze enabled? from cx_Freeze.dist import build_exe as _build_exe class cmd_build_exe(_build_exe): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _build_exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) cmds["build_exe"] = cmd_build_exe del cmds["build_py"] # we override different "sdist" commands for both environments if "setuptools" in sys.modules: from setuptools.command.sdist import sdist as _sdist else: from distutils.command.sdist import sdist as _sdist class cmd_sdist(_sdist): def run(self): versions = get_versions() self._versioneer_generated_versions = versions # unless we update this, the command will keep using the old # version self.distribution.metadata.version = versions["version"] return _sdist.run(self) def make_release_tree(self, base_dir, files): root = get_root() cfg = get_config_from_root(root) _sdist.make_release_tree(self, base_dir, files) # now locate _version.py in the new base_dir directory # (remembering that it may be a hardlink) and replace it with an # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, self._versioneer_generated_versions) cmds["sdist"] = cmd_sdist return cmds CONFIG_ERROR = """ setup.cfg is missing the necessary Versioneer configuration. You need a section like: [versioneer] VCS = git style = pep440 versionfile_source = src/myproject/_version.py versionfile_build = myproject/_version.py tag_prefix = "" parentdir_prefix = myproject- You will also need to edit your setup.py to use the results: import versioneer setup(version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), ...) Please read the docstring in ./versioneer.py for configuration instructions, edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. """ SAMPLE_CONFIG = """ # See the docstring in versioneer.py for instructions. Note that you must # re-run 'versioneer.py setup' after changing this section, and commit the # resulting files. [versioneer] #VCS = git #style = pep440 #versionfile_source = #versionfile_build = #tag_prefix = #parentdir_prefix = """ INIT_PY_SNIPPET = """ from ._version import get_versions __version__ = get_versions()['version'] del get_versions """ def do_setup(): root = get_root() try: cfg = get_config_from_root(root) except (EnvironmentError, configparser.NoSectionError, configparser.NoOptionError) as e: if isinstance(e, (EnvironmentError, configparser.NoSectionError)): print("Adding sample versioneer config to setup.cfg", file=sys.stderr) with open(os.path.join(root, "setup.cfg"), "a") as f: f.write(SAMPLE_CONFIG) print(CONFIG_ERROR, file=sys.stderr) return 1 print(" creating %s" % cfg.versionfile_source) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") if os.path.exists(ipy): try: with open(ipy, "r") as f: old = f.read() except EnvironmentError: old = "" if INIT_PY_SNIPPET not in old: print(" appending to %s" % ipy) with open(ipy, "a") as f: f.write(INIT_PY_SNIPPET) else: print(" %s unmodified" % ipy) else: print(" %s doesn't exist, ok" % ipy) ipy = None # Make sure both the top-level "versioneer.py" and versionfile_source # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so # they'll be copied into source distributions. Pip won't be able to # install the package without this. manifest_in = os.path.join(root, "MANIFEST.in") simple_includes = set() try: with open(manifest_in, "r") as f: for line in f: if line.startswith("include "): for include in line.split()[1:]: simple_includes.add(include) except EnvironmentError: pass # That doesn't cover everything MANIFEST.in can do # (http://docs.python.org/2/distutils/sourcedist.html#commands), so # it might give some false negatives. Appending redundant 'include' # lines is safe, though. if "versioneer.py" not in simple_includes: print(" appending 'versioneer.py' to MANIFEST.in") with open(manifest_in, "a") as f: f.write("include versioneer.py\n") else: print(" 'versioneer.py' already in MANIFEST.in") if cfg.versionfile_source not in simple_includes: print(" appending versionfile_source ('%s') to MANIFEST.in" % cfg.versionfile_source) with open(manifest_in, "a") as f: f.write("include %s\n" % cfg.versionfile_source) else: print(" versionfile_source already in MANIFEST.in") # Make VCS-specific changes. For git, this means creating/changing # .gitattributes to mark _version.py for export-time keyword # substitution. do_vcs_install(manifest_in, cfg.versionfile_source, ipy) return 0 def scan_setup_py(): found = set() setters = False errors = 0 with open("setup.py", "r") as f: for line in f.readlines(): if "import versioneer" in line: found.add("import") if "versioneer.get_cmdclass()" in line: found.add("cmdclass") if "versioneer.get_version()" in line: found.add("get_version") if "versioneer.VCS" in line: setters = True if "versioneer.versionfile_source" in line: setters = True if len(found) != 3: print("") print("Your setup.py appears to be missing some important items") print("(but I might be wrong). Please make sure it has something") print("roughly like the following:") print("") print(" import versioneer") print(" setup( version=versioneer.get_version(),") print(" cmdclass=versioneer.get_cmdclass(), ...)") print("") errors += 1 if setters: print("You should remove lines like 'versioneer.VCS = ' and") print("'versioneer.versionfile_source = ' . This configuration") print("now lives in setup.cfg, and should be removed from setup.py") print("") errors += 1 return errors if __name__ == "__main__": cmd = sys.argv[1] if cmd == "setup": errors = do_setup() errors += scan_setup_py() if errors: sys.exit(1)
8f063fdc8ad46d052ea56ae5d57f9e39e2b26b0d6a8161fd7efdbd552dfae1f3
# Matplotlib documentation build configuration file, created by # sphinx-quickstart on Fri May 2 12:33:25 2008. # # This file is execfile()d with the current directory set to its containing dir. # # The contents of this file are pickled, so don't put values in the namespace # that aren't pickleable (module imports are okay, they're removed automatically). # # All configuration values have a default value; values that are commented out # serve to show the default value. import os import shutil import subprocess import sys import matplotlib import sphinx # If your extensions are in another directory, add it here. If the directory # is relative to the documentation root, use os.path.abspath to make it # absolute, like shown here. sys.path.append(os.path.abspath('.')) sys.path.append('.') # General configuration # --------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.doctest', 'sphinx.ext.inheritance_diagram', 'sphinx.ext.intersphinx', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'IPython.sphinxext.ipython_console_highlighting', 'IPython.sphinxext.ipython_directive', 'numpydoc', # Needs to be loaded *after* autodoc. 'sphinx_gallery.gen_gallery', 'matplotlib.sphinxext.mathmpl', 'matplotlib.sphinxext.plot_directive', 'sphinxext.custom_roles', 'sphinxext.github', 'sphinxext.math_symbol_table', 'sphinxext.mock_gui_toolkits', 'sphinxext.skip_deprecated', 'sphinx_copybutton', ] exclude_patterns = ['api/api_changes/*', 'users/whats_new/*'] def _check_dependencies(): names = { "colorspacious": 'colorspacious', "IPython.sphinxext.ipython_console_highlighting": 'ipython', "matplotlib": 'matplotlib', "numpydoc": 'numpydoc', "PIL.Image": 'pillow', "sphinx_copybutton": 'sphinx_copybutton', "sphinx_gallery": 'sphinx_gallery', } missing = [] for name in names: try: __import__(name) except ImportError: missing.append(names[name]) if missing: raise ImportError( "The following dependencies are missing to build the " "documentation: {}".format(", ".join(missing))) if shutil.which('dot') is None: raise OSError( "No binary named dot - graphviz must be installed to build the " "documentation") _check_dependencies() # Import only after checking for dependencies. # gallery_order.py from the sphinxext folder provides the classes that # allow custom ordering of sections and subsections of the gallery import sphinxext.gallery_order as gallery_order # The following import is only necessary to monkey patch the signature later on from sphinx_gallery import gen_rst # On Linux, prevent plt.show() from emitting a non-GUI backend warning. os.environ.pop("DISPLAY", None) autosummary_generate = True autodoc_docstring_signature = True if sphinx.version_info < (1, 8): autodoc_default_flags = ['members', 'undoc-members'] else: autodoc_default_options = {'members': None, 'undoc-members': None} intersphinx_mapping = { 'python': ('https://docs.python.org/3', None), 'numpy': ('https://docs.scipy.org/doc/numpy/', None), 'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None), 'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None), 'Pillow': ('https://pillow.readthedocs.io/en/stable/', None), 'cycler': ('https://matplotlib.org/cycler', None), 'dateutil': ('https://dateutil.readthedocs.io/en/stable/', None), } # Sphinx gallery configuration sphinx_gallery_conf = { 'examples_dirs': ['../examples', '../tutorials'], 'filename_pattern': '^((?!sgskip).)*$', 'gallery_dirs': ['gallery', 'tutorials'], 'doc_module': ('matplotlib', 'mpl_toolkits'), 'reference_url': { 'matplotlib': None, 'numpy': 'https://docs.scipy.org/doc/numpy', 'scipy': 'https://docs.scipy.org/doc/scipy/reference', }, 'backreferences_dir': 'api/_as_gen', 'subsection_order': gallery_order.sectionorder, 'within_subsection_order': gallery_order.subsectionorder, 'min_reported_time': 1, } plot_gallery = 'True' # Monkey-patching gallery signature to include search keywords gen_rst.SPHX_GLR_SIG = """\n .. only:: html .. rst-class:: sphx-glr-signature Keywords: matplotlib code example, codex, python plot, pyplot `Gallery generated by Sphinx-Gallery <https://sphinx-gallery.readthedocs.io>`_\n""" # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # This is the default encoding, but it doesn't hurt to be explicit source_encoding = "utf-8" # The master toctree document. master_doc = 'contents' # General substitutions. try: SHA = subprocess.check_output( ['git', 'describe', '--dirty']).decode('utf-8').strip() # Catch the case where git is not installed locally, and use the versioneer # version number instead except (subprocess.CalledProcessError, FileNotFoundError): SHA = matplotlib.__version__ html_context = {'sha': SHA} project = 'Matplotlib' copyright = ('2002 - 2012 John Hunter, Darren Dale, Eric Firing, ' 'Michael Droettboom and the Matplotlib development ' 'team; 2012 - 2018 The Matplotlib development team') # The default replacements for |version| and |release|, also used in various # other places throughout the built documents. # # The short X.Y version. version = matplotlib.__version__ # The full version, including alpha/beta/rc tags. release = version # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. unused_docs = [] # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' default_role = 'obj' # Plot directive configuration # ---------------------------- plot_formats = [('png', 100), ('pdf', 100)] # Github extension github_project_url = "https://github.com/matplotlib/matplotlib/" # Options for HTML output # ----------------------- # The style sheet to use for HTML and HTML Help pages. A file of that name # must exist either in Sphinx' static/ path, or in one of the custom paths # given in html_static_path. #html_style = 'matplotlib.css' html_style = 'mpl.css' # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # The name of an image file (within the static path) to place at the top of # the sidebar. #html_logo = 'logo.png' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If nonempty, this is the file name suffix for generated HTML files. The # default is ``".html"``. html_file_suffix = '.html' # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. html_last_updated_fmt = '%b %d, %Y' # Content template for the index page. html_index = 'index.html' # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Custom sidebar templates, maps page names to templates. html_sidebars = { 'index': ['searchbox.html', 'sidebar_announcement.html', 'donate_sidebar.html'], '**': ['searchbox.html', 'localtoc.html', 'relations.html', 'pagesource.html'] } # If false, no module index is generated. #html_use_modindex = True html_domain_indices = ["py-modindex"] # If true, the reST sources are included in the HTML build as _sources/<name>. #html_copy_source = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. html_use_opensearch = 'False' # Output file base name for HTML help builder. htmlhelp_basename = 'Matplotlibdoc' # Use typographic quote characters. smartquotes = False # Path to favicon html_favicon = '_static/favicon.ico' # Options for LaTeX output # ------------------------ # The paper size ('letter' or 'a4'). latex_paper_size = 'letter' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, document class [howto/manual]). latex_documents = [ ('contents', 'Matplotlib.tex', 'Matplotlib', 'John Hunter, Darren Dale, Eric Firing, Michael Droettboom and the ' 'matplotlib development team', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. latex_logo = None latex_elements = {} # Additional stuff for the LaTeX preamble. latex_elements['preamble'] = r""" % In the parameters section, place a newline after the Parameters % header. (This is stolen directly from Numpy's conf.py, since it % affects Numpy-style docstrings). \usepackage{expdlist} \let\latexdescription=\description \def\description{\latexdescription{}{} \breaklabel} \usepackage{amsmath} \usepackage{amsfonts} \usepackage{amssymb} \usepackage{txfonts} % The enumitem package provides unlimited nesting of lists and % enums. Sphinx may use this in the future, in which case this can % be removed. See % https://bitbucket.org/birkenfeld/sphinx/issue/777/latex-output-too-deeply-nested \usepackage{enumitem} \setlistdepth{2048} """ latex_elements['pointsize'] = '11pt' # Documents to append as an appendix to all manuals. latex_appendices = [] # If false, no module index is generated. latex_use_modindex = True if hasattr(sphinx, 'version_info') and sphinx.version_info[:2] >= (1, 4): latex_toplevel_sectioning = 'part' else: latex_use_parts = True # Show both class-level docstring and __init__ docstring in class # documentation autoclass_content = 'both' texinfo_documents = [ ("contents", 'matplotlib', 'Matplotlib Documentation', 'John Hunter@*Darren Dale@*Eric Firing@*Michael Droettboom@*' 'The matplotlib development team', 'Matplotlib', "Python plotting package", 'Programming', 1), ] # numpydoc config numpydoc_show_class_members = False latex_engine = 'xelatex' # or 'lualatex' latex_elements = { 'babel': r'\usepackage{babel}', 'fontpkg': r'\setmainfont{DejaVu Serif}', } def setup(app): if any(st in version for st in ('post', 'alpha', 'beta')): bld_type = 'dev' else: bld_type = 'rel' app.add_config_value('releaselevel', bld_type, 'env')
bbf072dc21e56beac1320e7b7a01572316773ff27acc4912bbf74da41707e69c
from matplotlib.pylab import * import matplotlib.pylab __doc__ = matplotlib.pylab.__doc__
b849cd4cd3a3a6fc7e2abb4d6a255bd7dff8225c6a1422b35c3dbd542ef952ff
#!/usr/bin/env python import gc from io import BytesIO import tracemalloc try: import psutil except ImportError: raise ImportError("This script requires psutil") import numpy as np def run_memleak_test(bench, iterations, report): tracemalloc.start() starti = min(50, iterations // 2) endi = iterations malloc_arr = np.empty(endi, dtype=np.int64) rss_arr = np.empty(endi, dtype=np.int64) rss_peaks = np.empty(endi, dtype=np.int64) nobjs_arr = np.empty(endi, dtype=np.int64) garbage_arr = np.empty(endi, dtype=np.int64) open_files_arr = np.empty(endi, dtype=np.int64) rss_peak = 0 p = psutil.Process() for i in range(endi): bench() gc.collect() rss = p.memory_info().rss malloc, peak = tracemalloc.get_traced_memory() nobjs = len(gc.get_objects()) garbage = len(gc.garbage) open_files = len(p.open_files()) print("{0: 4d}: pymalloc {1: 10d}, rss {2: 10d}, nobjs {3: 10d}, " "garbage {4: 4d}, files: {5: 4d}".format( i, malloc, rss, nobjs, garbage, open_files)) malloc_arr[i] = malloc rss_arr[i] = rss if rss > rss_peak: rss_peak = rss rss_peaks[i] = rss_peak nobjs_arr[i] = nobjs garbage_arr[i] = garbage open_files_arr[i] = open_files print('Average memory consumed per loop: {:1.4f} bytes\n'.format( np.sum(rss_peaks[starti+1:] - rss_peaks[starti:-1]) / (endi - starti))) from matplotlib import pyplot as plt fig, (ax1, ax2, ax3) = plt.subplots(3) ax1b = ax1.twinx() ax1.plot(malloc_arr, 'r') ax1b.plot(rss_arr, 'b') ax1.set_ylabel('pymalloc', color='r') ax1b.set_ylabel('rss', color='b') ax2b = ax2.twinx() ax2.plot(nobjs_arr, 'r') ax2b.plot(garbage_arr, 'b') ax2.set_ylabel('total objects', color='r') ax2b.set_ylabel('garbage objects', color='b') ax3.plot(open_files_arr) ax3.set_ylabel('open file handles') if not report.endswith('.pdf'): report = report + '.pdf' fig.tight_layout() fig.savefig(report, format='pdf') class MemleakTest(object): def __init__(self, empty): self.empty = empty def __call__(self): import matplotlib.pyplot as plt fig = plt.figure(1) if not self.empty: t1 = np.arange(0.0, 2.0, 0.01) y1 = np.sin(2 * np.pi * t1) y2 = np.random.rand(len(t1)) X = np.random.rand(50, 50) ax = fig.add_subplot(221) ax.plot(t1, y1, '-') ax.plot(t1, y2, 's') ax = fig.add_subplot(222) ax.imshow(X) ax = fig.add_subplot(223) ax.scatter(np.random.rand(50), np.random.rand(50), s=100 * np.random.rand(50), c=np.random.rand(50)) ax = fig.add_subplot(224) ax.pcolor(10 * np.random.rand(50, 50)) fig.savefig(BytesIO(), dpi=75) plt.close(1) if __name__ == '__main__': import argparse parser = argparse.ArgumentParser('Run memory leak tests') parser.add_argument('backend', type=str, nargs=1, help='backend to test') parser.add_argument('iterations', type=int, nargs=1, help='number of iterations') parser.add_argument('report', type=str, nargs=1, help='filename to save report') parser.add_argument('--empty', action='store_true', help="Don't plot any content, just test creating " "and destroying figures") parser.add_argument('--interactive', action='store_true', help="Turn on interactive mode to actually open " "windows. Only works with some GUI backends.") args = parser.parse_args() import matplotlib matplotlib.use(args.backend[0]) if args.interactive: import matplotlib.pyplot as plt plt.ion() run_memleak_test( MemleakTest(args.empty), args.iterations[0], args.report[0])
24ff9d4badc8e295488c267bd95efab6d1066aea9059e1c17b1855966f2dfc71
"""Functions for Github API requests.""" import getpass import json import os import re import sys import requests try: import requests_cache except ImportError: print("no cache", file=sys.stderr) else: requests_cache.install_cache("gh_api", expire_after=3600) # Keyring stores passwords by a 'username', but we're not storing a username and # password fake_username = 'ipython_tools' class Obj(dict): """Dictionary with attribute access to names.""" def __getattr__(self, name): try: return self[name] except KeyError: raise AttributeError(name) def __setattr__(self, name, val): self[name] = val token = None def get_auth_token(): global token if token is not None: return token try: with open(os.path.join(os.path.expanduser('~'), '.ghoauth')) as f: token, = f return token except Exception: pass import keyring token = keyring.get_password('github', fake_username) if token is not None: return token print("Please enter your github username and password. These are not " "stored, only used to get an oAuth token. You can revoke this at " "any time on Github.") user = input("Username: ") pw = getpass.getpass("Password: ") auth_request = { "scopes": [ "public_repo", "gist" ], "note": "IPython tools", "note_url": "https://github.com/ipython/ipython/tree/master/tools", } response = requests.post('https://api.github.com/authorizations', auth=(user, pw), data=json.dumps(auth_request)) response.raise_for_status() token = json.loads(response.text)['token'] keyring.set_password('github', fake_username, token) return token def make_auth_header(): return {'Authorization': 'token ' + get_auth_token()} def post_issue_comment(project, num, body): url = 'https://api.github.com/repos/{project}/issues/{num}/comments'.format(project=project, num=num) payload = json.dumps({'body': body}) requests.post(url, data=payload, headers=make_auth_header()) def post_gist(content, description='', filename='file', auth=False): """Post some text to a Gist, and return the URL.""" post_data = json.dumps({ "description": description, "public": True, "files": { filename: { "content": content } } }).encode('utf-8') headers = make_auth_header() if auth else {} response = requests.post("https://api.github.com/gists", data=post_data, headers=headers) response.raise_for_status() response_data = json.loads(response.text) return response_data['html_url'] def get_pull_request(project, num, auth=False): """get pull request info by number """ url = "https://api.github.com/repos/{project}/pulls/{num}".format(project=project, num=num) if auth: header = make_auth_header() else: header = None print("fetching %s" % url, file=sys.stderr) response = requests.get(url, headers=header) response.raise_for_status() return json.loads(response.text, object_hook=Obj) def get_pull_request_files(project, num, auth=False): """get list of files in a pull request""" url = "https://api.github.com/repos/{project}/pulls/{num}/files".format(project=project, num=num) if auth: header = make_auth_header() else: header = None return get_paged_request(url, headers=header) element_pat = re.compile(r'<(.+?)>') rel_pat = re.compile(r'rel=[\'"](\w+)[\'"]') def get_paged_request(url, headers=None, **params): """get a full list, handling APIv3's paging""" results = [] params.setdefault("per_page", 100) while True: if '?' in url: params = None print("fetching %s" % url, file=sys.stderr) else: print("fetching %s with %s" % (url, params), file=sys.stderr) response = requests.get(url, headers=headers, params=params) response.raise_for_status() results.extend(response.json()) if 'next' in response.links: url = response.links['next']['url'] else: break return results def get_pulls_list(project, auth=False, **params): """get pull request list""" params.setdefault("state", "closed") url = "https://api.github.com/repos/{project}/pulls".format(project=project) if auth: headers = make_auth_header() else: headers = None pages = get_paged_request(url, headers=headers, **params) return pages def get_issues_list(project, auth=False, **params): """get issues list""" params.setdefault("state", "closed") url = "https://api.github.com/repos/{project}/issues".format(project=project) if auth: headers = make_auth_header() else: headers = None pages = get_paged_request(url, headers=headers, **params) return pages def get_milestones(project, auth=False, **params): params.setdefault('state', 'all') url = "https://api.github.com/repos/{project}/milestones".format(project=project) if auth: headers = make_auth_header() else: headers = None milestones = get_paged_request(url, headers=headers, **params) return milestones def get_milestone_id(project, milestone, auth=False, **params): milestones = get_milestones(project, auth=auth, **params) for mstone in milestones: if mstone['title'] == milestone: return mstone['number'] raise ValueError("milestone %s not found" % milestone) def is_pull_request(issue): """Return True if the given issue is a pull request.""" return bool(issue.get('pull_request', {}).get('html_url', None)) def get_authors(pr): print("getting authors for #%i" % pr['number'], file=sys.stderr) h = make_auth_header() r = requests.get(pr['commits_url'], headers=h) r.raise_for_status() commits = r.json() authors = [] for commit in commits: author = commit['commit']['author'] authors.append("%s <%s>" % (author['name'], author['email'])) return authors # encode_multipart_formdata is from urllib3.filepost # The only change is to iter_fields, to enforce S3's required key ordering def iter_fields(fields): fields = fields.copy() for key in [ 'key', 'acl', 'Filename', 'success_action_status', 'AWSAccessKeyId', 'Policy', 'Signature', 'Content-Type', 'file']: yield key, fields.pop(key) yield from fields.items() def encode_multipart_formdata(fields, boundary=None): """ Encode a dictionary of ``fields`` using the multipart/form-data mime format. :param fields: Dictionary of fields or list of (key, value) field tuples. The key is treated as the field name, and the value as the body of the form-data bytes. If the value is a tuple of two elements, then the first element is treated as the filename of the form-data section. Field names and filenames must be unicode. :param boundary: If not specified, then a random boundary will be generated using :func:`mimetools.choose_boundary`. """ # copy requests imports in here: from io import BytesIO from requests.packages.urllib3.filepost import ( choose_boundary, writer, b, get_content_type ) body = BytesIO() if boundary is None: boundary = choose_boundary() for fieldname, value in iter_fields(fields): body.write(b('--%s\r\n' % (boundary))) if isinstance(value, tuple): filename, data = value writer(body).write('Content-Disposition: form-data; name="%s"; ' 'filename="%s"\r\n' % (fieldname, filename)) body.write(b('Content-Type: %s\r\n\r\n' % (get_content_type(filename)))) else: data = value writer(body).write('Content-Disposition: form-data; name="%s"\r\n' % (fieldname)) body.write(b'Content-Type: text/plain\r\n\r\n') if isinstance(data, int): data = str(data) # Backwards compatibility if isinstance(data, str): writer(body).write(data) else: body.write(data) body.write(b'\r\n') body.write(b('--%s--\r\n' % (boundary))) content_type = b('multipart/form-data; boundary=%s' % boundary) return body.getvalue(), content_type def post_download(project, filename, name=None, description=""): """Upload a file to the GitHub downloads area""" if name is None: name = os.path.basename(filename) with open(filename, 'rb') as f: filedata = f.read() url = "https://api.github.com/repos/{project}/downloads".format(project=project) payload = json.dumps(dict(name=name, size=len(filedata), description=description)) response = requests.post(url, data=payload, headers=make_auth_header()) response.raise_for_status() reply = json.loads(response.content) s3_url = reply['s3_url'] fields = dict( key=reply['path'], acl=reply['acl'], success_action_status=201, Filename=reply['name'], AWSAccessKeyId=reply['accesskeyid'], Policy=reply['policy'], Signature=reply['signature'], file=(reply['name'], filedata), ) fields['Content-Type'] = reply['mime_type'] data, content_type = encode_multipart_formdata(fields) s3r = requests.post(s3_url, data=data, headers={'Content-Type': content_type}) return s3r
53bb7b2fc84628256b8e0c19c734aa22a2e070864223f833abcfde061f545a95
#!/usr/bin/env python # # This builds a html page of all images from the image comparison tests # and opens that page in the browser. # # $ python tools/visualize_tests.py # import argparse import os from collections import defaultdict html_template = """<html><head><style media="screen" type="text/css"> img{{ width:100%; max-width:800px; }} </style> </head><body> {failed} {body} </body></html> """ subdir_template = """<h2>{subdir}</h2><table> <thead><td>name</td><td>actual</td><td>expected</td><td>diff</td></thead> {rows} </table> """ failed_template = """<h2>Only Failed</h2><table> <thead><td>name</td><td>actual</td><td>expected</td><td>diff</td></thead> {rows} </table> """ row_template = ('<tr>' '<td>{0}{1}</td>' '<td>{2}</td>' '<td><a href="{3}"><img src="{3}"></a></td>' '<td>{4}</td>' '</tr>') linked_image_template = '<a href="{0}"><img src="{0}"></a>' def run(show_browser=True): """ Build a website for visual comparison """ image_dir = "result_images" _subdirs = (name for name in os.listdir(image_dir) if os.path.isdir(os.path.join(image_dir, name))) failed_rows = [] body_sections = [] for subdir in sorted(_subdirs): if subdir == "test_compare_images": # These are the images which test the image comparison functions. continue pictures = defaultdict(dict) for file in os.listdir(os.path.join(image_dir, subdir)): if os.path.isdir(os.path.join(image_dir, subdir, file)): continue fn, fext = os.path.splitext(file) if fext != ".png": continue # Always use / for URLs. if "-failed-diff" in fn: pictures[fn[:-12]]["f"] = "/".join((subdir, file)) elif "-expected" in fn: pictures[fn[:-9]]["e"] = "/".join((subdir, file)) else: pictures[fn]["c"] = "/".join((subdir, file)) subdir_rows = [] for name, test in sorted(pictures.items()): expected_image = test.get('e', '') actual_image = test.get('c', '') if 'f' in test: # A real failure in the image generation, resulting in # different images. status = " (failed)" failed = '<a href="{0}">diff</a>'.format(test['f']) current = linked_image_template.format(actual_image) failed_rows.append(row_template.format(name, "", current, expected_image, failed)) elif 'c' not in test: # A failure in the test, resulting in no current image status = " (failed)" failed = '--' current = '(Failure in test, no image produced)' failed_rows.append(row_template.format(name, "", current, expected_image, failed)) else: status = " (passed)" failed = '--' current = linked_image_template.format(actual_image) subdir_rows.append(row_template.format(name, status, current, expected_image, failed)) body_sections.append( subdir_template.format(subdir=subdir, rows='\n'.join(subdir_rows))) if failed_rows: failed = failed_template.format(rows='\n'.join(failed_rows)) else: failed = '' body = ''.join(body_sections) html = html_template.format(failed=failed, body=body) index = os.path.join(image_dir, "index.html") with open(index, "w") as f: f.write(html) show_message = not show_browser if show_browser: try: import webbrowser webbrowser.open(index) except Exception: show_message = True if show_message: print("Open {} in a browser for a visual comparison.".format(index)) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--no-browser', action='store_true', help="Don't show browser after creating index page.") args = parser.parse_args() run(show_browser=not args.no_browser)
96b312491dc29d478699205cbfc55156cb3d74013ba3e44a8e65be801953c8c8
""" Script to autogenerate pyplot wrappers. When this script is run, the current contents of pyplot are split into generatable and non-generatable content (via the magic header :attr:`PYPLOT_MAGIC_HEADER`) and the generatable content is overwritten. Hence, the non-generatable content should be edited in the pyplot.py file itself, whereas the generatable content must be edited via templates in this file. """ # Although it is possible to dynamically generate the pyplot functions at # runtime with the proper signatures, a static pyplot.py is simpler for static # analysis tools to parse. import inspect from inspect import Parameter from pathlib import Path import textwrap # This line imports the installed copy of matplotlib, and not the local copy. import numpy as np from matplotlib import cbook, mlab from matplotlib.axes import Axes from matplotlib.figure import Figure # This is the magic line that must exist in pyplot, after which the boilerplate # content will be appended. PYPLOT_MAGIC_HEADER = ( "################# REMAINING CONTENT GENERATED BY boilerplate.py " "##############\n") AUTOGEN_MSG = """ # Autogenerated by boilerplate.py. Do not edit as changes will be lost.""" AXES_CMAPPABLE_METHOD_TEMPLATE = AUTOGEN_MSG + """ @docstring.copy(Axes.{called_name}) def {name}{signature}: __ret = gca().{called_name}{call} {sci_command} return __ret """ AXES_METHOD_TEMPLATE = AUTOGEN_MSG + """ @docstring.copy(Axes.{called_name}) def {name}{signature}: return gca().{called_name}{call} """ FIGURE_METHOD_TEMPLATE = AUTOGEN_MSG + """ @docstring.copy(Figure.{called_name}) def {name}{signature}: return gcf().{called_name}{call} """ # Used for colormap functions CMAP_TEMPLATE = AUTOGEN_MSG + ''' def {name}(): """ Set the colormap to "{name}". This changes the default colormap as well as the colormap of the current image if there is one. See ``help(colormaps)`` for more information. """ set_cmap("{name}") ''' class value_formatter: """ Format function default values as needed for inspect.formatargspec. The interesting part is a hard-coded list of functions used as defaults in pyplot methods. """ def __init__(self, value): if value is mlab.detrend_none: self._repr = "mlab.detrend_none" elif value is mlab.window_hanning: self._repr = "mlab.window_hanning" elif value is np.mean: self._repr = "np.mean" elif value is cbook.deprecation._deprecated_parameter: self._repr = "cbook.deprecation._deprecated_parameter" else: self._repr = repr(value) def __repr__(self): return self._repr def generate_function(name, called_fullname, template, **kwargs): """ Create a wrapper function *pyplot_name* calling *call_name*. Parameters ---------- name : str The function to be created. called_fullname : str The method to be wrapped in the format ``"Class.method"``. template : str The template to be used. The template must contain {}-style format placeholders. The following placeholders are filled in: - name: The function name. - signature: The function signature (including parentheses). - called_name: The name of the called function. - call: Parameters passed to *called_name* (including parentheses). **kwargs Additional parameters are passed to ``template.format()``. """ text_wrapper = textwrap.TextWrapper( break_long_words=False, width=70, initial_indent=' ' * 8, subsequent_indent=' ' * 8) # Get signature of wrapped function. class_name, called_name = called_fullname.split('.') class_ = {'Axes': Axes, 'Figure': Figure}[class_name] signature = inspect.signature(getattr(class_, called_name)) # Replace self argument. params = list(signature.parameters.values())[1:] signature = str(signature.replace(parameters=[ param.replace(default=value_formatter(param.default)) if param.default is not param.empty else param for param in params])) if len('def ' + name + signature) >= 80: # Move opening parenthesis before newline. signature = '(\n' + text_wrapper.fill(signature).replace('(', '', 1) # How to call the wrapped function. call = '(' + ', '.join(( # Pass "intended-as-positional" parameters positionally to avoid # forcing third-party subclasses to reproduce the parameter names. '{0}' if param.kind in [ Parameter.POSITIONAL_OR_KEYWORD] and param.default is Parameter.empty else # Only pass the data kwarg if it is actually set, to avoid forcing # third-party subclasses to support it. '**({{"data": data}} if data is not None else {{}})' if param.name == "data" else '{0}={0}' if param.kind in [ Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY] else '*{0}' if param.kind is Parameter.VAR_POSITIONAL else '**{0}' if param.kind is Parameter.VAR_KEYWORD else # Intentionally crash for Parameter.POSITIONAL_ONLY. None).format(param.name) for param in params) + ')' MAX_CALL_PREFIX = 18 # len(' __ret = gca().') if MAX_CALL_PREFIX + max(len(name), len(called_name)) + len(call) >= 80: call = '(\n' + text_wrapper.fill(call[1:]) # Bail out in case of name collision. for reserved in ('gca', 'gci', 'gcf', '__ret'): if reserved in params: raise ValueError( f'Method {called_fullname} has kwarg named {reserved}') return template.format( name=name, called_name=called_name, signature=signature, call=call, **kwargs) def boilerplate_gen(): """Generator of lines for the automated part of pyplot.""" _figure_commands = ( 'figimage', 'figtext:text', 'ginput', 'suptitle', 'waitforbuttonpress', ) # These methods are all simple wrappers of Axes methods by the same name. _axes_commands = ( 'acorr', 'angle_spectrum', 'annotate', 'arrow', 'autoscale', 'axhline', 'axhspan', 'axis', 'axvline', 'axvspan', 'bar', 'barbs', 'barh', 'boxplot', 'broken_barh', 'cla', 'clabel', 'cohere', 'contour', 'contourf', 'csd', 'errorbar', 'eventplot', 'fill', 'fill_between', 'fill_betweenx', 'grid', 'hexbin', 'hist', 'hist2d', 'hlines', 'imshow', 'legend', 'locator_params', 'loglog', 'magnitude_spectrum', 'margins', 'minorticks_off', 'minorticks_on', 'pcolor', 'pcolormesh', 'phase_spectrum', 'pie', 'plot', 'plot_date', 'psd', 'quiver', 'quiverkey', 'scatter', 'semilogx', 'semilogy', 'specgram', 'spy', 'stackplot', 'stem', 'step', 'streamplot', 'table', 'text', 'tick_params', 'ticklabel_format', 'tricontour', 'tricontourf', 'tripcolor', 'triplot', 'violinplot', 'vlines', 'xcorr', # pyplot name : real name 'sci:_sci', 'title:set_title', 'xlabel:set_xlabel', 'ylabel:set_ylabel', 'xscale:set_xscale', 'yscale:set_yscale', ) cmappable = { 'contour': 'if __ret._A is not None: sci(__ret) # noqa', 'contourf': 'if __ret._A is not None: sci(__ret) # noqa', 'hexbin': 'sci(__ret)', 'scatter': 'sci(__ret)', 'pcolor': 'sci(__ret)', 'pcolormesh': 'sci(__ret)', 'hist2d': 'sci(__ret[-1])', 'imshow': 'sci(__ret)', 'spy': 'if isinstance(__ret, cm.ScalarMappable): sci(__ret) # noqa', 'quiver': 'sci(__ret)', 'specgram': 'sci(__ret[-1])', 'streamplot': 'sci(__ret.lines)', 'tricontour': 'if __ret._A is not None: sci(__ret) # noqa', 'tricontourf': 'if __ret._A is not None: sci(__ret) # noqa', 'tripcolor': 'sci(__ret)', } for spec in _figure_commands: if ':' in spec: name, called_name = spec.split(':') else: name = called_name = spec yield generate_function(name, f'Figure.{called_name}', FIGURE_METHOD_TEMPLATE) for spec in _axes_commands: if ':' in spec: name, called_name = spec.split(':') else: name = called_name = spec template = (AXES_CMAPPABLE_METHOD_TEMPLATE if name in cmappable else AXES_METHOD_TEMPLATE) yield generate_function(name, f'Axes.{called_name}', template, sci_command=cmappable.get(name)) cmaps = ( 'autumn', 'bone', 'cool', 'copper', 'flag', 'gray', 'hot', 'hsv', 'jet', 'pink', 'prism', 'spring', 'summer', 'winter', 'magma', 'inferno', 'plasma', 'viridis', "nipy_spectral" ) # add all the colormaps (autumn, hsv, ....) for name in cmaps: yield CMAP_TEMPLATE.format(name=name) yield '\n' yield '_setup_pyplot_info_docstrings()' def build_pyplot(): pyplot_path = Path(__file__).parent / "../lib/matplotlib/pyplot.py" pyplot_orig = pyplot_path.read_text().splitlines(keepends=True) try: pyplot_orig = pyplot_orig[:pyplot_orig.index(PYPLOT_MAGIC_HEADER) + 1] except IndexError: raise ValueError('The pyplot.py file *must* have the exact line: %s' % PYPLOT_MAGIC_HEADER) with pyplot_path.open('w') as pyplot: pyplot.writelines(pyplot_orig) pyplot.writelines(boilerplate_gen()) pyplot.write('\n') if __name__ == '__main__': # Write the matplotlib.pyplot file. build_pyplot()
75fa0b9bdbc812aeec86bfcce116d0bc2b9d8dc4f6785ae33f336e06f0f935e0
#!/usr/bin/env python """Simple tools to query github.com and gather stats about issues. To generate a report for IPython 2.0, run: python github_stats.py --milestone 2.0 --since-tag rel-1.0.0 """ #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- import sys from argparse import ArgumentParser from datetime import datetime, timedelta from subprocess import check_output from gh_api import ( get_paged_request, make_auth_header, get_pull_request, is_pull_request, get_milestone_id, get_issues_list, get_authors, ) #----------------------------------------------------------------------------- # Globals #----------------------------------------------------------------------------- ISO8601 = "%Y-%m-%dT%H:%M:%SZ" PER_PAGE = 100 #----------------------------------------------------------------------------- # Functions #----------------------------------------------------------------------------- def round_hour(dt): return dt.replace(minute=0,second=0,microsecond=0) def _parse_datetime(s): """Parse dates in the format returned by the Github API.""" if s: return datetime.strptime(s, ISO8601) else: return datetime.fromtimestamp(0) def issues2dict(issues): """Convert a list of issues to a dict, keyed by issue number.""" idict = {} for i in issues: idict[i['number']] = i return idict def split_pulls(all_issues, project="matplotlib/matplotlib"): """split a list of closed issues into non-PR Issues and Pull Requests""" pulls = [] issues = [] for i in all_issues: if is_pull_request(i): pull = get_pull_request(project, i['number'], auth=True) pulls.append(pull) else: issues.append(i) return issues, pulls def issues_closed_since(period=timedelta(days=365), project="matplotlib/matplotlib", pulls=False): """Get all issues closed since a particular point in time. period can either be a datetime object, or a timedelta object. In the latter case, it is used as a time before the present. """ which = 'pulls' if pulls else 'issues' if isinstance(period, timedelta): since = round_hour(datetime.utcnow() - period) else: since = period url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, since.strftime(ISO8601), PER_PAGE) allclosed = get_paged_request(url, headers=make_auth_header()) filtered = [ i for i in allclosed if _parse_datetime(i['closed_at']) > since ] if pulls: filtered = [ i for i in filtered if _parse_datetime(i['merged_at']) > since ] # filter out PRs not against master (backports) filtered = [ i for i in filtered if i['base']['ref'] == 'master' ] else: filtered = [ i for i in filtered if not is_pull_request(i) ] return filtered def sorted_by_field(issues, field='closed_at', reverse=False): """Return a list of issues sorted by closing date date.""" return sorted(issues, key = lambda i:i[field], reverse=reverse) def report(issues, show_urls=False): """Summary report about a list of issues, printing number and title.""" if show_urls: for i in issues: role = 'ghpull' if 'merged_at' in i else 'ghissue' print('* :%s:`%d`: %s' % (role, i['number'], i['title'].replace('`', '``'))) else: for i in issues: print('* %d: %s' % (i['number'], i['title'].replace('`', '``'))) #----------------------------------------------------------------------------- # Main script #----------------------------------------------------------------------------- if __name__ == "__main__": # Whether to add reST urls for all issues in printout. show_urls = True parser = ArgumentParser() parser.add_argument('--since-tag', type=str, help="The git tag to use for the starting point (typically the last major release)." ) parser.add_argument('--milestone', type=str, help="The GitHub milestone to use for filtering issues [optional]." ) parser.add_argument('--days', type=int, help="The number of days of data to summarize (use this or --since-tag)." ) parser.add_argument('--project', type=str, default="matplotlib/matplotlib", help="The project to summarize." ) parser.add_argument('--links', action='store_true', default=False, help="Include links to all closed Issues and PRs in the output." ) opts = parser.parse_args() tag = opts.since_tag # set `since` from days or git tag if opts.days: since = datetime.utcnow() - timedelta(days=opts.days) else: if not tag: tag = check_output(['git', 'describe', '--abbrev=0']).strip().decode('utf8') cmd = ['git', 'log', '-1', '--format=%ai', tag] tagday, tz = check_output(cmd).strip().decode('utf8').rsplit(' ', 1) since = datetime.strptime(tagday, "%Y-%m-%d %H:%M:%S") h = int(tz[1:3]) m = int(tz[3:]) td = timedelta(hours=h, minutes=m) if tz[0] == '-': since += td else: since -= td since = round_hour(since) milestone = opts.milestone project = opts.project print("fetching GitHub stats since %s (tag: %s, milestone: %s)" % (since, tag, milestone), file=sys.stderr) if milestone: milestone_id = get_milestone_id(project=project, milestone=milestone, auth=True) issues_and_pulls = get_issues_list(project=project, milestone=milestone_id, state='closed', auth=True, ) issues, pulls = split_pulls(issues_and_pulls, project=project) else: issues = issues_closed_since(since, project=project, pulls=False) pulls = issues_closed_since(since, project=project, pulls=True) # For regular reports, it's nice to show them in reverse chronological order issues = sorted_by_field(issues, reverse=True) pulls = sorted_by_field(pulls, reverse=True) n_issues, n_pulls = map(len, (issues, pulls)) n_total = n_issues + n_pulls # Print summary report we can directly include into release notes. print('.. _github-stats:') print() print('GitHub Stats') print('============') print() since_day = since.strftime("%Y/%m/%d") today = datetime.today().strftime("%Y/%m/%d") print("GitHub stats for %s - %s (tag: %s)" % (since_day, today, tag)) print() print("These lists are automatically generated, and may be incomplete or contain duplicates.") print() ncommits = 0 all_authors = [] if tag: # print git info, in addition to GitHub info: since_tag = tag+'..' cmd = ['git', 'log', '--oneline', since_tag] ncommits += len(check_output(cmd).splitlines()) author_cmd = ['git', 'log', '--use-mailmap', "--format=* %aN", since_tag] all_authors.extend(check_output(author_cmd).decode('utf-8', 'replace').splitlines()) pr_authors = [] for pr in pulls: pr_authors.extend(get_authors(pr)) ncommits = len(pr_authors) + ncommits - len(pulls) author_cmd = ['git', 'check-mailmap'] + pr_authors with_email = check_output(author_cmd).decode('utf-8', 'replace').splitlines() all_authors.extend(['* ' + a.split(' <')[0] for a in with_email]) unique_authors = sorted(set(all_authors), key=lambda s: s.lower()) print("We closed %d issues and merged %d pull requests." % (n_issues, n_pulls)) if milestone: print("The full list can be seen `on GitHub <https://github.com/%s/milestone/%s>`__" % (project, milestone) ) print() print("The following %i authors contributed %i commits." % (len(unique_authors), ncommits)) print() print('\n'.join(unique_authors)) if opts.links: print() print("GitHub issues and pull requests:") print() print('Pull Requests (%d):\n' % n_pulls) report(pulls, show_urls) print() print('Issues (%d):\n' % n_issues) report(issues, show_urls) print() print() print("""Previous GitHub Stats --------------------- .. toctree:: :maxdepth: 1 :glob: github_stats_* """)
e1ff0647c0bfd790cd1eae3ebcdb977fa5c905072760522d4aa2854a8c7fdb95
#!/usr/bin/env python """ Generates the toolbar icon images from the FontAwesome font. First download and extract FontAwesome from http://fontawesome.io/. Place the FontAwesome.otf file in the tools directory (same directory as this script). Generates SVG, PDF in one size (size they are vectors) and PNG, PPM and GIF in 24x24 and 48x48. """ import matplotlib matplotlib.use('agg') # noqa import os from PIL import Image import numpy as np import matplotlib.pyplot as plt from matplotlib.font_manager import FontProperties from matplotlib import cm from matplotlib import patheffects matplotlib.rcdefaults() matplotlib.rcParams['svg.fonttype'] = 'path' matplotlib.rcParams['pdf.fonttype'] = 3 matplotlib.rcParams['pdf.compression'] = 9 IMAGES_ROOT = os.path.join( os.path.dirname(__file__), '..', 'lib', 'matplotlib', 'mpl-data', 'images') FONT_PATH = os.path.join( os.path.dirname(__file__), 'FontAwesome.otf') def save_icon(fig, name): fig.savefig(os.path.join(IMAGES_ROOT, name + '.svg')) fig.savefig(os.path.join(IMAGES_ROOT, name + '.pdf')) for dpi, suffix in [(24, ''), (48, '_large')]: fig.savefig(os.path.join(IMAGES_ROOT, name + suffix + '.png'), dpi=dpi) img = Image.open(os.path.join(IMAGES_ROOT, name + suffix + '.png')) img.save(os.path.join(IMAGES_ROOT, name + suffix + '.ppm')) def make_icon(fontfile, ccode): prop = FontProperties(fname=fontfile, size=68) fig = plt.figure(figsize=(1, 1)) fig.patch.set_alpha(0.0) text = fig.text(0.5, 0.48, chr(ccode), ha='center', va='center', fontproperties=prop) text.set_path_effects([patheffects.Normal()]) return fig def make_matplotlib_icon(): fig = plt.figure(figsize=(1, 1)) fig.patch.set_alpha(0.0) ax = fig.add_axes([0.025, 0.025, 0.95, 0.95], projection='polar') ax.set_axisbelow(True) N = 7 arc = 2 * np.pi theta = np.arange(0, arc, arc / N) radii = 10 * np.array([0.2, 0.6, 0.8, 0.7, 0.4, 0.5, 0.8]) width = np.pi / 4 * np.array([0.4, 0.4, 0.6, 0.8, 0.2, 0.5, 0.3]) bars = ax.bar(theta, radii, width=width, bottom=0.0, linewidth=1, edgecolor='k') for r, bar in zip(radii, bars): bar.set_facecolor(cm.jet(r/10.)) ax.tick_params(labelleft=False, labelright=False, labelbottom=False, labeltop=False) ax.grid(lw=0.0) ax.set_yticks(np.arange(1, 9, 2)) ax.set_rmax(9) return fig icon_defs = [ ('home', 0xf015), ('back', 0xf060), ('forward', 0xf061), ('zoom_to_rect', 0xf002), ('move', 0xf047), ('filesave', 0xf0c7), ('subplots', 0xf1de), ('qt4_editor_options', 0xf201), ('help', 0xf128)] def make_icons(): for name, ccode in icon_defs: fig = make_icon(FONT_PATH, ccode) save_icon(fig, name) fig = make_matplotlib_icon() save_icon(fig, 'matplotlib') if __name__ == '__main__': if not os.path.exists(FONT_PATH): print("Download the FontAwesome.otf file and place it in the tools " "directory") make_icons()
b0bf01805f7c3601eb8b61135ad6dcd542b8831a4b57a45942dc97e1bb192a88
""" This is a developer utility to help analyze and triage image comparison failures. It allows the failures to be quickly compared against the expected results, and the new results to be either accepted (by copying the new results to the source tree) or rejected (by copying the original expected result to the source tree). To start: If you ran the tests from the top-level of a source checkout, simply run: python tools/triage_tests.py Otherwise, you can manually select the location of `result_images` on the commandline. Keys: left/right: Move between test, expected and diff images up/down: Move between tests A: Accept test. Copy the test result to the source tree. R: Reject test. Copy the expected result to the source tree. """ import os from pathlib import Path import shutil import sys from matplotlib.backends.qt_compat import QtCore, QtGui, QtWidgets # matplotlib stores the baseline images under two separate subtrees, # but these are all flattened in the result_images directory. In # order to find the source, we need to search for a match in one of # these two places. BASELINE_IMAGES = [ Path('lib/matplotlib/tests/baseline_images'), Path('lib/mpl_toolkits/tests/baseline_images'), ] # Non-png image extensions exts = ['pdf', 'svg'] class Thumbnail(QtWidgets.QFrame): """ Represents one of the three thumbnails at the top of the window. """ def __init__(self, parent, index, name): super().__init__() self.parent = parent self.index = index layout = QtWidgets.QVBoxLayout() label = QtWidgets.QLabel(name) label.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter) layout.addWidget(label, 0) self.image = QtWidgets.QLabel() self.image.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter) self.image.setMinimumSize(800/3, 500/3) layout.addWidget(self.image) self.setLayout(layout) def mousePressEvent(self, ev): self.parent.set_large_image(self.index) class ListWidget(QtWidgets.QListWidget): """ The list of files on the left-hand side """ def __init__(self, parent): super().__init__() self.parent = parent self.currentRowChanged.connect(self.change_row) def change_row(self, i): self.parent.set_entry(i) class EventFilter(QtCore.QObject): # A hack keypresses can be handled globally and aren't swallowed # by the individual widgets def __init__(self, window): super().__init__() self.window = window def eventFilter(self, receiver, event): if event.type() == QtCore.QEvent.KeyPress: self.window.keyPressEvent(event) return True else: return super().eventFilter(receiver, event) class Dialog(QtWidgets.QDialog): """ The main dialog window. """ def __init__(self, entries): super().__init__() self.entries = entries self.current_entry = -1 self.current_thumbnail = -1 event_filter = EventFilter(self) self.installEventFilter(event_filter) self.filelist = ListWidget(self) self.filelist.setMinimumWidth(400) for entry in entries: self.filelist.addItem(entry.display) images_box = QtWidgets.QWidget() images_layout = QtWidgets.QVBoxLayout() thumbnails_box = QtWidgets.QWidget() thumbnails_layout = QtWidgets.QHBoxLayout() self.thumbnails = [] for i, name in enumerate(('test', 'expected', 'diff')): thumbnail = Thumbnail(self, i, name) thumbnails_layout.addWidget(thumbnail) self.thumbnails.append(thumbnail) thumbnails_box.setLayout(thumbnails_layout) self.image_display = QtWidgets.QLabel() self.image_display.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter) self.image_display.setMinimumSize(800, 500) images_layout.addWidget(thumbnails_box, 3) images_layout.addWidget(self.image_display, 6) images_box.setLayout(images_layout) buttons_box = QtWidgets.QWidget() buttons_layout = QtWidgets.QHBoxLayout() accept_button = QtWidgets.QPushButton("Accept (A)") accept_button.clicked.connect(self.accept_test) buttons_layout.addWidget(accept_button) reject_button = QtWidgets.QPushButton("Reject (R)") reject_button.clicked.connect(self.reject_test) buttons_layout.addWidget(reject_button) buttons_box.setLayout(buttons_layout) images_layout.addWidget(buttons_box) main_layout = QtWidgets.QHBoxLayout() main_layout.addWidget(self.filelist, 3) main_layout.addWidget(images_box, 6) self.setLayout(main_layout) self.setWindowTitle("matplotlib test triager") self.set_entry(0) def set_entry(self, index): if self.current_entry == index: return self.current_entry = index entry = self.entries[index] self.pixmaps = [] for fname, thumbnail in zip(entry.thumbnails, self.thumbnails): pixmap = QtGui.QPixmap(os.fspath(fname)) scaled_pixmap = pixmap.scaled( thumbnail.size(), QtCore.Qt.KeepAspectRatio, QtCore.Qt.SmoothTransformation) thumbnail.image.setPixmap(scaled_pixmap) self.pixmaps.append(scaled_pixmap) self.set_large_image(0) self.filelist.setCurrentRow(self.current_entry) def set_large_image(self, index): self.thumbnails[self.current_thumbnail].setFrameShape(0) self.current_thumbnail = index pixmap = QtGui.QPixmap(os.fspath( self.entries[self.current_entry] .thumbnails[self.current_thumbnail])) self.image_display.setPixmap(pixmap) self.thumbnails[self.current_thumbnail].setFrameShape(1) def accept_test(self): entry = self.entries[self.current_entry] if entry.status == 'autogen': print('Cannot accept autogenerated test cases.') return entry.accept() self.filelist.currentItem().setText( self.entries[self.current_entry].display) # Auto-move to the next entry self.set_entry(min((self.current_entry + 1), len(self.entries) - 1)) def reject_test(self): entry = self.entries[self.current_entry] if entry.status == 'autogen': print('Cannot reject autogenerated test cases.') return entry.reject() self.filelist.currentItem().setText( self.entries[self.current_entry].display) # Auto-move to the next entry self.set_entry(min((self.current_entry + 1), len(self.entries) - 1)) def keyPressEvent(self, e): if e.key() == QtCore.Qt.Key_Left: self.set_large_image((self.current_thumbnail - 1) % 3) elif e.key() == QtCore.Qt.Key_Right: self.set_large_image((self.current_thumbnail + 1) % 3) elif e.key() == QtCore.Qt.Key_Up: self.set_entry(max(self.current_entry - 1, 0)) elif e.key() == QtCore.Qt.Key_Down: self.set_entry(min(self.current_entry + 1, len(self.entries) - 1)) elif e.key() == QtCore.Qt.Key_A: self.accept_test() elif e.key() == QtCore.Qt.Key_R: self.reject_test() else: super().keyPressEvent(e) class Entry(object): """ A model for a single image comparison test. """ def __init__(self, path, root, source): self.source = source self.root = root self.dir = path.parent self.diff = path.name self.reldir = self.dir.relative_to(self.root) basename = self.diff[:-len('-failed-diff.png')] for ext in exts: if basename.endswith('_' + ext): display_extension = '_' + ext extension = ext basename = basename[:-4] break else: display_extension = '' extension = 'png' self.basename = basename self.extension = extension self.generated = basename + '.' + extension self.expected = basename + '-expected.' + extension self.expected_display = (basename + '-expected' + display_extension + '.png') self.generated_display = basename + display_extension + '.png' self.name = self.reldir / self.basename self.destdir = self.get_dest_dir(self.reldir) self.thumbnails = [ self.generated_display, self.expected_display, self.diff ] self.thumbnails = [self.dir / x for x in self.thumbnails] if not Path(self.destdir, self.generated).exists(): # This case arises from a check_figures_equal test. self.status = 'autogen' elif ((self.dir / self.generated).read_bytes() == (self.destdir / self.generated).read_bytes()): self.status = 'accept' else: self.status = 'unknown' def get_dest_dir(self, reldir): """ Find the source tree directory corresponding to the given result_images subdirectory. """ for baseline_dir in BASELINE_IMAGES: path = self.source / baseline_dir / reldir if path.is_dir(): return path raise ValueError("Can't find baseline dir for {}".format(reldir)) @property def display(self): """ Get the display string for this entry. This is the text that appears in the list widget. """ status_map = { 'unknown': '\N{BALLOT BOX}', 'accept': '\N{BALLOT BOX WITH CHECK}', 'reject': '\N{BALLOT BOX WITH X}', 'autogen': '\N{WHITE SQUARE CONTAINING BLACK SMALL SQUARE}', } box = status_map[self.status] return '{} {} [{}]'.format(box, self.name, self.extension) def accept(self): """ Accept this test by copying the generated result to the source tree. """ copy_file(self.dir / self.generated, self.destdir / self.generated) self.status = 'accept' def reject(self): """ Reject this test by copying the expected result to the source tree. """ copy_file(self.dir / self.expected, self.destdir / self.generated) self.status = 'reject' def copy_file(a, b): """Copy file from *a* to *b*.""" print(f'copying: {a} to {b}') shutil.copyfile(a, b) def find_failing_tests(result_images, source): """ Find all of the failing tests by looking for files with `-failed-diff` at the end of the basename. """ return [Entry(path, result_images, source) for path in sorted(Path(result_images).glob("**/*-failed-diff.*"))] def launch(result_images, source): """ Launch the GUI. """ entries = find_failing_tests(result_images, source) if len(entries) == 0: print("No failed tests") sys.exit(0) app = QtWidgets.QApplication(sys.argv) dialog = Dialog(entries) dialog.show() filter = EventFilter(dialog) app.installEventFilter(filter) sys.exit(app.exec_()) if __name__ == '__main__': import argparse source_dir = Path(__file__).parent.parent parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=""" Triage image comparison test failures. If no arguments are provided, it assumes you ran the tests at the top-level of a source checkout as `pytest .`. Keys: left/right: Move between test, expected and diff images up/down: Move between tests A: Accept test. Copy the test result to the source tree. R: Reject test. Copy the expected result to the source tree. """) parser.add_argument("result_images", type=Path, nargs='?', default=source_dir / 'result_images', help="The location of the result_images directory") parser.add_argument("source", type=Path, nargs='?', default=source_dir, help="The location of the matplotlib source tree") args = parser.parse_args() launch(args.result_images, args.source)
8c08cb3a24a4c8ee5c23915f51d1b4aa845c00bd8d8e5364c8e992024a303037
#!/usr/bin/env python # # Copyright 2010-2012, Google Inc. # Author: Mikhail Kashkin ([email protected]) # Author: Raph Levien (<firstname.lastname>@gmail.com) # Author: Dave Crossland ([email protected]) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Version 1.01 Released 2012-03-27 # # A script for subsetting a font, using FontForge. See README for details. # TODO 2013-04-08 ensure the menu files are as compact as possible by default, similar to subset.pl # TODO 2013-05-22 in Arimo, the latin subset doesn't include ; but the greek does. why on earth is this happening? import getopt import os import struct import subprocess import sys import fontforge def log_namelist(nam, unicode): if nam and isinstance(unicode, int): print("0x%0.4X" % unicode, fontforge.nameFromUnicode(unicode), file=nam) def select_with_refs(font, unicode, newfont, pe = None, nam = None): newfont.selection.select(('more', 'unicode'), unicode) log_namelist(nam, unicode) if pe: print("SelectMore(%d)" % unicode, file=pe) try: for ref in font[unicode].references: newfont.selection.select(('more',), ref[0]) log_namelist(nam, ref[0]) if pe: print('SelectMore("%s")' % ref[0], file=pe) except Exception: print('Resolving references on u+%04x failed' % unicode) def subset_font_raw(font_in, font_out, unicodes, opts): if '--namelist' in opts: # 2010-12-06 DC To allow setting namelist filenames, # change getopt.gnu_getopt from namelist to namelist= # and invert comments on following 2 lines # nam_fn = opts['--namelist'] nam_fn = font_out + '.nam' nam = open(nam_fn, 'w') else: nam = None if '--script' in opts: pe_fn = "/tmp/script.pe" pe = open(pe_fn, 'w') else: pe = None font = fontforge.open(font_in) if pe: print('Open("' + font_in + '")', file=pe) extract_vert_to_script(font_in, pe) for i in unicodes: select_with_refs(font, i, font, pe, nam) addl_glyphs = [] if '--nmr' in opts: addl_glyphs.append('nonmarkingreturn') if '--null' in opts: addl_glyphs.append('.null') if '--nd' in opts: addl_glyphs.append('.notdef') for glyph in addl_glyphs: font.selection.select(('more',), glyph) if nam: print("0x%0.4X" % fontforge.unicodeFromName(glyph), glyph, file=nam) if pe: print('SelectMore("%s")' % glyph, file=pe) flags = () if '--opentype-features' in opts: flags += ('opentype',) if '--simplify' in opts: font.simplify() font.round() flags += ('omit-instructions',) if '--strip_names' in opts: font.sfnt_names = () if '--new' in opts: font.copy() new = fontforge.font() new.encoding = font.encoding new.em = font.em new.layers['Fore'].is_quadratic = font.layers['Fore'].is_quadratic for i in unicodes: select_with_refs(font, i, new, pe, nam) new.paste() # This is a hack - it should have been taken care of above. font.selection.select('space') font.copy() new.selection.select('space') new.paste() new.sfnt_names = font.sfnt_names font = new else: font.selection.invert() print("SelectInvert()", file=pe) font.cut() print("Clear()", file=pe) if '--move-display' in opts: print("Moving display glyphs into unicode ranges...") font.familyname += " Display" font.fullname += " Display" font.fontname += "Display" font.appendSFNTName('English (US)', 'Family', font.familyname) font.appendSFNTName('English (US)', 16, font.familyname) font.appendSFNTName('English (US)', 17, 'Display') font.appendSFNTName('English (US)', 'Fullname', font.fullname) for glname in unicodes: font.selection.none() if isinstance(glname, str): if glname.endswith('.display'): font.selection.select(glname) font.copy() font.selection.none() newgl = glname.replace('.display', '') font.selection.select(newgl) font.paste() font.selection.select(glname) font.cut() if nam: print("Writing NameList", end="") nam.close() if pe: print('Generate("' + font_out + '")', file=pe) pe.close() subprocess.call(["fontforge", "-script", pe_fn]) else: font.generate(font_out, flags = flags) font.close() if '--roundtrip' in opts: # FontForge apparently contains a bug where it incorrectly calculates # the advanceWidthMax in the hhea table, and a workaround is to open # and re-generate font2 = fontforge.open(font_out) font2.generate(font_out, flags = flags) def subset_font(font_in, font_out, unicodes, opts): font_out_raw = font_out if not font_out_raw.endswith('.ttf'): font_out_raw += '.ttf' subset_font_raw(font_in, font_out_raw, unicodes, opts) if font_out != font_out_raw: os.rename(font_out_raw, font_out) # 2011-02-14 DC this needs to only happen with --namelist is used # os.rename(font_out_raw + '.nam', font_out + '.nam') def getsubset(subset, font_in): subsets = subset.split('+') quotes = [0x2013] # endash quotes += [0x2014] # emdash quotes += [0x2018] # quoteleft quotes += [0x2019] # quoteright quotes += [0x201A] # quotesinglbase quotes += [0x201C] # quotedblleft quotes += [0x201D] # quotedblright quotes += [0x201E] # quotedblbase quotes += [0x2022] # bullet quotes += [0x2039] # guilsinglleft quotes += [0x203A] # guilsinglright latin = range(0x20, 0x7f) # Basic Latin (A-Z, a-z, numbers) latin += range(0xa0, 0x100) # Western European symbols and diacritics latin += [0x20ac] # Euro latin += [0x0152] # OE latin += [0x0153] # oe latin += [0x003b] # semicolon latin += [0x00b7] # periodcentered latin += [0x0131] # dotlessi latin += [0x02c6] # circumflex latin += [0x02da] # ring latin += [0x02dc] # tilde latin += [0x2074] # foursuperior latin += [0x2215] # division slash latin += [0x2044] # fraction slash latin += [0xe0ff] # PUA: Font logo latin += [0xeffd] # PUA: Font version number latin += [0xf000] # PUA: font ppem size indicator: run `ftview -f 1255 10 Ubuntu-Regular.ttf` to see it in action! result = quotes if 'menu' in subset: font = fontforge.open(font_in) result = map(ord, font.familyname) result += [0x0020] if 'latin' in subset: result += latin if 'latin-ext' in subset: # These ranges include Extended A, B, C, D, and Additional with the # exception of Vietnamese, which is a separate range result += (range(0x100, 0x370) + range(0x1d00, 0x1ea0) + range(0x1ef2, 0x1f00) + range(0x2070, 0x20d0) + range(0x2c60, 0x2c80) + range(0xa700, 0xa800)) if 'vietnamese' in subset: # 2011-07-16 DC: Charset from http://vietunicode.sourceforge.net/charset/ + U+1ef9 from Fontaine result += [0x00c0, 0x00c1, 0x00c2, 0x00c3, 0x00C8, 0x00C9, 0x00CA, 0x00CC, 0x00CD, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D9, 0x00DA, 0x00DD, 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E8, 0x00E9, 0x00EA, 0x00EC, 0x00ED, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F9, 0x00FA, 0x00FD, 0x0102, 0x0103, 0x0110, 0x0111, 0x0128, 0x0129, 0x0168, 0x0169, 0x01A0, 0x01A1, 0x01AF, 0x01B0, 0x20AB] + range(0x1EA0, 0x1EFA) if 'greek' in subset: # Could probably be more aggressive here and exclude archaic characters, # but lack data result += range(0x370, 0x400) if 'greek-ext' in subset: result += range(0x370, 0x400) + range(0x1f00, 0x2000) if 'cyrillic' in subset: # Based on character frequency analysis result += range(0x400, 0x460) + [0x490, 0x491, 0x4b0, 0x4b1, 0x2116] if 'cyrillic-ext' in subset: result += (range(0x400, 0x530) + [0x20b4, 0x2116] + # 0x2116 is the russian No, a number abbreviation similar to the latin #, suggested by Alexei Vanyashin range(0x2de0, 0x2e00) + range(0xa640, 0xa6a0)) if 'arabic' in subset: # Based on Droid Arabic Kufi 1.0 result += [0x000D, 0x0020, 0x0621, 0x0627, 0x062D, 0x062F, 0x0631, 0x0633, 0x0635, 0x0637, 0x0639, 0x0643, 0x0644, 0x0645, 0x0647, 0x0648, 0x0649, 0x0640, 0x066E, 0x066F, 0x0660, 0x0661, 0x0662, 0x0663, 0x0664, 0x0665, 0x0666, 0x0667, 0x0668, 0x0669, 0x06F4, 0x06F5, 0x06F6, 0x06BE, 0x06D2, 0x06A9, 0x06AF, 0x06BA, 0x066A, 0x061F, 0x060C, 0x061B, 0x066B, 0x066C, 0x066D, 0x064B, 0x064D, 0x064E, 0x064F, 0x064C, 0x0650, 0x0651, 0x0652, 0x0653, 0x0654, 0x0655, 0x0670, 0x0656, 0x0615, 0x0686, 0x0623, 0x0625, 0x0622, 0x0671, 0x0628, 0x067E, 0x062A, 0x062B, 0x0679, 0x0629, 0x062C, 0x062E, 0x0630, 0x0688, 0x0632, 0x0691, 0x0698, 0x0634, 0x0636, 0x0638, 0x063A, 0x0641, 0x0642, 0x0646, 0x06D5, 0x06C0, 0x0624, 0x064A, 0x06CC, 0x06D3, 0x0626, 0x06C2, 0x06C1, 0x06C3, 0x06F0, 0x06F1, 0x06F2, 0x06F3, 0x06F9, 0x06F7, 0x06F8, 0xFC63, 0x0672, 0x0673, 0x0675, 0x0676, 0x0677, 0x0678, 0x067A, 0x067B, 0x067C, 0x067D, 0x067F, 0x0680, 0x0681, 0x0682, 0x0683, 0x0684, 0x0685, 0x0687, 0x0689, 0x068A, 0x068B, 0x068C, 0x068D, 0x068E, 0x068F, 0x0690, 0x0692, 0x0693, 0x0694, 0x0695, 0x0696, 0x0697, 0x0699, 0x069A, 0x069B, 0x069C, 0x069D, 0x069E, 0x069F, 0x06A0, 0x06A1, 0x06A2, 0x06A3, 0x06A5, 0x06A6, 0x06A7, 0x06A8, 0x06AA, 0x06AB, 0x06AC, 0x06AD, 0x06AE, 0x06B0, 0x06B1, 0x06B2, 0x06B3, 0x06B4, 0x06B5, 0x06B6, 0x06B7, 0x06B8, 0x06B9, 0x06BB, 0x06BC, 0x06BD, 0x06BF, 0x06C4, 0x06C5, 0x06CD, 0x06D6, 0x06D7, 0x06D8, 0x06D9, 0x06DA, 0x06DB, 0x06DC, 0x06DF, 0x06E1, 0x06E2, 0x06E3, 0x06E4, 0x06E5, 0x06E6, 0x06E7, 0x06E8, 0x06EA, 0x06EB, 0x06ED, 0x06FB, 0x06FC, 0x06FD, 0x06FE, 0x0600, 0x0601, 0x0602, 0x0603, 0x060E, 0x060F, 0x0610, 0x0611, 0x0612, 0x0613, 0x0614, 0x0657, 0x0658, 0x06EE, 0x06EF, 0x06FF, 0x060B, 0x061E, 0x0659, 0x065A, 0x065B, 0x065C, 0x065D, 0x065E, 0x0750, 0x0751, 0x0752, 0x0753, 0x0754, 0x0755, 0x0756, 0x0757, 0x0758, 0x0759, 0x075A, 0x075B, 0x075C, 0x075D, 0x075E, 0x075F, 0x0760, 0x0761, 0x0762, 0x0763, 0x0764, 0x0765, 0x0766, 0x0767, 0x0768, 0x0769, 0x076A, 0x076B, 0x076C, 0x076D, 0x06A4, 0x06C6, 0x06C7, 0x06C8, 0x06C9, 0x06CA, 0x06CB, 0x06CF, 0x06CE, 0x06D0, 0x06D1, 0x06D4, 0x06FA, 0x06DD, 0x06DE, 0x06E0, 0x06E9, 0x060D, 0xFD3E, 0xFD3F, 0x25CC, # Added from https://groups.google.com/d/topic/googlefontdirectory-discuss/MwlMWMPNCXs/discussion 0x063b, 0x063c, 0x063d, 0x063e, 0x063f, 0x0620, 0x0674, 0x0674, 0x06EC] if 'dejavu-ext' in subset: # add all glyphnames ending in .display font = fontforge.open(font_in) for glyph in font.glyphs(): if glyph.glyphname.endswith('.display'): result.append(glyph.glyphname) return result # code for extracting vertical metrics from a TrueType font class Sfnt: def __init__(self, data): version, numTables, _, _, _ = struct.unpack('>IHHHH', data[:12]) self.tables = {} for i in range(numTables): tag, checkSum, offset, length = struct.unpack('>4sIII', data[12 + 16 * i: 28 + 16 * i]) self.tables[tag] = data[offset: offset + length] def hhea(self): r = {} d = self.tables['hhea'] r['Ascender'], r['Descender'], r['LineGap'] = struct.unpack('>hhh', d[4:10]) return r def os2(self): r = {} d = self.tables['OS/2'] r['fsSelection'], = struct.unpack('>H', d[62:64]) r['sTypoAscender'], r['sTypoDescender'], r['sTypoLineGap'] = struct.unpack('>hhh', d[68:74]) r['usWinAscender'], r['usWinDescender'] = struct.unpack('>HH', d[74:78]) return r def set_os2(pe, name, val): print('SetOS2Value("' + name + '", %d)' % val, file=pe) def set_os2_vert(pe, name, val): set_os2(pe, name + 'IsOffset', 0) set_os2(pe, name, val) # Extract vertical metrics data directly out of font file, and emit # script code to set the values in the generated font. This is a (rather # ugly) workaround for the issue described in: # http://sourceforge.net/mailarchive/forum.php?thread_name=20100906085718.GB1907%40khaled-laptop&forum_name=fontforge-users def extract_vert_to_script(font_in, pe): with open(font_in, 'rb') as in_file: data = in_file.read() sfnt = Sfnt(data) hhea = sfnt.hhea() os2 = sfnt.os2() set_os2_vert(pe, "WinAscent", os2['usWinAscender']) set_os2_vert(pe, "WinDescent", os2['usWinDescender']) set_os2_vert(pe, "TypoAscent", os2['sTypoAscender']) set_os2_vert(pe, "TypoDescent", os2['sTypoDescender']) set_os2_vert(pe, "HHeadAscent", hhea['Ascender']) set_os2_vert(pe, "HHeadDescent", hhea['Descender']) def main(argv): optlist, args = getopt.gnu_getopt(argv, '', ['string=', 'strip_names', 'opentype-features', 'simplify', 'new', 'script', 'nmr', 'roundtrip', 'subset=', 'namelist', 'null', 'nd', 'move-display']) font_in, font_out = args opts = dict(optlist) if '--string' in opts: subset = map(ord, opts['--string']) else: subset = getsubset(opts.get('--subset', 'latin'), font_in) subset_font(font_in, font_out, subset, opts) if __name__ == '__main__': main(sys.argv[1:])
7e4be7edb74df52d64808ba2b8c90687895ac2d36cee936305146aac014404e8
import sys def parse_results(filename): results = {} section = "???" with open(filename, 'r') as file: for line in file: line = line.strip() if line.startswith("testing"): section = line.split(" ", 1)[1] results.setdefault(section, {}) elif line.startswith("driving"): driving, test, time = [x.strip() for x in line.split()] time = float(time) results[section][test] = time return results def check_results_are_compatible(results_a, results_b): a_minus_b = {*results_a} - {*results_b} if a_minus_b: raise RuntimeError( "Backends {} in first set, but not in second".format(a_minus_b)) b_minus_a = {*results_b} - {*results_a} if b_minus_a: raise RuntimeError( "Backends {} in second set, but not in first".format(b_minus_a)) def compare_results(results_a, results_b): check_results_are_compatible(results_a, results_b) sections = results_a.keys() sections.sort() for section in results_a.keys(): print("backend %s" % section) print(" %-40s %6s %6s %6s %6s" % ("test", "a", "b", "delta", "% diff")) print(" " + '-' * 69) deltas = [] section_a = results_a[section] section_b = results_b[section] for test in section_a.keys(): if test not in section_b: deltas.append([None, None, section_a[test], None, test]) else: time_a = section_a[test] time_b = section_b[test] deltas.append([time_b / time_a, time_b - time_a, time_a, time_b, test]) for test in section_b.keys(): if test not in section_a: deltas.append([None, None, None, section_b[test], test]) deltas.sort() for diff, delta, time_a, time_b, test in deltas: if diff is None: if time_a is None: print(" %-40s ??? % 6.3f ??? ???" % (test, time_b)) else: print(" %-40s % 6.3f ??? ??? ???" % (test, time_a)) else: print(" %-40s % 6.3f % 6.3f % 6.3f %6d%%" % (test, time_a, time_b, delta, diff * 100)) if __name__ == '__main__': results_a_filename = sys.argv[-2] results_b_filename = sys.argv[-1] results_a = parse_results(results_a_filename) results_b = parse_results(results_b_filename) compare_results(results_a, results_b)
1990d0377983f419b35afa8f40f56bcbbb6fd42a47a718ba127b6bf7f1b0e955
""" ================= Embedding in GTK3 ================= Demonstrate adding a FigureCanvasGTK3Agg widget to a Gtk.ScrolledWindow using GTK3 accessed via pygobject. """ import gi gi.require_version('Gtk', '3.0') from gi.repository import Gtk from matplotlib.backends.backend_gtk3agg import ( FigureCanvasGTK3Agg as FigureCanvas) from matplotlib.figure import Figure import numpy as np win = Gtk.Window() win.connect("delete-event", Gtk.main_quit) win.set_default_size(400, 300) win.set_title("Embedding in GTK") f = Figure(figsize=(5, 4), dpi=100) a = f.add_subplot(111) t = np.arange(0.0, 3.0, 0.01) s = np.sin(2*np.pi*t) a.plot(t, s) sw = Gtk.ScrolledWindow() win.add(sw) # A scrolled window border goes outside the scrollbars and viewport sw.set_border_width(10) canvas = FigureCanvas(f) # a Gtk.DrawingArea canvas.set_size_request(800, 600) sw.add_with_viewport(canvas) win.show_all() Gtk.main()
ea6384d68ba026ca4f532fef71d34ae1f44e863051f01e53bd52e89047632235
""" ============ Tool Manager ============ This example demonstrates how to: * Modify the Toolbar * Create tools * Add tools * Remove tools Using `matplotlib.backend_managers.ToolManager` """ import matplotlib.pyplot as plt plt.rcParams['toolbar'] = 'toolmanager' from matplotlib.backend_tools import ToolBase, ToolToggleBase class ListTools(ToolBase): '''List all the tools controlled by the `ToolManager`''' # keyboard shortcut default_keymap = 'm' description = 'List Tools' def trigger(self, *args, **kwargs): print('_' * 80) print("{0:12} {1:45} {2}".format( 'Name (id)', 'Tool description', 'Keymap')) print('-' * 80) tools = self.toolmanager.tools for name in sorted(tools): if not tools[name].description: continue keys = ', '.join(sorted(self.toolmanager.get_tool_keymap(name))) print("{0:12} {1:45} {2}".format( name, tools[name].description, keys)) print('_' * 80) print("Active Toggle tools") print("{0:12} {1:45}".format("Group", "Active")) print('-' * 80) for group, active in self.toolmanager.active_toggle.items(): print("{0:12} {1:45}".format(str(group), str(active))) class GroupHideTool(ToolToggleBase): '''Show lines with a given gid''' default_keymap = 'G' description = 'Show by gid' default_toggled = True def __init__(self, *args, gid, **kwargs): self.gid = gid super().__init__(*args, **kwargs) def enable(self, *args): self.set_lines_visibility(True) def disable(self, *args): self.set_lines_visibility(False) def set_lines_visibility(self, state): for ax in self.figure.get_axes(): for line in ax.get_lines(): if line.get_gid() == self.gid: line.set_visible(state) self.figure.canvas.draw() fig = plt.figure() plt.plot([1, 2, 3], gid='mygroup') plt.plot([2, 3, 4], gid='unknown') plt.plot([3, 2, 1], gid='mygroup') # Add the custom tools that we created fig.canvas.manager.toolmanager.add_tool('List', ListTools) fig.canvas.manager.toolmanager.add_tool('Show', GroupHideTool, gid='mygroup') # Add an existing tool to new group `foo`. # It can be added as many times as we want fig.canvas.manager.toolbar.add_tool('zoom', 'foo') # Remove the forward button fig.canvas.manager.toolmanager.remove_tool('forward') # To add a custom tool to the toolbar at specific location inside # the navigation group fig.canvas.manager.toolbar.add_tool('Show', 'navigation', 1) plt.show()
fd5b90777a82fad1d05e2c9876a9d1b0227eafd0e599b00c79128d6512a8caac
""" =========== MathText WX =========== Demonstrates how to convert mathtext to a wx.Bitmap for display in various controls on wxPython. """ import matplotlib matplotlib.use("WxAgg") from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas from matplotlib.backends.backend_wx import NavigationToolbar2Wx from matplotlib.figure import Figure import numpy as np import wx IS_GTK = 'wxGTK' in wx.PlatformInfo IS_WIN = 'wxMSW' in wx.PlatformInfo ############################################################ # This is where the "magic" happens. from matplotlib.mathtext import MathTextParser mathtext_parser = MathTextParser("Bitmap") def mathtext_to_wxbitmap(s): ftimage, depth = mathtext_parser.parse(s, 150) return wx.Bitmap.FromBufferRGBA( ftimage.get_width(), ftimage.get_height(), ftimage.as_rgba_str()) ############################################################ functions = [ (r'$\sin(2 \pi x)$', lambda x: np.sin(2*np.pi*x)), (r'$\frac{4}{3}\pi x^3$', lambda x: (4.0/3.0)*np.pi*x**3), (r'$\cos(2 \pi x)$', lambda x: np.cos(2*np.pi*x)), (r'$\log(x)$', lambda x: np.log(x)) ] class CanvasFrame(wx.Frame): def __init__(self, parent, title): wx.Frame.__init__(self, parent, -1, title, size=(550, 350)) self.figure = Figure() self.axes = self.figure.add_subplot(111) self.canvas = FigureCanvas(self, -1, self.figure) self.change_plot(0) self.sizer = wx.BoxSizer(wx.VERTICAL) self.add_buttonbar() self.sizer.Add(self.canvas, 1, wx.LEFT | wx.TOP | wx.GROW) self.add_toolbar() # comment this out for no toolbar menuBar = wx.MenuBar() # File Menu menu = wx.Menu() m_exit = menu.Append(wx.ID_EXIT, "E&xit\tAlt-X", "Exit this simple sample") menuBar.Append(menu, "&File") self.Bind(wx.EVT_MENU, self.OnClose, m_exit) if IS_GTK or IS_WIN: # Equation Menu menu = wx.Menu() for i, (mt, func) in enumerate(functions): bm = mathtext_to_wxbitmap(mt) item = wx.MenuItem(menu, 1000 + i, " ") item.SetBitmap(bm) menu.Append(item) self.Bind(wx.EVT_MENU, self.OnChangePlot, item) menuBar.Append(menu, "&Functions") self.SetMenuBar(menuBar) self.SetSizer(self.sizer) self.Fit() def add_buttonbar(self): self.button_bar = wx.Panel(self) self.button_bar_sizer = wx.BoxSizer(wx.HORIZONTAL) self.sizer.Add(self.button_bar, 0, wx.LEFT | wx.TOP | wx.GROW) for i, (mt, func) in enumerate(functions): bm = mathtext_to_wxbitmap(mt) button = wx.BitmapButton(self.button_bar, 1000 + i, bm) self.button_bar_sizer.Add(button, 1, wx.GROW) self.Bind(wx.EVT_BUTTON, self.OnChangePlot, button) self.button_bar.SetSizer(self.button_bar_sizer) def add_toolbar(self): """Copied verbatim from embedding_wx2.py""" self.toolbar = NavigationToolbar2Wx(self.canvas) self.toolbar.Realize() # By adding toolbar in sizer, we are able to put it at the bottom # of the frame - so appearance is closer to GTK version. self.sizer.Add(self.toolbar, 0, wx.LEFT | wx.EXPAND) # update the axes menu on the toolbar self.toolbar.update() def OnChangePlot(self, event): self.change_plot(event.GetId() - 1000) def change_plot(self, plot_number): t = np.arange(1.0, 3.0, 0.01) s = functions[plot_number][1](t) self.axes.clear() self.axes.plot(t, s) self.canvas.draw() def OnClose(self, event): self.Destroy() class MyApp(wx.App): def OnInit(self): frame = CanvasFrame(None, "wxPython mathtext demo app") self.SetTopWindow(frame) frame.Show(True) return True app = MyApp() app.MainLoop()
892e72fb69db750648591a969be848b243e2d39d45ef4c7a608dab967a69ae2c
""" ================== Embedding in wx #3 ================== Copyright (C) 2003-2004 Andrew Straw, Jeremy O'Donoghue and others License: This work is licensed under the PSF. A copy should be included with this source code, and is also available at https://docs.python.org/3/license.html This is yet another example of using matplotlib with wx. Hopefully this is pretty full-featured: - both matplotlib toolbar and WX buttons manipulate plot - full wxApp framework, including widget interaction - XRC (XML wxWidgets resource) file to create GUI (made with XRCed) This was derived from embedding_in_wx and dynamic_image_wxagg. Thanks to matplotlib and wx teams for creating such great software! """ import matplotlib import matplotlib.cm as cm import matplotlib.cbook as cbook from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas from matplotlib.backends.backend_wxagg import NavigationToolbar2WxAgg as NavigationToolbar from matplotlib.figure import Figure import numpy as np import wx import wx.xrc as xrc ERR_TOL = 1e-5 # floating point slop for peak-detection matplotlib.rc('image', origin='lower') class PlotPanel(wx.Panel): def __init__(self, parent): wx.Panel.__init__(self, parent, -1) self.fig = Figure((5, 4), 75) self.canvas = FigureCanvas(self, -1, self.fig) self.toolbar = NavigationToolbar(self.canvas) # matplotlib toolbar self.toolbar.Realize() # self.toolbar.set_active([0,1]) # Now put all into a sizer sizer = wx.BoxSizer(wx.VERTICAL) # This way of adding to sizer allows resizing sizer.Add(self.canvas, 1, wx.LEFT | wx.TOP | wx.GROW) # Best to allow the toolbar to resize! sizer.Add(self.toolbar, 0, wx.GROW) self.SetSizer(sizer) self.Fit() def init_plot_data(self): a = self.fig.add_subplot(111) x = np.arange(120.0) * 2 * np.pi / 60.0 y = np.arange(100.0) * 2 * np.pi / 50.0 self.x, self.y = np.meshgrid(x, y) z = np.sin(self.x) + np.cos(self.y) self.im = a.imshow(z, cmap=cm.RdBu) # , interpolation='nearest') zmax = np.max(z) - ERR_TOL ymax_i, xmax_i = np.nonzero(z >= zmax) if self.im.origin == 'upper': ymax_i = z.shape[0] - ymax_i self.lines = a.plot(xmax_i, ymax_i, 'ko') self.toolbar.update() # Not sure why this is needed - ADS def GetToolBar(self): # You will need to override GetToolBar if you are using an # unmanaged toolbar in your frame return self.toolbar def OnWhiz(self, evt): self.x += np.pi / 15 self.y += np.pi / 20 z = np.sin(self.x) + np.cos(self.y) self.im.set_array(z) zmax = np.max(z) - ERR_TOL ymax_i, xmax_i = np.nonzero(z >= zmax) if self.im.origin == 'upper': ymax_i = z.shape[0] - ymax_i self.lines[0].set_data(xmax_i, ymax_i) self.canvas.draw() class MyApp(wx.App): def OnInit(self): xrcfile = cbook.get_sample_data('embedding_in_wx3.xrc', asfileobj=False) print('loading', xrcfile) self.res = xrc.XmlResource(xrcfile) # main frame and panel --------- self.frame = self.res.LoadFrame(None, "MainFrame") self.panel = xrc.XRCCTRL(self.frame, "MainPanel") # matplotlib panel ------------- # container for matplotlib panel (I like to make a container # panel for our panel so I know where it'll go when in XRCed.) plot_container = xrc.XRCCTRL(self.frame, "plot_container_panel") sizer = wx.BoxSizer(wx.VERTICAL) # matplotlib panel itself self.plotpanel = PlotPanel(plot_container) self.plotpanel.init_plot_data() # wx boilerplate sizer.Add(self.plotpanel, 1, wx.EXPAND) plot_container.SetSizer(sizer) # whiz button ------------------ whiz_button = xrc.XRCCTRL(self.frame, "whiz_button") whiz_button.Bind(wx.EVT_BUTTON, self.plotpanel.OnWhiz) # bang button ------------------ bang_button = xrc.XRCCTRL(self.frame, "bang_button") bang_button.Bind(wx.EVT_BUTTON, self.OnBang) # final setup ------------------ self.frame.Show(1) self.SetTopWindow(self.frame) return True def OnBang(self, event): bang_count = xrc.XRCCTRL(self.frame, "bang_count") bangs = bang_count.GetValue() bangs = int(bangs) + 1 bang_count.SetValue(str(bangs)) if __name__ == '__main__': app = MyApp(0) app.MainLoop()
bdcf830f939889aa45e36ae4ccf691110458bda2616ea6415ef0ef0a6bdc3a3b
""" =============== pyplot with GTK =============== An example of how to use pyplot to manage your figure windows, but modify the GUI by accessing the underlying GTK widgets. """ import matplotlib matplotlib.use('GTK3Agg') # or 'GTK3Cairo' import matplotlib.pyplot as plt import gi gi.require_version('Gtk', '3.0') from gi.repository import Gtk fig, ax = plt.subplots() ax.plot([1, 2, 3], 'ro-', label='easy as 1 2 3') ax.plot([1, 4, 9], 'gs--', label='easy as 1 2 3 squared') ax.legend() manager = fig.canvas.manager # you can access the window or vbox attributes this way toolbar = manager.toolbar vbox = manager.vbox # now let's add a button to the toolbar button = Gtk.Button(label='Click me') button.show() button.connect('clicked', lambda button: print('hi mom')) toolitem = Gtk.ToolItem() toolitem.show() toolitem.set_tooltip_text('Click me for fun and profit') toolitem.add(button) pos = 8 # where to insert this in the mpl toolbar toolbar.insert(toolitem, pos) # now let's add a widget to the vbox label = Gtk.Label() label.set_markup('Drag mouse over axes for position') label.show() vbox.pack_start(label, False, False, 0) vbox.reorder_child(toolbar, -1) def update(event): if event.xdata is None: label.set_markup('Drag mouse over axes for position') else: label.set_markup( f'<span color="#ef0000">x,y=({event.xdata}, {event.ydata})</span>') fig.canvas.mpl_connect('motion_notify_event', update) plt.show()
c3dedfaa678e5a738823e6c90d768795cbec62547eb0379c42c89a12aedd7ac3
""" ======================= Matplotlib With Glade 3 ======================= """ import os import gi gi.require_version('Gtk', '3.0') from gi.repository import Gtk from matplotlib.figure import Figure from matplotlib.backends.backend_gtk3agg import ( FigureCanvasGTK3Agg as FigureCanvas) import numpy as np class Window1Signals(object): def on_window1_destroy(self, widget): Gtk.main_quit() def main(): builder = Gtk.Builder() builder.add_objects_from_file(os.path.join(os.path.dirname(__file__), "mpl_with_glade3.glade"), ("window1", "")) builder.connect_signals(Window1Signals()) window = builder.get_object("window1") sw = builder.get_object("scrolledwindow1") # Start of Matplotlib specific code figure = Figure(figsize=(8, 6), dpi=71) axis = figure.add_subplot(111) t = np.arange(0.0, 3.0, 0.01) s = np.sin(2*np.pi*t) axis.plot(t, s) axis.set_xlabel('time [s]') axis.set_ylabel('voltage [V]') canvas = FigureCanvas(figure) # a Gtk.DrawingArea canvas.set_size_request(800, 600) sw.add_with_viewport(canvas) # End of Matplotlib specific code window.show_all() Gtk.main() if __name__ == "__main__": main()
05e63d2be953b30bf8f1198cd22c8d39fa52e3e4ca907ad8945cabb3b389b038
""" ============= SVG Histogram ============= Demonstrate how to create an interactive histogram, in which bars are hidden or shown by clicking on legend markers. The interactivity is encoded in ecmascript (javascript) and inserted in the SVG code in a post-processing step. To render the image, open it in a web browser. SVG is supported in most web browsers used by Linux and OSX users. Windows IE9 supports SVG, but earlier versions do not. Notes ----- The matplotlib backend lets us assign ids to each object. This is the mechanism used here to relate matplotlib objects created in python and the corresponding SVG constructs that are parsed in the second step. While flexible, ids are cumbersome to use for large collection of objects. Two mechanisms could be used to simplify things: * systematic grouping of objects into SVG <g> tags, * assigning classes to each SVG object according to its origin. For example, instead of modifying the properties of each individual bar, the bars from the `hist` function could either be grouped in a PatchCollection, or be assigned a class="hist_##" attribute. CSS could also be used more extensively to replace repetitive markup throughout the generated SVG. Author: [email protected] """ import numpy as np import matplotlib.pyplot as plt import xml.etree.ElementTree as ET from io import BytesIO import json plt.rcParams['svg.fonttype'] = 'none' # Apparently, this `register_namespace` method works only with # python 2.7 and up and is necessary to avoid garbling the XML name # space with ns0. ET.register_namespace("", "http://www.w3.org/2000/svg") # Fixing random state for reproducibility np.random.seed(19680801) # --- Create histogram, legend and title --- plt.figure() r = np.random.randn(100) r1 = r + 1 labels = ['Rabbits', 'Frogs'] H = plt.hist([r, r1], label=labels) containers = H[-1] leg = plt.legend(frameon=False) plt.title("From a web browser, click on the legend\n" "marker to toggle the corresponding histogram.") # --- Add ids to the svg objects we'll modify hist_patches = {} for ic, c in enumerate(containers): hist_patches['hist_%d' % ic] = [] for il, element in enumerate(c): element.set_gid('hist_%d_patch_%d' % (ic, il)) hist_patches['hist_%d' % ic].append('hist_%d_patch_%d' % (ic, il)) # Set ids for the legend patches for i, t in enumerate(leg.get_patches()): t.set_gid('leg_patch_%d' % i) # Set ids for the text patches for i, t in enumerate(leg.get_texts()): t.set_gid('leg_text_%d' % i) # Save SVG in a fake file object. f = BytesIO() plt.savefig(f, format="svg") # Create XML tree from the SVG file. tree, xmlid = ET.XMLID(f.getvalue()) # --- Add interactivity --- # Add attributes to the patch objects. for i, t in enumerate(leg.get_patches()): el = xmlid['leg_patch_%d' % i] el.set('cursor', 'pointer') el.set('onclick', "toggle_hist(this)") # Add attributes to the text objects. for i, t in enumerate(leg.get_texts()): el = xmlid['leg_text_%d' % i] el.set('cursor', 'pointer') el.set('onclick', "toggle_hist(this)") # Create script defining the function `toggle_hist`. # We create a global variable `container` that stores the patches id # belonging to each histogram. Then a function "toggle_element" sets the # visibility attribute of all patches of each histogram and the opacity # of the marker itself. script = """ <script type="text/ecmascript"> <![CDATA[ var container = %s function toggle(oid, attribute, values) { /* Toggle the style attribute of an object between two values. Parameters ---------- oid : str Object identifier. attribute : str Name of style attribute. values : [on state, off state] The two values that are switched between. */ var obj = document.getElementById(oid); var a = obj.style[attribute]; a = (a == values[0] || a == "") ? values[1] : values[0]; obj.style[attribute] = a; } function toggle_hist(obj) { var num = obj.id.slice(-1); toggle('leg_patch_' + num, 'opacity', [1, 0.3]); toggle('leg_text_' + num, 'opacity', [1, 0.5]); var names = container['hist_'+num] for (var i=0; i < names.length; i++) { toggle(names[i], 'opacity', [1,0]) }; } ]]> </script> """ % json.dumps(hist_patches) # Add a transition effect css = tree.getchildren()[0][0] css.text = css.text + "g {-webkit-transition:opacity 0.4s ease-out;" + \ "-moz-transition:opacity 0.4s ease-out;}" # Insert the script and save to file. tree.insert(0, ET.XML(script)) ET.ElementTree(tree).write("svg_histogram.svg")
d7505ecfbc8edf482dfa2f9c1036f768d40ab5fbbb01541ec780bce2b9283e70
""" ================== Embedding in wx #5 ================== """ import wx import wx.lib.agw.aui as aui import wx.lib.mixins.inspection as wit import matplotlib as mpl from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas from matplotlib.backends.backend_wxagg import NavigationToolbar2WxAgg as NavigationToolbar class Plot(wx.Panel): def __init__(self, parent, id=-1, dpi=None, **kwargs): wx.Panel.__init__(self, parent, id=id, **kwargs) self.figure = mpl.figure.Figure(dpi=dpi, figsize=(2, 2)) self.canvas = FigureCanvas(self, -1, self.figure) self.toolbar = NavigationToolbar(self.canvas) self.toolbar.Realize() sizer = wx.BoxSizer(wx.VERTICAL) sizer.Add(self.canvas, 1, wx.EXPAND) sizer.Add(self.toolbar, 0, wx.LEFT | wx.EXPAND) self.SetSizer(sizer) class PlotNotebook(wx.Panel): def __init__(self, parent, id=-1): wx.Panel.__init__(self, parent, id=id) self.nb = aui.AuiNotebook(self) sizer = wx.BoxSizer() sizer.Add(self.nb, 1, wx.EXPAND) self.SetSizer(sizer) def add(self, name="plot"): page = Plot(self.nb) self.nb.AddPage(page, name) return page.figure def demo(): # alternatively you could use #app = wx.App() # InspectableApp is a great debug tool, see: # http://wiki.wxpython.org/Widget%20Inspection%20Tool app = wit.InspectableApp() frame = wx.Frame(None, -1, 'Plotter') plotter = PlotNotebook(frame) axes1 = plotter.add('figure 1').gca() axes1.plot([1, 2, 3], [2, 1, 4]) axes2 = plotter.add('figure 2').gca() axes2.plot([1, 2, 3, 4, 5], [2, 1, 4, 2, 3]) frame.Show() app.MainLoop() if __name__ == "__main__": demo()
1261c91b62293603d877d21872a9f985d3662c27346a222c5e0cf48a2e518f63
""" ================ Embedding WebAgg ================ This example demonstrates how to embed matplotlib WebAgg interactive plotting in your own web application and framework. It is not necessary to do all this if you merely want to display a plot in a browser or use matplotlib's built-in Tornado-based server "on the side". The framework being used must support web sockets. """ import io try: import tornado except ImportError: raise RuntimeError("This example requires tornado.") import tornado.web import tornado.httpserver import tornado.ioloop import tornado.websocket from matplotlib.backends.backend_webagg_core import ( FigureManagerWebAgg, new_figure_manager_given_figure) from matplotlib.figure import Figure import numpy as np import json def create_figure(): """ Creates a simple example figure. """ fig = Figure() a = fig.add_subplot(111) t = np.arange(0.0, 3.0, 0.01) s = np.sin(2 * np.pi * t) a.plot(t, s) return fig # The following is the content of the web page. You would normally # generate this using some sort of template facility in your web # framework, but here we just use Python string formatting. html_content = """ <html> <head> <!-- TODO: There should be a way to include all of the required javascript and CSS so matplotlib can add to the set in the future if it needs to. --> <link rel="stylesheet" href="_static/css/page.css" type="text/css"> <link rel="stylesheet" href="_static/css/boilerplate.css" type="text/css" /> <link rel="stylesheet" href="_static/css/fbm.css" type="text/css" /> <link rel="stylesheet" href="_static/jquery-ui-1.12.1/jquery-ui.min.css" > <script src="_static/jquery-ui-1.12.1/external/jquery/jquery.js"></script> <script src="_static/jquery-ui-1.12.1/jquery-ui.min.js"></script> <script src="mpl.js"></script> <script> /* This is a callback that is called when the user saves (downloads) a file. Its purpose is really to map from a figure and file format to a url in the application. */ function ondownload(figure, format) { window.open('download.' + format, '_blank'); }; $(document).ready( function() { /* It is up to the application to provide a websocket that the figure will use to communicate to the server. This websocket object can also be a "fake" websocket that underneath multiplexes messages from multiple figures, if necessary. */ var websocket_type = mpl.get_websocket_type(); var websocket = new websocket_type("%(ws_uri)sws"); // mpl.figure creates a new figure on the webpage. var fig = new mpl.figure( // A unique numeric identifier for the figure %(fig_id)s, // A websocket object (or something that behaves like one) websocket, // A function called when a file type is selected for download ondownload, // The HTML element in which to place the figure $('div#figure')); } ); </script> <title>matplotlib</title> </head> <body> <div id="figure"> </div> </body> </html> """ class MyApplication(tornado.web.Application): class MainPage(tornado.web.RequestHandler): """ Serves the main HTML page. """ def get(self): manager = self.application.manager ws_uri = "ws://{req.host}/".format(req=self.request) content = html_content % { "ws_uri": ws_uri, "fig_id": manager.num} self.write(content) class MplJs(tornado.web.RequestHandler): """ Serves the generated matplotlib javascript file. The content is dynamically generated based on which toolbar functions the user has defined. Call `FigureManagerWebAgg` to get its content. """ def get(self): self.set_header('Content-Type', 'application/javascript') js_content = FigureManagerWebAgg.get_javascript() self.write(js_content) class Download(tornado.web.RequestHandler): """ Handles downloading of the figure in various file formats. """ def get(self, fmt): manager = self.application.manager mimetypes = { 'ps': 'application/postscript', 'eps': 'application/postscript', 'pdf': 'application/pdf', 'svg': 'image/svg+xml', 'png': 'image/png', 'jpeg': 'image/jpeg', 'tif': 'image/tiff', 'emf': 'application/emf' } self.set_header('Content-Type', mimetypes.get(fmt, 'binary')) buff = io.BytesIO() manager.canvas.figure.savefig(buff, format=fmt) self.write(buff.getvalue()) class WebSocket(tornado.websocket.WebSocketHandler): """ A websocket for interactive communication between the plot in the browser and the server. In addition to the methods required by tornado, it is required to have two callback methods: - ``send_json(json_content)`` is called by matplotlib when it needs to send json to the browser. `json_content` is a JSON tree (Python dictionary), and it is the responsibility of this implementation to encode it as a string to send over the socket. - ``send_binary(blob)`` is called to send binary image data to the browser. """ supports_binary = True def open(self): # Register the websocket with the FigureManager. manager = self.application.manager manager.add_web_socket(self) if hasattr(self, 'set_nodelay'): self.set_nodelay(True) def on_close(self): # When the socket is closed, deregister the websocket with # the FigureManager. manager = self.application.manager manager.remove_web_socket(self) def on_message(self, message): # The 'supports_binary' message is relevant to the # websocket itself. The other messages get passed along # to matplotlib as-is. # Every message has a "type" and a "figure_id". message = json.loads(message) if message['type'] == 'supports_binary': self.supports_binary = message['value'] else: manager = self.application.manager manager.handle_json(message) def send_json(self, content): self.write_message(json.dumps(content)) def send_binary(self, blob): if self.supports_binary: self.write_message(blob, binary=True) else: data_uri = "data:image/png;base64,{0}".format( blob.encode('base64').replace('\n', '')) self.write_message(data_uri) def __init__(self, figure): self.figure = figure self.manager = new_figure_manager_given_figure(id(figure), figure) super().__init__([ # Static files for the CSS and JS (r'/_static/(.*)', tornado.web.StaticFileHandler, {'path': FigureManagerWebAgg.get_static_file_path()}), # The page that contains all of the pieces ('/', self.MainPage), ('/mpl.js', self.MplJs), # Sends images and events to the browser, and receives # events from the browser ('/ws', self.WebSocket), # Handles the downloading (i.e., saving) of static images (r'/download.([a-z0-9.]+)', self.Download), ]) if __name__ == "__main__": figure = create_figure() application = MyApplication(figure) http_server = tornado.httpserver.HTTPServer(application) http_server.listen(8080) print("http://127.0.0.1:8080/") print("Press Ctrl+C to quit") tornado.ioloop.IOLoop.instance().start()
078b7d98185218217308777563d994bb8c6f0dc5b7613549c45e587030312cb9
""" ================== Embedding in wx #2 ================== An example of how to use wxagg in an application with the new toolbar - comment out the add_toolbar line for no toolbar """ from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas from matplotlib.backends.backend_wx import NavigationToolbar2Wx as NavigationToolbar from matplotlib.figure import Figure import numpy as np import wx import wx.lib.mixins.inspection as WIT class CanvasFrame(wx.Frame): def __init__(self): wx.Frame.__init__(self, None, -1, 'CanvasFrame', size=(550, 350)) self.figure = Figure() self.axes = self.figure.add_subplot(111) t = np.arange(0.0, 3.0, 0.01) s = np.sin(2 * np.pi * t) self.axes.plot(t, s) self.canvas = FigureCanvas(self, -1, self.figure) self.sizer = wx.BoxSizer(wx.VERTICAL) self.sizer.Add(self.canvas, 1, wx.LEFT | wx.TOP | wx.EXPAND) self.SetSizer(self.sizer) self.Fit() self.add_toolbar() # comment this out for no toolbar def add_toolbar(self): self.toolbar = NavigationToolbar(self.canvas) self.toolbar.Realize() # By adding toolbar in sizer, we are able to put it at the bottom # of the frame - so appearance is closer to GTK version. self.sizer.Add(self.toolbar, 0, wx.LEFT | wx.EXPAND) # update the axes menu on the toolbar self.toolbar.update() # alternatively you could use #class App(wx.App): class App(WIT.InspectableApp): def OnInit(self): 'Create the main window and insert the custom frame' self.Init() frame = CanvasFrame() frame.Show(True) return True app = App(0) app.MainLoop()
b0129e7aa72e562c4d8e9789cd2b8da50d39450696275834639264a822e82f3a
""" ============= WXcursor Demo ============= Example to draw a cursor and report the data coords in wx. """ from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas from matplotlib.backends.backend_wx import NavigationToolbar2Wx from matplotlib.figure import Figure import numpy as np import wx class CanvasFrame(wx.Frame): def __init__(self, ): wx.Frame.__init__(self, None, -1, 'CanvasFrame', size=(550, 350)) self.figure = Figure() self.axes = self.figure.add_subplot(111) t = np.arange(0.0, 3.0, 0.01) s = np.sin(2*np.pi*t) self.axes.plot(t, s) self.axes.set_xlabel('t') self.axes.set_ylabel('sin(t)') self.figure_canvas = FigureCanvas(self, -1, self.figure) # Note that event is a MplEvent self.figure_canvas.mpl_connect( 'motion_notify_event', self.UpdateStatusBar) self.figure_canvas.Bind(wx.EVT_ENTER_WINDOW, self.ChangeCursor) self.sizer = wx.BoxSizer(wx.VERTICAL) self.sizer.Add(self.figure_canvas, 1, wx.LEFT | wx.TOP | wx.GROW) self.SetSizer(self.sizer) self.Fit() self.statusBar = wx.StatusBar(self, -1) self.SetStatusBar(self.statusBar) self.toolbar = NavigationToolbar2Wx(self.figure_canvas) self.sizer.Add(self.toolbar, 0, wx.LEFT | wx.EXPAND) self.toolbar.Show() def ChangeCursor(self, event): self.figure_canvas.SetCursor(wx.Cursor(wx.CURSOR_BULLSEYE)) def UpdateStatusBar(self, event): if event.inaxes: self.statusBar.SetStatusText( "x={} y={}".format(event.xdata, event.ydata)) class App(wx.App): def OnInit(self): 'Create the main window and insert the custom frame' frame = CanvasFrame() self.SetTopWindow(frame) frame.Show(True) return True if __name__ == '__main__': app = App(0) app.MainLoop()
e1e42982551f4b54e591e45f20fa341b823bd4ddd4e900b009520af29f42628d
""" =========================================== Embedding in GTK3 with a navigation toolbar =========================================== Demonstrate NavigationToolbar with GTK3 accessed via pygobject. """ import gi gi.require_version('Gtk', '3.0') from gi.repository import Gtk from matplotlib.backends.backend_gtk3 import ( NavigationToolbar2GTK3 as NavigationToolbar) from matplotlib.backends.backend_gtk3agg import ( FigureCanvasGTK3Agg as FigureCanvas) from matplotlib.figure import Figure import numpy as np win = Gtk.Window() win.connect("delete-event", Gtk.main_quit) win.set_default_size(400, 300) win.set_title("Embedding in GTK") f = Figure(figsize=(5, 4), dpi=100) a = f.add_subplot(1, 1, 1) t = np.arange(0.0, 3.0, 0.01) s = np.sin(2*np.pi*t) a.plot(t, s) vbox = Gtk.VBox() win.add(vbox) # Add canvas to vbox canvas = FigureCanvas(f) # a Gtk.DrawingArea vbox.pack_start(canvas, True, True, 0) # Create toolbar toolbar = NavigationToolbar(canvas, win) vbox.pack_start(toolbar, False, False, 0) win.show_all() Gtk.main()
d238c2397446707e72156bac0dbd2362ee5e9dbfe231aae8fd11cb95cb239b36
""" =========== SVG Tooltip =========== This example shows how to create a tooltip that will show up when hovering over a matplotlib patch. Although it is possible to create the tooltip from CSS or javascript, here we create it in matplotlib and simply toggle its visibility on when hovering over the patch. This approach provides total control over the tooltip placement and appearance, at the expense of more code up front. The alternative approach would be to put the tooltip content in `title` attributes of SVG objects. Then, using an existing js/CSS library, it would be relatively straightforward to create the tooltip in the browser. The content would be dictated by the `title` attribute, and the appearance by the CSS. :author: David Huard """ import matplotlib.pyplot as plt import xml.etree.ElementTree as ET from io import BytesIO ET.register_namespace("", "http://www.w3.org/2000/svg") fig, ax = plt.subplots() # Create patches to which tooltips will be assigned. rect1 = plt.Rectangle((10, -20), 10, 5, fc='blue') rect2 = plt.Rectangle((-20, 15), 10, 5, fc='green') shapes = [rect1, rect2] labels = ['This is a blue rectangle.', 'This is a green rectangle'] for i, (item, label) in enumerate(zip(shapes, labels)): patch = ax.add_patch(item) annotate = ax.annotate(labels[i], xy=item.get_xy(), xytext=(0, 0), textcoords='offset points', color='w', ha='center', fontsize=8, bbox=dict(boxstyle='round, pad=.5', fc=(.1, .1, .1, .92), ec=(1., 1., 1.), lw=1, zorder=1)) ax.add_patch(patch) patch.set_gid('mypatch_{:03d}'.format(i)) annotate.set_gid('mytooltip_{:03d}'.format(i)) # Save the figure in a fake file object ax.set_xlim(-30, 30) ax.set_ylim(-30, 30) ax.set_aspect('equal') f = BytesIO() plt.savefig(f, format="svg") # --- Add interactivity --- # Create XML tree from the SVG file. tree, xmlid = ET.XMLID(f.getvalue()) tree.set('onload', 'init(evt)') for i in shapes: # Get the index of the shape index = shapes.index(i) # Hide the tooltips tooltip = xmlid['mytooltip_{:03d}'.format(index)] tooltip.set('visibility', 'hidden') # Assign onmouseover and onmouseout callbacks to patches. mypatch = xmlid['mypatch_{:03d}'.format(index)] mypatch.set('onmouseover', "ShowTooltip(this)") mypatch.set('onmouseout', "HideTooltip(this)") # This is the script defining the ShowTooltip and HideTooltip functions. script = """ <script type="text/ecmascript"> <![CDATA[ function init(evt) { if ( window.svgDocument == null ) { svgDocument = evt.target.ownerDocument; } } function ShowTooltip(obj) { var cur = obj.id.split("_")[1]; var tip = svgDocument.getElementById('mytooltip_' + cur); tip.setAttribute('visibility',"visible") } function HideTooltip(obj) { var cur = obj.id.split("_")[1]; var tip = svgDocument.getElementById('mytooltip_' + cur); tip.setAttribute('visibility',"hidden") } ]]> </script> """ # Insert the script at the top of the file and save it. tree.insert(0, ET.XML(script)) ET.ElementTree(tree).write('svg_tooltip.svg')
40dd107cb9ed5c5bda9e65ce01c83e4aaa14462a0132c201874f3aeab2099052
""" ================== Embedding in wx #4 ================== An example of how to use wx or wxagg in an application with a custom toolbar. """ from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas from matplotlib.backends.backend_wxagg import NavigationToolbar2WxAgg as NavigationToolbar from matplotlib.backends.backend_wx import _load_bitmap from matplotlib.figure import Figure import numpy as np import wx class MyNavigationToolbar(NavigationToolbar): """Extend the default wx toolbar with your own event handlers.""" def __init__(self, canvas, cankill): NavigationToolbar.__init__(self, canvas) # for simplicity I'm going to reuse a bitmap from wx, you'll # probably want to add your own. tool = self.AddTool(wx.ID_ANY, 'Click me', _load_bitmap('back.png'), 'Activate custom contol') self.Bind(wx.EVT_TOOL, self._on_custom, id=tool.GetId()) def _on_custom(self, evt): # add some text to the axes in a random location in axes (0,1) # coords) with a random color # get the axes ax = self.canvas.figure.axes[0] # generate a random location can color x, y = np.random.rand(2) rgb = np.random.rand(3) # add the text and draw ax.text(x, y, 'You clicked me', transform=ax.transAxes, color=rgb) self.canvas.draw() evt.Skip() class CanvasFrame(wx.Frame): def __init__(self): wx.Frame.__init__(self, None, -1, 'CanvasFrame', size=(550, 350)) self.figure = Figure(figsize=(5, 4), dpi=100) self.axes = self.figure.add_subplot(111) t = np.arange(0.0, 3.0, 0.01) s = np.sin(2 * np.pi * t) self.axes.plot(t, s) self.canvas = FigureCanvas(self, -1, self.figure) self.sizer = wx.BoxSizer(wx.VERTICAL) self.sizer.Add(self.canvas, 1, wx.TOP | wx.LEFT | wx.EXPAND) self.toolbar = MyNavigationToolbar(self.canvas, True) self.toolbar.Realize() # By adding toolbar in sizer, we are able to put it at the bottom # of the frame - so appearance is closer to GTK version. self.sizer.Add(self.toolbar, 0, wx.LEFT | wx.EXPAND) # update the axes menu on the toolbar self.toolbar.update() self.SetSizer(self.sizer) self.Fit() class App(wx.App): def OnInit(self): 'Create the main window and insert the custom frame' frame = CanvasFrame() frame.Show(True) return True app = App(0) app.MainLoop()
6be3aa45f8623d066289e3eeb219f0e5415110969f85cea3b69fabc9ae338b6e
""" =============== Embedding in Qt =============== Simple Qt application embedding Matplotlib canvases. This program will work equally well using Qt4 and Qt5. Either version of Qt can be selected (for example) by setting the ``MPLBACKEND`` environment variable to "Qt4Agg" or "Qt5Agg", or by first importing the desired version of PyQt. """ import sys import time import numpy as np from matplotlib.backends.qt_compat import QtCore, QtWidgets, is_pyqt5 if is_pyqt5(): from matplotlib.backends.backend_qt5agg import ( FigureCanvas, NavigationToolbar2QT as NavigationToolbar) else: from matplotlib.backends.backend_qt4agg import ( FigureCanvas, NavigationToolbar2QT as NavigationToolbar) from matplotlib.figure import Figure class ApplicationWindow(QtWidgets.QMainWindow): def __init__(self): super().__init__() self._main = QtWidgets.QWidget() self.setCentralWidget(self._main) layout = QtWidgets.QVBoxLayout(self._main) static_canvas = FigureCanvas(Figure(figsize=(5, 3))) layout.addWidget(static_canvas) self.addToolBar(NavigationToolbar(static_canvas, self)) dynamic_canvas = FigureCanvas(Figure(figsize=(5, 3))) layout.addWidget(dynamic_canvas) self.addToolBar(QtCore.Qt.BottomToolBarArea, NavigationToolbar(dynamic_canvas, self)) self._static_ax = static_canvas.figure.subplots() t = np.linspace(0, 10, 501) self._static_ax.plot(t, np.tan(t), ".") self._dynamic_ax = dynamic_canvas.figure.subplots() self._timer = dynamic_canvas.new_timer( 100, [(self._update_canvas, (), {})]) self._timer.start() def _update_canvas(self): self._dynamic_ax.clear() t = np.linspace(0, 10, 101) # Shift the sinusoid as a function of time. self._dynamic_ax.plot(t, np.sin(t + time.time())) self._dynamic_ax.figure.canvas.draw() if __name__ == "__main__": # Check whether there is already a running QApplication (e.g., if running # from an IDE). qapp = QtWidgets.QApplication.instance() if not qapp: qapp = QtWidgets.QApplication(sys.argv) app = ApplicationWindow() app.show() qapp.exec_()
4137b53db60867af69d60f18eb9644572c512f7e67c1613ced6411c60902d5ae
""" ============== CanvasAgg demo ============== This example shows how to use the agg backend directly to create images, which may be of use to web application developers who want full control over their code without using the pyplot interface to manage figures, figure closing etc. .. note:: It is not necessary to avoid using the pyplot interface in order to create figures without a graphical front-end - simply setting the backend to "Agg" would be sufficient. In this example, we show how to save the contents of the agg canvas to a file, and how to extract them to a string, which can in turn be passed off to PIL or put in a numpy array. The latter functionality allows e.g. to use Matplotlib inside a cgi-script *without* needing to write a figure to disk. """ from matplotlib.backends.backend_agg import FigureCanvasAgg from matplotlib.figure import Figure import numpy as np fig = Figure(figsize=(5, 4), dpi=100) # A canvas must be manually attached to the figure (pyplot would automatically # do it). This is done by instantiating the canvas with the figure as # argument. canvas = FigureCanvasAgg(fig) # Do some plotting. ax = fig.add_subplot(111) ax.plot([1, 2, 3]) # Option 1: Save the figure to a file; can also be a file-like object (BytesIO, # etc.). fig.savefig("test.png") # Option 2: Save the figure to a string. canvas.draw() s, (width, height) = canvas.print_to_buffer() # Option 2a: Convert to a NumPy array. X = np.frombuffer(s, np.uint8).reshape((height, width, 4)) # Option 2b: Pass off to PIL. from PIL import Image im = Image.frombytes("RGBA", (width, height), s) # Uncomment this line to display the image using ImageMagick's `display` tool. # im.show() ############################################################################# # # ------------ # # References # """""""""" # # The use of the following functions, methods, classes and modules is shown # in this example: import matplotlib matplotlib.backends.backend_agg.FigureCanvasAgg matplotlib.figure.Figure matplotlib.figure.Figure.add_subplot matplotlib.figure.Figure.savefig matplotlib.axes.Axes.plot
d91801eaa43c4af80d57f776160059f817f2dadbc0abbd296ebf512074fd3fc7
""" =============== Fourier Demo WX =============== """ import numpy as np import wx from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas from matplotlib.figure import Figure class Knob(object): """ Knob - simple class with a "setKnob" method. A Knob instance is attached to a Param instance, e.g., param.attach(knob) Base class is for documentation purposes. """ def setKnob(self, value): pass class Param(object): """ The idea of the "Param" class is that some parameter in the GUI may have several knobs that both control it and reflect the parameter's state, e.g. a slider, text, and dragging can all change the value of the frequency in the waveform of this example. The class allows a cleaner way to update/"feedback" to the other knobs when one is being changed. Also, this class handles min/max constraints for all the knobs. Idea - knob list - in "set" method, knob object is passed as well - the other knobs in the knob list have a "set" method which gets called for the others. """ def __init__(self, initialValue=None, minimum=0., maximum=1.): self.minimum = minimum self.maximum = maximum if initialValue != self.constrain(initialValue): raise ValueError('illegal initial value') self.value = initialValue self.knobs = [] def attach(self, knob): self.knobs += [knob] def set(self, value, knob=None): self.value = value self.value = self.constrain(value) for feedbackKnob in self.knobs: if feedbackKnob != knob: feedbackKnob.setKnob(self.value) return self.value def constrain(self, value): if value <= self.minimum: value = self.minimum if value >= self.maximum: value = self.maximum return value class SliderGroup(Knob): def __init__(self, parent, label, param): self.sliderLabel = wx.StaticText(parent, label=label) self.sliderText = wx.TextCtrl(parent, -1, style=wx.TE_PROCESS_ENTER) self.slider = wx.Slider(parent, -1) # self.slider.SetMax(param.maximum*1000) self.slider.SetRange(0, param.maximum * 1000) self.setKnob(param.value) sizer = wx.BoxSizer(wx.HORIZONTAL) sizer.Add(self.sliderLabel, 0, wx.EXPAND | wx.ALIGN_CENTER | wx.ALL, border=2) sizer.Add(self.sliderText, 0, wx.EXPAND | wx.ALIGN_CENTER | wx.ALL, border=2) sizer.Add(self.slider, 1, wx.EXPAND) self.sizer = sizer self.slider.Bind(wx.EVT_SLIDER, self.sliderHandler) self.sliderText.Bind(wx.EVT_TEXT_ENTER, self.sliderTextHandler) self.param = param self.param.attach(self) def sliderHandler(self, evt): value = evt.GetInt() / 1000. self.param.set(value) def sliderTextHandler(self, evt): value = float(self.sliderText.GetValue()) self.param.set(value) def setKnob(self, value): self.sliderText.SetValue('%g' % value) self.slider.SetValue(value * 1000) class FourierDemoFrame(wx.Frame): def __init__(self, *args, **kwargs): wx.Frame.__init__(self, *args, **kwargs) panel = wx.Panel(self) # create the GUI elements self.createCanvas(panel) self.createSliders(panel) # place them in a sizer for the Layout sizer = wx.BoxSizer(wx.VERTICAL) sizer.Add(self.canvas, 1, wx.EXPAND) sizer.Add(self.frequencySliderGroup.sizer, 0, wx.EXPAND | wx.ALIGN_CENTER | wx.ALL, border=5) sizer.Add(self.amplitudeSliderGroup.sizer, 0, wx.EXPAND | wx.ALIGN_CENTER | wx.ALL, border=5) panel.SetSizer(sizer) def createCanvas(self, parent): self.lines = [] self.figure = Figure() self.canvas = FigureCanvas(parent, -1, self.figure) self.canvas.callbacks.connect('button_press_event', self.mouseDown) self.canvas.callbacks.connect('motion_notify_event', self.mouseMotion) self.canvas.callbacks.connect('button_release_event', self.mouseUp) self.state = '' self.mouseInfo = (None, None, None, None) self.f0 = Param(2., minimum=0., maximum=6.) self.A = Param(1., minimum=0.01, maximum=2.) self.createPlots() # Not sure I like having two params attached to the same Knob, # but that is what we have here... it works but feels kludgy - # although maybe it's not too bad since the knob changes both params # at the same time (both f0 and A are affected during a drag) self.f0.attach(self) self.A.attach(self) def createSliders(self, panel): self.frequencySliderGroup = SliderGroup( panel, label='Frequency f0:', param=self.f0) self.amplitudeSliderGroup = SliderGroup(panel, label=' Amplitude a:', param=self.A) def mouseDown(self, evt): if self.lines[0].contains(evt)[0]: self.state = 'frequency' elif self.lines[1].contains(evt)[0]: self.state = 'time' else: self.state = '' self.mouseInfo = (evt.xdata, evt.ydata, max(self.f0.value, .1), self.A.value) def mouseMotion(self, evt): if self.state == '': return x, y = evt.xdata, evt.ydata if x is None: # outside the axes return x0, y0, f0Init, AInit = self.mouseInfo self.A.set(AInit + (AInit * (y - y0) / y0), self) if self.state == 'frequency': self.f0.set(f0Init + (f0Init * (x - x0) / x0)) elif self.state == 'time': if (x - x0) / x0 != -1.: self.f0.set(1. / (1. / f0Init + (1. / f0Init * (x - x0) / x0))) def mouseUp(self, evt): self.state = '' def createPlots(self): # This method creates the subplots, waveforms and labels. # Later, when the waveforms or sliders are dragged, only the # waveform data will be updated (not here, but below in setKnob). self.subplot1, self.subplot2 = self.figure.subplots(2) x1, y1, x2, y2 = self.compute(self.f0.value, self.A.value) color = (1., 0., 0.) self.lines += self.subplot1.plot(x1, y1, color=color, linewidth=2) self.lines += self.subplot2.plot(x2, y2, color=color, linewidth=2) # Set some plot attributes self.subplot1.set_title( "Click and drag waveforms to change frequency and amplitude", fontsize=12) self.subplot1.set_ylabel("Frequency Domain Waveform X(f)", fontsize=8) self.subplot1.set_xlabel("frequency f", fontsize=8) self.subplot2.set_ylabel("Time Domain Waveform x(t)", fontsize=8) self.subplot2.set_xlabel("time t", fontsize=8) self.subplot1.set_xlim([-6, 6]) self.subplot1.set_ylim([0, 1]) self.subplot2.set_xlim([-2, 2]) self.subplot2.set_ylim([-2, 2]) self.subplot1.text(0.05, .95, r'$X(f) = \mathcal{F}\{x(t)\}$', verticalalignment='top', transform=self.subplot1.transAxes) self.subplot2.text(0.05, .95, r'$x(t) = a \cdot \cos(2\pi f_0 t) e^{-\pi t^2}$', verticalalignment='top', transform=self.subplot2.transAxes) def compute(self, f0, A): f = np.arange(-6., 6., 0.02) t = np.arange(-2., 2., 0.01) x = A * np.cos(2 * np.pi * f0 * t) * np.exp(-np.pi * t ** 2) X = A / 2 * \ (np.exp(-np.pi * (f - f0) ** 2) + np.exp(-np.pi * (f + f0) ** 2)) return f, X, t, x def setKnob(self, value): # Note, we ignore value arg here and just go by state of the params x1, y1, x2, y2 = self.compute(self.f0.value, self.A.value) # update the data of the two waveforms self.lines[0].set(xdata=x1, ydata=y1) self.lines[1].set(xdata=x2, ydata=y2) # make the canvas draw its contents again with the new data self.canvas.draw() class App(wx.App): def OnInit(self): self.frame1 = FourierDemoFrame(parent=None, title="Fourier Demo", size=(640, 480)) self.frame1.Show() return True app = App() app.MainLoop()
8ca310c17ae4c8a81bd41c89d3d4708c0da6964f86ada037b6fcdc30e84ac2d0
""" =============== GTK Spreadsheet =============== Example of embedding Matplotlib in an application and interacting with a treeview to store data. Double click on an entry to update plot data. """ import gi gi.require_version('Gtk', '3.0') gi.require_version('Gdk', '3.0') from gi.repository import Gtk, Gdk from matplotlib.backends.backend_gtk3agg import FigureCanvas # or gtk3cairo. from numpy.random import random from matplotlib.figure import Figure class DataManager(Gtk.Window): num_rows, num_cols = 20, 10 data = random((num_rows, num_cols)) def __init__(self): super().__init__() self.set_default_size(600, 600) self.connect('destroy', lambda win: Gtk.main_quit()) self.set_title('GtkListStore demo') self.set_border_width(8) vbox = Gtk.VBox(homogeneous=False, spacing=8) self.add(vbox) label = Gtk.Label(label='Double click a row to plot the data') vbox.pack_start(label, False, False, 0) sw = Gtk.ScrolledWindow() sw.set_shadow_type(Gtk.ShadowType.ETCHED_IN) sw.set_policy(Gtk.PolicyType.NEVER, Gtk.PolicyType.AUTOMATIC) vbox.pack_start(sw, True, True, 0) model = self.create_model() self.treeview = Gtk.TreeView(model=model) # Matplotlib stuff fig = Figure(figsize=(6, 4)) self.canvas = FigureCanvas(fig) # a Gtk.DrawingArea vbox.pack_start(self.canvas, True, True, 0) ax = fig.add_subplot(111) self.line, = ax.plot(self.data[0, :], 'go') # plot the first row self.treeview.connect('row-activated', self.plot_row) sw.add(self.treeview) self.add_columns() self.add_events(Gdk.EventMask.BUTTON_PRESS_MASK | Gdk.EventMask.KEY_PRESS_MASK | Gdk.EventMask.KEY_RELEASE_MASK) def plot_row(self, treeview, path, view_column): ind, = path # get the index into data points = self.data[ind, :] self.line.set_ydata(points) self.canvas.draw() def add_columns(self): for i in range(self.num_cols): column = Gtk.TreeViewColumn(str(i), Gtk.CellRendererText(), text=i) self.treeview.append_column(column) def create_model(self): types = [float] * self.num_cols store = Gtk.ListStore(*types) for row in self.data: store.append(tuple(row)) return store manager = DataManager() manager.show_all() Gtk.main()
696bf08dcf74acc25803de8d5c7ff81a70bbef2ac38aaec89c37d267be73a630
""" =============== Embedding in Tk =============== """ import tkinter from matplotlib.backends.backend_tkagg import ( FigureCanvasTkAgg, NavigationToolbar2Tk) # Implement the default Matplotlib key bindings. from matplotlib.backend_bases import key_press_handler from matplotlib.figure import Figure import numpy as np root = tkinter.Tk() root.wm_title("Embedding in Tk") fig = Figure(figsize=(5, 4), dpi=100) t = np.arange(0, 3, .01) fig.add_subplot(111).plot(t, 2 * np.sin(2 * np.pi * t)) canvas = FigureCanvasTkAgg(fig, master=root) # A tk.DrawingArea. canvas.draw() canvas.get_tk_widget().pack(side=tkinter.TOP, fill=tkinter.BOTH, expand=1) toolbar = NavigationToolbar2Tk(canvas, root) toolbar.update() canvas.get_tk_widget().pack(side=tkinter.TOP, fill=tkinter.BOTH, expand=1) def on_key_press(event): print("you pressed {}".format(event.key)) key_press_handler(event, canvas, toolbar) canvas.mpl_connect("key_press_event", on_key_press) def _quit(): root.quit() # stops mainloop root.destroy() # this is necessary on Windows to prevent # Fatal Python Error: PyEval_RestoreThread: NULL tstate button = tkinter.Button(master=root, text="Quit", command=_quit) button.pack(side=tkinter.BOTTOM) tkinter.mainloop() # If you put root.destroy() here, it will cause an error if the window is # closed with the window manager.
b1fef259b35f20dae31fcb1511a7a99aabe8b5764bdac89602cbb5e116cef914
""" ======= Firefox ======= This example shows how to create the Firefox logo with path and patches. """ import re import numpy as np import matplotlib.pyplot as plt from matplotlib.path import Path import matplotlib.patches as patches # From: http://raphaeljs.com/icons/#firefox firefox = "M28.4,22.469c0.479-0.964,0.851-1.991,1.095-3.066c0.953-3.661,0.666-6.854,0.666-6.854l-0.327,2.104c0,0-0.469-3.896-1.044-5.353c-0.881-2.231-1.273-2.214-1.274-2.21c0.542,1.379,0.494,2.169,0.483,2.288c-0.01-0.016-0.019-0.032-0.027-0.047c-0.131-0.324-0.797-1.819-2.225-2.878c-2.502-2.481-5.943-4.014-9.745-4.015c-4.056,0-7.705,1.745-10.238,4.525C5.444,6.5,5.183,5.938,5.159,5.317c0,0-0.002,0.002-0.006,0.005c0-0.011-0.003-0.021-0.003-0.031c0,0-1.61,1.247-1.436,4.612c-0.299,0.574-0.56,1.172-0.777,1.791c-0.375,0.817-0.75,2.004-1.059,3.746c0,0,0.133-0.422,0.399-0.988c-0.064,0.482-0.103,0.971-0.116,1.467c-0.09,0.845-0.118,1.865-0.039,3.088c0,0,0.032-0.406,0.136-1.021c0.834,6.854,6.667,12.165,13.743,12.165l0,0c1.86,0,3.636-0.37,5.256-1.036C24.938,27.771,27.116,25.196,28.4,22.469zM16.002,3.356c2.446,0,4.73,0.68,6.68,1.86c-2.274-0.528-3.433-0.261-3.423-0.248c0.013,0.015,3.384,0.589,3.981,1.411c0,0-1.431,0-2.856,0.41c-0.065,0.019,5.242,0.663,6.327,5.966c0,0-0.582-1.213-1.301-1.42c0.473,1.439,0.351,4.17-0.1,5.528c-0.058,0.174-0.118-0.755-1.004-1.155c0.284,2.037-0.018,5.268-1.432,6.158c-0.109,0.07,0.887-3.189,0.201-1.93c-4.093,6.276-8.959,2.539-10.934,1.208c1.585,0.388,3.267,0.108,4.242-0.559c0.982-0.672,1.564-1.162,2.087-1.047c0.522,0.117,0.87-0.407,0.464-0.872c-0.405-0.466-1.392-1.105-2.725-0.757c-0.94,0.247-2.107,1.287-3.886,0.233c-1.518-0.899-1.507-1.63-1.507-2.095c0-0.366,0.257-0.88,0.734-1.028c0.58,0.062,1.044,0.214,1.537,0.466c0.005-0.135,0.006-0.315-0.001-0.519c0.039-0.077,0.015-0.311-0.047-0.596c-0.036-0.287-0.097-0.582-0.19-0.851c0.01-0.002,0.017-0.007,0.021-0.021c0.076-0.344,2.147-1.544,2.299-1.659c0.153-0.114,0.55-0.378,0.506-1.183c-0.015-0.265-0.058-0.294-2.232-0.286c-0.917,0.003-1.425-0.894-1.589-1.245c0.222-1.231,0.863-2.11,1.919-2.704c0.02-0.011,0.015-0.021-0.008-0.027c0.219-0.127-2.524-0.006-3.76,1.604C9.674,8.045,9.219,7.95,8.71,7.95c-0.638,0-1.139,0.07-1.603,0.187c-0.05,0.013-0.122,0.011-0.208-0.001C6.769,8.04,6.575,7.88,6.365,7.672c0.161-0.18,0.324-0.356,0.495-0.526C9.201,4.804,12.43,3.357,16.002,3.356z" def svg_parse(path): commands = {'M': (Path.MOVETO,), 'L': (Path.LINETO,), 'Q': (Path.CURVE3,)*2, 'C': (Path.CURVE4,)*3, 'Z': (Path.CLOSEPOLY,)} path_re = re.compile(r'([MLHVCSQTAZ])([^MLHVCSQTAZ]+)', re.IGNORECASE) float_re = re.compile(r'(?:[\s,]*)([+-]?\d+(?:\.\d+)?)') vertices = [] codes = [] last = (0, 0) for cmd, values in path_re.findall(path): points = [float(v) for v in float_re.findall(values)] points = np.array(points).reshape((len(points)//2, 2)) if cmd.islower(): points += last cmd = cmd.capitalize() last = points[-1] codes.extend(commands[cmd]) vertices.extend(points.tolist()) return codes, vertices # SVG to matplotlib codes, verts = svg_parse(firefox) verts = np.array(verts) path = Path(verts, codes) # Make upside down verts[:, 1] *= -1 xmin, xmax = verts[:, 0].min()-1, verts[:, 0].max()+1 ymin, ymax = verts[:, 1].min()-1, verts[:, 1].max()+1 fig = plt.figure(figsize=(5, 5)) ax = fig.add_axes([0.0, 0.0, 1.0, 1.0], frameon=False, aspect=1) # White outline (width = 6) patch = patches.PathPatch(path, facecolor='None', edgecolor='w', lw=6) ax.add_patch(patch) # Actual shape with black outline patch = patches.PathPatch(path, facecolor='orange', edgecolor='k', lw=2) ax.add_patch(patch) # Centering ax.set_xlim(xmin, xmax) ax.set_ylim(ymin, ymax) # No ticks ax.set_xticks([]) ax.set_yticks([]) # Display plt.show()
0807bfe648d8b0d8b9006c3ad592f263c0687162842ebd35a5471c9275463a87
""" ============================ Bachelor's degrees by gender ============================ A graph of multiple time series which demonstrates extensive custom styling of plot frame, tick lines and labels, and line graph properties. Also demonstrates the custom placement of text labels along the right edge as an alternative to a conventional legend. """ import numpy as np import matplotlib.pyplot as plt from matplotlib.cbook import get_sample_data fname = get_sample_data('percent_bachelors_degrees_women_usa.csv', asfileobj=False) gender_degree_data = np.genfromtxt(fname, delimiter=',', names=True) # You typically want your plot to be ~1.33x wider than tall. This plot # is a rare exception because of the number of lines being plotted on it. # Common sizes: (10, 7.5) and (12, 9) fig, ax = plt.subplots(1, 1, figsize=(12, 14)) # These are the colors that will be used in the plot ax.set_prop_cycle(color=[ '#1f77b4', '#aec7e8', '#ff7f0e', '#ffbb78', '#2ca02c', '#98df8a', '#d62728', '#ff9896', '#9467bd', '#c5b0d5', '#8c564b', '#c49c94', '#e377c2', '#f7b6d2', '#7f7f7f', '#c7c7c7', '#bcbd22', '#dbdb8d', '#17becf', '#9edae5']) # Remove the plot frame lines. They are unnecessary here. ax.spines['top'].set_visible(False) ax.spines['bottom'].set_visible(False) ax.spines['right'].set_visible(False) ax.spines['left'].set_visible(False) # Ensure that the axis ticks only show up on the bottom and left of the plot. # Ticks on the right and top of the plot are generally unnecessary. ax.get_xaxis().tick_bottom() ax.get_yaxis().tick_left() fig.subplots_adjust(left=.06, right=.75, bottom=.02, top=.94) # Limit the range of the plot to only where the data is. # Avoid unnecessary whitespace. ax.set_xlim(1969.5, 2011.1) ax.set_ylim(-0.25, 90) # Set a fixed location and format for ticks. ax.set_xticks(range(1970, 2011, 10)) ax.set_yticks(range(0, 91, 10)) ax.xaxis.set_major_formatter(plt.FuncFormatter('{:.0f}'.format)) ax.yaxis.set_major_formatter(plt.FuncFormatter('{:.0f}%'.format)) # Provide tick lines across the plot to help your viewers trace along # the axis ticks. Make sure that the lines are light and small so they # don't obscure the primary data lines. ax.grid(True, 'major', 'y', ls='--', lw=.5, c='k', alpha=.3) # Remove the tick marks; they are unnecessary with the tick lines we just # plotted. Make sure your axis ticks are large enough to be easily read. # You don't want your viewers squinting to read your plot. ax.tick_params(axis='both', which='both', labelsize=14, bottom=False, top=False, labelbottom=True, left=False, right=False, labelleft=True) # Now that the plot is prepared, it's time to actually plot the data! # Note that I plotted the majors in order of the highest % in the final year. majors = ['Health Professions', 'Public Administration', 'Education', 'Psychology', 'Foreign Languages', 'English', 'Communications\nand Journalism', 'Art and Performance', 'Biology', 'Agriculture', 'Social Sciences and History', 'Business', 'Math and Statistics', 'Architecture', 'Physical Sciences', 'Computer Science', 'Engineering'] y_offsets = {'Foreign Languages': 0.5, 'English': -0.5, 'Communications\nand Journalism': 0.75, 'Art and Performance': -0.25, 'Agriculture': 1.25, 'Social Sciences and History': 0.25, 'Business': -0.75, 'Math and Statistics': 0.75, 'Architecture': -0.75, 'Computer Science': 0.75, 'Engineering': -0.25} for column in majors: # Plot each line separately with its own color. column_rec_name = column.replace('\n', '_').replace(' ', '_') line, = ax.plot('Year', column_rec_name, data=gender_degree_data, lw=2.5) # Add a text label to the right end of every line. Most of the code below # is adding specific offsets y position because some labels overlapped. y_pos = gender_degree_data[column_rec_name][-1] - 0.5 if column in y_offsets: y_pos += y_offsets[column] # Again, make sure that all labels are large enough to be easily read # by the viewer. ax.text(2011.5, y_pos, column, fontsize=14, color=line.get_color()) # Make the title big enough so it spans the entire plot, but don't make it # so big that it requires two lines to show. # Note that if the title is descriptive enough, it is unnecessary to include # axis labels; they are self-evident, in this plot's case. fig.suptitle('Percentage of Bachelor\'s degrees conferred to women in ' 'the U.S.A. by major (1970-2011)\n', fontsize=18, ha='center') # Finally, save the figure as a PNG. # You can also save it as a PDF, JPEG, etc. # Just change the file extension in this call. # fig.savefig('percent-bachelors-degrees-women-usa.png', bbox_inches='tight') plt.show()
8d3ffe4cd640380542579b534b5ab3ee5375fa03d8a44a399234e2b7ff6e9ce3
""" =================== Anatomy of a figure =================== This figure shows the name of several matplotlib elements composing a figure """ import numpy as np import matplotlib.pyplot as plt from matplotlib.ticker import AutoMinorLocator, MultipleLocator, FuncFormatter np.random.seed(19680801) X = np.linspace(0.5, 3.5, 100) Y1 = 3+np.cos(X) Y2 = 1+np.cos(1+X/0.75)/2 Y3 = np.random.uniform(Y1, Y2, len(X)) fig = plt.figure(figsize=(8, 8)) ax = fig.add_subplot(1, 1, 1, aspect=1) def minor_tick(x, pos): if not x % 1.0: return "" return "%.2f" % x ax.xaxis.set_major_locator(MultipleLocator(1.000)) ax.xaxis.set_minor_locator(AutoMinorLocator(4)) ax.yaxis.set_major_locator(MultipleLocator(1.000)) ax.yaxis.set_minor_locator(AutoMinorLocator(4)) ax.xaxis.set_minor_formatter(FuncFormatter(minor_tick)) ax.set_xlim(0, 4) ax.set_ylim(0, 4) ax.tick_params(which='major', width=1.0) ax.tick_params(which='major', length=10) ax.tick_params(which='minor', width=1.0, labelsize=10) ax.tick_params(which='minor', length=5, labelsize=10, labelcolor='0.25') ax.grid(linestyle="--", linewidth=0.5, color='.25', zorder=-10) ax.plot(X, Y1, c=(0.25, 0.25, 1.00), lw=2, label="Blue signal", zorder=10) ax.plot(X, Y2, c=(1.00, 0.25, 0.25), lw=2, label="Red signal") ax.plot(X, Y3, linewidth=0, marker='o', markerfacecolor='w', markeredgecolor='k') ax.set_title("Anatomy of a figure", fontsize=20, verticalalignment='bottom') ax.set_xlabel("X axis label") ax.set_ylabel("Y axis label") ax.legend() def circle(x, y, radius=0.15): from matplotlib.patches import Circle from matplotlib.patheffects import withStroke circle = Circle((x, y), radius, clip_on=False, zorder=10, linewidth=1, edgecolor='black', facecolor=(0, 0, 0, .0125), path_effects=[withStroke(linewidth=5, foreground='w')]) ax.add_artist(circle) def text(x, y, text): ax.text(x, y, text, backgroundcolor="white", ha='center', va='top', weight='bold', color='blue') # Minor tick circle(0.50, -0.10) text(0.50, -0.32, "Minor tick label") # Major tick circle(-0.03, 4.00) text(0.03, 3.80, "Major tick") # Minor tick circle(0.00, 3.50) text(0.00, 3.30, "Minor tick") # Major tick label circle(-0.15, 3.00) text(-0.15, 2.80, "Major tick label") # X Label circle(1.80, -0.27) text(1.80, -0.45, "X axis label") # Y Label circle(-0.27, 1.80) text(-0.27, 1.6, "Y axis label") # Title circle(1.60, 4.13) text(1.60, 3.93, "Title") # Blue plot circle(1.75, 2.80) text(1.75, 2.60, "Line\n(line plot)") # Red plot circle(1.20, 0.60) text(1.20, 0.40, "Line\n(line plot)") # Scatter plot circle(3.20, 1.75) text(3.20, 1.55, "Markers\n(scatter plot)") # Grid circle(3.00, 3.00) text(3.00, 2.80, "Grid") # Legend circle(3.70, 3.80) text(3.70, 3.60, "Legend") # Axes circle(0.5, 0.5) text(0.5, 0.3, "Axes") # Figure circle(-0.3, 0.65) text(-0.3, 0.45, "Figure") color = 'blue' ax.annotate('Spines', xy=(4.0, 0.35), xytext=(3.3, 0.5), weight='bold', color=color, arrowprops=dict(arrowstyle='->', connectionstyle="arc3", color=color)) ax.annotate('', xy=(3.15, 0.0), xytext=(3.45, 0.45), weight='bold', color=color, arrowprops=dict(arrowstyle='->', connectionstyle="arc3", color=color)) ax.text(4.0, -0.4, "Made with http://matplotlib.org", fontsize=10, ha="right", color='.5') plt.show()
58490caf414b28d86eaf89a0975c4e81dfdc002223a78fc92424c1671165efa7
""" ==== XKCD ==== Shows how to create an xkcd-like plot. """ import matplotlib.pyplot as plt import numpy as np ############################################################################### with plt.xkcd(): # Based on "Stove Ownership" from XKCD by Randall Munroe # https://xkcd.com/418/ fig = plt.figure() ax = fig.add_axes((0.1, 0.2, 0.8, 0.7)) ax.spines['right'].set_color('none') ax.spines['top'].set_color('none') ax.set_xticks([]) ax.set_yticks([]) ax.set_ylim([-30, 10]) data = np.ones(100) data[70:] -= np.arange(30) ax.annotate( 'THE DAY I REALIZED\nI COULD COOK BACON\nWHENEVER I WANTED', xy=(70, 1), arrowprops=dict(arrowstyle='->'), xytext=(15, -10)) ax.plot(data) ax.set_xlabel('time') ax.set_ylabel('my overall health') fig.text( 0.5, 0.05, '"Stove Ownership" from xkcd by Randall Munroe', ha='center') ############################################################################### with plt.xkcd(): # Based on "The Data So Far" from XKCD by Randall Munroe # https://xkcd.com/373/ fig = plt.figure() ax = fig.add_axes((0.1, 0.2, 0.8, 0.7)) ax.bar([0, 1], [0, 100], 0.25) ax.spines['right'].set_color('none') ax.spines['top'].set_color('none') ax.xaxis.set_ticks_position('bottom') ax.set_xticks([0, 1]) ax.set_xticklabels(['CONFIRMED BY\nEXPERIMENT', 'REFUTED BY\nEXPERIMENT']) ax.set_xlim([-0.5, 1.5]) ax.set_yticks([]) ax.set_ylim([0, 110]) ax.set_title("CLAIMS OF SUPERNATURAL POWERS") fig.text( 0.5, 0.05, '"The Data So Far" from xkcd by Randall Munroe', ha='center') plt.show()
64c92dd2ca8ebb520063d5c02a8c671a2f241829991057334bbcdd1e7fb41407
""" =================================== Shaded & power normalized rendering =================================== The Mandelbrot set rendering can be improved by using a normalized recount associated with a power normalized colormap (gamma=0.3). Rendering can be further enhanced thanks to shading. The `maxiter` gives the precision of the computation. `maxiter=200` should take a few seconds on most modern laptops. """ import numpy as np def mandelbrot_set(xmin, xmax, ymin, ymax, xn, yn, maxiter, horizon=2.0): X = np.linspace(xmin, xmax, xn).astype(np.float32) Y = np.linspace(ymin, ymax, yn).astype(np.float32) C = X + Y[:, None] * 1j N = np.zeros_like(C, dtype=int) Z = np.zeros_like(C) for n in range(maxiter): I = abs(Z) < horizon N[I] = n Z[I] = Z[I]**2 + C[I] N[N == maxiter-1] = 0 return Z, N if __name__ == '__main__': import time import matplotlib from matplotlib import colors import matplotlib.pyplot as plt xmin, xmax, xn = -2.25, +0.75, 3000 // 2 ymin, ymax, yn = -1.25, +1.25, 2500 // 2 maxiter = 200 horizon = 2.0 ** 40 log_horizon = np.log2(np.log(horizon)) Z, N = mandelbrot_set(xmin, xmax, ymin, ymax, xn, yn, maxiter, horizon) # Normalized recount as explained in: # https://linas.org/art-gallery/escape/smooth.html # https://www.ibm.com/developerworks/community/blogs/jfp/entry/My_Christmas_Gift # This line will generate warnings for null values but it is faster to # process them afterwards using the nan_to_num with np.errstate(invalid='ignore'): M = np.nan_to_num(N + 1 - np.log2(np.log(abs(Z))) + log_horizon) dpi = 72 width = 10 height = 10*yn/xn fig = plt.figure(figsize=(width, height), dpi=dpi) ax = fig.add_axes([0, 0, 1, 1], frameon=False, aspect=1) # Shaded rendering light = colors.LightSource(azdeg=315, altdeg=10) M = light.shade(M, cmap=plt.cm.hot, vert_exag=1.5, norm=colors.PowerNorm(0.3), blend_mode='hsv') ax.imshow(M, extent=[xmin, xmax, ymin, ymax], interpolation="bicubic") ax.set_xticks([]) ax.set_yticks([]) # Some advertisement for matplotlib year = time.strftime("%Y") text = ("The Mandelbrot fractal set\n" "Rendered with matplotlib %s, %s - http://matplotlib.org" % (matplotlib.__version__, year)) ax.text(xmin+.025, ymin+.025, text, color="white", fontsize=12, alpha=0.5) plt.show()
e5f0b222dfba3d3fa90e7c8ec511cb21f2d93d69df32868e45ca8bf4d334738e
""" ================================== Integral as the area under a curve ================================== Although this is a simple example, it demonstrates some important tweaks: * A simple line plot with custom color and line width. * A shaded region created using a Polygon patch. * A text label with mathtext rendering. * figtext calls to label the x- and y-axes. * Use of axis spines to hide the top and right spines. * Custom tick placement and labels. """ import numpy as np import matplotlib.pyplot as plt from matplotlib.patches import Polygon def func(x): return (x - 3) * (x - 5) * (x - 7) + 85 a, b = 2, 9 # integral limits x = np.linspace(0, 10) y = func(x) fig, ax = plt.subplots() ax.plot(x, y, 'r', linewidth=2) ax.set_ylim(bottom=0) # Make the shaded region ix = np.linspace(a, b) iy = func(ix) verts = [(a, 0), *zip(ix, iy), (b, 0)] poly = Polygon(verts, facecolor='0.9', edgecolor='0.5') ax.add_patch(poly) ax.text(0.5 * (a + b), 30, r"$\int_a^b f(x)\mathrm{d}x$", horizontalalignment='center', fontsize=20) fig.text(0.9, 0.05, '$x$') fig.text(0.1, 0.9, '$y$') ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) ax.xaxis.set_ticks_position('bottom') ax.set_xticks((a, b)) ax.set_xticklabels(('$a$', '$b$')) ax.set_yticks([]) plt.show()
aef0002f97d727a5ba25a0fe36fb9fe402470c1056225adb26716baadbbf9e6d
""" =============== Simple Axisline =============== """ import matplotlib.pyplot as plt from mpl_toolkits.axisartist.axislines import SubplotZero fig = plt.figure() fig.subplots_adjust(right=0.85) ax = SubplotZero(fig, 1, 1, 1) fig.add_subplot(ax) # make right and top axis invisible ax.axis["right"].set_visible(False) ax.axis["top"].set_visible(False) # make xzero axis (horizontal axis line through y=0) visible. ax.axis["xzero"].set_visible(True) ax.axis["xzero"].label.set_text("Axis Zero") ax.set_ylim(-2, 4) ax.set_xlabel("Label X") ax.set_ylabel("Label Y") # or #ax.axis["bottom"].label.set_text("Label X") #ax.axis["left"].label.set_text("Label Y") # make new (right-side) yaxis, but with some offset offset = (20, 0) new_axisline = ax.get_grid_helper().new_fixed_axis ax.axis["right2"] = new_axisline(loc="right", offset=offset, axes=ax) ax.axis["right2"].label.set_text("Label Y2") ax.plot([-2, 3, 2]) plt.show()
1e555beb36334eb91ffe1ee93e79b6f7b47ae2abeaf55eb3c418775663906918
""" ========================== Axis Direction Demo Step03 ========================== """ import matplotlib.pyplot as plt import mpl_toolkits.axisartist as axisartist def setup_axes(fig, rect): ax = axisartist.Subplot(fig, rect) fig.add_axes(ax) ax.set_ylim(-0.1, 1.5) ax.set_yticks([0, 1]) #ax.axis[:].toggle(all=False) #ax.axis[:].line.set_visible(False) ax.axis[:].set_visible(False) ax.axis["x"] = ax.new_floating_axis(1, 0.5) ax.axis["x"].set_axisline_style("->", size=1.5) return ax fig = plt.figure(figsize=(6, 2.5)) fig.subplots_adjust(bottom=0.2, top=0.8) ax1 = setup_axes(fig, "121") ax1.axis["x"].label.set_text("Label") ax1.axis["x"].toggle(ticklabels=False) ax1.axis["x"].set_axislabel_direction("+") ax1.annotate("label direction=$+$", (0.5, 0), xycoords="axes fraction", xytext=(0, -10), textcoords="offset points", va="top", ha="center") ax2 = setup_axes(fig, "122") ax2.axis["x"].label.set_text("Label") ax2.axis["x"].toggle(ticklabels=False) ax2.axis["x"].set_axislabel_direction("-") ax2.annotate("label direction=$-$", (0.5, 0), xycoords="axes fraction", xytext=(0, -10), textcoords="offset points", va="top", ha="center") plt.show()
82f68dd5f4ffdd77d14915e9b68e75804a6914b3affdf6fe1e5d461ccace072f
""" ========================== Axis Direction Demo Step04 ========================== """ import matplotlib.pyplot as plt import mpl_toolkits.axisartist as axisartist def setup_axes(fig, rect): ax = axisartist.Subplot(fig, rect) fig.add_axes(ax) ax.set_ylim(-0.1, 1.5) ax.set_yticks([0, 1]) ax.axis[:].set_visible(False) ax.axis["x1"] = ax.new_floating_axis(1, 0.3) ax.axis["x1"].set_axisline_style("->", size=1.5) ax.axis["x2"] = ax.new_floating_axis(1, 0.7) ax.axis["x2"].set_axisline_style("->", size=1.5) return ax fig = plt.figure(figsize=(6, 2.5)) fig.subplots_adjust(bottom=0.2, top=0.8) ax1 = setup_axes(fig, "121") ax1.axis["x1"].label.set_text("rotation=0") ax1.axis["x1"].toggle(ticklabels=False) ax1.axis["x2"].label.set_text("rotation=10") ax1.axis["x2"].label.set_rotation(10) ax1.axis["x2"].toggle(ticklabels=False) ax1.annotate("label direction=$+$", (0.5, 0), xycoords="axes fraction", xytext=(0, -10), textcoords="offset points", va="top", ha="center") ax2 = setup_axes(fig, "122") ax2.axis["x1"].set_axislabel_direction("-") ax2.axis["x2"].set_axislabel_direction("-") ax2.axis["x1"].label.set_text("rotation=0") ax2.axis["x1"].toggle(ticklabels=False) ax2.axis["x2"].label.set_text("rotation=10") ax2.axis["x2"].label.set_rotation(10) ax2.axis["x2"].toggle(ticklabels=False) ax2.annotate("label direction=$-$", (0.5, 0), xycoords="axes fraction", xytext=(0, -10), textcoords="offset points", va="top", ha="center") plt.show()
d7b3f0c03ad6ae2d1d7ac14c290c8420a3b3577655caf83b4f3c79fa11473bb6
""" ================== Parasite Axes demo ================== Create a parasite axes. Such axes would share the x scale with a host axes, but show a different scale in y direction. This approach uses `mpl_toolkits.axes_grid1.parasite_axes.HostAxes` and `mpl_toolkits.axes_grid1.parasite_axes.ParasiteAxes`. An alternative approach using standard Matplotlib subplots is shown in the :doc:`/gallery/ticks_and_spines/multiple_yaxis_with_spines` example. An alternative approach using the :ref:`toolkit_axesgrid1-index` and :ref:`toolkit_axisartist-index` is found in the :doc:`/gallery/axisartist/demo_parasite_axes2` example. """ from mpl_toolkits.axisartist.parasite_axes import HostAxes, ParasiteAxes import matplotlib.pyplot as plt fig = plt.figure() host = HostAxes(fig, [0.15, 0.1, 0.65, 0.8]) par1 = ParasiteAxes(host, sharex=host) par2 = ParasiteAxes(host, sharex=host) host.parasites.append(par1) host.parasites.append(par2) host.set_ylabel("Density") host.set_xlabel("Distance") host.axis["right"].set_visible(False) par1.axis["right"].set_visible(True) par1.set_ylabel("Temperature") par1.axis["right"].major_ticklabels.set_visible(True) par1.axis["right"].label.set_visible(True) par2.set_ylabel("Velocity") offset = (60, 0) new_axisline = par2.get_grid_helper().new_fixed_axis par2.axis["right2"] = new_axisline(loc="right", axes=par2, offset=offset) fig.add_axes(host) host.set_xlim(0, 2) host.set_ylim(0, 2) host.set_xlabel("Distance") host.set_ylabel("Density") par1.set_ylabel("Temperature") p1, = host.plot([0, 1, 2], [0, 1, 2], label="Density") p2, = par1.plot([0, 1, 2], [0, 3, 2], label="Temperature") p3, = par2.plot([0, 1, 2], [50, 30, 15], label="Velocity") par1.set_ylim(0, 4) par2.set_ylim(1, 65) host.legend() host.axis["left"].label.set_color(p1.get_color()) par1.axis["right"].label.set_color(p2.get_color()) par2.axis["right2"].label.set_color(p3.get_color()) plt.show()
bcd2f8d476bd0f89f881aea2f9c0bbd825f116c56309c2f481a9f495dee044f3
""" ===================== Curvilinear grid demo ===================== Custom grid and ticklines. This example demonstrates how to use `~.grid_helper_curvelinear.GridHelperCurveLinear` to define custom grids and ticklines by applying a transformation on the grid. This can be used, as shown on the second plot, to create polar projections in a rectangular box. """ import numpy as np import matplotlib.pyplot as plt from matplotlib.projections import PolarAxes from matplotlib.transforms import Affine2D from mpl_toolkits.axisartist import ( angle_helper, Subplot, SubplotHost, ParasiteAxesAuxTrans) from mpl_toolkits.axisartist.grid_helper_curvelinear import ( GridHelperCurveLinear) def curvelinear_test1(fig): """ Grid for custom transform. """ def tr(x, y): x, y = np.asarray(x), np.asarray(y) return x, y - x def inv_tr(x, y): x, y = np.asarray(x), np.asarray(y) return x, y + x grid_helper = GridHelperCurveLinear((tr, inv_tr)) ax1 = Subplot(fig, 1, 2, 1, grid_helper=grid_helper) # ax1 will have a ticks and gridlines defined by the given # transform (+ transData of the Axes). Note that the transform of # the Axes itself (i.e., transData) is not affected by the given # transform. fig.add_subplot(ax1) xx, yy = tr([3, 6], [5, 10]) ax1.plot(xx, yy, linewidth=2.0) ax1.set_aspect(1) ax1.set_xlim(0, 10) ax1.set_ylim(0, 10) ax1.axis["t"] = ax1.new_floating_axis(0, 3) ax1.axis["t2"] = ax1.new_floating_axis(1, 7) ax1.grid(True, zorder=0) def curvelinear_test2(fig): """ Polar projection, but in a rectangular box. """ # PolarAxes.PolarTransform takes radian. However, we want our coordinate # system in degree tr = Affine2D().scale(np.pi/180, 1) + PolarAxes.PolarTransform() # Polar projection, which involves cycle, and also has limits in # its coordinates, needs a special method to find the extremes # (min, max of the coordinate within the view). extreme_finder = angle_helper.ExtremeFinderCycle( nx=20, ny=20, # Number of sampling points in each direction. lon_cycle=360, lat_cycle=None, lon_minmax=None, lat_minmax=(0, np.inf), ) # Find grid values appropriate for the coordinate (degree, minute, second). grid_locator1 = angle_helper.LocatorDMS(12) # Use an appropriate formatter. Note that the acceptable Locator and # Formatter classes are a bit different than that of Matplotlib, which # cannot directly be used here (this may be possible in the future). tick_formatter1 = angle_helper.FormatterDMS() grid_helper = GridHelperCurveLinear( tr, extreme_finder=extreme_finder, grid_locator1=grid_locator1, tick_formatter1=tick_formatter1) ax1 = SubplotHost(fig, 1, 2, 2, grid_helper=grid_helper) # make ticklabels of right and top axis visible. ax1.axis["right"].major_ticklabels.set_visible(True) ax1.axis["top"].major_ticklabels.set_visible(True) # let right axis shows ticklabels for 1st coordinate (angle) ax1.axis["right"].get_helper().nth_coord_ticks = 0 # let bottom axis shows ticklabels for 2nd coordinate (radius) ax1.axis["bottom"].get_helper().nth_coord_ticks = 1 fig.add_subplot(ax1) ax1.set_aspect(1) ax1.set_xlim(-5, 12) ax1.set_ylim(-5, 10) ax1.grid(True, zorder=0) # A parasite axes with given transform ax2 = ParasiteAxesAuxTrans(ax1, tr, "equal") # note that ax2.transData == tr + ax1.transData # Anything you draw in ax2 will match the ticks and grids of ax1. ax1.parasites.append(ax2) ax2.plot(np.linspace(0, 30, 51), np.linspace(10, 10, 51), linewidth=2) if __name__ == "__main__": fig = plt.figure(figsize=(7, 4)) curvelinear_test1(fig) curvelinear_test2(fig) plt.show()
68c8e64d3ea9900229de68d1a5d43cdf65a182e50ea602c50e8966f55cf62a22
""" ======================== Demo Ticklabel Alignment ======================== """ import matplotlib.pyplot as plt import mpl_toolkits.axisartist as axisartist def setup_axes(fig, rect): ax = axisartist.Subplot(fig, rect) fig.add_subplot(ax) ax.set_yticks([0.2, 0.8]) ax.set_yticklabels(["short", "loooong"]) ax.set_xticks([0.2, 0.8]) ax.set_xticklabels([r"$\frac{1}{2}\pi$", r"$\pi$"]) return ax fig = plt.figure(figsize=(3, 5)) fig.subplots_adjust(left=0.5, hspace=0.7) ax = setup_axes(fig, 311) ax.set_ylabel("ha=right") ax.set_xlabel("va=baseline") ax = setup_axes(fig, 312) ax.axis["left"].major_ticklabels.set_ha("center") ax.axis["bottom"].major_ticklabels.set_va("top") ax.set_ylabel("ha=center") ax.set_xlabel("va=top") ax = setup_axes(fig, 313) ax.axis["left"].major_ticklabels.set_ha("left") ax.axis["bottom"].major_ticklabels.set_va("bottom") ax.set_ylabel("ha=left") ax.set_xlabel("va=bottom") plt.show()
ed232b1c3163a5d0db3d8324d05b0bf9aa6cbe753b993954435029372281e4ff
""" ================ Axis line styles ================ This example shows some configurations for axis style. """ from mpl_toolkits.axisartist.axislines import SubplotZero import matplotlib.pyplot as plt import numpy as np fig = plt.figure() ax = SubplotZero(fig, 111) fig.add_subplot(ax) for direction in ["xzero", "yzero"]: # adds arrows at the ends of each axis ax.axis[direction].set_axisline_style("-|>") # adds X and Y-axis from the origin ax.axis[direction].set_visible(True) for direction in ["left", "right", "bottom", "top"]: # hides borders ax.axis[direction].set_visible(False) x = np.linspace(-0.5, 1., 100) ax.plot(x, np.sin(x*np.pi)) plt.show()
f960c0c36d672eceb338b752e95b5ab8f943265e34385744704f38cca10ea5e1
""" ======================= Simple Axis Direction03 ======================= """ import matplotlib.pyplot as plt import mpl_toolkits.axisartist as axisartist def setup_axes(fig, rect): ax = axisartist.Subplot(fig, rect) fig.add_subplot(ax) ax.set_yticks([0.2, 0.8]) ax.set_xticks([0.2, 0.8]) return ax fig = plt.figure(figsize=(5, 2)) fig.subplots_adjust(wspace=0.4, bottom=0.3) ax1 = setup_axes(fig, "121") ax1.set_xlabel("X-label") ax1.set_ylabel("Y-label") ax1.axis[:].invert_ticklabel_direction() ax2 = setup_axes(fig, "122") ax2.set_xlabel("X-label") ax2.set_ylabel("Y-label") ax2.axis[:].major_ticks.set_tick_out(True) plt.show()
b74711ecd67c9c10facced0c989bcebf3ea55d7bdbd6a8bd1d4c58139e310329
""" =============== Simple Axis Pad =============== """ import numpy as np import matplotlib.pyplot as plt import mpl_toolkits.axisartist.angle_helper as angle_helper import mpl_toolkits.axisartist.grid_finder as grid_finder from matplotlib.projections import PolarAxes from matplotlib.transforms import Affine2D import mpl_toolkits.axisartist as axisartist from mpl_toolkits.axisartist.grid_helper_curvelinear import \ GridHelperCurveLinear def setup_axes(fig, rect): """ polar projection, but in a rectangular box. """ # see demo_curvelinear_grid.py for details tr = Affine2D().scale(np.pi/180., 1.) + PolarAxes.PolarTransform() extreme_finder = angle_helper.ExtremeFinderCycle(20, 20, lon_cycle=360, lat_cycle=None, lon_minmax=None, lat_minmax=(0, np.inf), ) grid_locator1 = angle_helper.LocatorDMS(12) grid_locator2 = grid_finder.MaxNLocator(5) tick_formatter1 = angle_helper.FormatterDMS() grid_helper = GridHelperCurveLinear(tr, extreme_finder=extreme_finder, grid_locator1=grid_locator1, grid_locator2=grid_locator2, tick_formatter1=tick_formatter1 ) ax1 = axisartist.Subplot(fig, rect, grid_helper=grid_helper) ax1.axis[:].set_visible(False) fig.add_subplot(ax1) ax1.set_aspect(1.) ax1.set_xlim(-5, 12) ax1.set_ylim(-5, 10) return ax1 def add_floating_axis1(ax1): ax1.axis["lat"] = axis = ax1.new_floating_axis(0, 30) axis.label.set_text(r"$\theta = 30^{\circ}$") axis.label.set_visible(True) return axis def add_floating_axis2(ax1): ax1.axis["lon"] = axis = ax1.new_floating_axis(1, 6) axis.label.set_text(r"$r = 6$") axis.label.set_visible(True) return axis fig = plt.figure(figsize=(9, 3.)) fig.subplots_adjust(left=0.01, right=0.99, bottom=0.01, top=0.99, wspace=0.01, hspace=0.01) def ann(ax1, d): if plt.rcParams["text.usetex"]: d = d.replace("_", r"\_") ax1.annotate(d, (0.5, 1), (5, -5), xycoords="axes fraction", textcoords="offset points", va="top", ha="center") ax1 = setup_axes(fig, rect=141) axis = add_floating_axis1(ax1) ann(ax1, r"default") ax1 = setup_axes(fig, rect=142) axis = add_floating_axis1(ax1) axis.major_ticklabels.set_pad(10) ann(ax1, r"ticklabels.set_pad(10)") ax1 = setup_axes(fig, rect=143) axis = add_floating_axis1(ax1) axis.label.set_pad(20) ann(ax1, r"label.set_pad(20)") ax1 = setup_axes(fig, rect=144) axis = add_floating_axis1(ax1) axis.major_ticks.set_tick_out(True) ann(ax1, "ticks.set_tick_out(True)") plt.show()
a30b6f5f1a3ed291266910eee10c610a265a9d1bebc394db0f32110d6d84183d
""" ================== Simple Axisartist1 ================== """ import matplotlib.pyplot as plt import mpl_toolkits.axisartist as AA fig = plt.figure() fig.subplots_adjust(right=0.85) ax = AA.Subplot(fig, 1, 1, 1) fig.add_subplot(ax) # make some axis invisible ax.axis["bottom", "top", "right"].set_visible(False) # make an new axis along the first axis axis (x-axis) which pass # through y=0. ax.axis["y=0"] = ax.new_floating_axis(nth_coord=0, value=0, axis_direction="bottom") ax.axis["y=0"].toggle(all=True) ax.axis["y=0"].label.set_text("y = 0") ax.set_ylim(-2, 4) plt.show()
c45d62ad5fe3184b0b33eb03048efc03fd69d0d9eb576834dc779f54041e3598
""" ======================= Simple Axis Direction01 ======================= """ import matplotlib.pyplot as plt import mpl_toolkits.axisartist as axisartist fig = plt.figure(figsize=(4, 2.5)) ax1 = fig.add_subplot(axisartist.Subplot(fig, "111")) fig.subplots_adjust(right=0.8) ax1.axis["left"].major_ticklabels.set_axis_direction("top") ax1.axis["left"].label.set_text("Label") ax1.axis["right"].label.set_visible(True) ax1.axis["right"].label.set_text("Label") ax1.axis["right"].label.set_axis_direction("left") plt.show()
b4c1de5c663520167cb92d1db33ced66f12c0a2446ffa217f105d1a84db6280c
""" =================== Demo Parasite Axes2 =================== Parasite axis demo The following code is an example of a parasite axis. It aims to show how to plot multiple different values onto one single plot. Notice how in this example, par1 and par2 are both calling twinx meaning both are tied directly to the x-axis. From there, each of those two axis can behave separately from the each other, meaning they can take on separate values from themselves as well as the x-axis. Note that this approach uses the `mpl_toolkits.axes_grid1.parasite_axes`\' `~mpl_toolkits.axes_grid1.parasite_axes.host_subplot` and `mpl_toolkits.axisartist.axislines.Axes`. An alternative approach using the `~mpl_toolkits.axes_grid1.parasite_axes`\'s `~.mpl_toolkits.axes_grid1.parasite_axes.HostAxes` and `~.mpl_toolkits.axes_grid1.parasite_axes.ParasiteAxes` is the :doc:`/gallery/axisartist/demo_parasite_axes` example. An alternative approach using the usual matplotlib subplots is shown in the :doc:`/gallery/ticks_and_spines/multiple_yaxis_with_spines` example. """ from mpl_toolkits.axes_grid1 import host_subplot import mpl_toolkits.axisartist as AA import matplotlib.pyplot as plt host = host_subplot(111, axes_class=AA.Axes) plt.subplots_adjust(right=0.75) par1 = host.twinx() par2 = host.twinx() offset = 60 new_fixed_axis = par2.get_grid_helper().new_fixed_axis par2.axis["right"] = new_fixed_axis(loc="right", axes=par2, offset=(offset, 0)) par1.axis["right"].toggle(all=True) par2.axis["right"].toggle(all=True) host.set_xlim(0, 2) host.set_ylim(0, 2) host.set_xlabel("Distance") host.set_ylabel("Density") par1.set_ylabel("Temperature") par2.set_ylabel("Velocity") p1, = host.plot([0, 1, 2], [0, 1, 2], label="Density") p2, = par1.plot([0, 1, 2], [0, 3, 2], label="Temperature") p3, = par2.plot([0, 1, 2], [50, 30, 15], label="Velocity") par1.set_ylim(0, 4) par2.set_ylim(1, 65) host.legend() host.axis["left"].label.set_color(p1.get_color()) par1.axis["right"].label.set_color(p2.get_color()) par2.axis["right"].label.set_color(p3.get_color()) plt.show()
88d643620f94e2b7fce34561785b631c34809eff65313a453e6c5f590f3be0f7
""" ===================================================== :mod:`mpl_toolkits.axisartist.floating_axes` features ===================================================== Demonstration of features of the :mod:`.floating_axes` module: * Using `scatter` and `bar` with changing the shape of the plot. * Using `GridHelperCurveLinear` to rotate the plot and set the plot boundary. * Using `FloatingSubplot` to create a subplot using the return value from `GridHelperCurveLinear`. * Making a sector plot by adding more features to `GridHelperCurveLinear`. """ from matplotlib.transforms import Affine2D import mpl_toolkits.axisartist.floating_axes as floating_axes import numpy as np import mpl_toolkits.axisartist.angle_helper as angle_helper from matplotlib.projections import PolarAxes from mpl_toolkits.axisartist.grid_finder import (FixedLocator, MaxNLocator, DictFormatter) import matplotlib.pyplot as plt # Fixing random state for reproducibility np.random.seed(19680801) def setup_axes1(fig, rect): """ A simple one. """ tr = Affine2D().scale(2, 1).rotate_deg(30) grid_helper = floating_axes.GridHelperCurveLinear( tr, extremes=(-0.5, 3.5, 0, 4), grid_locator1=MaxNLocator(nbins=4), grid_locator2=MaxNLocator(nbins=4)) ax1 = floating_axes.FloatingSubplot(fig, rect, grid_helper=grid_helper) fig.add_subplot(ax1) aux_ax = ax1.get_aux_axes(tr) return ax1, aux_ax def setup_axes2(fig, rect): """ With custom locator and formatter. Note that the extreme values are swapped. """ tr = PolarAxes.PolarTransform() pi = np.pi angle_ticks = [(0, r"$0$"), (.25*pi, r"$\frac{1}{4}\pi$"), (.5*pi, r"$\frac{1}{2}\pi$")] grid_locator1 = FixedLocator([v for v, s in angle_ticks]) tick_formatter1 = DictFormatter(dict(angle_ticks)) grid_locator2 = MaxNLocator(2) grid_helper = floating_axes.GridHelperCurveLinear( tr, extremes=(.5*pi, 0, 2, 1), grid_locator1=grid_locator1, grid_locator2=grid_locator2, tick_formatter1=tick_formatter1, tick_formatter2=None) ax1 = floating_axes.FloatingSubplot(fig, rect, grid_helper=grid_helper) fig.add_subplot(ax1) # create a parasite axes whose transData in RA, cz aux_ax = ax1.get_aux_axes(tr) aux_ax.patch = ax1.patch # for aux_ax to have a clip path as in ax ax1.patch.zorder = 0.9 # but this has a side effect that the patch is # drawn twice, and possibly over some other # artists. So, we decrease the zorder a bit to # prevent this. return ax1, aux_ax def setup_axes3(fig, rect): """ Sometimes, things like axis_direction need to be adjusted. """ # rotate a bit for better orientation tr_rotate = Affine2D().translate(-95, 0) # scale degree to radians tr_scale = Affine2D().scale(np.pi/180., 1.) tr = tr_rotate + tr_scale + PolarAxes.PolarTransform() grid_locator1 = angle_helper.LocatorHMS(4) tick_formatter1 = angle_helper.FormatterHMS() grid_locator2 = MaxNLocator(3) # Specify theta limits in degrees ra0, ra1 = 8.*15, 14.*15 # Specify radial limits cz0, cz1 = 0, 14000 grid_helper = floating_axes.GridHelperCurveLinear( tr, extremes=(ra0, ra1, cz0, cz1), grid_locator1=grid_locator1, grid_locator2=grid_locator2, tick_formatter1=tick_formatter1, tick_formatter2=None) ax1 = floating_axes.FloatingSubplot(fig, rect, grid_helper=grid_helper) fig.add_subplot(ax1) # adjust axis ax1.axis["left"].set_axis_direction("bottom") ax1.axis["right"].set_axis_direction("top") ax1.axis["bottom"].set_visible(False) ax1.axis["top"].set_axis_direction("bottom") ax1.axis["top"].toggle(ticklabels=True, label=True) ax1.axis["top"].major_ticklabels.set_axis_direction("top") ax1.axis["top"].label.set_axis_direction("top") ax1.axis["left"].label.set_text(r"cz [km$^{-1}$]") ax1.axis["top"].label.set_text(r"$\alpha_{1950}$") # create a parasite axes whose transData in RA, cz aux_ax = ax1.get_aux_axes(tr) aux_ax.patch = ax1.patch # for aux_ax to have a clip path as in ax ax1.patch.zorder = 0.9 # but this has a side effect that the patch is # drawn twice, and possibly over some other # artists. So, we decrease the zorder a bit to # prevent this. return ax1, aux_ax ########################################################## fig = plt.figure(figsize=(8, 4)) fig.subplots_adjust(wspace=0.3, left=0.05, right=0.95) ax1, aux_ax1 = setup_axes1(fig, 131) aux_ax1.bar([0, 1, 2, 3], [3, 2, 1, 3]) ax2, aux_ax2 = setup_axes2(fig, 132) theta = np.random.rand(10)*.5*np.pi radius = np.random.rand(10) + 1. aux_ax2.scatter(theta, radius) ax3, aux_ax3 = setup_axes3(fig, 133) theta = (8 + np.random.rand(10)*(14 - 8))*15. # in degrees radius = np.random.rand(10)*14000. aux_ax3.scatter(theta, radius) plt.show()
c4fbe6a4fcdb8826ba3631de4899c196be235a667d19807d296114657fe0455a
""" ================ Simple Axisline2 ================ """ import matplotlib.pyplot as plt from mpl_toolkits.axisartist.axislines import SubplotZero import numpy as np fig = plt.figure(figsize=(4, 3)) # a subplot with two additional axis, "xzero" and "yzero". "xzero" is # y=0 line, and "yzero" is x=0 line. ax = SubplotZero(fig, 1, 1, 1) fig.add_subplot(ax) # make xzero axis (horizontal axis line through y=0) visible. ax.axis["xzero"].set_visible(True) ax.axis["xzero"].label.set_text("Axis Zero") # make other axis (bottom, top, right) invisible. for n in ["bottom", "top", "right"]: ax.axis[n].set_visible(False) xx = np.arange(0, 2*np.pi, 0.01) ax.plot(xx, np.sin(xx)) plt.show()
86b650200993ca36f0735c09f58039fcfdd2b772a42ef0f4f6b716ec31efc092
""" ====================== Demo Curvelinear Grid2 ====================== Custom grid and ticklines. This example demonstrates how to use GridHelperCurveLinear to define custom grids and ticklines by applying a transformation on the grid. As showcase on the plot, a 5x5 matrix is displayed on the axes. """ import numpy as np import matplotlib.pyplot as plt from mpl_toolkits.axisartist.grid_helper_curvelinear import \ GridHelperCurveLinear from mpl_toolkits.axisartist.grid_finder import MaxNLocator from mpl_toolkits.axisartist.axislines import Subplot import mpl_toolkits.axisartist.angle_helper as angle_helper def curvelinear_test1(fig): """ grid for custom transform. """ def tr(x, y): sgn = np.sign(x) x, y = np.abs(np.asarray(x)), np.asarray(y) return sgn*x**.5, y def inv_tr(x, y): sgn = np.sign(x) x, y = np.asarray(x), np.asarray(y) return sgn*x**2, y extreme_finder = angle_helper.ExtremeFinderCycle(20, 20, lon_cycle=None, lat_cycle=None, # (0, np.inf), lon_minmax=None, lat_minmax=None, ) grid_helper = GridHelperCurveLinear((tr, inv_tr), extreme_finder=extreme_finder, # better tick density grid_locator1=MaxNLocator(nbins=6), grid_locator2=MaxNLocator(nbins=6)) ax1 = Subplot(fig, 111, grid_helper=grid_helper) # ax1 will have a ticks and gridlines defined by the given # transform (+ transData of the Axes). Note that the transform of # the Axes itself (i.e., transData) is not affected by the given # transform. fig.add_subplot(ax1) ax1.imshow(np.arange(25).reshape(5, 5), vmax=50, cmap=plt.cm.gray_r, interpolation="nearest", origin="lower") if __name__ == "__main__": fig = plt.figure(figsize=(7, 4)) curvelinear_test1(fig) plt.show()
5df59a37cf44dbb4d2b9d748e947c8870e284ffd8a5a9d805c9e52f197e908a9
""" ========================== Axis Direction Demo Step02 ========================== """ import matplotlib.pyplot as plt import mpl_toolkits.axisartist as axisartist def setup_axes(fig, rect): ax = axisartist.Subplot(fig, rect) fig.add_axes(ax) ax.set_ylim(-0.1, 1.5) ax.set_yticks([0, 1]) #ax.axis[:].toggle(all=False) #ax.axis[:].line.set_visible(False) ax.axis[:].set_visible(False) ax.axis["x"] = ax.new_floating_axis(1, 0.5) ax.axis["x"].set_axisline_style("->", size=1.5) return ax fig = plt.figure(figsize=(6, 2.5)) fig.subplots_adjust(bottom=0.2, top=0.8) ax1 = setup_axes(fig, "121") ax1.axis["x"].set_ticklabel_direction("+") ax1.annotate("ticklabel direction=$+$", (0.5, 0), xycoords="axes fraction", xytext=(0, -10), textcoords="offset points", va="top", ha="center") ax2 = setup_axes(fig, "122") ax2.axis["x"].set_ticklabel_direction("-") ax2.annotate("ticklabel direction=$-$", (0.5, 0), xycoords="axes fraction", xytext=(0, -10), textcoords="offset points", va="top", ha="center") plt.show()
5169ac175c4f6432fcf064c53b66028d16bad813eb243eb9bc006fc9b7411d66
""" ================ Simple Axisline3 ================ """ import matplotlib.pyplot as plt from mpl_toolkits.axisartist.axislines import Subplot fig = plt.figure(figsize=(3, 3)) ax = Subplot(fig, 111) fig.add_subplot(ax) ax.axis["right"].set_visible(False) ax.axis["top"].set_visible(False) plt.show()