repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
gdub/django
|
tests/gis_tests/geoapp/tests.py
|
189
|
41436
|
from __future__ import unicode_literals
import re
import tempfile
from django.contrib.gis import gdal
from django.contrib.gis.db.models import Extent, MakeLine, Union
from django.contrib.gis.geos import (
GeometryCollection, GEOSGeometry, LinearRing, LineString, Point, Polygon,
fromstr,
)
from django.core.management import call_command
from django.db import connection
from django.test import TestCase, ignore_warnings, skipUnlessDBFeature
from django.utils import six
from django.utils.deprecation import (
RemovedInDjango20Warning, RemovedInDjango110Warning,
)
from ..utils import no_oracle, oracle, postgis, spatialite
from .models import (
City, Country, Feature, MinusOneSRID, NonConcreteModel, PennsylvaniaCity,
State, Track,
)
def postgis_bug_version():
spatial_version = getattr(connection.ops, "spatial_version", (0, 0, 0))
return spatial_version and (2, 0, 0) <= spatial_version <= (2, 0, 1)
@skipUnlessDBFeature("gis_enabled")
class GeoModelTest(TestCase):
fixtures = ['initial']
def test_fixtures(self):
"Testing geographic model initialization from fixtures."
# Ensuring that data was loaded from initial data fixtures.
self.assertEqual(2, Country.objects.count())
self.assertEqual(8, City.objects.count())
self.assertEqual(2, State.objects.count())
def test_proxy(self):
"Testing Lazy-Geometry support (using the GeometryProxy)."
# Testing on a Point
pnt = Point(0, 0)
nullcity = City(name='NullCity', point=pnt)
nullcity.save()
# Making sure TypeError is thrown when trying to set with an
# incompatible type.
for bad in [5, 2.0, LineString((0, 0), (1, 1))]:
try:
nullcity.point = bad
except TypeError:
pass
else:
self.fail('Should throw a TypeError')
# Now setting with a compatible GEOS Geometry, saving, and ensuring
# the save took, notice no SRID is explicitly set.
new = Point(5, 23)
nullcity.point = new
# Ensuring that the SRID is automatically set to that of the
# field after assignment, but before saving.
self.assertEqual(4326, nullcity.point.srid)
nullcity.save()
# Ensuring the point was saved correctly after saving
self.assertEqual(new, City.objects.get(name='NullCity').point)
# Setting the X and Y of the Point
nullcity.point.x = 23
nullcity.point.y = 5
# Checking assignments pre & post-save.
self.assertNotEqual(Point(23, 5), City.objects.get(name='NullCity').point)
nullcity.save()
self.assertEqual(Point(23, 5), City.objects.get(name='NullCity').point)
nullcity.delete()
# Testing on a Polygon
shell = LinearRing((0, 0), (0, 100), (100, 100), (100, 0), (0, 0))
inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40))
# Creating a State object using a built Polygon
ply = Polygon(shell, inner)
nullstate = State(name='NullState', poly=ply)
self.assertEqual(4326, nullstate.poly.srid) # SRID auto-set from None
nullstate.save()
ns = State.objects.get(name='NullState')
self.assertEqual(ply, ns.poly)
# Testing the `ogr` and `srs` lazy-geometry properties.
if gdal.HAS_GDAL:
self.assertIsInstance(ns.poly.ogr, gdal.OGRGeometry)
self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb)
self.assertIsInstance(ns.poly.srs, gdal.SpatialReference)
self.assertEqual('WGS 84', ns.poly.srs.name)
# Changing the interior ring on the poly attribute.
new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30), (30, 30))
ns.poly[1] = new_inner
ply[1] = new_inner
self.assertEqual(4326, ns.poly.srid)
ns.save()
self.assertEqual(ply, State.objects.get(name='NullState').poly)
ns.delete()
@skipUnlessDBFeature("supports_transform")
def test_lookup_insert_transform(self):
"Testing automatic transform for lookups and inserts."
# San Antonio in 'WGS84' (SRID 4326)
sa_4326 = 'POINT (-98.493183 29.424170)'
wgs_pnt = fromstr(sa_4326, srid=4326) # Our reference point in WGS84
# Oracle doesn't have SRID 3084, using 41157.
if oracle:
# San Antonio in 'Texas 4205, Southern Zone (1983, meters)' (SRID 41157)
# Used the following Oracle SQL to get this value:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(
# SDO_CS.TRANSFORM(SDO_GEOMETRY('POINT (-98.493183 29.424170)', 4326), 41157))
# )
# FROM DUAL;
nad_wkt = 'POINT (300662.034646583 5416427.45974934)'
nad_srid = 41157
else:
# San Antonio in 'NAD83(HARN) / Texas Centric Lambert Conformal' (SRID 3084)
# Used ogr.py in gdal 1.4.1 for this transform
nad_wkt = 'POINT (1645978.362408288754523 6276356.025927528738976)'
nad_srid = 3084
# Constructing & querying with a point from a different SRID. Oracle
# `SDO_OVERLAPBDYINTERSECT` operates differently from
# `ST_Intersects`, so contains is used instead.
nad_pnt = fromstr(nad_wkt, srid=nad_srid)
if oracle:
tx = Country.objects.get(mpoly__contains=nad_pnt)
else:
tx = Country.objects.get(mpoly__intersects=nad_pnt)
self.assertEqual('Texas', tx.name)
# Creating San Antonio. Remember the Alamo.
sa = City.objects.create(name='San Antonio', point=nad_pnt)
# Now verifying that San Antonio was transformed correctly
sa = City.objects.get(name='San Antonio')
self.assertAlmostEqual(wgs_pnt.x, sa.point.x, 6)
self.assertAlmostEqual(wgs_pnt.y, sa.point.y, 6)
# If the GeometryField SRID is -1, then we shouldn't perform any
# transformation if the SRID of the input geometry is different.
if spatialite and connection.ops.spatial_version < (3, 0, 0):
# SpatiaLite < 3 does not support missing SRID values.
return
m1 = MinusOneSRID(geom=Point(17, 23, srid=4326))
m1.save()
self.assertEqual(-1, m1.geom.srid)
def test_createnull(self):
"Testing creating a model instance and the geometry being None"
c = City()
self.assertEqual(c.point, None)
def test_geometryfield(self):
"Testing the general GeometryField."
Feature(name='Point', geom=Point(1, 1)).save()
Feature(name='LineString', geom=LineString((0, 0), (1, 1), (5, 5))).save()
Feature(name='Polygon', geom=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0)))).save()
Feature(name='GeometryCollection',
geom=GeometryCollection(Point(2, 2), LineString((0, 0), (2, 2)),
Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))))).save()
f_1 = Feature.objects.get(name='Point')
self.assertIsInstance(f_1.geom, Point)
self.assertEqual((1.0, 1.0), f_1.geom.tuple)
f_2 = Feature.objects.get(name='LineString')
self.assertIsInstance(f_2.geom, LineString)
self.assertEqual(((0.0, 0.0), (1.0, 1.0), (5.0, 5.0)), f_2.geom.tuple)
f_3 = Feature.objects.get(name='Polygon')
self.assertIsInstance(f_3.geom, Polygon)
f_4 = Feature.objects.get(name='GeometryCollection')
self.assertIsInstance(f_4.geom, GeometryCollection)
self.assertEqual(f_3.geom, f_4.geom[2])
@skipUnlessDBFeature("supports_transform")
def test_inherited_geofields(self):
"Test GeoQuerySet methods on inherited Geometry fields."
# Creating a Pennsylvanian city.
PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)')
# All transformation SQL will need to be performed on the
# _parent_ table.
qs = PennsylvaniaCity.objects.transform(32128)
self.assertEqual(1, qs.count())
for pc in qs:
self.assertEqual(32128, pc.point.srid)
def test_raw_sql_query(self):
"Testing raw SQL query."
cities1 = City.objects.all()
# Only PostGIS would support a 'select *' query because of its recognized
# HEXEWKB format for geometry fields
as_text = 'ST_AsText(%s)' if postgis else connection.ops.select
cities2 = City.objects.raw(
'select id, name, %s from geoapp_city' % as_text % 'point'
)
self.assertEqual(len(cities1), len(list(cities2)))
self.assertIsInstance(cities2[0].point, Point)
def test_dumpdata_loaddata_cycle(self):
"""
Test a dumpdata/loaddata cycle with geographic data.
"""
out = six.StringIO()
original_data = list(City.objects.all().order_by('name'))
call_command('dumpdata', 'geoapp.City', stdout=out)
result = out.getvalue()
houston = City.objects.get(name='Houston')
self.assertIn('"point": "%s"' % houston.point.ewkt, result)
# Reload now dumped data
with tempfile.NamedTemporaryFile(mode='w', suffix='.json') as tmp:
tmp.write(result)
tmp.seek(0)
call_command('loaddata', tmp.name, verbosity=0)
self.assertListEqual(original_data, list(City.objects.all().order_by('name')))
@skipUnlessDBFeature("gis_enabled")
class GeoLookupTest(TestCase):
fixtures = ['initial']
def test_disjoint_lookup(self):
"Testing the `disjoint` lookup type."
ptown = City.objects.get(name='Pueblo')
qs1 = City.objects.filter(point__disjoint=ptown.point)
self.assertEqual(7, qs1.count())
if connection.features.supports_real_shape_operations:
qs2 = State.objects.filter(poly__disjoint=ptown.point)
self.assertEqual(1, qs2.count())
self.assertEqual('Kansas', qs2[0].name)
def test_contains_contained_lookups(self):
"Testing the 'contained', 'contains', and 'bbcontains' lookup types."
# Getting Texas, yes we were a country -- once ;)
texas = Country.objects.get(name='Texas')
# Seeing what cities are in Texas, should get Houston and Dallas,
# and Oklahoma City because 'contained' only checks on the
# _bounding box_ of the Geometries.
if connection.features.supports_contained_lookup:
qs = City.objects.filter(point__contained=texas.mpoly)
self.assertEqual(3, qs.count())
cities = ['Houston', 'Dallas', 'Oklahoma City']
for c in qs:
self.assertIn(c.name, cities)
# Pulling out some cities.
houston = City.objects.get(name='Houston')
wellington = City.objects.get(name='Wellington')
pueblo = City.objects.get(name='Pueblo')
okcity = City.objects.get(name='Oklahoma City')
lawrence = City.objects.get(name='Lawrence')
# Now testing contains on the countries using the points for
# Houston and Wellington.
tx = Country.objects.get(mpoly__contains=houston.point) # Query w/GEOSGeometry
nz = Country.objects.get(mpoly__contains=wellington.point.hex) # Query w/EWKBHEX
self.assertEqual('Texas', tx.name)
self.assertEqual('New Zealand', nz.name)
# Spatialite 2.3 thinks that Lawrence is in Puerto Rico (a NULL geometry).
if not (spatialite and connection.ops.spatial_version < (3, 0, 0)):
ks = State.objects.get(poly__contains=lawrence.point)
self.assertEqual('Kansas', ks.name)
# Pueblo and Oklahoma City (even though OK City is within the bounding box of Texas)
# are not contained in Texas or New Zealand.
self.assertEqual(len(Country.objects.filter(mpoly__contains=pueblo.point)), 0) # Query w/GEOSGeometry object
self.assertEqual(len(Country.objects.filter(mpoly__contains=okcity.point.wkt)),
0 if connection.features.supports_real_shape_operations else 1) # Query w/WKT
# OK City is contained w/in bounding box of Texas.
if connection.features.supports_bbcontains_lookup:
qs = Country.objects.filter(mpoly__bbcontains=okcity.point)
self.assertEqual(1, len(qs))
self.assertEqual('Texas', qs[0].name)
@skipUnlessDBFeature("supports_crosses_lookup")
def test_crosses_lookup(self):
Track.objects.create(
name='Line1',
line=LineString([(-95, 29), (-60, 0)])
)
self.assertEqual(
Track.objects.filter(line__crosses=LineString([(-95, 0), (-60, 29)])).count(),
1
)
self.assertEqual(
Track.objects.filter(line__crosses=LineString([(-95, 30), (0, 30)])).count(),
0
)
@skipUnlessDBFeature("supports_left_right_lookups")
def test_left_right_lookups(self):
"Testing the 'left' and 'right' lookup types."
# Left: A << B => true if xmax(A) < xmin(B)
# Right: A >> B => true if xmin(A) > xmax(B)
# See: BOX2D_left() and BOX2D_right() in lwgeom_box2dfloat4.c in PostGIS source.
# The left/right lookup tests are known failures on PostGIS 2.0/2.0.1
# http://trac.osgeo.org/postgis/ticket/2035
if postgis_bug_version():
self.skipTest("PostGIS 2.0/2.0.1 left and right lookups are known to be buggy.")
# Getting the borders for Colorado & Kansas
co_border = State.objects.get(name='Colorado').poly
ks_border = State.objects.get(name='Kansas').poly
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
# These cities should be strictly to the right of the CO border.
cities = ['Houston', 'Dallas', 'Oklahoma City',
'Lawrence', 'Chicago', 'Wellington']
qs = City.objects.filter(point__right=co_border)
self.assertEqual(6, len(qs))
for c in qs:
self.assertIn(c.name, cities)
# These cities should be strictly to the right of the KS border.
cities = ['Chicago', 'Wellington']
qs = City.objects.filter(point__right=ks_border)
self.assertEqual(2, len(qs))
for c in qs:
self.assertIn(c.name, cities)
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
vic = City.objects.get(point__left=co_border)
self.assertEqual('Victoria', vic.name)
cities = ['Pueblo', 'Victoria']
qs = City.objects.filter(point__left=ks_border)
self.assertEqual(2, len(qs))
for c in qs:
self.assertIn(c.name, cities)
def test_equals_lookups(self):
"Testing the 'same_as' and 'equals' lookup types."
pnt = fromstr('POINT (-95.363151 29.763374)', srid=4326)
c1 = City.objects.get(point=pnt)
c2 = City.objects.get(point__same_as=pnt)
c3 = City.objects.get(point__equals=pnt)
for c in [c1, c2, c3]:
self.assertEqual('Houston', c.name)
@skipUnlessDBFeature("supports_null_geometries")
def test_null_geometries(self):
"Testing NULL geometry support, and the `isnull` lookup type."
# Creating a state with a NULL boundary.
State.objects.create(name='Puerto Rico')
# Querying for both NULL and Non-NULL values.
nullqs = State.objects.filter(poly__isnull=True)
validqs = State.objects.filter(poly__isnull=False)
# Puerto Rico should be NULL (it's a commonwealth unincorporated territory)
self.assertEqual(1, len(nullqs))
self.assertEqual('Puerto Rico', nullqs[0].name)
# The valid states should be Colorado & Kansas
self.assertEqual(2, len(validqs))
state_names = [s.name for s in validqs]
self.assertIn('Colorado', state_names)
self.assertIn('Kansas', state_names)
# Saving another commonwealth w/a NULL geometry.
nmi = State.objects.create(name='Northern Mariana Islands', poly=None)
self.assertEqual(nmi.poly, None)
# Assigning a geometry and saving -- then UPDATE back to NULL.
nmi.poly = 'POLYGON((0 0,1 0,1 1,1 0,0 0))'
nmi.save()
State.objects.filter(name='Northern Mariana Islands').update(poly=None)
self.assertIsNone(State.objects.get(name='Northern Mariana Islands').poly)
@skipUnlessDBFeature("supports_relate_lookup")
def test_relate_lookup(self):
"Testing the 'relate' lookup type."
# To make things more interesting, we will have our Texas reference point in
# different SRIDs.
pnt1 = fromstr('POINT (649287.0363174 4177429.4494686)', srid=2847)
pnt2 = fromstr('POINT(-98.4919715741052 29.4333344025053)', srid=4326)
# Not passing in a geometry as first param should
# raise a type error when initializing the GeoQuerySet
self.assertRaises(ValueError, Country.objects.filter, mpoly__relate=(23, 'foo'))
# Making sure the right exception is raised for the given
# bad arguments.
for bad_args, e in [((pnt1, 0), ValueError), ((pnt2, 'T*T***FF*', 0), ValueError)]:
qs = Country.objects.filter(mpoly__relate=bad_args)
self.assertRaises(e, qs.count)
# Relate works differently for the different backends.
if postgis or spatialite:
contains_mask = 'T*T***FF*'
within_mask = 'T*F**F***'
intersects_mask = 'T********'
elif oracle:
contains_mask = 'contains'
within_mask = 'inside'
# TODO: This is not quite the same as the PostGIS mask above
intersects_mask = 'overlapbdyintersect'
# Testing contains relation mask.
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, contains_mask)).name)
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, contains_mask)).name)
# Testing within relation mask.
ks = State.objects.get(name='Kansas')
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, within_mask)).name)
# Testing intersection relation mask.
if not oracle:
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, intersects_mask)).name)
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, intersects_mask)).name)
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, intersects_mask)).name)
@skipUnlessDBFeature("gis_enabled")
@ignore_warnings(category=RemovedInDjango20Warning)
class GeoQuerySetTest(TestCase):
fixtures = ['initial']
# Please keep the tests in GeoQuerySet method's alphabetic order
@skipUnlessDBFeature("has_centroid_method")
def test_centroid(self):
"Testing the `centroid` GeoQuerySet method."
qs = State.objects.exclude(poly__isnull=True).centroid()
if oracle:
tol = 0.1
elif spatialite:
tol = 0.000001
else:
tol = 0.000000001
for s in qs:
self.assertTrue(s.poly.centroid.equals_exact(s.centroid, tol))
@skipUnlessDBFeature(
"has_difference_method", "has_intersection_method",
"has_sym_difference_method", "has_union_method")
def test_diff_intersection_union(self):
"Testing the `difference`, `intersection`, `sym_difference`, and `union` GeoQuerySet methods."
geom = Point(5, 23)
qs = Country.objects.all().difference(geom).sym_difference(geom).union(geom)
# XXX For some reason SpatiaLite does something screwy with the Texas geometry here. Also,
# XXX it doesn't like the null intersection.
if spatialite:
qs = qs.exclude(name='Texas')
else:
qs = qs.intersection(geom)
for c in qs:
if oracle:
# Should be able to execute the queries; however, they won't be the same
# as GEOS (because Oracle doesn't use GEOS internally like PostGIS or
# SpatiaLite).
pass
else:
self.assertEqual(c.mpoly.difference(geom), c.difference)
if not spatialite:
self.assertEqual(c.mpoly.intersection(geom), c.intersection)
# Ordering might differ in collections
self.assertSetEqual(set(g.wkt for g in c.mpoly.sym_difference(geom)),
set(g.wkt for g in c.sym_difference))
self.assertSetEqual(set(g.wkt for g in c.mpoly.union(geom)),
set(g.wkt for g in c.union))
@skipUnlessDBFeature("has_envelope_method")
def test_envelope(self):
"Testing the `envelope` GeoQuerySet method."
countries = Country.objects.all().envelope()
for country in countries:
self.assertIsInstance(country.envelope, Polygon)
@skipUnlessDBFeature("supports_extent_aggr")
@ignore_warnings(category=RemovedInDjango110Warning)
def test_extent(self):
"""
Testing the (deprecated) `extent` GeoQuerySet method and the Extent
aggregate.
"""
# Reference query:
# `SELECT ST_extent(point) FROM geoapp_city WHERE (name='Houston' or name='Dallas');`
# => BOX(-96.8016128540039 29.7633724212646,-95.3631439208984 32.7820587158203)
expected = (-96.8016128540039, 29.7633724212646, -95.3631439208984, 32.782058715820)
qs = City.objects.filter(name__in=('Houston', 'Dallas'))
extent1 = qs.extent()
extent2 = qs.aggregate(Extent('point'))['point__extent']
for extent in (extent1, extent2):
for val, exp in zip(extent, expected):
self.assertAlmostEqual(exp, val, 4)
self.assertIsNone(City.objects.filter(name=('Smalltown')).extent())
self.assertIsNone(City.objects.filter(name=('Smalltown')).aggregate(Extent('point'))['point__extent'])
@skipUnlessDBFeature("supports_extent_aggr")
def test_extent_with_limit(self):
"""
Testing if extent supports limit.
"""
extent1 = City.objects.all().aggregate(Extent('point'))['point__extent']
extent2 = City.objects.all()[:3].aggregate(Extent('point'))['point__extent']
self.assertNotEqual(extent1, extent2)
@skipUnlessDBFeature("has_force_rhr_method")
def test_force_rhr(self):
"Testing GeoQuerySet.force_rhr()."
rings = (
((0, 0), (5, 0), (0, 5), (0, 0)),
((1, 1), (1, 3), (3, 1), (1, 1)),
)
rhr_rings = (
((0, 0), (0, 5), (5, 0), (0, 0)),
((1, 1), (3, 1), (1, 3), (1, 1)),
)
State.objects.create(name='Foo', poly=Polygon(*rings))
s = State.objects.force_rhr().get(name='Foo')
self.assertEqual(rhr_rings, s.force_rhr.coords)
@skipUnlessDBFeature("has_geohash_method")
def test_geohash(self):
"Testing GeoQuerySet.geohash()."
# Reference query:
# SELECT ST_GeoHash(point) FROM geoapp_city WHERE name='Houston';
# SELECT ST_GeoHash(point, 5) FROM geoapp_city WHERE name='Houston';
ref_hash = '9vk1mfq8jx0c8e0386z6'
h1 = City.objects.geohash().get(name='Houston')
h2 = City.objects.geohash(precision=5).get(name='Houston')
self.assertEqual(ref_hash, h1.geohash)
self.assertEqual(ref_hash[:5], h2.geohash)
def test_geojson(self):
"Testing GeoJSON output from the database using GeoQuerySet.geojson()."
# Only PostGIS and SpatiaLite 3.0+ support GeoJSON.
if not connection.ops.geojson:
self.assertRaises(NotImplementedError, Country.objects.all().geojson, field_name='mpoly')
return
pueblo_json = '{"type":"Point","coordinates":[-104.609252,38.255001]}'
houston_json = (
'{"type":"Point","crs":{"type":"name","properties":'
'{"name":"EPSG:4326"}},"coordinates":[-95.363151,29.763374]}'
)
victoria_json = (
'{"type":"Point","bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],'
'"coordinates":[-123.305196,48.462611]}'
)
chicago_json = (
'{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},'
'"bbox":[-87.65018,41.85039,-87.65018,41.85039],"coordinates":[-87.65018,41.85039]}'
)
if spatialite:
victoria_json = (
'{"type":"Point","bbox":[-123.305196,48.462611,-123.305196,48.462611],'
'"coordinates":[-123.305196,48.462611]}'
)
# Precision argument should only be an integer
self.assertRaises(TypeError, City.objects.geojson, precision='foo')
# Reference queries and values.
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 0)
# FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Pueblo';
self.assertEqual(pueblo_json, City.objects.geojson().get(name='Pueblo').geojson)
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city"
# WHERE "geoapp_city"."name" = 'Houston';
# This time we want to include the CRS by using the `crs` keyword.
self.assertEqual(houston_json, City.objects.geojson(crs=True, model_att='json').get(name='Houston').json)
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city"
# WHERE "geoapp_city"."name" = 'Houston';
# This time we include the bounding box by using the `bbox` keyword.
self.assertEqual(victoria_json, City.objects.geojson(bbox=True).get(name='Victoria').geojson)
# SELECT ST_AsGeoJson("geoapp_city"."point", 5, 3) FROM "geoapp_city"
# WHERE "geoapp_city"."name" = 'Chicago';
# Finally, we set every available keyword.
self.assertEqual(
chicago_json,
City.objects.geojson(bbox=True, crs=True, precision=5).get(name='Chicago').geojson
)
@skipUnlessDBFeature("has_gml_method")
def test_gml(self):
"Testing GML output from the database using GeoQuerySet.gml()."
# Should throw a TypeError when trying to obtain GML from a
# non-geometry field.
qs = City.objects.all()
self.assertRaises(TypeError, qs.gml, field_name='name')
ptown1 = City.objects.gml(field_name='point', precision=9).get(name='Pueblo')
ptown2 = City.objects.gml(precision=9).get(name='Pueblo')
if oracle:
# No precision parameter for Oracle :-/
gml_regex = re.compile(
r'^<gml:Point srsName="SDO:4326" xmlns:gml="http://www.opengis.net/gml">'
r'<gml:coordinates decimal="\." cs="," ts=" ">-104.60925\d+,38.25500\d+ '
r'</gml:coordinates></gml:Point>'
)
elif spatialite and connection.ops.spatial_version < (3, 0, 0):
# Spatialite before 3.0 has extra colon in SrsName
gml_regex = re.compile(
r'^<gml:Point SrsName="EPSG::4326"><gml:coordinates decimal="\." '
r'cs="," ts=" ">-104.609251\d+,38.255001</gml:coordinates></gml:Point>'
)
else:
gml_regex = re.compile(
r'^<gml:Point srsName="EPSG:4326"><gml:coordinates>'
r'-104\.60925\d+,38\.255001</gml:coordinates></gml:Point>'
)
for ptown in [ptown1, ptown2]:
self.assertTrue(gml_regex.match(ptown.gml))
if postgis:
self.assertIn('<gml:pos srsDimension="2">', City.objects.gml(version=3).get(name='Pueblo').gml)
@skipUnlessDBFeature("has_kml_method")
def test_kml(self):
"Testing KML output from the database using GeoQuerySet.kml()."
# Should throw a TypeError when trying to obtain KML from a
# non-geometry field.
qs = City.objects.all()
self.assertRaises(TypeError, qs.kml, 'name')
# Ensuring the KML is as expected.
ptown1 = City.objects.kml(field_name='point', precision=9).get(name='Pueblo')
ptown2 = City.objects.kml(precision=9).get(name='Pueblo')
for ptown in [ptown1, ptown2]:
self.assertEqual('<Point><coordinates>-104.609252,38.255001</coordinates></Point>', ptown.kml)
@ignore_warnings(category=RemovedInDjango110Warning)
def test_make_line(self):
"""
Testing the (deprecated) `make_line` GeoQuerySet method and the MakeLine
aggregate.
"""
if not connection.features.supports_make_line_aggr:
# Only PostGIS has support for the MakeLine aggregate. For other
# backends, test that NotImplementedError is raised
self.assertRaises(
NotImplementedError,
City.objects.all().aggregate, MakeLine('point')
)
return
# Ensuring that a `TypeError` is raised on models without PointFields.
self.assertRaises(TypeError, State.objects.make_line)
self.assertRaises(TypeError, Country.objects.make_line)
# MakeLine on an inappropriate field returns simply None
self.assertIsNone(State.objects.aggregate(MakeLine('poly'))['poly__makeline'])
# Reference query:
# SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city;
ref_line = GEOSGeometry(
'LINESTRING(-95.363151 29.763374,-96.801611 32.782057,'
'-97.521157 34.464642,174.783117 -41.315268,-104.609252 38.255001,'
'-95.23506 38.971823,-87.650175 41.850385,-123.305196 48.462611)',
srid=4326
)
# We check for equality with a tolerance of 10e-5 which is a lower bound
# of the precisions of ref_line coordinates
line1 = City.objects.make_line()
line2 = City.objects.aggregate(MakeLine('point'))['point__makeline']
for line in (line1, line2):
self.assertTrue(ref_line.equals_exact(line, tolerance=10e-5),
"%s != %s" % (ref_line, line))
@skipUnlessDBFeature("has_num_geom_method")
def test_num_geom(self):
"Testing the `num_geom` GeoQuerySet method."
# Both 'countries' only have two geometries.
for c in Country.objects.num_geom():
self.assertEqual(2, c.num_geom)
for c in City.objects.filter(point__isnull=False).num_geom():
# Oracle and PostGIS 2.0+ will return 1 for the number of
# geometries on non-collections.
self.assertEqual(1, c.num_geom)
@skipUnlessDBFeature("supports_num_points_poly")
def test_num_points(self):
"Testing the `num_points` GeoQuerySet method."
for c in Country.objects.num_points():
self.assertEqual(c.mpoly.num_points, c.num_points)
if not oracle:
# Oracle cannot count vertices in Point geometries.
for c in City.objects.num_points():
self.assertEqual(1, c.num_points)
@skipUnlessDBFeature("has_point_on_surface_method")
def test_point_on_surface(self):
"Testing the `point_on_surface` GeoQuerySet method."
# Reference values.
if oracle:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(SDO_GEOM.SDO_POINTONSURFACE(GEOAPP_COUNTRY.MPOLY, 0.05))
# FROM GEOAPP_COUNTRY;
ref = {'New Zealand': fromstr('POINT (174.616364 -36.100861)', srid=4326),
'Texas': fromstr('POINT (-103.002434 36.500397)', srid=4326),
}
else:
# Using GEOSGeometry to compute the reference point on surface values
# -- since PostGIS also uses GEOS these should be the same.
ref = {'New Zealand': Country.objects.get(name='New Zealand').mpoly.point_on_surface,
'Texas': Country.objects.get(name='Texas').mpoly.point_on_surface
}
for c in Country.objects.point_on_surface():
if spatialite:
# XXX This seems to be a WKT-translation-related precision issue?
tol = 0.00001
else:
tol = 0.000000001
self.assertTrue(ref[c.name].equals_exact(c.point_on_surface, tol))
@skipUnlessDBFeature("has_reverse_method")
def test_reverse_geom(self):
"Testing GeoQuerySet.reverse_geom()."
coords = [(-95.363151, 29.763374), (-95.448601, 29.713803)]
Track.objects.create(name='Foo', line=LineString(coords))
t = Track.objects.reverse_geom().get(name='Foo')
coords.reverse()
self.assertEqual(tuple(coords), t.reverse_geom.coords)
if oracle:
self.assertRaises(TypeError, State.objects.reverse_geom)
@skipUnlessDBFeature("has_scale_method")
def test_scale(self):
"Testing the `scale` GeoQuerySet method."
xfac, yfac = 2, 3
tol = 5 # XXX The low precision tolerance is for SpatiaLite
qs = Country.objects.scale(xfac, yfac, model_att='scaled')
for c in qs:
for p1, p2 in zip(c.mpoly, c.scaled):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
self.assertAlmostEqual(c1[0] * xfac, c2[0], tol)
self.assertAlmostEqual(c1[1] * yfac, c2[1], tol)
@skipUnlessDBFeature("has_snap_to_grid_method")
def test_snap_to_grid(self):
"Testing GeoQuerySet.snap_to_grid()."
# Let's try and break snap_to_grid() with bad combinations of arguments.
for bad_args in ((), range(3), range(5)):
self.assertRaises(ValueError, Country.objects.snap_to_grid, *bad_args)
for bad_args in (('1.0',), (1.0, None), tuple(map(six.text_type, range(4)))):
self.assertRaises(TypeError, Country.objects.snap_to_grid, *bad_args)
# Boundary for San Marino, courtesy of Bjorn Sandvik of thematicmapping.org
# from the world borders dataset he provides.
wkt = ('MULTIPOLYGON(((12.41580 43.95795,12.45055 43.97972,12.45389 43.98167,'
'12.46250 43.98472,12.47167 43.98694,12.49278 43.98917,'
'12.50555 43.98861,12.51000 43.98694,12.51028 43.98277,'
'12.51167 43.94333,12.51056 43.93916,12.49639 43.92333,'
'12.49500 43.91472,12.48778 43.90583,12.47444 43.89722,'
'12.46472 43.89555,12.45917 43.89611,12.41639 43.90472,'
'12.41222 43.90610,12.40782 43.91366,12.40389 43.92667,'
'12.40500 43.94833,12.40889 43.95499,12.41580 43.95795)))')
Country.objects.create(name='San Marino', mpoly=fromstr(wkt))
# Because floating-point arithmetic isn't exact, we set a tolerance
# to pass into GEOS `equals_exact`.
tol = 0.000000001
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.1)) FROM "geoapp_country"
# WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 44,12.5 44,12.5 43.9,12.4 43.9,12.4 44)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.1).get(name='San Marino').snap_to_grid, tol))
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.05, 0.23)) FROM "geoapp_country"
# WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 43.93,12.45 43.93,12.5 43.93,12.45 43.93,12.4 43.93)))')
self.assertTrue(
ref.equals_exact(Country.objects.snap_to_grid(0.05, 0.23).get(name='San Marino').snap_to_grid, tol)
)
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.5, 0.17, 0.05, 0.23)) FROM "geoapp_country"
# WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr(
'MULTIPOLYGON(((12.4 43.87,12.45 43.87,12.45 44.1,12.5 44.1,12.5 43.87,12.45 43.87,12.4 43.87)))'
)
self.assertTrue(
ref.equals_exact(
Country.objects.snap_to_grid(0.05, 0.23, 0.5, 0.17).get(name='San Marino').snap_to_grid,
tol
)
)
@skipUnlessDBFeature("has_svg_method")
def test_svg(self):
"Testing SVG output using GeoQuerySet.svg()."
self.assertRaises(TypeError, City.objects.svg, precision='foo')
# SELECT AsSVG(geoapp_city.point, 0, 8) FROM geoapp_city WHERE name = 'Pueblo';
svg1 = 'cx="-104.609252" cy="-38.255001"'
# Even though relative, only one point so it's practically the same except for
# the 'c' letter prefix on the x,y values.
svg2 = svg1.replace('c', '')
self.assertEqual(svg1, City.objects.svg().get(name='Pueblo').svg)
self.assertEqual(svg2, City.objects.svg(relative=5).get(name='Pueblo').svg)
@skipUnlessDBFeature("has_transform_method")
def test_transform(self):
"Testing the transform() GeoQuerySet method."
# Pre-transformed points for Houston and Pueblo.
htown = fromstr('POINT(1947516.83115183 6322297.06040572)', srid=3084)
ptown = fromstr('POINT(992363.390841912 481455.395105533)', srid=2774)
prec = 3 # Precision is low due to version variations in PROJ and GDAL.
# Asserting the result of the transform operation with the values in
# the pre-transformed points. Oracle does not have the 3084 SRID.
if not oracle:
h = City.objects.transform(htown.srid).get(name='Houston')
self.assertEqual(3084, h.point.srid)
self.assertAlmostEqual(htown.x, h.point.x, prec)
self.assertAlmostEqual(htown.y, h.point.y, prec)
p1 = City.objects.transform(ptown.srid, field_name='point').get(name='Pueblo')
p2 = City.objects.transform(srid=ptown.srid).get(name='Pueblo')
for p in [p1, p2]:
self.assertEqual(2774, p.point.srid)
self.assertAlmostEqual(ptown.x, p.point.x, prec)
self.assertAlmostEqual(ptown.y, p.point.y, prec)
@skipUnlessDBFeature("has_translate_method")
def test_translate(self):
"Testing the `translate` GeoQuerySet method."
xfac, yfac = 5, -23
qs = Country.objects.translate(xfac, yfac, model_att='translated')
for c in qs:
for p1, p2 in zip(c.mpoly, c.translated):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
# XXX The low precision is for SpatiaLite
self.assertAlmostEqual(c1[0] + xfac, c2[0], 5)
self.assertAlmostEqual(c1[1] + yfac, c2[1], 5)
# TODO: Oracle can be made to pass if
# union1 = union2 = fromstr('POINT (-97.5211570000000023 34.4646419999999978)')
# but this seems unexpected and should be investigated to determine the cause.
@skipUnlessDBFeature("has_unionagg_method")
@no_oracle
@ignore_warnings(category=RemovedInDjango110Warning)
def test_unionagg(self):
"""
Testing the (deprecated) `unionagg` (aggregate union) GeoQuerySet method
and the Union aggregate.
"""
tx = Country.objects.get(name='Texas').mpoly
# Houston, Dallas -- Ordering may differ depending on backend or GEOS version.
union1 = fromstr('MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)')
union2 = fromstr('MULTIPOINT(-95.363151 29.763374,-96.801611 32.782057)')
qs = City.objects.filter(point__within=tx)
self.assertRaises(TypeError, qs.unionagg, 'name')
self.assertRaises(ValueError, qs.aggregate, Union('name'))
# Using `field_name` keyword argument in one query and specifying an
# order in the other (which should not be used because this is
# an aggregate method on a spatial column)
u1 = qs.unionagg(field_name='point')
u2 = qs.order_by('name').unionagg()
u3 = qs.aggregate(Union('point'))['point__union']
u4 = qs.order_by('name').aggregate(Union('point'))['point__union']
tol = 0.00001
self.assertTrue(union1.equals_exact(u1, tol) or union2.equals_exact(u1, tol))
self.assertTrue(union1.equals_exact(u2, tol) or union2.equals_exact(u2, tol))
self.assertTrue(union1.equals_exact(u3, tol) or union2.equals_exact(u3, tol))
self.assertTrue(union1.equals_exact(u4, tol) or union2.equals_exact(u4, tol))
qs = City.objects.filter(name='NotACity')
self.assertIsNone(qs.unionagg(field_name='point'))
self.assertIsNone(qs.aggregate(Union('point'))['point__union'])
def test_within_subquery(self):
"""
Test that using a queryset inside a geo lookup is working (using a subquery)
(#14483).
"""
tex_cities = City.objects.filter(
point__within=Country.objects.filter(name='Texas').values('mpoly')).order_by('name')
expected = ['Dallas', 'Houston']
if not connection.features.supports_real_shape_operations:
expected.append('Oklahoma City')
self.assertEqual(
list(tex_cities.values_list('name', flat=True)),
expected
)
def test_non_concrete_field(self):
NonConcreteModel.objects.create(point=Point(0, 0), name='name')
list(NonConcreteModel.objects.all())
|
bsd-3-clause
|
rbaindourov/v8-inspector
|
Source/chrome/net/tools/net_docs/net_docs.py
|
15
|
3541
|
#!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Reads, parses, and (optionally) writes as HTML the contents of Markdown
files passed as arguments. Intended for rendering network stack documentation
stored as Markdown in the source tree to a human-readable format."""
import argparse
import os.path
import sys
def nth_parent_directory(path, n):
for i in range(n):
path = os.path.dirname(path)
return path
# Go up the directory tree from this script and add src/third_party to sys.path
# so "import markdown" can find it in src/third_party/markdown.
SCRIPT_PATH = os.path.abspath(__file__)
SRC_PATH = nth_parent_directory(SCRIPT_PATH, 4)
THIRD_PARTY_PATH = os.path.join(SRC_PATH, 'third_party')
sys.path.insert(0, THIRD_PARTY_PATH)
import markdown
def ReadFile(filename):
with open(filename, 'r') as file:
return file.read()
def WriteFile(filename, contents):
dir = os.path.dirname(filename)
if not os.path.isdir(dir):
os.mkdir(dir)
with open(filename, 'w') as file:
file.write(contents)
TEMPLATE = """
<html>
<head>
<title>{title}</title>
</head>
<body>
{body}
</body>
</html>"""
def FormatPage(markdown_html, title):
# TODO(ttuttle): Add a navigation list / table of contents of available
# Markdown files, perhaps?
return TEMPLATE.format(title=title, body=markdown_html)
def ProcessDocs(input_filenames, input_pathname, output_pathname):
"""Processes a list of Markdown documentation files.
If input_pathname and output_pathname are specified, outputs HTML files
into the corresponding subdirectories of output_pathname. If one or both is
not specified, simply ensures the files exist and contain valid Markdown.
Args:
input_filenames: A list of filenames (absolute, or relative to $PWD) of
Markdown files to parse and possibly render.
input_pathname: The base directory of the input files. (Needed so they
can be placed in the same relative path in the output path.)
output_pathname: The output directory into which rendered Markdown files
go, using that relative path.
Returns:
nothing
Raises:
IOError: if any of the file operations fail (e.g. input_filenames
contains a non-existent file).
"""
outputting = (input_pathname is not None) and (output_pathname is not None)
markdown_parser = markdown.Markdown()
for input_filename in input_filenames:
markdown_text = ReadFile(input_filename)
markdown_html = markdown_parser.reset().convert(markdown_text)
if not outputting:
continue
full_html = FormatPage(markdown_html, title=input_filename)
rel_filename = os.path.relpath(input_filename, start=input_pathname)
output_filename = os.path.join(output_pathname, rel_filename) + '.html'
WriteFile(output_filename, full_html)
def main():
parser = argparse.ArgumentParser(
description='Parse and render Markdown documentation')
parser.add_argument('--input_path', default=None,
help="Input path for Markdown; required only if output_path set")
parser.add_argument('--output_path', default=None,
help="Output path for rendered HTML; if unspecified, won't output")
parser.add_argument('filenames', nargs=argparse.REMAINDER)
args = parser.parse_args()
ProcessDocs(args.filenames, args.input_path, args.output_path)
return 0
if __name__ == '__main__':
sys.exit(main())
|
bsd-3-clause
|
wrbutros/condominios
|
registro/serializers.py
|
1
|
1679
|
from rest_framework import serializers
from models import Condominio, Edificio, Departamento, Servicio, LecturaServicio
from models import AdministradorEdificio, Conserje
class ConserjeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Conserje
class AdministradorEdificioSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = AdministradorEdificio
class CondominioSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Condominio
class EdificioSerializer(serializers.HyperlinkedModelSerializer):
condominio = CondominioSerializer()
class Meta:
model = Edificio
fields = (
'nombre',
'cantidadPisos',
'condominio'
)
class DepartamentoSerializer(serializers.HyperlinkedModelSerializer):
edificio = EdificioSerializer()
class Meta:
model = Departamento
fields = (
'numero',
'metrosCuadrados',
'cantidadBanos',
'cantidadPiezas',
'walkInCloset',
'porcentajeDominio',
'edificio'
)
class ServicioSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Servicio
fields = (
'nombre',
'unidad_medida'
)
class LecturaServicioSerializer(serializers.HyperlinkedModelSerializer):
servicio = ServicioSerializer()
departamento = DepartamentoSerializer()
class Meta:
model = LecturaServicio
fields = (
'servicio',
'departamento',
'fecha',
'lectura'
)
|
gpl-3.0
|
harshilasu/GraphicMelon
|
y/google-cloud-sdk/platform/gsutil/third_party/boto/tests/integration/ec2/cloudwatch/__init__.py
|
761
|
1104
|
# Copyright (c) 2006-2011 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
|
gpl-3.0
|
davidobrien1985/ansible-modules-core
|
cloud/google/gc_storage.py
|
101
|
16678
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: gc_storage
version_added: "1.4"
short_description: This module manages objects/buckets in Google Cloud Storage.
description:
- This module allows users to manage their objects/buckets in Google Cloud Storage. It allows upload and download operations and can set some canned permissions. It also allows retrieval of URLs for objects for use in playbooks, and retrieval of string contents of objects. This module requires setting the default project in GCS prior to playbook usage. See U(https://developers.google.com/storage/docs/reference/v1/apiversion1) for information about setting the default project.
options:
bucket:
description:
- Bucket name.
required: true
object:
description:
- Keyname of the object inside the bucket. Can be also be used to create "virtual directories" (see examples).
required: false
default: null
src:
description:
- The source file path when performing a PUT operation.
required: false
default: null
dest:
description:
- The destination file path when downloading an object/key with a GET operation.
required: false
force:
description:
- Forces an overwrite either locally on the filesystem or remotely with the object/key. Used with PUT and GET operations.
required: false
default: true
aliases: [ 'overwrite' ]
permission:
description:
- This option let's the user set the canned permissions on the object/bucket that are created. The permissions that can be set are 'private', 'public-read', 'authenticated-read'.
required: false
default: private
headers:
version_added: "2.0"
description:
- Headers to attach to object.
required: false
default: '{}'
expiration:
description:
- Time limit (in seconds) for the URL generated and returned by GCA when performing a mode=put or mode=get_url operation. This url is only available when public-read is the acl for the object.
required: false
default: null
mode:
description:
- Switches the module behaviour between upload, download, get_url (return download url) , get_str (download object as string), create (bucket) and delete (bucket).
required: true
default: null
choices: [ 'get', 'put', 'get_url', 'get_str', 'delete', 'create' ]
gcs_secret_key:
description:
- GCS secret key. If not set then the value of the GCS_SECRET_KEY environment variable is used.
required: true
default: null
gcs_access_key:
description:
- GCS access key. If not set then the value of the GCS_ACCESS_KEY environment variable is used.
required: true
default: null
requirements:
- "python >= 2.6"
- "boto >= 2.9"
author: "Benno Joy (@bennojoy)"
'''
EXAMPLES = '''
# upload some content
- gc_storage: bucket=mybucket object=key.txt src=/usr/local/myfile.txt mode=put permission=public-read
# upload some headers
- gc_storage: bucket=mybucket object=key.txt src=/usr/local/myfile.txt headers='{"Content-Encoding": "gzip"}'
# download some content
- gc_storage: bucket=mybucket object=key.txt dest=/usr/local/myfile.txt mode=get
# Download an object as a string to use else where in your playbook
- gc_storage: bucket=mybucket object=key.txt mode=get_str
# Create an empty bucket
- gc_storage: bucket=mybucket mode=create
# Create a bucket with key as directory
- gc_storage: bucket=mybucket object=/my/directory/path mode=create
# Delete a bucket and all contents
- gc_storage: bucket=mybucket mode=delete
'''
import os
import urlparse
import hashlib
try:
import boto
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def grant_check(module, gs, obj):
try:
acp = obj.get_acl()
if module.params.get('permission') == 'public-read':
grant = [ x for x in acp.entries.entry_list if x.scope.type == 'AllUsers']
if not grant:
obj.set_acl('public-read')
module.exit_json(changed=True, result="The objects permission as been set to public-read")
if module.params.get('permission') == 'authenticated-read':
grant = [ x for x in acp.entries.entry_list if x.scope.type == 'AllAuthenticatedUsers']
if not grant:
obj.set_acl('authenticated-read')
module.exit_json(changed=True, result="The objects permission as been set to authenticated-read")
except gs.provider.storage_response_error, e:
module.fail_json(msg= str(e))
return True
def key_check(module, gs, bucket, obj):
try:
bucket = gs.lookup(bucket)
key_check = bucket.get_key(obj)
except gs.provider.storage_response_error, e:
module.fail_json(msg= str(e))
if key_check:
grant_check(module, gs, key_check)
return True
else:
return False
def keysum(module, gs, bucket, obj):
bucket = gs.lookup(bucket)
key_check = bucket.get_key(obj)
if not key_check:
return None
md5_remote = key_check.etag[1:-1]
etag_multipart = '-' in md5_remote # Check for multipart, etag is not md5
if etag_multipart is True:
module.fail_json(msg="Files uploaded with multipart of gs are not supported with checksum, unable to compute checksum.")
return md5_remote
def bucket_check(module, gs, bucket):
try:
result = gs.lookup(bucket)
except gs.provider.storage_response_error, e:
module.fail_json(msg= str(e))
if result:
grant_check(module, gs, result)
return True
else:
return False
def create_bucket(module, gs, bucket):
try:
bucket = gs.create_bucket(bucket)
bucket.set_acl(module.params.get('permission'))
except gs.provider.storage_response_error, e:
module.fail_json(msg= str(e))
if bucket:
return True
def delete_bucket(module, gs, bucket):
try:
bucket = gs.lookup(bucket)
bucket_contents = bucket.list()
for key in bucket_contents:
bucket.delete_key(key.name)
bucket.delete()
return True
except gs.provider.storage_response_error, e:
module.fail_json(msg= str(e))
def delete_key(module, gs, bucket, obj):
try:
bucket = gs.lookup(bucket)
bucket.delete_key(obj)
module.exit_json(msg="Object deleted from bucket ", changed=True)
except gs.provider.storage_response_error, e:
module.fail_json(msg= str(e))
def create_dirkey(module, gs, bucket, obj):
try:
bucket = gs.lookup(bucket)
key = bucket.new_key(obj)
key.set_contents_from_string('')
module.exit_json(msg="Virtual directory %s created in bucket %s" % (obj, bucket.name), changed=True)
except gs.provider.storage_response_error, e:
module.fail_json(msg= str(e))
def path_check(path):
if os.path.exists(path):
return True
else:
return False
def transform_headers(headers):
"""
Boto url-encodes values unless we convert the value to `str`, so doing
this prevents 'max-age=100000' from being converted to "max-age%3D100000".
:param headers: Headers to convert
:type headers: dict
:rtype: dict
"""
for key, value in headers.items():
headers[key] = str(value)
return headers
def upload_gsfile(module, gs, bucket, obj, src, expiry):
try:
bucket = gs.lookup(bucket)
key = bucket.new_key(obj)
key.set_contents_from_filename(
filename=src,
headers=transform_headers(module.params.get('headers'))
)
key.set_acl(module.params.get('permission'))
url = key.generate_url(expiry)
module.exit_json(msg="PUT operation complete", url=url, changed=True)
except gs.provider.storage_copy_error, e:
module.fail_json(msg= str(e))
def download_gsfile(module, gs, bucket, obj, dest):
try:
bucket = gs.lookup(bucket)
key = bucket.lookup(obj)
key.get_contents_to_filename(dest)
module.exit_json(msg="GET operation complete", changed=True)
except gs.provider.storage_copy_error, e:
module.fail_json(msg= str(e))
def download_gsstr(module, gs, bucket, obj):
try:
bucket = gs.lookup(bucket)
key = bucket.lookup(obj)
contents = key.get_contents_as_string()
module.exit_json(msg="GET operation complete", contents=contents, changed=True)
except gs.provider.storage_copy_error, e:
module.fail_json(msg= str(e))
def get_download_url(module, gs, bucket, obj, expiry):
try:
bucket = gs.lookup(bucket)
key = bucket.lookup(obj)
url = key.generate_url(expiry)
module.exit_json(msg="Download url:", url=url, expiration=expiry, changed=True)
except gs.provider.storage_response_error, e:
module.fail_json(msg= str(e))
def handle_get(module, gs, bucket, obj, overwrite, dest):
md5_remote = keysum(module, gs, bucket, obj)
md5_local = module.md5(dest)
if md5_local == md5_remote:
module.exit_json(changed=False)
if md5_local != md5_remote and not overwrite:
module.exit_json(msg="WARNING: Checksums do not match. Use overwrite parameter to force download.", failed=True)
else:
download_gsfile(module, gs, bucket, obj, dest)
def handle_put(module, gs, bucket, obj, overwrite, src, expiration):
# Lets check to see if bucket exists to get ground truth.
bucket_rc = bucket_check(module, gs, bucket)
key_rc = key_check(module, gs, bucket, obj)
# Lets check key state. Does it exist and if it does, compute the etag md5sum.
if bucket_rc and key_rc:
md5_remote = keysum(module, gs, bucket, obj)
md5_local = module.md5(src)
if md5_local == md5_remote:
module.exit_json(msg="Local and remote object are identical", changed=False)
if md5_local != md5_remote and not overwrite:
module.exit_json(msg="WARNING: Checksums do not match. Use overwrite parameter to force upload.", failed=True)
else:
upload_gsfile(module, gs, bucket, obj, src, expiration)
if not bucket_rc:
create_bucket(module, gs, bucket)
upload_gsfile(module, gs, bucket, obj, src, expiration)
# If bucket exists but key doesn't, just upload.
if bucket_rc and not key_rc:
upload_gsfile(module, gs, bucket, obj, src, expiration)
def handle_delete(module, gs, bucket, obj):
if bucket and not obj:
if bucket_check(module, gs, bucket):
module.exit_json(msg="Bucket %s and all keys have been deleted."%bucket, changed=delete_bucket(module, gs, bucket))
else:
module.exit_json(msg="Bucket does not exist.", changed=False)
if bucket and obj:
if bucket_check(module, gs, bucket):
if key_check(module, gs, bucket, obj):
module.exit_json(msg="Object has been deleted.", changed=delete_key(module, gs, bucket, obj))
else:
module.exit_json(msg="Object does not exists.", changed=False)
else:
module.exit_json(msg="Bucket does not exist.", changed=False)
else:
module.fail_json(msg="Bucket or Bucket & object parameter is required.", failed=True)
def handle_create(module, gs, bucket, obj):
if bucket and not obj:
if bucket_check(module, gs, bucket):
module.exit_json(msg="Bucket already exists.", changed=False)
else:
module.exit_json(msg="Bucket created successfully", changed=create_bucket(module, gs, bucket))
if bucket and obj:
if obj.endswith('/'):
dirobj = obj
else:
dirobj = obj + "/"
if bucket_check(module, gs, bucket):
if key_check(module, gs, bucket, dirobj):
module.exit_json(msg="Bucket %s and key %s already exists."% (bucket, obj), changed=False)
else:
create_dirkey(module, gs, bucket, dirobj)
else:
create_bucket(module, gs, bucket)
create_dirkey(module, gs, bucket, dirobj)
def main():
module = AnsibleModule(
argument_spec = dict(
bucket = dict(required=True),
object = dict(default=None),
src = dict(default=None),
dest = dict(default=None),
expiration = dict(default=600, aliases=['expiry']),
mode = dict(choices=['get', 'put', 'delete', 'create', 'get_url', 'get_str'], required=True),
permission = dict(choices=['private', 'public-read', 'authenticated-read'], default='private'),
headers = dict(type='dict', default={}),
gs_secret_key = dict(no_log=True, required=True),
gs_access_key = dict(required=True),
overwrite = dict(default=True, type='bool', aliases=['force']),
),
)
if not HAS_BOTO:
module.fail_json(msg='boto 2.9+ required for this module')
bucket = module.params.get('bucket')
obj = module.params.get('object')
src = module.params.get('src')
dest = module.params.get('dest')
if dest:
dest = os.path.expanduser(dest)
mode = module.params.get('mode')
expiry = module.params.get('expiration')
gs_secret_key = module.params.get('gs_secret_key')
gs_access_key = module.params.get('gs_access_key')
overwrite = module.params.get('overwrite')
if mode == 'put':
if not src or not object:
module.fail_json(msg="When using PUT, src, bucket, object are mandatory parameters")
if mode == 'get':
if not dest or not object:
module.fail_json(msg="When using GET, dest, bucket, object are mandatory parameters")
if obj:
obj = os.path.expanduser(module.params['object'])
try:
gs = boto.connect_gs(gs_access_key, gs_secret_key)
except boto.exception.NoAuthHandlerFound, e:
module.fail_json(msg = str(e))
if mode == 'get':
if not bucket_check(module, gs, bucket) or not key_check(module, gs, bucket, obj):
module.fail_json(msg="Target bucket/key cannot be found", failed=True)
if not path_check(dest):
download_gsfile(module, gs, bucket, obj, dest)
else:
handle_get(module, gs, bucket, obj, overwrite, dest)
if mode == 'put':
if not path_check(src):
module.fail_json(msg="Local object for PUT does not exist", failed=True)
handle_put(module, gs, bucket, obj, overwrite, src, expiry)
# Support for deleting an object if we have both params.
if mode == 'delete':
handle_delete(module, gs, bucket, obj)
if mode == 'create':
handle_create(module, gs, bucket, obj)
if mode == 'get_url':
if bucket and obj:
if bucket_check(module, gs, bucket) and key_check(module, gs, bucket, obj):
get_download_url(module, gs, bucket, obj, expiry)
else:
module.fail_json(msg="Key/Bucket does not exist", failed=True)
else:
module.fail_json(msg="Bucket and Object parameters must be set", failed=True)
# --------------------------- Get the String contents of an Object -------------------------
if mode == 'get_str':
if bucket and obj:
if bucket_check(module, gs, bucket) and key_check(module, gs, bucket, obj):
download_gsstr(module, gs, bucket, obj)
else:
module.fail_json(msg="Key/Bucket does not exist", failed=True)
else:
module.fail_json(msg="Bucket and Object parameters must be set", failed=True)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
frrp/ipcoin
|
contrib/testgen/base58.py
|
2139
|
2818
|
'''
Bitcoin base58 encoding and decoding.
Based on https://bitcointalk.org/index.php?topic=1026.0 (public domain)
'''
import hashlib
# for compatibility with following code...
class SHA256:
new = hashlib.sha256
if str != bytes:
# Python 3.x
def ord(c):
return c
def chr(n):
return bytes( (n,) )
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
__b58base = len(__b58chars)
b58chars = __b58chars
def b58encode(v):
""" encode v, which is a string of bytes, to base58.
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += (256**i) * ord(c)
result = ''
while long_value >= __b58base:
div, mod = divmod(long_value, __b58base)
result = __b58chars[mod] + result
long_value = div
result = __b58chars[long_value] + result
# Bitcoin does a little leading-zero-compression:
# leading 0-bytes in the input become leading-1s
nPad = 0
for c in v:
if c == '\0': nPad += 1
else: break
return (__b58chars[0]*nPad) + result
def b58decode(v, length = None):
""" decode v into a string of len bytes
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += __b58chars.find(c) * (__b58base**i)
result = bytes()
while long_value >= 256:
div, mod = divmod(long_value, 256)
result = chr(mod) + result
long_value = div
result = chr(long_value) + result
nPad = 0
for c in v:
if c == __b58chars[0]: nPad += 1
else: break
result = chr(0)*nPad + result
if length is not None and len(result) != length:
return None
return result
def checksum(v):
"""Return 32-bit checksum based on SHA256"""
return SHA256.new(SHA256.new(v).digest()).digest()[0:4]
def b58encode_chk(v):
"""b58encode a string, with 32-bit checksum"""
return b58encode(v + checksum(v))
def b58decode_chk(v):
"""decode a base58 string, check and remove checksum"""
result = b58decode(v)
if result is None:
return None
h3 = checksum(result[:-4])
if result[-4:] == checksum(result[:-4]):
return result[:-4]
else:
return None
def get_bcaddress_version(strAddress):
""" Returns None if strAddress is invalid. Otherwise returns integer version of address. """
addr = b58decode_chk(strAddress)
if addr is None or len(addr)!=21: return None
version = addr[0]
return ord(version)
if __name__ == '__main__':
# Test case (from http://gitorious.org/bitcoin/python-base58.git)
assert get_bcaddress_version('15VjRaDX9zpbA8LVnbrCAFzrVzN7ixHNsC') is 0
_ohai = 'o hai'.encode('ascii')
_tmp = b58encode(_ohai)
assert _tmp == 'DYB3oMS'
assert b58decode(_tmp, 5) == _ohai
print("Tests passed")
|
mit
|
eufarn7sp/egads
|
egads/thirdparty/quantities/constants/naturalunits.py
|
4
|
1604
|
# -*- coding: utf-8 -*-
"""
"""
from __future__ import absolute_import
from ._utils import _cd
from ..unitquantity import UnitConstant
natural_unit_of_action = UnitConstant(
'natural_unit_of_action',
_cd('natural unit of action'),
symbol='hbar',
u_symbol='ħ'
)
natural_unit_of_energy = UnitConstant(
'natural_unit_of_energy',
_cd('natural unit of energy'),
symbol='(m_e*c**2)',
u_symbol='(mₑ·c²)'
)
natural_unit_of_length = UnitConstant(
'natural_unit_of_length',
_cd('natural unit of length'),
symbol='lambdabar_C',
u_symbol='ƛ_C'
)
natural_unit_of_mass = UnitConstant(
'natural_unit_of_mass',
_cd('natural unit of mass'),
symbol='m_e',
u_symbol='mₑ'
)
natural_unit_of_momentum = UnitConstant(
'natural_unit_of_momentum',
_cd('natural unit of momentum'),
symbol='(m_e*c)',
u_symbol='(mₑ·c)'
)
natural_unit_of_time = UnitConstant(
'natural_unit_of_time',
_cd('natural unit of time'),
symbol='(hbar/(m_e*c**2))',
u_symbol='(ħ/(mₑ·c²))'
)
natural_unit_of_velocity = UnitConstant(
'natural_unit_of_velocity',
_cd('natural unit of velocity'),
symbol='c'
)
natural_unit_of_action_in_eV_s = UnitConstant(
'natural_unit_of_action_in_eV_s',
_cd('natural unit of action in eV s')
)
natural_unit_of_energy_in_MeV = UnitConstant(
'natural_unit_of_energy_in_MeV',
_cd('natural unit of energy in MeV')
)
natural_unit_of_momentum_in_MeV_per_c = UnitConstant(
'natural_unit_of_momentum_in_MeV_per_c',
_cd('natural unit of momentum in MeV/c')
)
del UnitConstant, _cd
|
gpl-3.0
|
woutersmet/Zeosummer
|
lib/zeobuilder/expressions.py
|
2
|
2479
|
# Zeobuilder is an extensible GUI-toolkit for molecular model construction.
# Copyright (C) 2007 - 2009 Toon Verstraelen <[email protected]>, Center
# for Molecular Modeling (CMM), Ghent University, Ghent, Belgium; all rights
# reserved unless otherwise stated.
#
# This file is part of Zeobuilder.
#
# Zeobuilder is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# In addition to the regulations of the GNU General Public License,
# publications and communications based in parts on this program or on
# parts of this program are required to cite the following article:
#
# "ZEOBUILDER: a GUI toolkit for the construction of complex molecules on the
# nanoscale with building blocks", Toon Verstraelen, Veronique Van Speybroeck
# and Michel Waroquier, Journal of Chemical Information and Modeling, Vol. 48
# (7), 1530-1541, 2008
# DOI:10.1021/ci8000748
#
# Zeobuilder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
from zeobuilder import context
from molmod.data.periodic import periodic
from molmod.data.bonds import bonds, BOND_SINGLE, BOND_DOUBLE, BOND_TRIPLE
import molmod.units
class Expression(object):
l = {
"periodic": periodic,
"bonds": bonds,
"BOND_SINGLE": BOND_SINGLE,
"BOND_DOUBLE": BOND_DOUBLE,
"BOND_TRIPLE": BOND_TRIPLE,
}
for key, val in molmod.units.__dict__.iteritems():
if isinstance(val, float):
l[key] = val
def __init__(self, code="True"):
self.compiled = compile("(%s)" % code, "<string>", 'eval')
self.code = code
self.variables = ("node",)
def compile_as(self, name):
self.compiled = compile("(%s)" % self.code, name, 'eval')
def __call__(self, *variables):
g = {"__builtins__": __builtins__}
g.update(self.l)
for name, variable in zip(self.variables, variables):
g[name] = variable
return eval(self.compiled, g)
def add_locals(l):
Expression.l.update(l)
|
gpl-3.0
|
danieljaouen/ansible
|
lib/ansible/modules/cloud/google/gcp_storage_bucket_access_control.py
|
12
|
12033
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ["preview"],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_storage_bucket_access_control
description:
- The BucketAccessControls resource represents the Access Control Lists (ACLs) for
buckets within Google Cloud Storage. ACLs let you specify who has access to your
data and to what extent.
- 'There are three roles that can be assigned to an entity: READERs can get the bucket,
though no acl property will be returned, and list the bucket''s objects. WRITERs
are READERs, and they can insert objects into the bucket and delete the bucket''s
objects. OWNERs are WRITERs, and they can get the acl property of a bucket, update
a bucket, and call all BucketAccessControls methods on the bucket. For more information,
see Access Control, with the caveat that this API uses READER, WRITER, and OWNER
instead of READ, WRITE, and FULL_CONTROL.'
short_description: Creates a GCP BucketAccessControl
version_added: 2.6
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
state:
description:
- Whether the given object should exist in GCP
choices: ['present', 'absent']
default: 'present'
bucket:
description:
- The name of the bucket.
required: true
entity:
description:
- 'The entity holding the permission, in one of the following forms: user-userId
user-email group-groupId group-email domain-domain project-team-projectId allUsers
allAuthenticatedUsers Examples: The user [email protected] would be
[email protected].'
- The group [email protected] would be [email protected].
- To refer to all members of the Google Apps for Business domain example.com, the
entity would be domain-example.com.
required: true
entity_id:
description:
- The ID for the entity.
required: false
project_team:
description:
- The project team associated with the entity.
required: false
suboptions:
project_number:
description:
- The project team associated with the entity.
required: false
team:
description:
- The team.
required: false
choices: ['editors', 'owners', 'viewers']
role:
description:
- The access permission for the entity.
required: false
choices: ['OWNER', 'READER', 'WRITER']
extends_documentation_fragment: gcp
'''
EXAMPLES = '''
- name: create a bucket
gcp_storage_bucket:
name: "bucket-bac"
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: bucket
- name: create a bucket access control
gcp_storage_bucket_access_control:
bucket: "{{ bucket }}"
entity: [email protected]
role: WRITER
project: "test_project"
auth_kind: "service_account"
service_account_file: "/tmp/auth.pem"
state: present
'''
RETURN = '''
bucket:
description:
- The name of the bucket.
returned: success
type: dict
domain:
description:
- The domain associated with the entity.
returned: success
type: str
email:
description:
- The email address associated with the entity.
returned: success
type: str
entity:
description:
- 'The entity holding the permission, in one of the following forms: user-userId
user-email group-groupId group-email domain-domain project-team-projectId allUsers
allAuthenticatedUsers Examples: The user [email protected] would be
[email protected].'
- The group [email protected] would be [email protected].
- To refer to all members of the Google Apps for Business domain example.com, the
entity would be domain-example.com.
returned: success
type: str
entity_id:
description:
- The ID for the entity.
returned: success
type: str
id:
description:
- The ID of the access-control entry.
returned: success
type: str
project_team:
description:
- The project team associated with the entity.
returned: success
type: complex
contains:
project_number:
description:
- The project team associated with the entity.
returned: success
type: str
team:
description:
- The team.
returned: success
type: str
role:
description:
- The access permission for the entity.
returned: success
type: str
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, remove_nones_from_dict, replace_resource_dict
import json
################################################################################
# Main
################################################################################
def main():
"""Main function"""
module = GcpModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
bucket=dict(required=True, type='dict'),
entity=dict(required=True, type='str'),
entity_id=dict(type='str'),
project_team=dict(type='dict', options=dict(
project_number=dict(type='str'),
team=dict(type='str', choices=['editors', 'owners', 'viewers'])
)),
role=dict(type='str', choices=['OWNER', 'READER', 'WRITER'])
)
)
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/devstorage.full_control']
state = module.params['state']
kind = 'storage#bucketAccessControl'
fetch = fetch_resource(module, self_link(module), kind)
changed = False
if fetch:
if state == 'present':
if is_different(module, fetch):
fetch = update(module, self_link(module), kind)
changed = True
else:
delete(module, self_link(module), kind)
fetch = {}
changed = True
else:
if state == 'present':
fetch = create(module, collection(module), kind)
changed = True
else:
fetch = {}
fetch.update({'changed': changed})
module.exit_json(**fetch)
def create(module, link, kind):
auth = GcpSession(module, 'storage')
return return_if_object(module, auth.post(link, resource_to_request(module)), kind)
def update(module, link, kind):
auth = GcpSession(module, 'storage')
return return_if_object(module, auth.put(link, resource_to_request(module)), kind)
def delete(module, link, kind):
auth = GcpSession(module, 'storage')
return return_if_object(module, auth.delete(link), kind)
def resource_to_request(module):
request = {
u'kind': 'storage#bucketAccessControl',
u'bucket': replace_resource_dict(module.params.get(u'bucket', {}), 'name'),
u'entity': module.params.get('entity'),
u'entityId': module.params.get('entity_id'),
u'projectTeam': BucketAccessControlProjectTeam(module.params.get('project_team', {}), module).to_request(),
u'role': module.params.get('role')
}
return_vals = {}
for k, v in request.items():
if v:
return_vals[k] = v
return return_vals
def fetch_resource(module, link, kind):
auth = GcpSession(module, 'storage')
return return_if_object(module, auth.get(link), kind)
def self_link(module):
return "https://www.googleapis.com/storage/v1/b/{bucket}/acl/{entity}".format(**module.params)
def collection(module):
return "https://www.googleapis.com/storage/v1/b/{bucket}/acl".format(**module.params)
def return_if_object(module, response, kind):
# If not found, return nothing.
if response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
module.fail_json(msg="Invalid JSON response with error: %s" % inst)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
if result['kind'] != kind:
module.fail_json(msg="Incorrect result: {kind}".format(**result))
return result
def is_different(module, response):
request = resource_to_request(module)
response = response_to_hash(module, response)
# Remove all output-only from response.
response_vals = {}
for k, v in response.items():
if k in request:
response_vals[k] = v
request_vals = {}
for k, v in request.items():
if k in response:
request_vals[k] = v
return GcpRequest(request_vals) != GcpRequest(response_vals)
# Remove unnecessary properties from the response.
# This is for doing comparisons with Ansible's current parameters.
def response_to_hash(module, response):
return {
u'bucket': response.get(u'bucket'),
u'domain': response.get(u'domain'),
u'email': response.get(u'email'),
u'entity': response.get(u'entity'),
u'entityId': response.get(u'entityId'),
u'id': response.get(u'id'),
u'projectTeam': BucketAccessControlProjectTeam(response.get(u'projectTeam', {}), module).from_response(),
u'role': response.get(u'role')
}
class BucketAccessControlProjectTeam(object):
def __init__(self, request, module):
self.module = module
if request:
self.request = request
else:
self.request = {}
def to_request(self):
return remove_nones_from_dict({
u'projectNumber': self.request.get('project_number'),
u'team': self.request.get('team')
})
def from_response(self):
return remove_nones_from_dict({
u'projectNumber': self.request.get(u'projectNumber'),
u'team': self.request.get(u'team')
})
if __name__ == '__main__':
main()
|
gpl-3.0
|
prakxys/flask
|
Work/TriviaMVA/TriviaMVA/env/Lib/site-packages/setuptools/tests/test_upload_docs.py
|
522
|
2139
|
"""build_ext tests
"""
import sys, os, shutil, tempfile, unittest, site, zipfile
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
SETUP_PY = """\
from setuptools import setup
setup(name='foo')
"""
class TestUploadDocsTest(unittest.TestCase):
def setUp(self):
self.dir = tempfile.mkdtemp()
setup = os.path.join(self.dir, 'setup.py')
f = open(setup, 'w')
f.write(SETUP_PY)
f.close()
self.old_cwd = os.getcwd()
os.chdir(self.dir)
self.upload_dir = os.path.join(self.dir, 'build')
os.mkdir(self.upload_dir)
# A test document.
f = open(os.path.join(self.upload_dir, 'index.html'), 'w')
f.write("Hello world.")
f.close()
# An empty folder.
os.mkdir(os.path.join(self.upload_dir, 'empty'))
if sys.version >= "2.6":
self.old_base = site.USER_BASE
site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp()
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
if sys.version >= "2.6":
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
def test_create_zipfile(self):
# Test to make sure zipfile creation handles common cases.
# This explicitly includes a folder containing an empty folder.
dist = Distribution()
cmd = upload_docs(dist)
cmd.upload_dir = self.upload_dir
cmd.target_dir = self.upload_dir
tmp_dir = tempfile.mkdtemp()
tmp_file = os.path.join(tmp_dir, 'foo.zip')
try:
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
zip_file = zipfile.ZipFile(tmp_file) # woh...
assert zip_file.namelist() == ['index.html']
zip_file.close()
finally:
shutil.rmtree(tmp_dir)
|
apache-2.0
|
roryk/recipes
|
recipes/deepvariant/dv_call_variants.py
|
28
|
1904
|
#!/opt/anaconda1anaconda2anaconda3/bin/python
#
# Wrapper script for DeepVariant call_variants
BINARY_DIR="/opt/anaconda1anaconda2anaconda3/BINARYSUB"
MODEL_DIRS= {"wgs": "/opt/anaconda1anaconda2anaconda3/WGSMODELSUB",
"wes": "/opt/anaconda1anaconda2anaconda3/WESMODELSUB"}
import argparse
import os
import subprocess
import sys
def real_dirname(path):
"""Return the symlink-resolved, canonicalized directory-portion of path."""
return os.path.realpath(path)
class DVHelp(argparse._HelpAction):
def __call__(self, parser, namespace, values, option_string=None):
print("Baseline DeepVariant arguments")
print(subprocess.check_output([sys.executable, "%s/call_variants.zip" % real_dirname(BINARY_DIR), "--help"]))
print()
print("Wrapper arguments")
parser.print_help()
def main():
parser = argparse.ArgumentParser(description="DeepVariant call_variants wrapper", add_help=False)
parser.add_argument("--cores", default=1)
parser.add_argument("--outfile", required=True)
parser.add_argument("--examples", required=True, help="Example directory from make_examples")
parser.add_argument("--sample", required=True, help="Sample name")
parser.add_argument("--model", default="wgs", choices=sorted(MODEL_DIRS.keys()),
help="DeepVariant trained model to use, defaults to wgs")
parser.add_argument("-h", "--help", action=DVHelp)
args = parser.parse_args()
bin_dir = real_dirname(BINARY_DIR)
model_dir = real_dirname(MODEL_DIRS[args.model])
py_exe = sys.executable
cmd = ("{py_exe} {bin_dir}/call_variants.zip "
"--outfile {args.outfile} --examples {args.examples}/{args.sample}.tfrecord@{args.cores}.gz "
"--checkpoint {model_dir}/model.ckpt")
sys.exit(subprocess.call(cmd.format(**locals()), shell=True))
if __name__ == '__main__':
main()
|
mit
|
gdi2290/django
|
django/contrib/postgres/fields/hstore.py
|
14
|
4495
|
import json
from django.contrib.postgres import forms
from django.contrib.postgres.fields.array import ArrayField
from django.core import exceptions
from django.db.models import Field, Lookup, Transform, TextField
from django.utils import six
from django.utils.translation import ugettext_lazy as _
__all__ = ['HStoreField']
class HStoreField(Field):
empty_strings_allowed = False
description = _('Map of strings to strings')
default_error_messages = {
'not_a_string': _('The value of "%(key)s" is not a string.'),
}
def db_type(self, connection):
return 'hstore'
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
if lookup_type == 'contains':
return [self.get_prep_value(value)]
return super(HStoreField, self).get_db_prep_lookup(lookup_type, value,
connection, prepared=False)
def get_transform(self, name):
transform = super(HStoreField, self).get_transform(name)
if transform:
return transform
return KeyTransformFactory(name)
def validate(self, value, model_instance):
super(HStoreField, self).validate(value, model_instance)
for key, val in value.items():
if not isinstance(val, six.string_types):
raise exceptions.ValidationError(
self.error_messages['not_a_string'],
code='not_a_string',
params={'key': key},
)
def to_python(self, value):
if isinstance(value, six.string_types):
value = json.loads(value)
return value
def value_to_string(self, obj):
value = self._get_val_from_obj(obj)
return json.dumps(value)
def formfield(self, **kwargs):
defaults = {
'form_class': forms.HStoreField,
}
defaults.update(kwargs)
return super(HStoreField, self).formfield(**defaults)
@HStoreField.register_lookup
class HStoreContainsLookup(Lookup):
lookup_name = 'contains'
def as_sql(self, compiler, connection):
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params
return '%s @> %s' % (lhs, rhs), params
@HStoreField.register_lookup
class HStoreContainedByLookup(Lookup):
lookup_name = 'contained_by'
def as_sql(self, compiler, connection):
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params
return '%s <@ %s' % (lhs, rhs), params
@HStoreField.register_lookup
class HasKeyLookup(Lookup):
lookup_name = 'has_key'
def as_sql(self, compiler, connection):
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params
return '%s ? %s' % (lhs, rhs), params
@HStoreField.register_lookup
class HasKeysLookup(Lookup):
lookup_name = 'has_keys'
def as_sql(self, compiler, connection):
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params
return '%s ?& %s' % (lhs, rhs), params
class KeyTransform(Transform):
output_field = TextField()
def __init__(self, key_name, *args, **kwargs):
super(KeyTransform, self).__init__(*args, **kwargs)
self.key_name = key_name
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
return "%s -> '%s'" % (lhs, self.key_name), params
class KeyTransformFactory(object):
def __init__(self, key_name):
self.key_name = key_name
def __call__(self, *args, **kwargs):
return KeyTransform(self.key_name, *args, **kwargs)
@HStoreField.register_lookup
class KeysTransform(Transform):
lookup_name = 'keys'
output_field = ArrayField(TextField())
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
return 'akeys(%s)' % lhs, params
@HStoreField.register_lookup
class ValuesTransform(Transform):
lookup_name = 'values'
output_field = ArrayField(TextField())
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
return 'avals(%s)' % lhs, params
|
bsd-3-clause
|
stianrh/askbot-nordic
|
askbot/migrations/0168_add_twitter_fields_to_user_profile.py
|
13
|
35766
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from askbot.migrations_api import safe_add_column
class Migration(SchemaMigration):
def forwards(self, orm):
"""add fields twitter_access_token, twitter_handle, social_sharing_mode"""
safe_add_column(
'auth_user',
'twitter_access_token',
self.gf('django.db.models.fields.CharField')(default='', max_length=256)
)
safe_add_column(
'auth_user',
'twitter_handle',
self.gf('django.db.models.fields.CharField')(default='', max_length=32)
)
safe_add_column(
'auth_user',
'social_sharing_mode',
self.gf('django.db.models.fields.IntegerField')(default=0)
)
def backwards(self, orm):
self.delete_column('auth_user', 'twitter_access_token')
self.delete_column('auth_user', 'twitter_handle')
self.delete_column('auth_user', 'social_sharing_mode')
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']", 'null': 'True'}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'recipients': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'incoming_activity'", 'symmetrical': 'False', 'through': "orm['askbot.ActivityAuditStatus']", 'to': "orm['auth.User']"}),
'summary': ('django.db.models.fields.TextField', [], {'default': "''"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.activityauditstatus': {
'Meta': {'unique_together': "(('user', 'activity'),)", 'object_name': 'ActivityAuditStatus'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Activity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Post']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.askwidget': {
'Meta': {'object_name': 'AskWidget'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Group']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'include_text_field': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'inner_style': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'outer_style': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Tag']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.BadgeData']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badgedata': {
'Meta': {'ordering': "('slug',)", 'object_name': 'BadgeData'},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': "orm['askbot.Award']", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
'askbot.bulktagsubscription': {
'Meta': {'ordering': "['-date_added']", 'object_name': 'BulkTagSubscription'},
'date_added': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['askbot.Group']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['askbot.Tag']", 'symmetrical': 'False'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False'})
},
'askbot.draftanswer': {
'Meta': {'object_name': 'DraftAnswer'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'draft_answers'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'draft_answers'", 'to': "orm['askbot.Thread']"})
},
'askbot.draftquestion': {
'Meta': {'object_name': 'DraftQuestion'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125', 'null': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True'})
},
'askbot.emailfeedsetting': {
'Meta': {'unique_together': "(('subscriber', 'feed_type'),)", 'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Thread']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'askbot.group': {
'Meta': {'object_name': 'Group', '_ormbases': ['auth.Group']},
'description': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'described_group'", 'unique': 'True', 'null': 'True', 'to': "orm['askbot.Post']"}),
'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}),
'is_vip': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'logo_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'moderate_answers_to_enquirers': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderate_email': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'openness': ('django.db.models.fields.SmallIntegerField', [], {'default': '2'}),
'preapproved_email_domains': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'preapproved_emails': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'})
},
'askbot.groupmembership': {
'Meta': {'object_name': 'GroupMembership', '_ormbases': ['auth.AuthUserGroups']},
'authusergroups_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.AuthUserGroups']", 'unique': 'True', 'primary_key': 'True'}),
'level': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.post': {
'Meta': {'object_name': 'Post'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'posts'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_posts'", 'null': 'True', 'to': "orm['auth.User']"}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'group_posts'", 'symmetrical': 'False', 'through': "orm['askbot.PostToGroup']", 'to': "orm['askbot.Group']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'language_code': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '16'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_posts'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_posts'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'old_answer_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'old_comment_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'old_question_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'points': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_column': "'score'"}),
'post_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'posts'", 'null': 'True', 'blank': 'True', 'to': "orm['askbot.Thread']"}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.postflagreason': {
'Meta': {'object_name': 'PostFlagReason'},
'added_at': ('django.db.models.fields.DateTimeField', [], {}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'details': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'post_reject_reasons'", 'to': "orm['askbot.Post']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'askbot.postrevision': {
'Meta': {'ordering': "('-revision',)", 'unique_together': "(('post', 'revision'),)", 'object_name': 'PostRevision'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'approved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'approved_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'postrevisions'", 'to': "orm['auth.User']"}),
'by_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'revisions'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '125', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300', 'blank': 'True'})
},
'askbot.posttogroup': {
'Meta': {'unique_together': "(('post', 'group'),)", 'object_name': 'PostToGroup', 'db_table': "'askbot_post_groups'"},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']"})
},
'askbot.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Post']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'askbot.questionwidget': {
'Meta': {'object_name': 'QuestionWidget'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Group']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order_by': ('django.db.models.fields.CharField', [], {'default': "'-added_at'", 'max_length': '18'}),
'question_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '7'}),
'search_query': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'style': ('django.db.models.fields.TextField', [], {'default': '"\\n@import url(\'http://fonts.googleapis.com/css?family=Yanone+Kaffeesatz:300,400,700\');\\nbody {\\n overflow: hidden;\\n}\\n\\n#container {\\n width: 200px;\\n height: 350px;\\n}\\nul {\\n list-style: none;\\n padding: 5px;\\n margin: 5px;\\n}\\nli {\\n border-bottom: #CCC 1px solid;\\n padding-bottom: 5px;\\n padding-top: 5px;\\n}\\nli:last-child {\\n border: none;\\n}\\na {\\n text-decoration: none;\\n color: #464646;\\n font-family: \'Yanone Kaffeesatz\', sans-serif;\\n font-size: 15px;\\n}\\n"', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'askbot.replyaddress': {
'Meta': {'object_name': 'ReplyAddress'},
'address': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '25'}),
'allowed_from_email': ('django.db.models.fields.EmailField', [], {'max_length': '150'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reply_addresses'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'reply_action': ('django.db.models.fields.CharField', [], {'default': "'auto_answer_or_comment'", 'max_length': '32'}),
'response_post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'edit_addresses'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'used_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']", 'null': 'True', 'blank': 'True'}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'ordering': "('-used_count', 'name')", 'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'suggested_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'suggested_tags'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'tag_wiki': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'described_tag'", 'unique': 'True', 'null': 'True', 'to': "orm['askbot.Post']"}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.thread': {
'Meta': {'object_name': 'Thread'},
'accepted_answer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'unused_favorite_threads'", 'symmetrical': 'False', 'through': "orm['askbot.FavoriteQuestion']", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_threads'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'group_threads'", 'symmetrical': 'False', 'through': "orm['askbot.ThreadToGroup']", 'to': "orm['askbot.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '16'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'unused_last_active_in_threads'", 'to': "orm['auth.User']"}),
'points': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_column': "'score'"}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'threads'", 'symmetrical': 'False', 'to': "orm['askbot.Tag']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.threadtogroup': {
'Meta': {'unique_together': "(('thread', 'group'),)", 'object_name': 'ThreadToGroup', 'db_table': "'askbot_thread_groups'"},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Thread']"}),
'visibility': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'})
},
'askbot.vote': {
'Meta': {'unique_together': "(('user', 'voted_post'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'voted_post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['askbot.Post']"})
},
'auth.authusergroups': {
'Meta': {'unique_together': "(('group', 'user'),)", 'object_name': 'AuthUserGroups', 'db_table': "'auth_user_groups'", 'managed': 'False'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 4, 14, 23, 39, 36, 994613)'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_signature': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_fake': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'languages': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '128'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 4, 14, 23, 39, 36, 994431)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'show_marked_tags': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'subscribed_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['askbot']
|
gpl-3.0
|
pyload/pyload
|
src/pyload/plugins/accounts/UptoboxCom.py
|
1
|
1145
|
# -*- coding: utf-8 -*-
import re
from ..base.xfs_account import XFSAccount
class UptoboxCom(XFSAccount):
__name__ = "UptoboxCom"
__type__ = "account"
__version__ = "0.25"
__status__ = "testing"
__description__ = """Uptobox.com account plugin"""
__license__ = "GPLv3"
__authors__ = [
("benbox69", "[email protected]"),
("GammaC0de", "nitzo2001[AT]yahoo[DOT]com"),
]
PLUGIN_DOMAIN = "uptobox.com"
LOGIN_URL = "https://uptobox.com/login"
LOGIN_SKIP_PATTERN = r"https://uptobox\.com/logout"
PREMIUM_PATTERN = r"Premium member"
VALID_UNTIL_PATTERN = r"class='expiration-date .+?'>(\d{1,2} [\w^_]+ \d{4})"
def signin(self, user, password, data):
html = self.load(self.LOGIN_URL, cookies=self.COOKIES)
if re.search(self.LOGIN_SKIP_PATTERN, html):
self.skip_login()
html = self.load(
self.LOGIN_URL,
post={"login": user, "password": password},
ref=self.LOGIN_URL,
cookies=self.COOKIES,
)
if re.search(self.LOGIN_SKIP_PATTERN, html) is None:
self.fail_login()
|
agpl-3.0
|
ESOedX/edx-platform
|
lms/djangoapps/bulk_email/tests/test_tasks.py
|
1
|
23176
|
# -*- coding: utf-8 -*-
"""
Unit tests for LMS instructor-initiated background tasks.
Runs tasks on answers to course problems to validate that code
paths actually work.
"""
from __future__ import absolute_import, print_function
import json
from itertools import chain, cycle, repeat
from smtplib import SMTPAuthenticationError, SMTPConnectError, SMTPDataError, SMTPServerDisconnected
from uuid import uuid4
from boto.exception import AWSConnectionError
from boto.ses.exceptions import (
SESAddressBlacklistedError,
SESAddressNotVerifiedError,
SESDailyQuotaExceededError,
SESDomainEndsWithDotError,
SESDomainNotConfirmedError,
SESIdentityNotVerifiedError,
SESIllegalAddressError,
SESLocalAddressCharacterError,
SESMaxSendingRateExceededError
)
from celery.states import FAILURE, SUCCESS
from django.conf import settings
from django.core.management import call_command
from mock import Mock, patch
from opaque_keys.edx.locator import CourseLocator
from six.moves import range
from bulk_email.models import SEND_TO_LEARNERS, SEND_TO_MYSELF, SEND_TO_STAFF, CourseEmail, Optout
from bulk_email.tasks import _get_course_email_context
from lms.djangoapps.instructor_task.models import InstructorTask
from lms.djangoapps.instructor_task.subtasks import SubtaskStatus, update_subtask_status
from lms.djangoapps.instructor_task.tasks import send_bulk_course_email
from lms.djangoapps.instructor_task.tests.factories import InstructorTaskFactory
from lms.djangoapps.instructor_task.tests.test_base import InstructorTaskCourseTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class TestTaskFailure(Exception):
"""Dummy exception used for unit tests."""
pass
def my_update_subtask_status(entry_id, current_task_id, new_subtask_status):
"""
Check whether a subtask has been updated before really updating.
Check whether a subtask which has been retried
has had the retry already write its results here before the code
that was invoking the retry had a chance to update this status.
This is the norm in "eager" mode (used by tests) where the retry is called
and run to completion before control is returned to the code that
invoked the retry. If the retries eventually end in failure (e.g. due to
a maximum number of retries being attempted), the "eager" code will return
the error for each retry as it is popped off the stack. We want to just ignore
the later updates that are called as the result of the earlier retries.
This should not be an issue in production, where status is updated before
a task is retried, and is then updated afterwards if the retry fails.
"""
entry = InstructorTask.objects.get(pk=entry_id)
subtask_dict = json.loads(entry.subtasks)
subtask_status_info = subtask_dict['status']
current_subtask_status = SubtaskStatus.from_dict(subtask_status_info[current_task_id])
current_retry_count = current_subtask_status.get_retry_count()
new_retry_count = new_subtask_status.get_retry_count()
if current_retry_count <= new_retry_count:
update_subtask_status(entry_id, current_task_id, new_subtask_status)
@patch('bulk_email.models.html_to_text', Mock(return_value='Mocking CourseEmail.text_message', autospec=True))
class TestBulkEmailInstructorTask(InstructorTaskCourseTestCase):
"""Tests instructor task that send bulk email."""
def setUp(self):
super(TestBulkEmailInstructorTask, self).setUp()
self.initialize_course()
self.instructor = self.create_instructor('instructor')
# load initial content (since we don't run migrations as part of tests):
call_command("loaddata", "course_email_template.json")
def _create_input_entry(self, course_id=None):
"""
Creates a InstructorTask entry for testing.
Overrides the base class version in that this creates CourseEmail.
"""
targets = [SEND_TO_MYSELF, SEND_TO_STAFF, SEND_TO_LEARNERS]
course_id = course_id or self.course.id
course_email = CourseEmail.create(
course_id, self.instructor, targets, "Test Subject", "<p>This is a test message</p>"
)
task_input = {'email_id': course_email.id}
task_id = str(uuid4())
instructor_task = InstructorTaskFactory.create(
course_id=course_id,
requester=self.instructor,
task_input=json.dumps(task_input),
task_key='dummy value',
task_id=task_id,
)
return instructor_task
def _run_task_with_mock_celery(self, task_class, entry_id, task_id):
"""Mock was not needed for some tests, testing to see if it's needed at all."""
task_args = [entry_id, {}]
return task_class.apply(task_args, task_id=task_id).get()
def test_email_missing_current_task(self):
task_entry = self._create_input_entry()
with self.assertRaises(ValueError):
send_bulk_course_email(task_entry.id, {})
def test_email_undefined_course(self):
# Check that we fail when passing in a course that doesn't exist.
task_entry = self._create_input_entry(course_id=CourseLocator("bogus", "course", "id"))
with self.assertRaises(ValueError):
self._run_task_with_mock_celery(send_bulk_course_email, task_entry.id, task_entry.task_id)
def test_bad_task_id_on_update(self):
task_entry = self._create_input_entry()
def dummy_update_subtask_status(entry_id, _current_task_id, new_subtask_status):
"""Passes a bad value for task_id to test update_subtask_status"""
bogus_task_id = "this-is-bogus"
update_subtask_status(entry_id, bogus_task_id, new_subtask_status)
with self.assertRaises(ValueError):
with patch('bulk_email.tasks.update_subtask_status', dummy_update_subtask_status):
send_bulk_course_email(task_entry.id, {})
def _create_students(self, num_students):
"""Create students for testing"""
return [self.create_student('robot%d' % i) for i in range(num_students)]
def _assert_single_subtask_status(self, entry, succeeded, failed=0, skipped=0, retried_nomax=0, retried_withmax=0):
"""Compare counts with 'subtasks' entry in InstructorTask table."""
subtask_info = json.loads(entry.subtasks)
# verify subtask-level counts:
self.assertEquals(subtask_info.get('total'), 1)
self.assertEquals(subtask_info.get('succeeded'), 1 if succeeded > 0 else 0)
self.assertEquals(subtask_info.get('failed'), 0 if succeeded > 0 else 1)
# verify individual subtask status:
subtask_status_info = subtask_info.get('status')
task_id_list = list(subtask_status_info.keys())
self.assertEquals(len(task_id_list), 1)
task_id = task_id_list[0]
subtask_status = subtask_status_info.get(task_id)
print(u"Testing subtask status: {}".format(subtask_status))
self.assertEquals(subtask_status.get('task_id'), task_id)
self.assertEquals(subtask_status.get('attempted'), succeeded + failed)
self.assertEquals(subtask_status.get('succeeded'), succeeded)
self.assertEquals(subtask_status.get('skipped'), skipped)
self.assertEquals(subtask_status.get('failed'), failed)
self.assertEquals(subtask_status.get('retried_nomax'), retried_nomax)
self.assertEquals(subtask_status.get('retried_withmax'), retried_withmax)
self.assertEquals(subtask_status.get('state'), SUCCESS if succeeded > 0 else FAILURE)
def _test_run_with_task(
self, task_class, action_name, total, succeeded,
failed=0, skipped=0, retried_nomax=0, retried_withmax=0):
"""Run a task and check the number of emails processed."""
task_entry = self._create_input_entry()
parent_status = self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id)
# check return value
self.assertEquals(parent_status.get('total'), total)
self.assertEquals(parent_status.get('action_name'), action_name)
# compare with task_output entry in InstructorTask table:
entry = InstructorTask.objects.get(id=task_entry.id)
status = json.loads(entry.task_output)
self.assertEquals(status.get('attempted'), succeeded + failed)
self.assertEquals(status.get('succeeded'), succeeded)
self.assertEquals(status.get('skipped'), skipped)
self.assertEquals(status.get('failed'), failed)
self.assertEquals(status.get('total'), total)
self.assertEquals(status.get('action_name'), action_name)
self.assertGreater(status.get('duration_ms'), 0)
self.assertEquals(entry.task_state, SUCCESS)
self._assert_single_subtask_status(entry, succeeded, failed, skipped, retried_nomax, retried_withmax)
return entry
def test_successful(self):
# Select number of emails to fit into a single subtask.
num_emails = settings.BULK_EMAIL_EMAILS_PER_TASK
# We also send email to the instructor:
self._create_students(num_emails - 1)
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
get_conn.return_value.send_messages.side_effect = cycle([None])
self._test_run_with_task(send_bulk_course_email, 'emailed', num_emails, num_emails)
def test_successful_twice(self):
# Select number of emails to fit into a single subtask.
num_emails = settings.BULK_EMAIL_EMAILS_PER_TASK
# We also send email to the instructor:
self._create_students(num_emails - 1)
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
get_conn.return_value.send_messages.side_effect = cycle([None])
task_entry = self._test_run_with_task(send_bulk_course_email, 'emailed', num_emails, num_emails)
# submit the same task a second time, and confirm that it is not run again.
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
get_conn.return_value.send_messages.side_effect = cycle([Exception("This should not happen!")])
parent_status = self._run_task_with_mock_celery(send_bulk_course_email, task_entry.id, task_entry.task_id)
self.assertEquals(parent_status.get('total'), num_emails)
self.assertEquals(parent_status.get('succeeded'), num_emails)
self.assertEquals(parent_status.get('failed'), 0)
def test_unactivated_user(self):
# Select number of emails to fit into a single subtask.
num_emails = settings.BULK_EMAIL_EMAILS_PER_TASK
# We also send email to the instructor:
students = self._create_students(num_emails - 1)
# mark a student as not yet having activated their email:
student = students[0]
student.is_active = False
student.save()
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
get_conn.return_value.send_messages.side_effect = cycle([None])
self._test_run_with_task(send_bulk_course_email, 'emailed', num_emails - 1, num_emails - 1)
def test_skipped(self):
# Select number of emails to fit into a single subtask.
num_emails = settings.BULK_EMAIL_EMAILS_PER_TASK
# We also send email to the instructor:
students = self._create_students(num_emails - 1)
# have every fourth student optout:
expected_skipped = int((num_emails + 3) / 4.0)
expected_succeeds = num_emails - expected_skipped
for index in range(0, num_emails, 4):
Optout.objects.create(user=students[index], course_id=self.course.id)
# mark some students as opting out
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
get_conn.return_value.send_messages.side_effect = cycle([None])
self._test_run_with_task(
send_bulk_course_email, 'emailed', num_emails, expected_succeeds, skipped=expected_skipped
)
def _test_email_address_failures(self, exception):
"""Test that celery handles bad address errors by failing and not retrying."""
# Select number of emails to fit into a single subtask.
num_emails = settings.BULK_EMAIL_EMAILS_PER_TASK
# We also send email to the instructor:
self._create_students(num_emails - 1)
expected_fails = int((num_emails + 3) / 4.0)
expected_succeeds = num_emails - expected_fails
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
# have every fourth email fail due to some address failure:
get_conn.return_value.send_messages.side_effect = cycle([exception, None, None, None])
self._test_run_with_task(
send_bulk_course_email, 'emailed', num_emails, expected_succeeds, failed=expected_fails
)
def test_smtp_blacklisted_user(self):
# Test that celery handles permanent SMTPDataErrors by failing and not retrying.
self._test_email_address_failures(SMTPDataError(554, "Email address is blacklisted"))
def test_ses_blacklisted_user(self):
# Test that celery handles permanent SMTPDataErrors by failing and not retrying.
self._test_email_address_failures(SESAddressBlacklistedError(554, "Email address is blacklisted"))
def test_ses_illegal_address(self):
# Test that celery handles permanent SMTPDataErrors by failing and not retrying.
self._test_email_address_failures(SESIllegalAddressError(554, "Email address is illegal"))
def test_ses_local_address_character_error(self):
# Test that celery handles permanent SMTPDataErrors by failing and not retrying.
self._test_email_address_failures(SESLocalAddressCharacterError(554, "Email address contains a bad character"))
def test_ses_domain_ends_with_dot(self):
# Test that celery handles permanent SMTPDataErrors by failing and not retrying.
self._test_email_address_failures(SESDomainEndsWithDotError(554, "Email address ends with a dot"))
def test_bulk_email_skip_with_non_ascii_emails(self):
"""
Tests that bulk email skips the email address containing non-ASCII characters
and does not fail.
"""
num_emails = 10
emails_with_non_ascii_chars = 3
num_of_course_instructors = 1
students = [self.create_student('robot%d' % i) for i in range(num_emails)]
for student in students[:emails_with_non_ascii_chars]:
student.email = '{username}@tesá.com'.format(username=student.username)
student.save()
total = num_emails + num_of_course_instructors
expected_succeeds = num_emails - emails_with_non_ascii_chars + num_of_course_instructors
expected_fails = emails_with_non_ascii_chars
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
get_conn.return_value.send_messages.side_effect = cycle([None])
self._test_run_with_task(
task_class=send_bulk_course_email,
action_name='emailed',
total=total,
succeeded=expected_succeeds,
failed=expected_fails
)
def _test_retry_after_limited_retry_error(self, exception):
"""Test that celery handles connection failures by retrying."""
# If we want the batch to succeed, we need to send fewer emails
# than the max retries, so that the max is not triggered.
num_emails = settings.BULK_EMAIL_MAX_RETRIES
# We also send email to the instructor:
self._create_students(num_emails - 1)
expected_fails = 0
expected_succeeds = num_emails
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
# Have every other mail attempt fail due to disconnection.
get_conn.return_value.send_messages.side_effect = cycle([exception, None])
self._test_run_with_task(
send_bulk_course_email,
'emailed',
num_emails,
expected_succeeds,
failed=expected_fails,
retried_withmax=num_emails
)
def _test_max_retry_limit_causes_failure(self, exception):
"""Test that celery can hit a maximum number of retries."""
# Doesn't really matter how many recipients, since we expect
# to fail on the first.
num_emails = 10
# We also send email to the instructor:
self._create_students(num_emails - 1)
expected_fails = num_emails
expected_succeeds = 0
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
# always fail to connect, triggering repeated retries until limit is hit:
get_conn.return_value.send_messages.side_effect = cycle([exception])
with patch('bulk_email.tasks.update_subtask_status', my_update_subtask_status):
self._test_run_with_task(
send_bulk_course_email,
'emailed',
num_emails,
expected_succeeds,
failed=expected_fails,
retried_withmax=(settings.BULK_EMAIL_MAX_RETRIES + 1)
)
def test_retry_after_smtp_disconnect(self):
self._test_retry_after_limited_retry_error(SMTPServerDisconnected(425, "Disconnecting"))
def test_max_retry_after_smtp_disconnect(self):
self._test_max_retry_limit_causes_failure(SMTPServerDisconnected(425, "Disconnecting"))
def test_retry_after_smtp_connect_error(self):
self._test_retry_after_limited_retry_error(SMTPConnectError(424, "Bad Connection"))
def test_max_retry_after_smtp_connect_error(self):
self._test_max_retry_limit_causes_failure(SMTPConnectError(424, "Bad Connection"))
def test_retry_after_aws_connect_error(self):
self._test_retry_after_limited_retry_error(
AWSConnectionError("Unable to provide secure connection through proxy")
)
def test_max_retry_after_aws_connect_error(self):
self._test_max_retry_limit_causes_failure(
AWSConnectionError("Unable to provide secure connection through proxy")
)
def test_retry_after_general_error(self):
self._test_retry_after_limited_retry_error(Exception("This is some random exception."))
def test_max_retry_after_general_error(self):
self._test_max_retry_limit_causes_failure(Exception("This is some random exception."))
def _test_retry_after_unlimited_retry_error(self, exception):
"""Test that celery handles throttling failures by retrying."""
num_emails = 8
# We also send email to the instructor:
self._create_students(num_emails - 1)
expected_fails = 0
expected_succeeds = num_emails
# Note that because celery in eager mode will call retries synchronously,
# each retry will increase the stack depth. It turns out that there is a
# maximum depth at which a RuntimeError is raised ("maximum recursion depth
# exceeded"). The maximum recursion depth is 90, so
# num_emails * expected_retries < 90.
expected_retries = 10
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
# Cycle through N throttling errors followed by a success.
get_conn.return_value.send_messages.side_effect = cycle(
chain(repeat(exception, expected_retries), [None])
)
self._test_run_with_task(
send_bulk_course_email,
'emailed',
num_emails,
expected_succeeds,
failed=expected_fails,
retried_nomax=(expected_retries * num_emails)
)
def test_retry_after_smtp_throttling_error(self):
self._test_retry_after_unlimited_retry_error(SMTPDataError(455, "Throttling: Sending rate exceeded"))
def test_retry_after_ses_throttling_error(self):
self._test_retry_after_unlimited_retry_error(
SESMaxSendingRateExceededError(455, "Throttling: Sending rate exceeded")
)
def _test_immediate_failure(self, exception):
"""Test that celery can hit a maximum number of retries."""
# Doesn't really matter how many recipients, since we expect
# to fail on the first.
num_emails = 10
# We also send email to the instructor:
self._create_students(num_emails - 1)
expected_fails = num_emails
expected_succeeds = 0
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
# always fail to connect, triggering repeated retries until limit is hit:
get_conn.return_value.send_messages.side_effect = cycle([exception])
self._test_run_with_task(
send_bulk_course_email,
'emailed',
num_emails,
expected_succeeds,
failed=expected_fails,
)
def test_failure_on_unhandled_smtp(self):
self._test_immediate_failure(SMTPAuthenticationError(403, "That password doesn't work!"))
def test_failure_on_ses_quota_exceeded(self):
self._test_immediate_failure(SESDailyQuotaExceededError(403, "You're done for the day!"))
def test_failure_on_ses_address_not_verified(self):
self._test_immediate_failure(SESAddressNotVerifiedError(403, "Who *are* you?"))
def test_failure_on_ses_identity_not_verified(self):
self._test_immediate_failure(SESIdentityNotVerifiedError(403, "May I please see an ID!"))
def test_failure_on_ses_domain_not_confirmed(self):
self._test_immediate_failure(SESDomainNotConfirmedError(403, "You're out of bounds!"))
def test_bulk_emails_with_unicode_course_image_name(self):
# Test bulk email with unicode characters in course image name
course_image = u'在淡水測試.jpg'
self.course = CourseFactory.create(course_image=course_image)
num_emails = 2
# We also send email to the instructor:
self._create_students(num_emails - 1)
with patch('bulk_email.tasks.get_connection', autospec=True) as get_conn:
get_conn.return_value.send_messages.side_effect = cycle([None])
self._test_run_with_task(send_bulk_course_email, 'emailed', num_emails, num_emails)
def test_get_course_email_context_has_correct_keys(self):
result = _get_course_email_context(self.course)
self.assertIn('course_title', result)
self.assertIn('course_root', result)
self.assertIn('course_language', result)
self.assertIn('course_url', result)
self.assertIn('course_image_url', result)
self.assertIn('course_end_date', result)
self.assertIn('account_settings_url', result)
self.assertIn('email_settings_url', result)
self.assertIn('platform_name', result)
|
agpl-3.0
|
mgedmin/ansible
|
lib/ansible/modules/cloud/cloudstack/cs_zone_facts.py
|
48
|
5787
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2016, René Moser <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: cs_zone_facts
short_description: Gathering facts of zones from Apache CloudStack based clouds.
description:
- Gathering facts from the API of a zone.
version_added: "2.1"
author: "René Moser (@resmo)"
options:
name:
description:
- Name of the zone.
required: true
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- cs_zone_facts:
name: ch-gva-1
delegate_to: localhost
- debug:
var: cloudstack_zone
'''
RETURN = '''
---
cloudstack_zone.id:
description: UUID of the zone.
returned: success
type: string
sample: 04589590-ac63-4ffc-93f5-b698b8ac38b6
cloudstack_zone.name:
description: Name of the zone.
returned: success
type: string
sample: zone01
cloudstack_zone.dns1:
description: First DNS for the zone.
returned: success
type: string
sample: 8.8.8.8
cloudstack_zone.dns2:
description: Second DNS for the zone.
returned: success
type: string
sample: 8.8.4.4
cloudstack_zone.internal_dns1:
description: First internal DNS for the zone.
returned: success
type: string
sample: 8.8.8.8
cloudstack_zone.internal_dns2:
description: Second internal DNS for the zone.
returned: success
type: string
sample: 8.8.4.4
cloudstack_zone.dns1_ipv6:
description: First IPv6 DNS for the zone.
returned: success
type: string
sample: "2001:4860:4860::8888"
cloudstack_zone.dns2_ipv6:
description: Second IPv6 DNS for the zone.
returned: success
type: string
sample: "2001:4860:4860::8844"
cloudstack_zone.allocation_state:
description: State of the zone.
returned: success
type: string
sample: Enabled
cloudstack_zone.domain:
description: Domain the zone is related to.
returned: success
type: string
sample: ROOT
cloudstack_zone.network_domain:
description: Network domain for the zone.
returned: success
type: string
sample: example.com
cloudstack_zone.network_type:
description: Network type for the zone.
returned: success
type: string
sample: basic
cloudstack_zone.local_storage_enabled:
description: Local storage offering enabled.
returned: success
type: bool
sample: false
cloudstack_zone.securitygroups_enabled:
description: Security groups support is enabled.
returned: success
type: bool
sample: false
cloudstack_zone.guest_cidr_address:
description: Guest CIDR address for the zone
returned: success
type: string
sample: 10.1.1.0/24
cloudstack_zone.dhcp_provider:
description: DHCP provider for the zone
returned: success
type: string
sample: VirtualRouter
cloudstack_zone.zone_token:
description: Zone token
returned: success
type: string
sample: ccb0a60c-79c8-3230-ab8b-8bdbe8c45bb7
cloudstack_zone.tags:
description: List of resource tags associated with the zone.
returned: success
type: dict
sample: [ { "key": "foo", "value": "bar" } ]
'''
import base64
# import cloudstack common
from ansible.module_utils.cloudstack import *
class AnsibleCloudStackZoneFacts(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackZoneFacts, self).__init__(module)
self.returns = {
'dns1': 'dns1',
'dns2': 'dns2',
'internaldns1': 'internal_dns1',
'internaldns2': 'internal_dns2',
'ipv6dns1': 'dns1_ipv6',
'ipv6dns2': 'dns2_ipv6',
'domain': 'network_domain',
'networktype': 'network_type',
'securitygroupsenabled': 'securitygroups_enabled',
'localstorageenabled': 'local_storage_enabled',
'guestcidraddress': 'guest_cidr_address',
'dhcpprovider': 'dhcp_provider',
'allocationstate': 'allocation_state',
'zonetoken': 'zone_token',
}
self.facts = {
'cloudstack_zone': None,
}
def get_zone(self):
if not self.zone:
# TODO: add param key signature in get_zone()
self.module.params['zone'] = self.module.params.get('name')
super(AnsibleCloudStackZoneFacts, self).get_zone()
return self.zone
def run(self):
zone = self.get_zone()
self.facts['cloudstack_zone'] = self.get_result(zone)
return self.facts
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name = dict(required=True),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=False,
)
cs_zone_facts = AnsibleCloudStackZoneFacts(module=module).run()
cs_facts_result = dict(changed=False, ansible_facts=cs_zone_facts)
module.exit_json(**cs_facts_result)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
SUSE/kiwi
|
test/unit/tasks/system_build_test.py
|
1
|
12177
|
import logging
import sys
import mock
import os
from pytest import fixture
from mock import patch, call
import kiwi
from ..test_helper import argv_kiwi_tests
from kiwi.tasks.system_build import SystemBuildTask
class TestSystemBuildTask:
@fixture(autouse=True)
def inject_fixtures(self, caplog):
self._caplog = caplog
def setup(self):
sys.argv = [
sys.argv[0], '--profile', 'vmxFlavour', 'system', 'build',
'--description', '../data/description',
'--target-dir', 'some-target'
]
self.abs_target_dir = os.path.abspath('some-target')
kiwi.tasks.system_build.Privileges = mock.Mock()
kiwi.tasks.system_build.Path = mock.Mock()
kiwi.tasks.system_build.Help = mock.Mock(
return_value=mock.Mock()
)
self.manager = mock.Mock()
self.system_prepare = mock.Mock()
self.system_prepare.setup_repositories = mock.Mock(
return_value=self.manager
)
self.runtime_checker = mock.Mock()
kiwi.tasks.base.RuntimeChecker = mock.Mock(
return_value=self.runtime_checker
)
self.runtime_config = mock.Mock()
self.runtime_config.get_disabled_runtime_checks.return_value = []
kiwi.tasks.base.RuntimeConfig = mock.Mock(
return_value=self.runtime_config
)
kiwi.tasks.system_build.SystemPrepare = mock.Mock(
return_value=self.system_prepare
)
self.setup = mock.Mock()
kiwi.tasks.system_build.SystemSetup = mock.Mock(
return_value=self.setup
)
self.profile = mock.Mock()
self.profile.dot_profile = dict()
kiwi.tasks.system_build.Profile = mock.Mock(
return_value=self.profile
)
self.result = mock.Mock()
self.builder = mock.MagicMock()
self.builder.create = mock.Mock(
return_value=self.result
)
kiwi.tasks.system_build.ImageBuilder.new = mock.Mock(
return_value=self.builder
)
self.task = SystemBuildTask()
def teardown(self):
sys.argv = argv_kiwi_tests
def _init_command_args(self):
self.task.command_args = {}
self.task.command_args['help'] = False
self.task.command_args['build'] = False
self.task.command_args['--allow-existing-root'] = True
self.task.command_args['--description'] = '../data/description'
self.task.command_args['--target-dir'] = 'some-target'
self.task.command_args['--set-repo'] = None
self.task.command_args['--add-repo'] = []
self.task.command_args['--add-package'] = []
self.task.command_args['--add-bootstrap-package'] = []
self.task.command_args['--delete-package'] = []
self.task.command_args['--ignore-repos'] = False
self.task.command_args['--ignore-repos-used-for-build'] = False
self.task.command_args['--set-container-derived-from'] = None
self.task.command_args['--set-container-tag'] = None
self.task.command_args['--add-container-label'] = []
self.task.command_args['--clear-cache'] = False
self.task.command_args['--signing-key'] = None
@patch('kiwi.logger.Logger.set_logfile')
def test_process_system_build(self, mock_log):
self._init_command_args()
self.task.command_args['build'] = True
self.task.process()
self.runtime_checker.\
check_boot_description_exists.assert_called_once_with()
self.runtime_checker.\
check_initrd_selection_required.assert_called_once_with()
self.runtime_checker.\
check_consistent_kernel_in_boot_and_system_image.\
assert_called_once_with()
self.runtime_checker.\
check_container_tool_chain_installed.assert_called_once_with()
self.runtime_checker.\
check_volume_setup_defines_multiple_fullsize_volumes.\
assert_called_once_with()
self.runtime_checker.\
check_volume_setup_has_no_root_definition.\
assert_called_once_with()
self.runtime_checker.\
check_volume_label_used_with_lvm.assert_called_once_with()
self.runtime_checker.\
check_swap_name_used_with_lvm.assert_called_once_with()
self.runtime_checker.\
check_xen_uniquely_setup_as_server_or_guest.\
assert_called_once_with()
self.runtime_checker.\
check_target_directory_not_in_shared_cache.\
assert_called_once_with(self.abs_target_dir)
self.runtime_checker.\
check_mediacheck_installed.assert_called_once_with()
self.runtime_checker.\
check_dracut_module_for_live_iso_in_package_list.\
assert_called_once_with()
self.runtime_checker.\
check_repositories_configured.assert_called_once_with()
self.runtime_checker.\
check_dracut_module_for_disk_overlay_in_package_list.\
assert_called_once_with()
self.runtime_checker.\
check_dracut_module_for_disk_oem_in_package_list.\
assert_called_once_with()
self.runtime_checker.\
check_dracut_module_for_oem_install_in_package_list.\
assert_called_once_with()
self.runtime_checker.\
check_efi_mode_for_disk_overlay_correctly_setup.\
assert_called_once_with()
self.runtime_checker.\
check_architecture_supports_iso_firmware_setup.\
assert_called_once_with()
self.system_prepare.setup_repositories.assert_called_once_with(
False, None
)
self.system_prepare.install_bootstrap.assert_called_once_with(
self.manager, []
)
self.system_prepare.install_system.assert_called_once_with(
self.manager
)
self.profile.create.assert_called_once_with(
self.abs_target_dir + '/build/image-root/.profile'
)
self.setup.import_description.assert_called_once_with()
self.setup.import_overlay_files.assert_called_once_with()
self.setup.import_repositories_marked_as_imageinclude.\
assert_called_once_with()
self.setup.call_config_script.assert_called_once_with()
self.setup.import_image_identifier.assert_called_once_with()
self.setup.setup_groups.assert_called_once_with()
self.setup.setup_users.assert_called_once_with()
self.setup.setup_keyboard_map.assert_called_once_with()
self.setup.setup_locale.assert_called_once_with()
self.setup.setup_plymouth_splash.assert_called_once_with()
self.setup.setup_timezone.assert_called_once_with()
self.setup.setup_permissions.assert_called_once_with()
self.system_prepare.pinch_system.assert_has_calls(
[call(force=False), call(force=True)]
)
assert self.system_prepare.clean_package_manager_leftovers.called
self.setup.call_image_script.assert_called_once_with()
self.builder.create.assert_called_once_with()
self.result.print_results.assert_called_once_with()
self.result.dump.assert_called_once_with(
os.sep.join([self.abs_target_dir, 'kiwi.result'])
)
@patch('kiwi.logger.Logger.set_logfile')
def test_process_system_build_add_package(self, mock_log):
self._init_command_args()
self.task.command_args['--add-package'] = ['vim']
self.task.process()
self.system_prepare.setup_repositories.assert_called_once_with(
False, None
)
self.system_prepare.install_packages.assert_called_once_with(
self.manager, ['vim']
)
@patch('kiwi.logger.Logger.set_logfile')
def test_process_system_update_delete_package(self, mock_log):
self._init_command_args()
self.task.command_args['--delete-package'] = ['vim']
self.task.process()
self.system_prepare.setup_repositories.assert_called_once_with(
False, None
)
self.system_prepare.delete_packages.assert_called_once_with(
self.manager, ['vim']
)
@patch('kiwi.xml_state.XMLState.set_container_config_tag')
@patch('kiwi.logger.Logger.set_logfile')
def test_process_system_build_prepare_stage_set_container_tag(
self, mock_log, mock_set_container_tag
):
self._init_command_args()
self.task.command_args['--set-container-tag'] = 'new_tag'
self.task.process()
mock_set_container_tag.assert_called_once_with(
'new_tag'
)
@patch('kiwi.xml_state.XMLState.add_container_config_label')
@patch('kiwi.logger.Logger.set_logfile')
def test_process_system_build_add_container_label(
self, mock_log, mock_add_container_label
):
self._init_command_args()
self.task.command_args['--add-container-label'] = [
'newLabel=value', 'anotherLabel=my=crazy value'
]
self.task.process()
mock_add_container_label.assert_has_calls([
call('newLabel', 'value'),
call('anotherLabel', 'my=crazy value')
])
@patch('kiwi.logger.Logger.set_logfile')
def test_process_system_build_add_container_label_invalid_format(
self, mock_logger
):
self._init_command_args()
self.task.command_args['--add-container-label'] = ['newLabel:value']
with self._caplog.at_level(logging.WARNING):
self.task.process()
@patch('kiwi.xml_state.XMLState.set_derived_from_image_uri')
@patch('kiwi.logger.Logger.set_logfile')
def test_process_system_build_prepare_stage_set_derived_from_uri(
self, mock_log, mock_set_derived_from_uri
):
self._init_command_args()
self.task.command_args['--set-container-derived-from'] = 'file:///new'
self.task.process()
mock_set_derived_from_uri.assert_called_once_with(
'file:///new'
)
@patch('kiwi.xml_state.XMLState.set_repository')
@patch('kiwi.logger.Logger.set_logfile')
def test_process_system_build_prepare_stage_set_repo(
self, mock_log, mock_set_repo
):
self._init_command_args()
self.task.command_args['--set-repo'] = 'http://example.com,yast2,alias'
self.task.process()
mock_set_repo.assert_called_once_with(
'http://example.com', 'yast2', 'alias', None, None, None
)
@patch('kiwi.xml_state.XMLState.add_repository')
@patch('kiwi.logger.Logger.set_logfile')
def test_process_system_build_prepare_stage_add_repo(
self, mock_log, mock_add_repo
):
self._init_command_args()
self.task.command_args['--add-repo'] = [
'http://example.com,yast2,alias,99,false,true'
]
self.task.process()
mock_add_repo.assert_called_once_with(
'http://example.com', 'yast2', 'alias', '99', False, True
)
def test_process_system_build_help(self):
self._init_command_args()
self.task.command_args['help'] = True
self.task.command_args['build'] = True
self.task.process()
self.task.manual.show.assert_called_once_with(
'kiwi::system::build'
)
@patch('kiwi.xml_state.XMLState.delete_repository_sections')
@patch('kiwi.logger.Logger.set_logfile')
def test_process_system_prepare_ignore_repos(
self, mock_log, mock_delete_repos
):
self._init_command_args()
self.task.command_args['--ignore-repos'] = True
self.task.process()
mock_delete_repos.assert_called_once_with()
@patch('kiwi.xml_state.XMLState.delete_repository_sections_used_for_build')
@patch('kiwi.logger.Logger.set_logfile')
def test_process_system_prepare_ignore_repos_used_for_build(
self, mock_log, mock_delete_repos
):
self._init_command_args()
self.task.command_args['--ignore-repos-used-for-build'] = True
self.task.process()
mock_delete_repos.assert_called_once_with()
|
gpl-3.0
|
RusDavies/ansible-modules-core
|
database/postgresql/postgresql_user.py
|
89
|
25160
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: postgresql_user
short_description: Adds or removes a users (roles) from a PostgreSQL database.
description:
- Add or remove PostgreSQL users (roles) from a remote host and, optionally,
grant the users access to an existing database or tables.
- The fundamental function of the module is to create, or delete, roles from
a PostgreSQL cluster. Privilege assignment, or removal, is an optional
step, which works on one database at a time. This allows for the module to
be called several times in the same module to modify the permissions on
different databases, or to grant permissions to already existing users.
- A user cannot be removed until all the privileges have been stripped from
the user. In such situation, if the module tries to remove the user it
will fail. To avoid this from happening the fail_on_user option signals
the module to try to remove the user, but if not possible keep going; the
module will report if changes happened and separately if the user was
removed or not.
version_added: "0.6"
options:
name:
description:
- name of the user (role) to add or remove
required: true
default: null
password:
description:
- set the user's password, before 1.4 this was required.
- "When passing an encrypted password, the encrypted parameter must also be true, and it must be generated with the format C('str[\\"md5\\"] + md5[ password + username ]'), resulting in a total of 35 characters. An easy way to do this is: C(echo \\"md5`echo -n \\"verysecretpasswordJOE\\" | md5`\\"). Note that if encrypted is set, the stored password will be hashed whether or not it is pre-encrypted."
required: false
default: null
db:
description:
- name of database where permissions will be granted
required: false
default: null
fail_on_user:
description:
- if C(yes), fail when user can't be removed. Otherwise just log and continue
required: false
default: 'yes'
choices: [ "yes", "no" ]
port:
description:
- Database port to connect to.
required: false
default: 5432
login_user:
description:
- User (role) used to authenticate with PostgreSQL
required: false
default: postgres
login_password:
description:
- Password used to authenticate with PostgreSQL
required: false
default: null
login_host:
description:
- Host running PostgreSQL.
required: false
default: localhost
login_unix_socket:
description:
- Path to a Unix domain socket for local connections
required: false
default: null
priv:
description:
- "PostgreSQL privileges string in the format: C(table:priv1,priv2)"
required: false
default: null
role_attr_flags:
description:
- "PostgreSQL role attributes string in the format: CREATEDB,CREATEROLE,SUPERUSER"
required: false
default: ""
choices: [ "[NO]SUPERUSER","[NO]CREATEROLE", "[NO]CREATEUSER", "[NO]CREATEDB",
"[NO]INHERIT", "[NO]LOGIN", "[NO]REPLICATION" ]
state:
description:
- The user (role) state
required: false
default: present
choices: [ "present", "absent" ]
encrypted:
description:
- whether the password is stored hashed in the database. boolean. Passwords can be passed already hashed or unhashed, and postgresql ensures the stored password is hashed when encrypted is set.
required: false
default: false
version_added: '1.4'
expires:
description:
- sets the user's password expiration.
required: false
default: null
version_added: '1.4'
no_password_changes:
description:
- if C(yes), don't inspect database for password changes. Effective when C(pg_authid) is not accessible (such as AWS RDS). Otherwise, make password changes as necessary.
required: false
default: 'no'
choices: [ "yes", "no" ]
version_added: '2.0'
notes:
- The default authentication assumes that you are either logging in as or
sudo'ing to the postgres account on the host.
- This module uses psycopg2, a Python PostgreSQL database adapter. You must
ensure that psycopg2 is installed on the host before using this module. If
the remote host is the PostgreSQL server (which is the default case), then
PostgreSQL must also be installed on the remote host. For Ubuntu-based
systems, install the postgresql, libpq-dev, and python-psycopg2 packages
on the remote host before using this module.
- If the passlib library is installed, then passwords that are encrypted
in the DB but not encrypted when passed as arguments can be checked for
changes. If the passlib library is not installed, unencrypted passwords
stored in the DB encrypted will be assumed to have changed.
- If you specify PUBLIC as the user, then the privilege changes will apply
to all users. You may not specify password or role_attr_flags when the
PUBLIC user is specified.
requirements: [ psycopg2 ]
author: "Lorin Hochstein (@lorin)"
'''
EXAMPLES = '''
# Create django user and grant access to database and products table
- postgresql_user: db=acme name=django password=ceec4eif7ya priv=CONNECT/products:ALL
# Create rails user, grant privilege to create other databases and demote rails from super user status
- postgresql_user: name=rails password=secret role_attr_flags=CREATEDB,NOSUPERUSER
# Remove test user privileges from acme
- postgresql_user: db=acme name=test priv=ALL/products:ALL state=absent fail_on_user=no
# Remove test user from test database and the cluster
- postgresql_user: db=test name=test priv=ALL state=absent
# Example privileges string format
INSERT,UPDATE/table:SELECT/anothertable:ALL
# Remove an existing user's password
- postgresql_user: db=test user=test password=NULL
'''
import re
import itertools
try:
import psycopg2
import psycopg2.extras
except ImportError:
postgresqldb_found = False
else:
postgresqldb_found = True
_flags = ('SUPERUSER', 'CREATEROLE', 'CREATEUSER', 'CREATEDB', 'INHERIT', 'LOGIN', 'REPLICATION')
VALID_FLAGS = frozenset(itertools.chain(_flags, ('NO%s' % f for f in _flags)))
VALID_PRIVS = dict(table=frozenset(('SELECT', 'INSERT', 'UPDATE', 'DELETE', 'TRUNCATE', 'REFERENCES', 'TRIGGER', 'ALL')),
database=frozenset(('CREATE', 'CONNECT', 'TEMPORARY', 'TEMP', 'ALL')),
)
# map to cope with idiosyncracies of SUPERUSER and LOGIN
PRIV_TO_AUTHID_COLUMN = dict(SUPERUSER='rolsuper', CREATEROLE='rolcreaterole',
CREATEUSER='rolcreateuser', CREATEDB='rolcreatedb',
INHERIT='rolinherit', LOGIN='rolcanlogin',
REPLICATION='rolreplication')
class InvalidFlagsError(Exception):
pass
class InvalidPrivsError(Exception):
pass
# ===========================================
# PostgreSQL module specific support methods.
#
def user_exists(cursor, user):
# The PUBLIC user is a special case that is always there
if user == 'PUBLIC':
return True
query = "SELECT rolname FROM pg_roles WHERE rolname=%(user)s"
cursor.execute(query, {'user': user})
return cursor.rowcount > 0
def user_add(cursor, user, password, role_attr_flags, encrypted, expires):
"""Create a new database user (role)."""
# Note: role_attr_flags escaped by parse_role_attrs and encrypted is a literal
query_password_data = dict(password=password, expires=expires)
query = ['CREATE USER %(user)s' % { "user": pg_quote_identifier(user, 'role')}]
if password is not None:
query.append("WITH %(crypt)s" % { "crypt": encrypted })
query.append("PASSWORD %(password)s")
if expires is not None:
query.append("VALID UNTIL %(expires)s")
query.append(role_attr_flags)
query = ' '.join(query)
cursor.execute(query, query_password_data)
return True
def user_alter(cursor, module, user, password, role_attr_flags, encrypted, expires, no_password_changes):
"""Change user password and/or attributes. Return True if changed, False otherwise."""
changed = False
# Note: role_attr_flags escaped by parse_role_attrs and encrypted is a literal
if user == 'PUBLIC':
if password is not None:
module.fail_json(msg="cannot change the password for PUBLIC user")
elif role_attr_flags != '':
module.fail_json(msg="cannot change the role_attr_flags for PUBLIC user")
else:
return False
# Handle passwords.
if not no_password_changes and (password is not None or role_attr_flags != ''):
# Select password and all flag-like columns in order to verify changes.
query_password_data = dict(password=password, expires=expires)
select = "SELECT * FROM pg_authid where rolname=%(user)s"
cursor.execute(select, {"user": user})
# Grab current role attributes.
current_role_attrs = cursor.fetchone()
# Do we actually need to do anything?
pwchanging = False
if password is not None:
if encrypted:
if password.startswith('md5'):
if password != current_role_attrs['rolpassword']:
pwchanging = True
else:
try:
from passlib.hash import postgres_md5 as pm
if pm.encrypt(password, user) != current_role_attrs['rolpassword']:
pwchanging = True
except ImportError:
# Cannot check if passlib is not installed, so assume password is different
pwchanging = True
else:
if password != current_role_attrs['rolpassword']:
pwchanging = True
role_attr_flags_changing = False
if role_attr_flags:
role_attr_flags_dict = {}
for r in role_attr_flags.split(' '):
if r.startswith('NO'):
role_attr_flags_dict[r.replace('NO', '', 1)] = False
else:
role_attr_flags_dict[r] = True
for role_attr_name, role_attr_value in role_attr_flags_dict.items():
if current_role_attrs[PRIV_TO_AUTHID_COLUMN[role_attr_name]] != role_attr_value:
role_attr_flags_changing = True
expires_changing = (expires is not None and expires == current_roles_attrs['rol_valid_until'])
if not pwchanging and not role_attr_flags_changing and not expires_changing:
return False
alter = ['ALTER USER %(user)s' % {"user": pg_quote_identifier(user, 'role')}]
if pwchanging:
alter.append("WITH %(crypt)s" % {"crypt": encrypted})
alter.append("PASSWORD %(password)s")
alter.append(role_attr_flags)
elif role_attr_flags:
alter.append('WITH %s' % role_attr_flags)
if expires is not None:
alter.append("VALID UNTIL %(expires)s")
try:
cursor.execute(' '.join(alter), query_password_data)
except psycopg2.InternalError, e:
if e.pgcode == '25006':
# Handle errors due to read-only transactions indicated by pgcode 25006
# ERROR: cannot execute ALTER ROLE in a read-only transaction
changed = False
module.fail_json(msg=e.pgerror)
return changed
else:
raise psycopg2.InternalError, e
# Grab new role attributes.
cursor.execute(select, {"user": user})
new_role_attrs = cursor.fetchone()
# Detect any differences between current_ and new_role_attrs.
for i in range(len(current_role_attrs)):
if current_role_attrs[i] != new_role_attrs[i]:
changed = True
return changed
def user_delete(cursor, user):
"""Try to remove a user. Returns True if successful otherwise False"""
cursor.execute("SAVEPOINT ansible_pgsql_user_delete")
try:
cursor.execute("DROP USER %s" % pg_quote_identifier(user, 'role'))
except:
cursor.execute("ROLLBACK TO SAVEPOINT ansible_pgsql_user_delete")
cursor.execute("RELEASE SAVEPOINT ansible_pgsql_user_delete")
return False
cursor.execute("RELEASE SAVEPOINT ansible_pgsql_user_delete")
return True
def has_table_privileges(cursor, user, table, privs):
"""
Return the difference between the privileges that a user already has and
the privileges that they desire to have.
:returns: tuple of:
* privileges that they have and were requested
* privileges they currently hold but were not requested
* privileges requested that they do not hold
"""
cur_privs = get_table_privileges(cursor, user, table)
have_currently = cur_privs.intersection(privs)
other_current = cur_privs.difference(privs)
desired = privs.difference(cur_privs)
return (have_currently, other_current, desired)
def get_table_privileges(cursor, user, table):
if '.' in table:
schema, table = table.split('.', 1)
else:
schema = 'public'
query = '''SELECT privilege_type FROM information_schema.role_table_grants
WHERE grantee=%s AND table_name=%s AND table_schema=%s'''
cursor.execute(query, (user, table, schema))
return frozenset([x[0] for x in cursor.fetchall()])
def grant_table_privileges(cursor, user, table, privs):
# Note: priv escaped by parse_privs
privs = ', '.join(privs)
query = 'GRANT %s ON TABLE %s TO %s' % (
privs, pg_quote_identifier(table, 'table'), pg_quote_identifier(user, 'role') )
cursor.execute(query)
def revoke_table_privileges(cursor, user, table, privs):
# Note: priv escaped by parse_privs
privs = ', '.join(privs)
query = 'REVOKE %s ON TABLE %s FROM %s' % (
privs, pg_quote_identifier(table, 'table'), pg_quote_identifier(user, 'role') )
cursor.execute(query)
def get_database_privileges(cursor, user, db):
priv_map = {
'C':'CREATE',
'T':'TEMPORARY',
'c':'CONNECT',
}
query = 'SELECT datacl FROM pg_database WHERE datname = %s'
cursor.execute(query, (db,))
datacl = cursor.fetchone()[0]
if datacl is None:
return set()
r = re.search('%s=(C?T?c?)/[a-z]+\,?' % user, datacl)
if r is None:
return set()
o = set()
for v in r.group(1):
o.add(priv_map[v])
return normalize_privileges(o, 'database')
def has_database_privileges(cursor, user, db, privs):
"""
Return the difference between the privileges that a user already has and
the privileges that they desire to have.
:returns: tuple of:
* privileges that they have and were requested
* privileges they currently hold but were not requested
* privileges requested that they do not hold
"""
cur_privs = get_database_privileges(cursor, user, db)
have_currently = cur_privs.intersection(privs)
other_current = cur_privs.difference(privs)
desired = privs.difference(cur_privs)
return (have_currently, other_current, desired)
def grant_database_privileges(cursor, user, db, privs):
# Note: priv escaped by parse_privs
privs =', '.join(privs)
if user == "PUBLIC":
query = 'GRANT %s ON DATABASE %s TO PUBLIC' % (
privs, pg_quote_identifier(db, 'database'))
else:
query = 'GRANT %s ON DATABASE %s TO %s' % (
privs, pg_quote_identifier(db, 'database'),
pg_quote_identifier(user, 'role'))
cursor.execute(query)
def revoke_database_privileges(cursor, user, db, privs):
# Note: priv escaped by parse_privs
privs = ', '.join(privs)
if user == "PUBLIC":
query = 'REVOKE %s ON DATABASE %s FROM PUBLIC' % (
privs, pg_quote_identifier(db, 'database'))
else:
query = 'REVOKE %s ON DATABASE %s FROM %s' % (
privs, pg_quote_identifier(db, 'database'),
pg_quote_identifier(user, 'role'))
cursor.execute(query)
def revoke_privileges(cursor, user, privs):
if privs is None:
return False
revoke_funcs = dict(table=revoke_table_privileges, database=revoke_database_privileges)
check_funcs = dict(table=has_table_privileges, database=has_database_privileges)
changed = False
for type_ in privs:
for name, privileges in privs[type_].iteritems():
# Check that any of the privileges requested to be removed are
# currently granted to the user
differences = check_funcs[type_](cursor, user, name, privileges)
if differences[0]:
revoke_funcs[type_](cursor, user, name, privileges)
changed = True
return changed
def grant_privileges(cursor, user, privs):
if privs is None:
return False
grant_funcs = dict(table=grant_table_privileges, database=grant_database_privileges)
check_funcs = dict(table=has_table_privileges, database=has_database_privileges)
grant_funcs = dict(table=grant_table_privileges, database=grant_database_privileges)
check_funcs = dict(table=has_table_privileges, database=has_database_privileges)
changed = False
for type_ in privs:
for name, privileges in privs[type_].iteritems():
# Check that any of the privileges requested for the user are
# currently missing
differences = check_funcs[type_](cursor, user, name, privileges)
if differences[2]:
grant_funcs[type_](cursor, user, name, privileges)
changed = True
return changed
def parse_role_attrs(role_attr_flags):
"""
Parse role attributes string for user creation.
Format:
attributes[,attributes,...]
Where:
attributes := CREATEDB,CREATEROLE,NOSUPERUSER,...
[ "[NO]SUPERUSER","[NO]CREATEROLE", "[NO]CREATEUSER", "[NO]CREATEDB",
"[NO]INHERIT", "[NO]LOGIN", "[NO]REPLICATION" ]
"""
if ',' in role_attr_flags:
flag_set = frozenset(r.upper() for r in role_attr_flags.split(","))
elif role_attr_flags:
flag_set = frozenset((role_attr_flags.upper(),))
else:
flag_set = frozenset()
if not flag_set.issubset(VALID_FLAGS):
raise InvalidFlagsError('Invalid role_attr_flags specified: %s' %
' '.join(flag_set.difference(VALID_FLAGS)))
o_flags = ' '.join(flag_set)
return o_flags
def normalize_privileges(privs, type_):
new_privs = set(privs)
if 'ALL' in new_privs:
new_privs.update(VALID_PRIVS[type_])
new_privs.remove('ALL')
if 'TEMP' in new_privs:
new_privs.add('TEMPORARY')
new_privs.remove('TEMP')
return new_privs
def parse_privs(privs, db):
"""
Parse privilege string to determine permissions for database db.
Format:
privileges[/privileges/...]
Where:
privileges := DATABASE_PRIVILEGES[,DATABASE_PRIVILEGES,...] |
TABLE_NAME:TABLE_PRIVILEGES[,TABLE_PRIVILEGES,...]
"""
if privs is None:
return privs
o_privs = {
'database':{},
'table':{}
}
for token in privs.split('/'):
if ':' not in token:
type_ = 'database'
name = db
priv_set = frozenset(x.strip().upper() for x in token.split(',') if x.strip())
else:
type_ = 'table'
name, privileges = token.split(':', 1)
priv_set = frozenset(x.strip().upper() for x in privileges.split(',') if x.strip())
if not priv_set.issubset(VALID_PRIVS[type_]):
raise InvalidPrivsError('Invalid privs specified for %s: %s' %
(type_, ' '.join(priv_set.difference(VALID_PRIVS[type_]))))
priv_set = normalize_privileges(priv_set, type_)
o_privs[type_][name] = priv_set
return o_privs
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
login_user=dict(default="postgres"),
login_password=dict(default=""),
login_host=dict(default=""),
login_unix_socket=dict(default=""),
user=dict(required=True, aliases=['name']),
password=dict(default=None),
state=dict(default="present", choices=["absent", "present"]),
priv=dict(default=None),
db=dict(default=''),
port=dict(default='5432'),
fail_on_user=dict(type='bool', default='yes'),
role_attr_flags=dict(default=''),
encrypted=dict(type='bool', default='no'),
no_password_changes=dict(type='bool', default='no'),
expires=dict(default=None)
),
supports_check_mode = True
)
user = module.params["user"]
password = module.params["password"]
state = module.params["state"]
fail_on_user = module.params["fail_on_user"]
db = module.params["db"]
if db == '' and module.params["priv"] is not None:
module.fail_json(msg="privileges require a database to be specified")
privs = parse_privs(module.params["priv"], db)
port = module.params["port"]
no_password_changes = module.params["no_password_changes"]
try:
role_attr_flags = parse_role_attrs(module.params["role_attr_flags"])
except InvalidFlagsError, e:
module.fail_json(msg=str(e))
if module.params["encrypted"]:
encrypted = "ENCRYPTED"
else:
encrypted = "UNENCRYPTED"
expires = module.params["expires"]
if not postgresqldb_found:
module.fail_json(msg="the python psycopg2 module is required")
# To use defaults values, keyword arguments must be absent, so
# check which values are empty and don't include in the **kw
# dictionary
params_map = {
"login_host":"host",
"login_user":"user",
"login_password":"password",
"port":"port",
"db":"database"
}
kw = dict( (params_map[k], v) for (k, v) in module.params.iteritems()
if k in params_map and v != "" )
# If a login_unix_socket is specified, incorporate it here.
is_localhost = "host" not in kw or kw["host"] == "" or kw["host"] == "localhost"
if is_localhost and module.params["login_unix_socket"] != "":
kw["host"] = module.params["login_unix_socket"]
try:
db_connection = psycopg2.connect(**kw)
cursor = db_connection.cursor(cursor_factory=psycopg2.extras.DictCursor)
except Exception, e:
module.fail_json(msg="unable to connect to database: %s" % e)
kw = dict(user=user)
changed = False
user_removed = False
if state == "present":
if user_exists(cursor, user):
try:
changed = user_alter(cursor, module, user, password, role_attr_flags, encrypted, expires, no_password_changes)
except SQLParseError, e:
module.fail_json(msg=str(e))
else:
try:
changed = user_add(cursor, user, password, role_attr_flags, encrypted, expires)
except SQLParseError, e:
module.fail_json(msg=str(e))
try:
changed = grant_privileges(cursor, user, privs) or changed
except SQLParseError, e:
module.fail_json(msg=str(e))
else:
if user_exists(cursor, user):
if module.check_mode:
changed = True
kw['user_removed'] = True
else:
try:
changed = revoke_privileges(cursor, user, privs)
user_removed = user_delete(cursor, user)
except SQLParseError, e:
module.fail_json(msg=str(e))
changed = changed or user_removed
if fail_on_user and not user_removed:
msg = "unable to remove user"
module.fail_json(msg=msg)
kw['user_removed'] = user_removed
if changed:
if module.check_mode:
db_connection.rollback()
else:
db_connection.commit()
kw['changed'] = changed
module.exit_json(**kw)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.database import *
main()
|
gpl-3.0
|
heke123/chromium-crosswalk
|
tools/grit/grit/clique.py
|
25
|
18007
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Collections of messages and their translations, called cliques. Also
collections of cliques (uber-cliques).
'''
import re
import types
from grit import constants
from grit import exception
from grit import lazy_re
from grit import pseudo
from grit import pseudo_rtl
from grit import tclib
class UberClique(object):
'''A factory (NOT a singleton factory) for making cliques. It has several
methods for working with the cliques created using the factory.
'''
def __init__(self):
# A map from message ID to list of cliques whose source messages have
# that ID. This will contain all cliques created using this factory.
# Different messages can have the same ID because they have the
# same translateable portion and placeholder names, but occur in different
# places in the resource tree.
#
# Each list of cliques is kept sorted by description, to achieve
# stable results from the BestClique method, see below.
self.cliques_ = {}
# A map of clique IDs to list of languages to indicate translations where we
# fell back to English.
self.fallback_translations_ = {}
# A map of clique IDs to list of languages to indicate missing translations.
self.missing_translations_ = {}
def _AddMissingTranslation(self, lang, clique, is_error):
tl = self.fallback_translations_
if is_error:
tl = self.missing_translations_
id = clique.GetId()
if id not in tl:
tl[id] = {}
if lang not in tl[id]:
tl[id][lang] = 1
def HasMissingTranslations(self):
return len(self.missing_translations_) > 0
def MissingTranslationsReport(self):
'''Returns a string suitable for printing to report missing
and fallback translations to the user.
'''
def ReportTranslation(clique, langs):
text = clique.GetMessage().GetPresentableContent()
# The text 'error' (usually 'Error:' but we are conservative)
# can trigger some build environments (Visual Studio, we're
# looking at you) to consider invocation of grit to have failed,
# so we make sure never to output that word.
extract = re.sub('(?i)error', 'REDACTED', text[0:40])[0:40]
ellipsis = ''
if len(text) > 40:
ellipsis = '...'
langs_extract = langs[0:6]
describe_langs = ','.join(langs_extract)
if len(langs) > 6:
describe_langs += " and %d more" % (len(langs) - 6)
return " %s \"%s%s\" %s" % (clique.GetId(), extract, ellipsis,
describe_langs)
lines = []
if len(self.fallback_translations_):
lines.append(
"WARNING: Fell back to English for the following translations:")
for (id, langs) in self.fallback_translations_.items():
lines.append(ReportTranslation(self.cliques_[id][0], langs.keys()))
if len(self.missing_translations_):
lines.append("ERROR: The following translations are MISSING:")
for (id, langs) in self.missing_translations_.items():
lines.append(ReportTranslation(self.cliques_[id][0], langs.keys()))
return '\n'.join(lines)
def MakeClique(self, message, translateable=True):
'''Create a new clique initialized with a message.
Args:
message: tclib.Message()
translateable: True | False
'''
clique = MessageClique(self, message, translateable)
# Enable others to find this clique by its message ID
if message.GetId() in self.cliques_:
presentable_text = clique.GetMessage().GetPresentableContent()
if not message.HasAssignedId():
for c in self.cliques_[message.GetId()]:
assert c.GetMessage().GetPresentableContent() == presentable_text
self.cliques_[message.GetId()].append(clique)
# We need to keep each list of cliques sorted by description, to
# achieve stable results from the BestClique method, see below.
self.cliques_[message.GetId()].sort(
key=lambda c:c.GetMessage().GetDescription())
else:
self.cliques_[message.GetId()] = [clique]
return clique
def FindCliqueAndAddTranslation(self, translation, language):
'''Adds the specified translation to the clique with the source message
it is a translation of.
Args:
translation: tclib.Translation()
language: 'en' | 'fr' ...
Return:
True if the source message was found, otherwise false.
'''
if translation.GetId() in self.cliques_:
for clique in self.cliques_[translation.GetId()]:
clique.AddTranslation(translation, language)
return True
else:
return False
def BestClique(self, id):
'''Returns the "best" clique from a list of cliques. All the cliques
must have the same ID. The "best" clique is chosen in the following
order of preference:
- The first clique that has a non-ID-based description.
- If no such clique found, the first clique with an ID-based description.
- Otherwise the first clique.
This method is stable in terms of always returning a clique with
an identical description (on different runs of GRIT on the same
data) because self.cliques_ is sorted by description.
'''
clique_list = self.cliques_[id]
clique_with_id = None
clique_default = None
for clique in clique_list:
if not clique_default:
clique_default = clique
description = clique.GetMessage().GetDescription()
if description and len(description) > 0:
if not description.startswith('ID:'):
# this is the preferred case so we exit right away
return clique
elif not clique_with_id:
clique_with_id = clique
if clique_with_id:
return clique_with_id
else:
return clique_default
def BestCliquePerId(self):
'''Iterates over the list of all cliques and returns the best clique for
each ID. This will be the first clique with a source message that has a
non-empty description, or an arbitrary clique if none of them has a
description.
'''
for id in self.cliques_:
yield self.BestClique(id)
def BestCliqueByOriginalText(self, text, meaning):
'''Finds the "best" (as in BestClique()) clique that has original text
'text' and meaning 'meaning'. Returns None if there is no such clique.
'''
# If needed, this can be optimized by maintaining a map of
# fingerprints of original text+meaning to cliques.
for c in self.BestCliquePerId():
msg = c.GetMessage()
if msg.GetRealContent() == text and msg.GetMeaning() == meaning:
return msg
return None
def AllMessageIds(self):
'''Returns a list of all defined message IDs.
'''
return self.cliques_.keys()
def AllCliques(self):
'''Iterates over all cliques. Note that this can return multiple cliques
with the same ID.
'''
for cliques in self.cliques_.values():
for c in cliques:
yield c
def GenerateXtbParserCallback(self, lang, debug=False):
'''Creates a callback function as required by grit.xtb_reader.Parse().
This callback will create Translation objects for each message from
the XTB that exists in this uberclique, and add them as translations for
the relevant cliques. The callback will add translations to the language
specified by 'lang'
Args:
lang: 'fr'
debug: True | False
'''
def Callback(id, structure):
if id not in self.cliques_:
if debug: print "Ignoring translation #%s" % id
return
if debug: print "Adding translation #%s" % id
# We fetch placeholder information from the original message (the XTB file
# only contains placeholder names).
original_msg = self.BestClique(id).GetMessage()
translation = tclib.Translation(id=id)
for is_ph,text in structure:
if not is_ph:
translation.AppendText(text)
else:
found_placeholder = False
for ph in original_msg.GetPlaceholders():
if ph.GetPresentation() == text:
translation.AppendPlaceholder(tclib.Placeholder(
ph.GetPresentation(), ph.GetOriginal(), ph.GetExample()))
found_placeholder = True
break
if not found_placeholder:
raise exception.MismatchingPlaceholders(
'Translation for message ID %s had <ph name="%s"/>, no match\n'
'in original message' % (id, text))
self.FindCliqueAndAddTranslation(translation, lang)
return Callback
class CustomType(object):
'''A base class you should implement if you wish to specify a custom type
for a message clique (i.e. custom validation and optional modification of
translations).'''
def Validate(self, message):
'''Returns true if the message (a tclib.Message object) is valid,
otherwise false.
'''
raise NotImplementedError()
def ValidateAndModify(self, lang, translation):
'''Returns true if the translation (a tclib.Translation object) is valid,
otherwise false. The language is also passed in. This method may modify
the translation that is passed in, if it so wishes.
'''
raise NotImplementedError()
def ModifyTextPart(self, lang, text):
'''If you call ModifyEachTextPart, it will turn around and call this method
for each text part of the translation. You should return the modified
version of the text, or just the original text to not change anything.
'''
raise NotImplementedError()
def ModifyEachTextPart(self, lang, translation):
'''Call this to easily modify one or more of the textual parts of a
translation. It will call ModifyTextPart for each part of the
translation.
'''
contents = translation.GetContent()
for ix in range(len(contents)):
if (isinstance(contents[ix], types.StringTypes)):
contents[ix] = self.ModifyTextPart(lang, contents[ix])
class OneOffCustomType(CustomType):
'''A very simple custom type that performs the validation expressed by
the input expression on all languages including the source language.
The expression can access the variables 'lang', 'msg' and 'text()' where
'lang' is the language of 'msg', 'msg' is the message or translation being
validated and 'text()' returns the real contents of 'msg' (for shorthand).
'''
def __init__(self, expression):
self.expr = expression
def Validate(self, message):
return self.ValidateAndModify(MessageClique.source_language, message)
def ValidateAndModify(self, lang, msg):
def text():
return msg.GetRealContent()
return eval(self.expr, {},
{'lang' : lang,
'text' : text,
'msg' : msg,
})
class MessageClique(object):
'''A message along with all of its translations. Also code to bring
translations together with their original message.'''
# change this to the language code of Messages you add to cliques_.
# TODO(joi) Actually change this based on the <grit> node's source language
source_language = 'en'
# A constant translation we use when asked for a translation into the
# special language constants.CONSTANT_LANGUAGE.
CONSTANT_TRANSLATION = tclib.Translation(text='TTTTTT')
# A pattern to match messages that are empty or whitespace only.
WHITESPACE_MESSAGE = lazy_re.compile(u'^\s*$')
def __init__(self, uber_clique, message, translateable=True,
custom_type=None):
'''Create a new clique initialized with just a message.
Note that messages with a body comprised only of whitespace will implicitly
be marked non-translatable.
Args:
uber_clique: Our uber-clique (collection of cliques)
message: tclib.Message()
translateable: True | False
custom_type: instance of clique.CustomType interface
'''
# Our parent
self.uber_clique = uber_clique
# If not translateable, we only store the original message.
self.translateable = translateable
# We implicitly mark messages that have a whitespace-only body as
# non-translateable.
if MessageClique.WHITESPACE_MESSAGE.match(message.GetRealContent()):
self.translateable = False
# A mapping of language identifiers to tclib.BaseMessage and its
# subclasses (i.e. tclib.Message and tclib.Translation).
self.clique = { MessageClique.source_language : message }
# A list of the "shortcut groups" this clique is
# part of. Within any given shortcut group, no shortcut key (e.g. &J)
# must appear more than once in each language for all cliques that
# belong to the group.
self.shortcut_groups = []
# An instance of the CustomType interface, or None. If this is set, it will
# be used to validate the original message and translations thereof, and
# will also get a chance to modify translations of the message.
self.SetCustomType(custom_type)
def GetMessage(self):
'''Retrieves the tclib.Message that is the source for this clique.'''
return self.clique[MessageClique.source_language]
def GetId(self):
'''Retrieves the message ID of the messages in this clique.'''
return self.GetMessage().GetId()
def IsTranslateable(self):
return self.translateable
def AddToShortcutGroup(self, group):
self.shortcut_groups.append(group)
def SetCustomType(self, custom_type):
'''Makes this clique use custom_type for validating messages and
translations, and optionally modifying translations.
'''
self.custom_type = custom_type
if custom_type and not custom_type.Validate(self.GetMessage()):
raise exception.InvalidMessage(self.GetMessage().GetRealContent())
def MessageForLanguage(self, lang, pseudo_if_no_match=True,
fallback_to_english=False):
'''Returns the message/translation for the specified language, providing
a pseudotranslation if there is no available translation and a pseudo-
translation is requested.
The translation of any message whatsoever in the special language
'x_constant' is the message "TTTTTT".
Args:
lang: 'en'
pseudo_if_no_match: True
fallback_to_english: False
Return:
tclib.BaseMessage
'''
if not self.translateable:
return self.GetMessage()
if lang == constants.CONSTANT_LANGUAGE:
return self.CONSTANT_TRANSLATION
for msglang in self.clique.keys():
if lang == msglang:
return self.clique[msglang]
if lang == constants.FAKE_BIDI:
return pseudo_rtl.PseudoRTLMessage(self.GetMessage())
if fallback_to_english:
self.uber_clique._AddMissingTranslation(lang, self, is_error=False)
return self.GetMessage()
# If we're not supposed to generate pseudotranslations, we add an error
# report to a list of errors, then fail at a higher level, so that we
# get a list of all messages that are missing translations.
if not pseudo_if_no_match:
self.uber_clique._AddMissingTranslation(lang, self, is_error=True)
return pseudo.PseudoMessage(self.GetMessage())
def AllMessagesThatMatch(self, lang_re, include_pseudo = True):
'''Returns a map of all messages that match 'lang', including the pseudo
translation if requested.
Args:
lang_re: re.compile('fr|en')
include_pseudo: True
Return:
{ 'en' : tclib.Message,
'fr' : tclib.Translation,
pseudo.PSEUDO_LANG : tclib.Translation }
'''
if not self.translateable:
return [self.GetMessage()]
matches = {}
for msglang in self.clique:
if lang_re.match(msglang):
matches[msglang] = self.clique[msglang]
if include_pseudo:
matches[pseudo.PSEUDO_LANG] = pseudo.PseudoMessage(self.GetMessage())
return matches
def AddTranslation(self, translation, language):
'''Add a translation to this clique. The translation must have the same
ID as the message that is the source for this clique.
If this clique is not translateable, the function just returns.
Args:
translation: tclib.Translation()
language: 'en'
Throws:
grit.exception.InvalidTranslation if the translation you're trying to add
doesn't have the same message ID as the source message of this clique.
'''
if not self.translateable:
return
if translation.GetId() != self.GetId():
raise exception.InvalidTranslation(
'Msg ID %s, transl ID %s' % (self.GetId(), translation.GetId()))
assert not language in self.clique
# Because two messages can differ in the original content of their
# placeholders yet share the same ID (because they are otherwise the
# same), the translation we are getting may have different original
# content for placeholders than our message, yet it is still the right
# translation for our message (because it is for the same ID). We must
# therefore fetch the original content of placeholders from our original
# English message.
#
# See grit.clique_unittest.MessageCliqueUnittest.testSemiIdenticalCliques
# for a concrete explanation of why this is necessary.
original = self.MessageForLanguage(self.source_language, False)
if len(original.GetPlaceholders()) != len(translation.GetPlaceholders()):
print ("ERROR: '%s' translation of message id %s does not match" %
(language, translation.GetId()))
assert False
transl_msg = tclib.Translation(id=self.GetId(),
text=translation.GetPresentableContent(),
placeholders=original.GetPlaceholders())
if (self.custom_type and
not self.custom_type.ValidateAndModify(language, transl_msg)):
print "WARNING: %s translation failed validation: %s" % (
language, transl_msg.GetId())
self.clique[language] = transl_msg
|
bsd-3-clause
|
Argon-Zhou/django
|
tests/admin_checks/models.py
|
98
|
1724
|
"""
Tests of ModelAdmin system checks logic.
"""
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
class Album(models.Model):
title = models.CharField(max_length=150)
@python_2_unicode_compatible
class Song(models.Model):
title = models.CharField(max_length=150)
album = models.ForeignKey(Album)
original_release = models.DateField(editable=False)
class Meta:
ordering = ('title',)
def __str__(self):
return self.title
def readonly_method_on_model(self):
# does nothing
pass
class TwoAlbumFKAndAnE(models.Model):
album1 = models.ForeignKey(Album, related_name="album1_set")
album2 = models.ForeignKey(Album, related_name="album2_set")
e = models.CharField(max_length=1)
class Author(models.Model):
name = models.CharField(max_length=100)
class Book(models.Model):
name = models.CharField(max_length=100)
subtitle = models.CharField(max_length=100)
price = models.FloatField()
authors = models.ManyToManyField(Author, through='AuthorsBooks')
class AuthorsBooks(models.Model):
author = models.ForeignKey(Author)
book = models.ForeignKey(Book)
featured = models.BooleanField()
class State(models.Model):
name = models.CharField(max_length=15)
class City(models.Model):
state = models.ForeignKey(State)
class Influence(models.Model):
name = models.TextField()
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
|
bsd-3-clause
|
badloop/SickRage
|
lib/unidecode/x0b7.py
|
253
|
4833
|
data = (
'ddwim', # 0x00
'ddwib', # 0x01
'ddwibs', # 0x02
'ddwis', # 0x03
'ddwiss', # 0x04
'ddwing', # 0x05
'ddwij', # 0x06
'ddwic', # 0x07
'ddwik', # 0x08
'ddwit', # 0x09
'ddwip', # 0x0a
'ddwih', # 0x0b
'ddyu', # 0x0c
'ddyug', # 0x0d
'ddyugg', # 0x0e
'ddyugs', # 0x0f
'ddyun', # 0x10
'ddyunj', # 0x11
'ddyunh', # 0x12
'ddyud', # 0x13
'ddyul', # 0x14
'ddyulg', # 0x15
'ddyulm', # 0x16
'ddyulb', # 0x17
'ddyuls', # 0x18
'ddyult', # 0x19
'ddyulp', # 0x1a
'ddyulh', # 0x1b
'ddyum', # 0x1c
'ddyub', # 0x1d
'ddyubs', # 0x1e
'ddyus', # 0x1f
'ddyuss', # 0x20
'ddyung', # 0x21
'ddyuj', # 0x22
'ddyuc', # 0x23
'ddyuk', # 0x24
'ddyut', # 0x25
'ddyup', # 0x26
'ddyuh', # 0x27
'ddeu', # 0x28
'ddeug', # 0x29
'ddeugg', # 0x2a
'ddeugs', # 0x2b
'ddeun', # 0x2c
'ddeunj', # 0x2d
'ddeunh', # 0x2e
'ddeud', # 0x2f
'ddeul', # 0x30
'ddeulg', # 0x31
'ddeulm', # 0x32
'ddeulb', # 0x33
'ddeuls', # 0x34
'ddeult', # 0x35
'ddeulp', # 0x36
'ddeulh', # 0x37
'ddeum', # 0x38
'ddeub', # 0x39
'ddeubs', # 0x3a
'ddeus', # 0x3b
'ddeuss', # 0x3c
'ddeung', # 0x3d
'ddeuj', # 0x3e
'ddeuc', # 0x3f
'ddeuk', # 0x40
'ddeut', # 0x41
'ddeup', # 0x42
'ddeuh', # 0x43
'ddyi', # 0x44
'ddyig', # 0x45
'ddyigg', # 0x46
'ddyigs', # 0x47
'ddyin', # 0x48
'ddyinj', # 0x49
'ddyinh', # 0x4a
'ddyid', # 0x4b
'ddyil', # 0x4c
'ddyilg', # 0x4d
'ddyilm', # 0x4e
'ddyilb', # 0x4f
'ddyils', # 0x50
'ddyilt', # 0x51
'ddyilp', # 0x52
'ddyilh', # 0x53
'ddyim', # 0x54
'ddyib', # 0x55
'ddyibs', # 0x56
'ddyis', # 0x57
'ddyiss', # 0x58
'ddying', # 0x59
'ddyij', # 0x5a
'ddyic', # 0x5b
'ddyik', # 0x5c
'ddyit', # 0x5d
'ddyip', # 0x5e
'ddyih', # 0x5f
'ddi', # 0x60
'ddig', # 0x61
'ddigg', # 0x62
'ddigs', # 0x63
'ddin', # 0x64
'ddinj', # 0x65
'ddinh', # 0x66
'ddid', # 0x67
'ddil', # 0x68
'ddilg', # 0x69
'ddilm', # 0x6a
'ddilb', # 0x6b
'ddils', # 0x6c
'ddilt', # 0x6d
'ddilp', # 0x6e
'ddilh', # 0x6f
'ddim', # 0x70
'ddib', # 0x71
'ddibs', # 0x72
'ddis', # 0x73
'ddiss', # 0x74
'dding', # 0x75
'ddij', # 0x76
'ddic', # 0x77
'ddik', # 0x78
'ddit', # 0x79
'ddip', # 0x7a
'ddih', # 0x7b
'ra', # 0x7c
'rag', # 0x7d
'ragg', # 0x7e
'rags', # 0x7f
'ran', # 0x80
'ranj', # 0x81
'ranh', # 0x82
'rad', # 0x83
'ral', # 0x84
'ralg', # 0x85
'ralm', # 0x86
'ralb', # 0x87
'rals', # 0x88
'ralt', # 0x89
'ralp', # 0x8a
'ralh', # 0x8b
'ram', # 0x8c
'rab', # 0x8d
'rabs', # 0x8e
'ras', # 0x8f
'rass', # 0x90
'rang', # 0x91
'raj', # 0x92
'rac', # 0x93
'rak', # 0x94
'rat', # 0x95
'rap', # 0x96
'rah', # 0x97
'rae', # 0x98
'raeg', # 0x99
'raegg', # 0x9a
'raegs', # 0x9b
'raen', # 0x9c
'raenj', # 0x9d
'raenh', # 0x9e
'raed', # 0x9f
'rael', # 0xa0
'raelg', # 0xa1
'raelm', # 0xa2
'raelb', # 0xa3
'raels', # 0xa4
'raelt', # 0xa5
'raelp', # 0xa6
'raelh', # 0xa7
'raem', # 0xa8
'raeb', # 0xa9
'raebs', # 0xaa
'raes', # 0xab
'raess', # 0xac
'raeng', # 0xad
'raej', # 0xae
'raec', # 0xaf
'raek', # 0xb0
'raet', # 0xb1
'raep', # 0xb2
'raeh', # 0xb3
'rya', # 0xb4
'ryag', # 0xb5
'ryagg', # 0xb6
'ryags', # 0xb7
'ryan', # 0xb8
'ryanj', # 0xb9
'ryanh', # 0xba
'ryad', # 0xbb
'ryal', # 0xbc
'ryalg', # 0xbd
'ryalm', # 0xbe
'ryalb', # 0xbf
'ryals', # 0xc0
'ryalt', # 0xc1
'ryalp', # 0xc2
'ryalh', # 0xc3
'ryam', # 0xc4
'ryab', # 0xc5
'ryabs', # 0xc6
'ryas', # 0xc7
'ryass', # 0xc8
'ryang', # 0xc9
'ryaj', # 0xca
'ryac', # 0xcb
'ryak', # 0xcc
'ryat', # 0xcd
'ryap', # 0xce
'ryah', # 0xcf
'ryae', # 0xd0
'ryaeg', # 0xd1
'ryaegg', # 0xd2
'ryaegs', # 0xd3
'ryaen', # 0xd4
'ryaenj', # 0xd5
'ryaenh', # 0xd6
'ryaed', # 0xd7
'ryael', # 0xd8
'ryaelg', # 0xd9
'ryaelm', # 0xda
'ryaelb', # 0xdb
'ryaels', # 0xdc
'ryaelt', # 0xdd
'ryaelp', # 0xde
'ryaelh', # 0xdf
'ryaem', # 0xe0
'ryaeb', # 0xe1
'ryaebs', # 0xe2
'ryaes', # 0xe3
'ryaess', # 0xe4
'ryaeng', # 0xe5
'ryaej', # 0xe6
'ryaec', # 0xe7
'ryaek', # 0xe8
'ryaet', # 0xe9
'ryaep', # 0xea
'ryaeh', # 0xeb
'reo', # 0xec
'reog', # 0xed
'reogg', # 0xee
'reogs', # 0xef
'reon', # 0xf0
'reonj', # 0xf1
'reonh', # 0xf2
'reod', # 0xf3
'reol', # 0xf4
'reolg', # 0xf5
'reolm', # 0xf6
'reolb', # 0xf7
'reols', # 0xf8
'reolt', # 0xf9
'reolp', # 0xfa
'reolh', # 0xfb
'reom', # 0xfc
'reob', # 0xfd
'reobs', # 0xfe
'reos', # 0xff
)
|
gpl-3.0
|
Krossom/python-for-android
|
python-modules/twisted/twisted/persisted/crefutil.py
|
61
|
4644
|
# -*- test-case-name: twisted.test.test_persisted -*-
# Copyright (c) 2001-2008 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Utility classes for dealing with circular references.
"""
from twisted.python import log, reflect
try:
from new import instancemethod
except:
from org.python.core import PyMethod
instancemethod = PyMethod
class NotKnown:
def __init__(self):
self.dependants = []
self.resolved = 0
def addDependant(self, mutableObject, key):
assert not self.resolved
self.dependants.append( (mutableObject, key) )
resolvedObject = None
def resolveDependants(self, newObject):
self.resolved = 1
self.resolvedObject = newObject
for mut, key in self.dependants:
mut[key] = newObject
if isinstance(newObject, NotKnown):
newObject.addDependant(mut, key)
def __hash__(self):
assert 0, "I am not to be used as a dictionary key."
class _Container(NotKnown):
"""
Helper class to resolve circular references on container objects.
"""
def __init__(self, l, containerType):
"""
@param l: The list of object which may contain some not yet referenced
objects.
@param containerType: A type of container objects (e.g., C{tuple} or
C{set}).
"""
NotKnown.__init__(self)
self.containerType = containerType
self.l = l
self.locs = range(len(l))
for idx in xrange(len(l)):
if not isinstance(l[idx], NotKnown):
self.locs.remove(idx)
else:
l[idx].addDependant(self, idx)
if not self.locs:
self.resolveDependants(self.containerType(self.l))
def __setitem__(self, n, obj):
"""
Change the value of one contained objects, and resolve references if
all objects have been referenced.
"""
self.l[n] = obj
if not isinstance(obj, NotKnown):
self.locs.remove(n)
if not self.locs:
self.resolveDependants(self.containerType(self.l))
class _Tuple(_Container):
"""
Manage tuple containing circular references. Deprecated: use C{_Container}
instead.
"""
def __init__(self, l):
"""
@param l: The list of object which may contain some not yet referenced
objects.
"""
_Container.__init__(self, l, tuple)
class _InstanceMethod(NotKnown):
def __init__(self, im_name, im_self, im_class):
NotKnown.__init__(self)
self.my_class = im_class
self.name = im_name
# im_self _must_ be a
im_self.addDependant(self, 0)
def __call__(self, *args, **kw):
import traceback
log.msg('instance method %s.%s' % (reflect.qual(self.my_class), self.name))
log.msg('being called with %r %r' % (args, kw))
traceback.print_stack(file=log.logfile)
assert 0
def __setitem__(self, n, obj):
assert n == 0, "only zero index allowed"
if not isinstance(obj, NotKnown):
self.resolveDependants(instancemethod(self.my_class.__dict__[self.name],
obj,
self.my_class))
class _DictKeyAndValue:
def __init__(self, dict):
self.dict = dict
def __setitem__(self, n, obj):
if n not in (1, 0):
raise RuntimeError("DictKeyAndValue should only ever be called with 0 or 1")
if n: # value
self.value = obj
else:
self.key = obj
if hasattr(self, "key") and hasattr(self, "value"):
self.dict[self.key] = self.value
class _Dereference(NotKnown):
def __init__(self, id):
NotKnown.__init__(self)
self.id = id
from twisted.internet.defer import Deferred
class _Catcher:
def catch(self, value):
self.value = value
class _Defer(Deferred, NotKnown):
def __init__(self):
Deferred.__init__(self)
NotKnown.__init__(self)
self.pause()
wasset = 0
def __setitem__(self, n, obj):
if self.wasset:
raise RuntimeError('setitem should only be called once, setting %r to %r' % (n, obj))
else:
self.wasset = 1
self.callback(obj)
def addDependant(self, dep, key):
# by the time I'm adding a dependant, I'm *not* adding any more
# callbacks
NotKnown.addDependant(self, dep, key)
self.unpause()
resovd = self.result
self.resolveDependants(resovd)
|
apache-2.0
|
feigames/Odoo
|
addons/portal_claim/portal_claim.py
|
315
|
1871
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import SUPERUSER_ID
from openerp.osv import osv
class crm_claim(osv.osv):
_inherit = "crm.claim"
def _get_default_partner_id(self, cr, uid, context=None):
""" Gives default partner_id """
if context is None:
context = {}
if context.get('portal'):
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
# Special case for portal users, as they are not allowed to call name_get on res.partner
# We save this call for the web client by returning it in default get
return self.pool['res.partner'].name_get(cr, SUPERUSER_ID, [user.partner_id.id], context=context)[0]
return False
_defaults = {
'partner_id': lambda s, cr, uid, c: s._get_default_partner_id(cr, uid, c),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
yan12125/youtube-dl
|
youtube_dl/extractor/rmcdecouverte.py
|
20
|
2017
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from .brightcove import BrightcoveLegacyIE
from ..compat import (
compat_parse_qs,
compat_urlparse,
)
from ..utils import smuggle_url
class RMCDecouverteIE(InfoExtractor):
_VALID_URL = r'https?://rmcdecouverte\.bfmtv\.com/(?:(?:[^/]+/)*program_(?P<id>\d+)|(?P<live_id>mediaplayer-direct))'
_TESTS = [{
'url': 'https://rmcdecouverte.bfmtv.com/wheeler-dealers-occasions-a-saisir/program_2566/',
'info_dict': {
'id': '5983675500001',
'ext': 'mp4',
'title': 'CORVETTE',
'description': 'md5:c1e8295521e45ffebf635d6a7658f506',
'uploader_id': '1969646226001',
'upload_date': '20181226',
'timestamp': 1545861635,
},
'params': {
'skip_download': True,
},
'skip': 'only available for a week',
}, {
# live, geo restricted, bypassable
'url': 'https://rmcdecouverte.bfmtv.com/mediaplayer-direct/',
'only_matching': True,
}]
BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/1969646226001/default_default/index.html?videoId=%s'
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
display_id = mobj.group('id') or mobj.group('live_id')
webpage = self._download_webpage(url, display_id)
brightcove_legacy_url = BrightcoveLegacyIE._extract_brightcove_url(webpage)
if brightcove_legacy_url:
brightcove_id = compat_parse_qs(compat_urlparse.urlparse(
brightcove_legacy_url).query)['@videoPlayer'][0]
else:
brightcove_id = self._search_regex(
r'data-video-id=["\'](\d+)', webpage, 'brightcove id')
return self.url_result(
smuggle_url(
self.BRIGHTCOVE_URL_TEMPLATE % brightcove_id,
{'geo_countries': ['FR']}),
'BrightcoveNew', brightcove_id)
|
unlicense
|
arikpoz/mxnet
|
example/recommenders/movielens_data.py
|
15
|
1636
|
"""MovieLens data handling: download, parse, and expose as DataIter
"""
import os
import mxnet as mx
def load_mldata_iter(filename, batch_size):
"""Not particularly fast code to parse the text file and load it into three NDArray's
and product an NDArrayIter
"""
user = []
item = []
score = []
with file(filename) as f:
for line in f:
tks = line.strip().split('\t')
if len(tks) != 4:
continue
user.append(int(tks[0]))
item.append(int(tks[1]))
score.append(float(tks[2]))
user = mx.nd.array(user)
item = mx.nd.array(item)
score = mx.nd.array(score)
return mx.io.NDArrayIter(data={'user':user,'item':item},label={'score':score},
batch_size=batch_size, shuffle=True)
def ensure_local_data(prefix):
if not os.path.exists("%s.zip" % prefix):
print("Downloading MovieLens data: %s" % prefix)
os.system("wget http://files.grouplens.org/datasets/movielens/%s.zip" % prefix)
os.system("unzip %s.zip" % prefix)
def get_data_iter(batch_size, prefix='ml-100k'):
"""Returns a pair of NDArrayDataIter, one for train, one for test.
"""
ensure_local_data(prefix)
return (load_mldata_iter('./%s/u1.base' % prefix, batch_size),
load_mldata_iter('./%s/u1.test' % prefix, batch_size))
def max_id(fname):
mu = 0
mi = 0
for line in file(fname):
tks = line.strip().split('\t')
if len(tks) != 4:
continue
mu = max(mu, int(tks[0]))
mi = max(mi, int(tks[1]))
return mu + 1, mi + 1
|
apache-2.0
|
couwbat/couwbatns3
|
ns-allinone-3.23/ns-3.23/src/click/bindings/modulegen__gcc_ILP32.py
|
124
|
289391
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.click', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## log.h (module 'core'): ns3::LogLevel [enumeration]
module.add_enum('LogLevel', ['LOG_NONE', 'LOG_ERROR', 'LOG_LEVEL_ERROR', 'LOG_WARN', 'LOG_LEVEL_WARN', 'LOG_DEBUG', 'LOG_LEVEL_DEBUG', 'LOG_INFO', 'LOG_LEVEL_INFO', 'LOG_FUNCTION', 'LOG_LEVEL_FUNCTION', 'LOG_LOGIC', 'LOG_LEVEL_LOGIC', 'LOG_ALL', 'LOG_LEVEL_ALL', 'LOG_PREFIX_FUNC', 'LOG_PREFIX_TIME', 'LOG_PREFIX_NODE', 'LOG_PREFIX_LEVEL', 'LOG_PREFIX_ALL'], import_from_module='ns.core')
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
module.add_class('Inet6SocketAddress', import_from_module='ns.network')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
root_module['ns3::Inet6SocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
module.add_class('InetSocketAddress', import_from_module='ns.network')
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
root_module['ns3::InetSocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress [class]
module.add_class('Ipv4InterfaceAddress', import_from_module='ns.internet')
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e [enumeration]
module.add_enum('InterfaceAddressScope_e', ['HOST', 'LINK', 'GLOBAL'], outer_class=root_module['ns3::Ipv4InterfaceAddress'], import_from_module='ns.internet')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## log.h (module 'core'): ns3::LogComponent [class]
module.add_class('LogComponent', import_from_module='ns.core')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [enumeration]
module.add_enum('', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData_e [enumeration]
module.add_enum('TagData_e', ['MAX_SIZE'], outer_class=root_module['ns3::PacketTagList::TagData'], import_from_module='ns.network')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## system-wall-clock-ms.h (module 'core'): ns3::SystemWallClockMs [class]
module.add_class('SystemWallClockMs', import_from_module='ns.core')
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header [class]
module.add_class('Ipv4Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::DscpType [enumeration]
module.add_enum('DscpType', ['DscpDefault', 'DSCP_CS1', 'DSCP_AF11', 'DSCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::EcnType [enumeration]
module.add_enum('EcnType', ['ECN_NotECT', 'ECN_ECT1', 'ECN_ECT0', 'ECN_CE'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4MulticastRoute', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4MulticastRoute>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4Route', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4Route>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## socket.h (module 'network'): ns3::Socket [class]
module.add_class('Socket', import_from_module='ns.network', parent=root_module['ns3::Object'])
## socket.h (module 'network'): ns3::Socket::SocketErrno [enumeration]
module.add_enum('SocketErrno', ['ERROR_NOTERROR', 'ERROR_ISCONN', 'ERROR_NOTCONN', 'ERROR_MSGSIZE', 'ERROR_AGAIN', 'ERROR_SHUTDOWN', 'ERROR_OPNOTSUPP', 'ERROR_AFNOSUPPORT', 'ERROR_INVAL', 'ERROR_BADF', 'ERROR_NOROUTETOHOST', 'ERROR_NODEV', 'ERROR_ADDRNOTAVAIL', 'ERROR_ADDRINUSE', 'SOCKET_ERRNO_LAST'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::Socket::SocketType [enumeration]
module.add_enum('SocketType', ['NS3_SOCK_STREAM', 'NS3_SOCK_SEQPACKET', 'NS3_SOCK_DGRAM', 'NS3_SOCK_RAW'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::SocketAddressTag [class]
module.add_class('SocketAddressTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpTosTag [class]
module.add_class('SocketIpTosTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpTtlTag [class]
module.add_class('SocketIpTtlTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag [class]
module.add_class('SocketIpv6HopLimitTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpv6TclassTag [class]
module.add_class('SocketIpv6TclassTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag [class]
module.add_class('SocketSetDontFragmentTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## trailer.h (module 'network'): ns3::Trailer [class]
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## ipv4.h (module 'internet'): ns3::Ipv4 [class]
module.add_class('Ipv4', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-interface.h (module 'internet'): ns3::Ipv4Interface [class]
module.add_class('Ipv4Interface', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv4-l3-click-protocol.h (module 'click'): ns3::Ipv4L3ClickProtocol [class]
module.add_class('Ipv4L3ClickProtocol', parent=root_module['ns3::Ipv4'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute [class]
module.add_class('Ipv4MulticastRoute', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
## ipv4-route.h (module 'internet'): ns3::Ipv4Route [class]
module.add_class('Ipv4Route', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol [class]
module.add_class('Ipv4RoutingProtocol', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## nix-vector.h (module 'network'): ns3::NixVector [class]
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper [class]
module.add_class('OutputStreamWrapper', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
## packet.h (module 'network'): ns3::Packet [class]
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-click-routing.h (module 'click'): ns3::Ipv4ClickRouting [class]
module.add_class('Ipv4ClickRouting', parent=root_module['ns3::Ipv4RoutingProtocol'])
module.add_container('std::map< unsigned int, unsigned int >', ('unsigned int', 'unsigned int'), container_type='map')
typehandlers.add_type_alias('void ( * ) ( std::ostream & ) *', 'ns3::LogTimePrinter')
typehandlers.add_type_alias('void ( * ) ( std::ostream & ) **', 'ns3::LogTimePrinter*')
typehandlers.add_type_alias('void ( * ) ( std::ostream & ) *&', 'ns3::LogTimePrinter&')
typehandlers.add_type_alias('void ( * ) ( std::ostream & ) *', 'ns3::LogNodePrinter')
typehandlers.add_type_alias('void ( * ) ( std::ostream & ) **', 'ns3::LogNodePrinter*')
typehandlers.add_type_alias('void ( * ) ( std::ostream & ) *&', 'ns3::LogNodePrinter&')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias('uint32_t ( * ) ( char const *, size_t ) *', 'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias('uint32_t ( * ) ( char const *, size_t ) **', 'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias('uint32_t ( * ) ( char const *, size_t ) *&', 'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias('uint64_t ( * ) ( char const *, size_t ) *', 'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias('uint64_t ( * ) ( char const *, size_t ) **', 'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias('uint64_t ( * ) ( char const *, size_t ) *&', 'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3Inet6SocketAddress_methods(root_module, root_module['ns3::Inet6SocketAddress'])
register_Ns3InetSocketAddress_methods(root_module, root_module['ns3::InetSocketAddress'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4InterfaceAddress_methods(root_module, root_module['ns3::Ipv4InterfaceAddress'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3LogComponent_methods(root_module, root_module['ns3::LogComponent'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3SystemWallClockMs_methods(root_module, root_module['ns3::SystemWallClockMs'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3Ipv4Header_methods(root_module, root_module['ns3::Ipv4Header'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3Ipv4MulticastRoute_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4MulticastRoute__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
register_Ns3SimpleRefCount__Ns3Ipv4Route_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4Route__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Socket_methods(root_module, root_module['ns3::Socket'])
register_Ns3SocketAddressTag_methods(root_module, root_module['ns3::SocketAddressTag'])
register_Ns3SocketIpTosTag_methods(root_module, root_module['ns3::SocketIpTosTag'])
register_Ns3SocketIpTtlTag_methods(root_module, root_module['ns3::SocketIpTtlTag'])
register_Ns3SocketIpv6HopLimitTag_methods(root_module, root_module['ns3::SocketIpv6HopLimitTag'])
register_Ns3SocketIpv6TclassTag_methods(root_module, root_module['ns3::SocketIpv6TclassTag'])
register_Ns3SocketSetDontFragmentTag_methods(root_module, root_module['ns3::SocketSetDontFragmentTag'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3Ipv4_methods(root_module, root_module['ns3::Ipv4'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4Interface_methods(root_module, root_module['ns3::Ipv4Interface'])
register_Ns3Ipv4L3ClickProtocol_methods(root_module, root_module['ns3::Ipv4L3ClickProtocol'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv4MulticastRoute_methods(root_module, root_module['ns3::Ipv4MulticastRoute'])
register_Ns3Ipv4Route_methods(root_module, root_module['ns3::Ipv4Route'])
register_Ns3Ipv4RoutingProtocol_methods(root_module, root_module['ns3::Ipv4RoutingProtocol'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3OutputStreamWrapper_methods(root_module, root_module['ns3::OutputStreamWrapper'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3Ipv4ClickRouting_methods(root_module, root_module['ns3::Ipv4ClickRouting'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Buffer_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [copy constructor]
cls.add_constructor([param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtEnd(uint32_t end) [member function]
cls.add_method('AddAtEnd',
'bool',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtStart(uint32_t start) [member function]
cls.add_method('AddAtStart',
'bool',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function]
cls.add_method('Begin',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Buffer',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFullCopy() const [member function]
cls.add_method('CreateFullCopy',
'ns3::Buffer',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function]
cls.add_method('End',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentEndOffset() const [member function]
cls.add_method('GetCurrentEndOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentStartOffset() const [member function]
cls.add_method('GetCurrentStartOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')])
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function]
cls.add_method('GetDistanceFrom',
'uint32_t',
[param('ns3::Buffer::Iterator const &', 'o')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function]
cls.add_method('IsEnd',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function]
cls.add_method('IsStart',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function]
cls.add_method('Next',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function]
cls.add_method('Next',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function]
cls.add_method('Prev',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function]
cls.add_method('Prev',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function]
cls.add_method('ReadLsbtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function]
cls.add_method('ReadLsbtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function]
cls.add_method('ReadLsbtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function]
cls.add_method('ReadNtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function]
cls.add_method('ReadNtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function]
cls.add_method('ReadNtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Write',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function]
cls.add_method('WriteHtolsbU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function]
cls.add_method('WriteHtolsbU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function]
cls.add_method('WriteHtolsbU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function]
cls.add_method('WriteHtonU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function]
cls.add_method('WriteHtonU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function]
cls.add_method('WriteHtonU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data'), param('uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagIterator::Item',
[])
return
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function]
cls.add_method('GetEnd',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function]
cls.add_method('GetStart',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor]
cls.add_constructor([])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function]
cls.add_method('Add',
'ns3::TagBuffer',
[param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function]
cls.add_method('Add',
'void',
[param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t adjustment, int32_t appendOffset) [member function]
cls.add_method('AddAtEnd',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'appendOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t adjustment, int32_t prependOffset) [member function]
cls.add_method('AddAtStart',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'prependOffset')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function]
cls.add_method('Begin',
'ns3::ByteTagList::Iterator',
[param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')],
is_const=True)
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3ByteTagListIterator_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')])
## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function]
cls.add_method('GetOffsetStart',
'uint32_t',
[],
is_const=True)
## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagList::Iterator::Item',
[])
return
def register_Ns3ByteTagListIteratorItem_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor]
cls.add_constructor([param('ns3::TagBuffer', 'buf')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable]
cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable]
cls.add_instance_attribute('end', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable]
cls.add_instance_attribute('start', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3Inet6SocketAddress_methods(root_module, cls):
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Inet6SocketAddress const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Inet6SocketAddress const &', 'arg0')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6) [constructor]
cls.add_constructor([param('char const *', 'ipv6')])
## inet6-socket-address.h (module 'network'): static ns3::Inet6SocketAddress ns3::Inet6SocketAddress::ConvertFrom(ns3::Address const & addr) [member function]
cls.add_method('ConvertFrom',
'ns3::Inet6SocketAddress',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): ns3::Ipv6Address ns3::Inet6SocketAddress::GetIpv6() const [member function]
cls.add_method('GetIpv6',
'ns3::Ipv6Address',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): uint16_t ns3::Inet6SocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): static bool ns3::Inet6SocketAddress::IsMatchingType(ns3::Address const & addr) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetIpv6(ns3::Ipv6Address ipv6) [member function]
cls.add_method('SetIpv6',
'void',
[param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
return
def register_Ns3InetSocketAddress_methods(root_module, cls):
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::InetSocketAddress const & arg0) [copy constructor]
cls.add_constructor([param('ns3::InetSocketAddress const &', 'arg0')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4) [constructor]
cls.add_constructor([param('char const *', 'ipv4')])
## inet-socket-address.h (module 'network'): static ns3::InetSocketAddress ns3::InetSocketAddress::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::InetSocketAddress',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): ns3::Ipv4Address ns3::InetSocketAddress::GetIpv4() const [member function]
cls.add_method('GetIpv4',
'ns3::Ipv4Address',
[],
is_const=True)
## inet-socket-address.h (module 'network'): uint16_t ns3::InetSocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet-socket-address.h (module 'network'): static bool ns3::InetSocketAddress::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetIpv4(ns3::Ipv4Address address) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ipv4Address', 'address')])
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4InterfaceAddress_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress() [constructor]
cls.add_constructor([])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress(ns3::Ipv4Address local, ns3::Ipv4Mask mask) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'local'), param('ns3::Ipv4Mask', 'mask')])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress(ns3::Ipv4InterfaceAddress const & o) [copy constructor]
cls.add_constructor([param('ns3::Ipv4InterfaceAddress const &', 'o')])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4InterfaceAddress::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4InterfaceAddress::GetLocal() const [member function]
cls.add_method('GetLocal',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Mask ns3::Ipv4InterfaceAddress::GetMask() const [member function]
cls.add_method('GetMask',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e ns3::Ipv4InterfaceAddress::GetScope() const [member function]
cls.add_method('GetScope',
'ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): bool ns3::Ipv4InterfaceAddress::IsSecondary() const [member function]
cls.add_method('IsSecondary',
'bool',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetBroadcast(ns3::Ipv4Address broadcast) [member function]
cls.add_method('SetBroadcast',
'void',
[param('ns3::Ipv4Address', 'broadcast')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetLocal(ns3::Ipv4Address local) [member function]
cls.add_method('SetLocal',
'void',
[param('ns3::Ipv4Address', 'local')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetMask(ns3::Ipv4Mask mask) [member function]
cls.add_method('SetMask',
'void',
[param('ns3::Ipv4Mask', 'mask')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetPrimary() [member function]
cls.add_method('SetPrimary',
'void',
[])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetScope(ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SetScope',
'void',
[param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetSecondary() [member function]
cls.add_method('SetSecondary',
'void',
[])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsDocumentation() const [member function]
cls.add_method('IsDocumentation',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() const [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac16Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac64Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac16Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac64Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3LogComponent_methods(root_module, cls):
## log.h (module 'core'): ns3::LogComponent::LogComponent(ns3::LogComponent const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LogComponent const &', 'arg0')])
## log.h (module 'core'): ns3::LogComponent::LogComponent(std::string const & name) [constructor]
cls.add_constructor([param('std::string const &', 'name')])
## log.h (module 'core'): void ns3::LogComponent::Disable(ns3::LogLevel level) [member function]
cls.add_method('Disable',
'void',
[param('ns3::LogLevel', 'level')])
## log.h (module 'core'): void ns3::LogComponent::Enable(ns3::LogLevel level) [member function]
cls.add_method('Enable',
'void',
[param('ns3::LogLevel', 'level')])
## log.h (module 'core'): void ns3::LogComponent::EnvVarCheck(std::string const & name) [member function]
cls.add_method('EnvVarCheck',
'void',
[param('std::string const &', 'name')])
## log.h (module 'core'): std::string ns3::LogComponent::GetLevelLabel(ns3::LogLevel const level) const [member function]
cls.add_method('GetLevelLabel',
'std::string',
[param('ns3::LogLevel const', 'level')],
is_const=True)
## log.h (module 'core'): bool ns3::LogComponent::IsEnabled(ns3::LogLevel level) const [member function]
cls.add_method('IsEnabled',
'bool',
[param('ns3::LogLevel', 'level')],
is_const=True)
## log.h (module 'core'): bool ns3::LogComponent::IsNoneEnabled() const [member function]
cls.add_method('IsNoneEnabled',
'bool',
[],
is_const=True)
## log.h (module 'core'): char const * ns3::LogComponent::Name() const [member function]
cls.add_method('Name',
'char const *',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & attribute) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'attribute')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3PacketMetadata_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor]
cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[param('ns3::Buffer', 'buffer')],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function]
cls.add_method('CreateFragment',
'ns3::PacketMetadata',
[param('uint32_t', 'start'), param('uint32_t', 'end')],
is_const=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function]
cls.add_method('Enable',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('RemoveTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor]
cls.add_constructor([])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable]
cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable]
cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable]
cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable]
cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable]
cls.add_instance_attribute('isFragment', 'bool', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3PacketMetadataItemIterator_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')])
## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketMetadata::Item',
[])
return
def register_Ns3PacketTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketTagIterator::Item',
[])
return
def register_Ns3PacketTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3PacketTagList_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList const &', 'o')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function]
cls.add_method('Add',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function]
cls.add_method('Head',
'ns3::PacketTagList::TagData const *',
[],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function]
cls.add_method('Peek',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Tag &', 'tag')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Replace(ns3::Tag & tag) [member function]
cls.add_method('Replace',
'bool',
[param('ns3::Tag &', 'tag')])
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable]
cls.add_instance_attribute('count', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable]
cls.add_instance_attribute('data', 'uint8_t [ 20 ]', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable]
cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SystemWallClockMs_methods(root_module, cls):
## system-wall-clock-ms.h (module 'core'): ns3::SystemWallClockMs::SystemWallClockMs(ns3::SystemWallClockMs const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SystemWallClockMs const &', 'arg0')])
## system-wall-clock-ms.h (module 'core'): ns3::SystemWallClockMs::SystemWallClockMs() [constructor]
cls.add_constructor([])
## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::End() [member function]
cls.add_method('End',
'int64_t',
[])
## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::GetElapsedReal() const [member function]
cls.add_method('GetElapsedReal',
'int64_t',
[],
is_const=True)
## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::GetElapsedSystem() const [member function]
cls.add_method('GetElapsedSystem',
'int64_t',
[],
is_const=True)
## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::GetElapsedUser() const [member function]
cls.add_method('GetElapsedUser',
'int64_t',
[],
is_const=True)
## system-wall-clock-ms.h (module 'core'): void ns3::SystemWallClockMs::Start() [member function]
cls.add_method('Start',
'void',
[])
return
def register_Ns3Tag_methods(root_module, cls):
## tag.h (module 'network'): ns3::Tag::Tag() [constructor]
cls.add_constructor([])
## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Tag const &', 'arg0')])
## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_virtual=True)
## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Chunk_methods(root_module, cls):
## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor]
cls.add_constructor([])
## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Chunk const &', 'arg0')])
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
## header.h (module 'network'): ns3::Header::Header() [constructor]
cls.add_constructor([])
## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Header const &', 'arg0')])
## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Ipv4Header_methods(root_module, cls):
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::Ipv4Header(ns3::Ipv4Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Header const &', 'arg0')])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::Ipv4Header() [constructor]
cls.add_constructor([])
## ipv4-header.h (module 'internet'): uint32_t ns3::Ipv4Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ipv4-header.h (module 'internet'): std::string ns3::Ipv4Header::DscpTypeToString(ns3::Ipv4Header::DscpType dscp) const [member function]
cls.add_method('DscpTypeToString',
'std::string',
[param('ns3::Ipv4Header::DscpType', 'dscp')],
is_const=True)
## ipv4-header.h (module 'internet'): std::string ns3::Ipv4Header::EcnTypeToString(ns3::Ipv4Header::EcnType ecn) const [member function]
cls.add_method('EcnTypeToString',
'std::string',
[param('ns3::Ipv4Header::EcnType', 'ecn')],
is_const=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::EnableChecksum() [member function]
cls.add_method('EnableChecksum',
'void',
[])
## ipv4-header.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Header::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::DscpType ns3::Ipv4Header::GetDscp() const [member function]
cls.add_method('GetDscp',
'ns3::Ipv4Header::DscpType',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::EcnType ns3::Ipv4Header::GetEcn() const [member function]
cls.add_method('GetEcn',
'ns3::Ipv4Header::EcnType',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetFragmentOffset() const [member function]
cls.add_method('GetFragmentOffset',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetIdentification() const [member function]
cls.add_method('GetIdentification',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::TypeId ns3::Ipv4Header::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetPayloadSize() const [member function]
cls.add_method('GetPayloadSize',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetProtocol() const [member function]
cls.add_method('GetProtocol',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint32_t ns3::Ipv4Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Header::GetSource() const [member function]
cls.add_method('GetSource',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): static ns3::TypeId ns3::Ipv4Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsChecksumOk() const [member function]
cls.add_method('IsChecksumOk',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsDontFragment() const [member function]
cls.add_method('IsDontFragment',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsLastFragment() const [member function]
cls.add_method('IsLastFragment',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDestination(ns3::Ipv4Address destination) [member function]
cls.add_method('SetDestination',
'void',
[param('ns3::Ipv4Address', 'destination')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDontFragment() [member function]
cls.add_method('SetDontFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDscp(ns3::Ipv4Header::DscpType dscp) [member function]
cls.add_method('SetDscp',
'void',
[param('ns3::Ipv4Header::DscpType', 'dscp')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetEcn(ns3::Ipv4Header::EcnType ecn) [member function]
cls.add_method('SetEcn',
'void',
[param('ns3::Ipv4Header::EcnType', 'ecn')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetFragmentOffset(uint16_t offsetBytes) [member function]
cls.add_method('SetFragmentOffset',
'void',
[param('uint16_t', 'offsetBytes')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetIdentification(uint16_t identification) [member function]
cls.add_method('SetIdentification',
'void',
[param('uint16_t', 'identification')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetLastFragment() [member function]
cls.add_method('SetLastFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetMayFragment() [member function]
cls.add_method('SetMayFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetMoreFragments() [member function]
cls.add_method('SetMoreFragments',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetPayloadSize(uint16_t size) [member function]
cls.add_method('SetPayloadSize',
'void',
[param('uint16_t', 'size')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetProtocol(uint8_t num) [member function]
cls.add_method('SetProtocol',
'void',
[param('uint8_t', 'num')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetSource(ns3::Ipv4Address source) [member function]
cls.add_method('SetSource',
'void',
[param('ns3::Ipv4Address', 'source')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Ipv4MulticastRoute_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4MulticastRoute__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter< ns3::Ipv4MulticastRoute > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Ipv4Route_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4Route__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter< ns3::Ipv4Route > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount(ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter< ns3::OutputStreamWrapper > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Socket_methods(root_module, cls):
## socket.h (module 'network'): ns3::Socket::Socket(ns3::Socket const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Socket const &', 'arg0')])
## socket.h (module 'network'): ns3::Socket::Socket() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): int ns3::Socket::Bind(ns3::Address const & address) [member function]
cls.add_method('Bind',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind() [member function]
cls.add_method('Bind',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind6() [member function]
cls.add_method('Bind6',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::BindToNetDevice(ns3::Ptr<ns3::NetDevice> netdevice) [member function]
cls.add_method('BindToNetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'netdevice')],
is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Close() [member function]
cls.add_method('Close',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Connect(ns3::Address const & address) [member function]
cls.add_method('Connect',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): static ns3::Ptr<ns3::Socket> ns3::Socket::CreateSocket(ns3::Ptr<ns3::Node> node, ns3::TypeId tid) [member function]
cls.add_method('CreateSocket',
'ns3::Ptr< ns3::Socket >',
[param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::TypeId', 'tid')],
is_static=True)
## socket.h (module 'network'): bool ns3::Socket::GetAllowBroadcast() const [member function]
cls.add_method('GetAllowBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Socket::GetBoundNetDevice() [member function]
cls.add_method('GetBoundNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[])
## socket.h (module 'network'): ns3::Socket::SocketErrno ns3::Socket::GetErrno() const [member function]
cls.add_method('GetErrno',
'ns3::Socket::SocketErrno',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpTos() const [member function]
cls.add_method('GetIpTos',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpTtl() const [member function]
cls.add_method('GetIpTtl',
'uint8_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpv6HopLimit() const [member function]
cls.add_method('GetIpv6HopLimit',
'uint8_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpv6Tclass() const [member function]
cls.add_method('GetIpv6Tclass',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Node> ns3::Socket::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetRxAvailable() const [member function]
cls.add_method('GetRxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::GetSockName(ns3::Address & address) const [member function]
cls.add_method('GetSockName',
'int',
[param('ns3::Address &', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Socket::SocketType ns3::Socket::GetSocketType() const [member function]
cls.add_method('GetSocketType',
'ns3::Socket::SocketType',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetTxAvailable() const [member function]
cls.add_method('GetTxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::Socket::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpRecvTos() const [member function]
cls.add_method('IsIpRecvTos',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpRecvTtl() const [member function]
cls.add_method('IsIpRecvTtl',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpv6RecvHopLimit() const [member function]
cls.add_method('IsIpv6RecvHopLimit',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpv6RecvTclass() const [member function]
cls.add_method('IsIpv6RecvTclass',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsRecvPktInfo() const [member function]
cls.add_method('IsRecvPktInfo',
'bool',
[],
is_const=True)
## socket.h (module 'network'): int ns3::Socket::Listen() [member function]
cls.add_method('Listen',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv(uint32_t maxSize, uint32_t flags) [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv() [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[])
## socket.h (module 'network'): int ns3::Socket::Recv(uint8_t * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Recv',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(uint32_t maxSize, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::RecvFrom(uint8_t * buf, uint32_t size, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p')])
## socket.h (module 'network'): int ns3::Socket::Send(uint8_t const * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): int ns3::Socket::SendTo(ns3::Ptr<ns3::Packet> p, uint32_t flags, ns3::Address const & toAddress) [member function]
cls.add_method('SendTo',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags'), param('ns3::Address const &', 'toAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::SendTo(uint8_t const * buf, uint32_t size, uint32_t flags, ns3::Address const & address) [member function]
cls.add_method('SendTo',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address const &', 'address')])
## socket.h (module 'network'): void ns3::Socket::SetAcceptCallback(ns3::Callback<bool, ns3::Ptr<ns3::Socket>, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionRequest, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> newConnectionCreated) [member function]
cls.add_method('SetAcceptCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionRequest'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'newConnectionCreated')])
## socket.h (module 'network'): bool ns3::Socket::SetAllowBroadcast(bool allowBroadcast) [member function]
cls.add_method('SetAllowBroadcast',
'bool',
[param('bool', 'allowBroadcast')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetCloseCallbacks(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> normalClose, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> errorClose) [member function]
cls.add_method('SetCloseCallbacks',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'normalClose'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'errorClose')])
## socket.h (module 'network'): void ns3::Socket::SetConnectCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionSucceeded, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionFailed) [member function]
cls.add_method('SetConnectCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionSucceeded'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionFailed')])
## socket.h (module 'network'): void ns3::Socket::SetDataSentCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> dataSent) [member function]
cls.add_method('SetDataSentCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'dataSent')])
## socket.h (module 'network'): void ns3::Socket::SetIpRecvTos(bool ipv4RecvTos) [member function]
cls.add_method('SetIpRecvTos',
'void',
[param('bool', 'ipv4RecvTos')])
## socket.h (module 'network'): void ns3::Socket::SetIpRecvTtl(bool ipv4RecvTtl) [member function]
cls.add_method('SetIpRecvTtl',
'void',
[param('bool', 'ipv4RecvTtl')])
## socket.h (module 'network'): void ns3::Socket::SetIpTos(uint8_t ipTos) [member function]
cls.add_method('SetIpTos',
'void',
[param('uint8_t', 'ipTos')])
## socket.h (module 'network'): void ns3::Socket::SetIpTtl(uint8_t ipTtl) [member function]
cls.add_method('SetIpTtl',
'void',
[param('uint8_t', 'ipTtl')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetIpv6HopLimit(uint8_t ipHopLimit) [member function]
cls.add_method('SetIpv6HopLimit',
'void',
[param('uint8_t', 'ipHopLimit')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetIpv6RecvHopLimit(bool ipv6RecvHopLimit) [member function]
cls.add_method('SetIpv6RecvHopLimit',
'void',
[param('bool', 'ipv6RecvHopLimit')])
## socket.h (module 'network'): void ns3::Socket::SetIpv6RecvTclass(bool ipv6RecvTclass) [member function]
cls.add_method('SetIpv6RecvTclass',
'void',
[param('bool', 'ipv6RecvTclass')])
## socket.h (module 'network'): void ns3::Socket::SetIpv6Tclass(int ipTclass) [member function]
cls.add_method('SetIpv6Tclass',
'void',
[param('int', 'ipTclass')])
## socket.h (module 'network'): void ns3::Socket::SetRecvCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> arg0) [member function]
cls.add_method('SetRecvCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'arg0')])
## socket.h (module 'network'): void ns3::Socket::SetRecvPktInfo(bool flag) [member function]
cls.add_method('SetRecvPktInfo',
'void',
[param('bool', 'flag')])
## socket.h (module 'network'): void ns3::Socket::SetSendCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> sendCb) [member function]
cls.add_method('SetSendCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'sendCb')])
## socket.h (module 'network'): int ns3::Socket::ShutdownRecv() [member function]
cls.add_method('ShutdownRecv',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::ShutdownSend() [member function]
cls.add_method('ShutdownSend',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## socket.h (module 'network'): bool ns3::Socket::IsManualIpTos() const [member function]
cls.add_method('IsManualIpTos',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpTtl() const [member function]
cls.add_method('IsManualIpTtl',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpv6HopLimit() const [member function]
cls.add_method('IsManualIpv6HopLimit',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpv6Tclass() const [member function]
cls.add_method('IsManualIpv6Tclass',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionFailed() [member function]
cls.add_method('NotifyConnectionFailed',
'void',
[],
visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::NotifyConnectionRequest(ns3::Address const & from) [member function]
cls.add_method('NotifyConnectionRequest',
'bool',
[param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionSucceeded() [member function]
cls.add_method('NotifyConnectionSucceeded',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataRecv() [member function]
cls.add_method('NotifyDataRecv',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataSent(uint32_t size) [member function]
cls.add_method('NotifyDataSent',
'void',
[param('uint32_t', 'size')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyErrorClose() [member function]
cls.add_method('NotifyErrorClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNewConnectionCreated(ns3::Ptr<ns3::Socket> socket, ns3::Address const & from) [member function]
cls.add_method('NotifyNewConnectionCreated',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket'), param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNormalClose() [member function]
cls.add_method('NotifyNormalClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifySend(uint32_t spaceAvailable) [member function]
cls.add_method('NotifySend',
'void',
[param('uint32_t', 'spaceAvailable')],
visibility='protected')
return
def register_Ns3SocketAddressTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketAddressTag::SocketAddressTag(ns3::SocketAddressTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketAddressTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketAddressTag::SocketAddressTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketAddressTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::Address ns3::SocketAddressTag::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketAddressTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketAddressTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketAddressTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketAddressTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketAddressTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketAddressTag::SetAddress(ns3::Address addr) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'addr')])
return
def register_Ns3SocketIpTosTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpTosTag::SocketIpTosTag(ns3::SocketIpTosTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpTosTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpTosTag::SocketIpTosTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpTosTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpTosTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpTosTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpTosTag::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpTosTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
return
def register_Ns3SocketIpTtlTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag(ns3::SocketIpTtlTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpTtlTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpTtlTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpTtlTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpTtlTag::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpTtlTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3SocketIpv6HopLimitTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag::SocketIpv6HopLimitTag(ns3::SocketIpv6HopLimitTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpv6HopLimitTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag::SocketIpv6HopLimitTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpv6HopLimitTag::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpv6HopLimitTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpv6HopLimitTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpv6HopLimitTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::SetHopLimit(uint8_t hopLimit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'hopLimit')])
return
def register_Ns3SocketIpv6TclassTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpv6TclassTag::SocketIpv6TclassTag(ns3::SocketIpv6TclassTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpv6TclassTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpv6TclassTag::SocketIpv6TclassTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpv6TclassTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpv6TclassTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpv6TclassTag::GetTclass() const [member function]
cls.add_method('GetTclass',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpv6TclassTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::SetTclass(uint8_t tclass) [member function]
cls.add_method('SetTclass',
'void',
[param('uint8_t', 'tclass')])
return
def register_Ns3SocketSetDontFragmentTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag(ns3::SocketSetDontFragmentTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketSetDontFragmentTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Disable() [member function]
cls.add_method('Disable',
'void',
[])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Enable() [member function]
cls.add_method('Enable',
'void',
[])
## socket.h (module 'network'): ns3::TypeId ns3::SocketSetDontFragmentTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketSetDontFragmentTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketSetDontFragmentTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): bool ns3::SocketSetDontFragmentTag::IsEnabled() const [member function]
cls.add_method('IsEnabled',
'bool',
[],
is_const=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv4_methods(root_module, cls):
## ipv4.h (module 'internet'): ns3::Ipv4::Ipv4(ns3::Ipv4 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4 const &', 'arg0')])
## ipv4.h (module 'internet'): ns3::Ipv4::Ipv4() [constructor]
cls.add_constructor([])
## ipv4.h (module 'internet'): bool ns3::Ipv4::AddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv4::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4::GetAddress(uint32_t interface, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForAddress(ns3::Ipv4Address address) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv4Address', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForPrefix(ns3::Ipv4Address address, ns3::Ipv4Mask mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv4Address', 'address'), param('ns3::Ipv4Mask', 'mask')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint16_t ns3::Ipv4::GetMetric(uint32_t interface) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint16_t ns3::Ipv4::GetMtu(uint32_t interface) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4::GetNetDevice(uint32_t interface) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): static ns3::TypeId ns3::Ipv4::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsDestinationAddress(ns3::Ipv4Address address, uint32_t iif) const [member function]
cls.add_method('IsDestinationAddress',
'bool',
[param('ns3::Ipv4Address', 'address'), param('uint32_t', 'iif')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsForwarding(uint32_t interface) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsUp(uint32_t interface) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::RemoveAddress(uint32_t interface, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::RemoveAddress(uint32_t interface, ns3::Ipv4Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4::SelectSourceAddress(ns3::Ptr<const ns3::NetDevice> device, ns3::Ipv4Address dst, ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SelectSourceAddress',
'ns3::Ipv4Address',
[param('ns3::Ptr< ns3::NetDevice const >', 'device'), param('ns3::Ipv4Address', 'dst'), param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Address source, ns3::Ipv4Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Address', 'source'), param('ns3::Ipv4Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SendWithHeader(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Header ipHeader, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SendWithHeader',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Header', 'ipHeader'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetDown(uint32_t interface) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetForwarding(uint32_t interface, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'interface'), param('bool', 'val')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetMetric(uint32_t interface, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'interface'), param('uint16_t', 'metric')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetRoutingProtocol(ns3::Ptr<ns3::Ipv4RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv4RoutingProtocol >', 'routingProtocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetUp(uint32_t interface) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4::IF_ANY [variable]
cls.add_static_attribute('IF_ANY', 'uint32_t const', is_const=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::GetWeakEsModel() const [member function]
cls.add_method('GetWeakEsModel',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetWeakEsModel(bool model) [member function]
cls.add_method('SetWeakEsModel',
'void',
[param('bool', 'model')],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4Interface_methods(root_module, cls):
## ipv4-interface.h (module 'internet'): ns3::Ipv4Interface::Ipv4Interface(ns3::Ipv4Interface const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Interface const &', 'arg0')])
## ipv4-interface.h (module 'internet'): ns3::Ipv4Interface::Ipv4Interface() [constructor]
cls.add_constructor([])
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::AddAddress(ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('ns3::Ipv4InterfaceAddress', 'address')])
## ipv4-interface.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4Interface::GetAddress(uint32_t index) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'index')],
is_const=True)
## ipv4-interface.h (module 'internet'): ns3::Ptr<ns3::ArpCache> ns3::Ipv4Interface::GetArpCache() const [member function]
cls.add_method('GetArpCache',
'ns3::Ptr< ns3::ArpCache >',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4Interface::GetDevice() const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): uint16_t ns3::Ipv4Interface::GetMetric() const [member function]
cls.add_method('GetMetric',
'uint16_t',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): uint32_t ns3::Ipv4Interface::GetNAddresses() const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): static ns3::TypeId ns3::Ipv4Interface::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::IsDown() const [member function]
cls.add_method('IsDown',
'bool',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::IsForwarding() const [member function]
cls.add_method('IsForwarding',
'bool',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::IsUp() const [member function]
cls.add_method('IsUp',
'bool',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4Interface::RemoveAddress(uint32_t index) [member function]
cls.add_method('RemoveAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'index')])
## ipv4-interface.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4Interface::RemoveAddress(ns3::Ipv4Address address) [member function]
cls.add_method('RemoveAddress',
'ns3::Ipv4InterfaceAddress',
[param('ns3::Ipv4Address', 'address')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::Send(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Address dest) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Address', 'dest')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetArpCache(ns3::Ptr<ns3::ArpCache> arpCache) [member function]
cls.add_method('SetArpCache',
'void',
[param('ns3::Ptr< ns3::ArpCache >', 'arpCache')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('SetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetDown() [member function]
cls.add_method('SetDown',
'void',
[])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetForwarding(bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('bool', 'val')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetMetric(uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint16_t', 'metric')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetUp() [member function]
cls.add_method('SetUp',
'void',
[])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3Ipv4L3ClickProtocol_methods(root_module, cls):
## ipv4-l3-click-protocol.h (module 'click'): ns3::Ipv4L3ClickProtocol::Ipv4L3ClickProtocol() [constructor]
cls.add_constructor([])
## ipv4-l3-click-protocol.h (module 'click'): ns3::Ipv4L3ClickProtocol::Ipv4L3ClickProtocol(ns3::Ipv4L3ClickProtocol const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4L3ClickProtocol const &', 'arg0')])
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv4MulticastRoute_methods(root_module, cls):
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::Ipv4MulticastRoute(ns3::Ipv4MulticastRoute const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MulticastRoute const &', 'arg0')])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::Ipv4MulticastRoute() [constructor]
cls.add_constructor([])
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4MulticastRoute::GetGroup() const [member function]
cls.add_method('GetGroup',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4MulticastRoute::GetOrigin() const [member function]
cls.add_method('GetOrigin',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): uint32_t ns3::Ipv4MulticastRoute::GetOutputTtl(uint32_t oif) [member function]
cls.add_method('GetOutputTtl',
'uint32_t',
[param('uint32_t', 'oif')],
deprecated=True)
## ipv4-route.h (module 'internet'): std::map<unsigned int, unsigned int, std::less<unsigned int>, std::allocator<std::pair<unsigned int const, unsigned int> > > ns3::Ipv4MulticastRoute::GetOutputTtlMap() const [member function]
cls.add_method('GetOutputTtlMap',
'std::map< unsigned int, unsigned int >',
[],
is_const=True)
## ipv4-route.h (module 'internet'): uint32_t ns3::Ipv4MulticastRoute::GetParent() const [member function]
cls.add_method('GetParent',
'uint32_t',
[],
is_const=True)
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetGroup(ns3::Ipv4Address const group) [member function]
cls.add_method('SetGroup',
'void',
[param('ns3::Ipv4Address const', 'group')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetOrigin(ns3::Ipv4Address const origin) [member function]
cls.add_method('SetOrigin',
'void',
[param('ns3::Ipv4Address const', 'origin')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetOutputTtl(uint32_t oif, uint32_t ttl) [member function]
cls.add_method('SetOutputTtl',
'void',
[param('uint32_t', 'oif'), param('uint32_t', 'ttl')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetParent(uint32_t iif) [member function]
cls.add_method('SetParent',
'void',
[param('uint32_t', 'iif')])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::MAX_INTERFACES [variable]
cls.add_static_attribute('MAX_INTERFACES', 'uint32_t const', is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::MAX_TTL [variable]
cls.add_static_attribute('MAX_TTL', 'uint32_t const', is_const=True)
return
def register_Ns3Ipv4Route_methods(root_module, cls):
cls.add_output_stream_operator()
## ipv4-route.h (module 'internet'): ns3::Ipv4Route::Ipv4Route(ns3::Ipv4Route const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Route const &', 'arg0')])
## ipv4-route.h (module 'internet'): ns3::Ipv4Route::Ipv4Route() [constructor]
cls.add_constructor([])
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetGateway() const [member function]
cls.add_method('GetGateway',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4Route::GetOutputDevice() const [member function]
cls.add_method('GetOutputDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetSource() const [member function]
cls.add_method('GetSource',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetDestination(ns3::Ipv4Address dest) [member function]
cls.add_method('SetDestination',
'void',
[param('ns3::Ipv4Address', 'dest')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetGateway(ns3::Ipv4Address gw) [member function]
cls.add_method('SetGateway',
'void',
[param('ns3::Ipv4Address', 'gw')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetOutputDevice(ns3::Ptr<ns3::NetDevice> outputDevice) [member function]
cls.add_method('SetOutputDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'outputDevice')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetSource(ns3::Ipv4Address src) [member function]
cls.add_method('SetSource',
'void',
[param('ns3::Ipv4Address', 'src')])
return
def register_Ns3Ipv4RoutingProtocol_methods(root_module, cls):
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol::Ipv4RoutingProtocol() [constructor]
cls.add_constructor([])
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol::Ipv4RoutingProtocol(ns3::Ipv4RoutingProtocol const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4RoutingProtocol const &', 'arg0')])
## ipv4-routing-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv4RoutingProtocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyAddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyAddAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyInterfaceDown(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyInterfaceUp(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyRemoveAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyRemoveAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::PrintRoutingTable(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTable',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): bool ns3::Ipv4RoutingProtocol::RouteInput(ns3::Ptr<const ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<const ns3::NetDevice> idev, ns3::Callback<void,ns3::Ptr<ns3::Ipv4Route>,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ucb, ns3::Callback<void,ns3::Ptr<ns3::Ipv4MulticastRoute>,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> mcb, ns3::Callback<void,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,unsigned int,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> lcb, ns3::Callback<void,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::Socket::SocketErrno,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ecb) [member function]
cls.add_method('RouteInput',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice const >', 'idev'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4Route> ns3::Ipv4RoutingProtocol::RouteOutput(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::NetDevice> oif, ns3::Socket::SocketErrno & sockerr) [member function]
cls.add_method('RouteOutput',
'ns3::Ptr< ns3::Ipv4Route >',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice >', 'oif'), param('ns3::Socket::SocketErrno &', 'sockerr')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::SetIpv4(ns3::Ptr<ns3::Ipv4> ipv4) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ptr< ns3::Ipv4 >', 'ipv4')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,const ns3::Address&,ns3::NetDevice::PacketType,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor]
cls.add_constructor([])
## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [copy constructor]
cls.add_constructor([param('ns3::NixVector const &', 'o')])
## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function]
cls.add_method('AddNeighborIndex',
'void',
[param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function]
cls.add_method('BitCount',
'uint32_t',
[param('uint32_t', 'numberOfNeighbors')],
is_const=True)
## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint32_t const *', 'buffer'), param('uint32_t', 'size')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function]
cls.add_method('ExtractNeighborIndex',
'uint32_t',
[param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function]
cls.add_method('GetRemainingBits',
'uint32_t',
[])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3OutputStreamWrapper_methods(root_module, cls):
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(ns3::OutputStreamWrapper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::OutputStreamWrapper const &', 'arg0')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::string filename, std::_Ios_Openmode filemode) [constructor]
cls.add_constructor([param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::ostream * os) [constructor]
cls.add_constructor([param('std::ostream *', 'os')])
## output-stream-wrapper.h (module 'network'): std::ostream * ns3::OutputStreamWrapper::GetStream() [member function]
cls.add_method('GetStream',
'std::ostream *',
[])
return
def register_Ns3Packet_methods(root_module, cls):
cls.add_output_stream_operator()
## packet.h (module 'network'): ns3::Packet::Packet() [constructor]
cls.add_constructor([])
## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [copy constructor]
cls.add_constructor([param('ns3::Packet const &', 'o')])
## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor]
cls.add_constructor([param('uint32_t', 'size')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<const ns3::Packet> packet) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddByteTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header')])
## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddPacketTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer')])
## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function]
cls.add_method('EnablePrinting',
'void',
[],
is_static=True)
## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function]
cls.add_method('FindFirstMatchingByteTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function]
cls.add_method('GetByteTagIterator',
'ns3::ByteTagIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function]
cls.add_method('GetNixVector',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function]
cls.add_method('GetPacketTagIterator',
'ns3::PacketTagIterator',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet.h (module 'network'): uint8_t const * ns3::Packet::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
deprecated=True, is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header')],
is_const=True)
## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function]
cls.add_method('PeekPacketTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('PeekTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function]
cls.add_method('PrintByteTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function]
cls.add_method('PrintPacketTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function]
cls.add_method('RemoveAllByteTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function]
cls.add_method('RemoveAllPacketTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header')])
## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function]
cls.add_method('RemovePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('RemoveTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): bool ns3::Packet::ReplacePacketTag(ns3::Tag & tag) [member function]
cls.add_method('ReplacePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> arg0) [member function]
cls.add_method('SetNixVector',
'void',
[param('ns3::Ptr< ns3::NixVector >', 'arg0')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3Ipv4ClickRouting_methods(root_module, cls):
## ipv4-click-routing.h (module 'click'): ns3::Ipv4ClickRouting::Ipv4ClickRouting() [constructor]
cls.add_constructor([])
## ipv4-click-routing.h (module 'click'): ns3::Ipv4ClickRouting::Ipv4ClickRouting(ns3::Ipv4ClickRouting const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4ClickRouting const &', 'arg0')])
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_Hash(module.get_submodule('Hash'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
|
gpl-2.0
|
dcherian/tools
|
ROMS/pmacc/tools/post_tools/rompy/tags/rompy-0.1.6/test.py
|
4
|
8114
|
#!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
from rompy import rompy, plot_utils, utils
map1 = False
map2 = False
map3 = False
map4 = False
map5 = False
map6 = False
map7 = False
map8 = False
map9 = False
map10 = False
map11 = False
map1 = True
# map2 = True
# map3 = True
# map4 = True
# map5 = True
# map6 = True
# map7 = True
# map8 = True
# map9 = True
# map10 = True
# map11 = True
if map1:
print('map1')
(data, coords) = rompy.extract('ocean_his_0001.nc',varname='h')
plot_utils.plot_surface(coords['xm'],coords['ym'],data)
plot_utils.plot_map(coords['xm'],coords['ym'],data,filename='/Users/lederer/tmp/rompy.map.png')
del(data)
del(coords)
if map2:
print('map2')
# full domain
#x = np.linspace(-127.,-122.,100)
#y = np.linspace(44.,50.,100)
#puget sound area
#x = np.linspace(-123.,-122.,500)
#y = np.linspace(47.,48.,500)
# hood canal
x = np.linspace(-123.25,-122.5,400)
y = np.linspace(47.33,48.0,400)
(data, coords) = rompy.extract('ocean_his_0001.nc', varname='zeta',extraction_type='points', x=x, y=y)
plot_utils.plot_map(coords['xm'],coords['ym'],data,filename='/Users/lederer/tmp/rompy.map2.png',resolution='h')
# plot_utils.plot_surface(coords['xm'],coords['ym'],data,filename='/Users/lederer/tmp/rompy.map2.png')
if map3:
print('map3')
(data, coords) = rompy.extract('ocean_his_0001.nc',varname='v',extraction_type='full')
print(data.shape)
for key in coords:
print(key, coords[key].shape)
plot_utils.plot_profile(data[:,20,20],coords['zm'][:,20,20],filename='/Users/lederer/tmp/rompy.profile.png')
if map4:
print('map4')
(data, coords) = rompy.extract('ocean_his_0001.nc',varname='salt',extraction_type='surface')
plot_utils.plot_map(coords['xm'],coords['ym'],data,filename='/Users/lederer/tmp/rompy.map4.png',resolution='h')
if map5:
print('map5')
# middle of pacific
# x = np.linspace(-126.0,-125.0,1001)
# y = np.linspace(45.0,46.0,1001)
# hood canal PRISM Cruise February 2009
x,y = utils.hood_canal_xy()
#cs = np.linspace(-0.96103753,-0.00143376,10)
(data, coords) = rompy.extract('ocean_his_0001.nc',varname='salt',extraction_type='profile',x=x,y=y)#,cs=cs)
fig = Figure(facecolor='white')
ax = fig.add_subplot(111)
# my_plot = ax.pcolormesh(np.arange(data.shape[1]),coords['zm'],data,clim=(0,35),colorbar=True)
my_plot = ax.contourf(np.tile(np.arange(data.shape[1]),(coords['zm'].shape[0],1)),coords['zm'],data,100)
my_plot2 = ax.contour(np.tile(np.arange(data.shape[1]),(coords['zm'].shape[0],1)),coords['zm'],data,100,linewidths=1,linestyle=None)
ax.fill_between(np.arange(data.shape[1]),coords['zm'][0,:],ax.get_ylim()[0],color='grey')
fig.colorbar(my_plot,ax=ax)
ax.set_title('Hood Canal Salinity from a ROMS run')
ax.set_ylabel('depth in meters')
ax.set_xticks(np.arange(data.shape[1]))
ax.set_xticklabels(utils.hood_canal_station_list())
ax.set_xlabel('station ID')
FigureCanvas(fig).print_png('/Users/lederer/tmp/rompy.map5.png')
if map6:
print('map6')
# middle of pacific
# x = np.linspace(-126.0,-125.0,1001)
# y = np.linspace(45.0,46.0,1001)
# hood canal PRISM Cruise February 2009
x,y = utils.main_basin_xy()
#cs = np.linspace(-0.96103753,-0.00143376,10)
(data, coords) = rompy.extract('ocean_his_0001.nc',varname='salt',extraction_type='profile',x=x,y=y)#,cs=cs)
fig = Figure(facecolor='white')
ax = fig.add_subplot(111)
# my_plot = ax.pcolormesh(np.arange(data.shape[1]),coords['zm'],data,clim=(0,35),colorbar=True)
my_plot = ax.contourf(np.tile(np.arange(data.shape[1]),(coords['zm'].shape[0],1)),coords['zm'],data,100)
my_plot2 = ax.contour(np.tile(np.arange(data.shape[1]),(coords['zm'].shape[0],1)),coords['zm'],data,100,linewidths=1,linestyle=None)
ax.fill_between(np.arange(data.shape[1]),coords['zm'][0,:],ax.get_ylim()[0],color='grey')
fig.colorbar(my_plot,ax=ax)
ax.set_title('Main Basin Salinity from a ROMS run')
ax.set_ylabel('depth in meters')
ax.set_xticks(np.arange(data.shape[1]))
ax.set_xticklabels(utils.main_basin_station_list())
ax.set_xlabel('station ID')
FigureCanvas(fig).print_png('/Users/lederer/tmp/rompy.map6.png')
if map7: # Main Basin
print('map7')
n = 10
x,y = utils.high_res_main_basin_xy(n=n)
# Salinity
(data, coords) = rompy.extract('ocean_his_0001.nc', varname='salt', extraction_type='profile', x=x, y=y)
plot_utils.plot_mickett(coords=coords, data=data, varname='Salinity', region='Main Basin', filename='/Users/lederer/tmp/rompy.mickett_main_salt.png', n=n, x_axis_offset=utils.offset_region(coords), clim=[0,20,32,32], cmap='banas_hsv_cm', labeled_contour_gap=2)
# Temperature
(data, coords) = rompy.extract('ocean_his_0001.nc',varname='temp',extraction_type='profile',x=x,y=y)
plot_utils.plot_mickett(coords=coords, data=data, varname='Temperature', region='Main Basin', filename='/Users/lederer/tmp/rompy.mickett_main_temp.png', n=n, x_axis_offset=utils.offset_region(coords), clim=[0,20], cmap='banas_hsv_cm', labeled_contour_gap=2)
if map8: # Hood Canal
print('map8')
n=10
x,y = utils.high_res_hood_canal_xy(n=n)
# Salinity
(data, coords) = rompy.extract('ocean_his_0001.nc', varname='salt', extraction_type='profile', x=x, y=y)
plot_utils.plot_mickett(coords=coords, data=data, varname='Salinity', region='Hood Canal', filename='/Users/lederer/tmp/rompy.mickett_hood_salt.png', n=n, x_axis_offset=utils.offset_region(coords), clim=[0,20,32,32], cmap='banas_hsv_cm')
# Temperature
(data, coords) = rompy.extract('ocean_his_0001.nc', varname='temp', extraction_type='profile', x=x, y=y)
plot_utils.plot_mickett(coords=coords, data=data, varname='Temperature', region='Hood Canal', filename='/Users/lederer/tmp/rompy.mickett_hood_temp.png', n=n, x_axis_offset=utils.offset_region(coords), clim=[0,20], cmap='banas_hsv_cm')
if map9: # velocity in Hood Canal
print('map9')
n=20
x,y = utils.high_res_hood_canal_xy(n=n)
(u, coords) = rompy.extract('ocean_his_0001.nc',varname='u',extraction_type='profile',x=x,y=y)
(v, coords) = rompy.extract('ocean_his_0001.nc',varname='v',extraction_type='profile',x=x,y=y)
data = np.zeros(u.shape)
for i in range(u.shape[1]):
if i == u.shape[1]-1:
x_vec = np.array([x[i] - x[i-1], y[i] - y[i-1]])
else:
x_vec = np.array([x[i+1] - x[i], y[i+1] - y[i]])
for j in range(u.shape[0]):
u_vec = np.array([u[j,i], v[j,i]])
data[j,i] = np.dot(x_vec,u_vec)/(np.sqrt(np.dot(x_vec,x_vec)))
data = np.ma.array(data, mask=np.abs(data) > 100)
plot_utils.plot_mickett(coords=coords,data=data,varname='U', region='Hood Canal', filename='/Users/lederer/tmp/rompy.mickett_hood_U.png', n=n, clim=[-2,2], x_axis_offset=utils.offset_region(coords),cmap='red_blue')
if map10: # velocity in Main Basin
print('map10')
n=3
x,y = utils.high_res_main_basin_xy(n=n)
(u, coords) = rompy.extract('ocean_his_0001.nc',varname='u',extraction_type='profile',x=x,y=y)
(v, coords) = rompy.extract('ocean_his_0001.nc',varname='v',extraction_type='profile',x=x,y=y)
data = np.zeros(u.shape)
for i in range(u.shape[1]):
if i == u.shape[1]-1:
x_vec = np.array([x[i] - x[i-1], y[i] - y[i-1]])
else:
x_vec = np.array([x[i+1] - x[i], y[i+1] - y[i]])
for j in range(u.shape[0]):
u_vec = np.array([u[j,i], v[j,i]])
data[j,i] = np.dot(x_vec,u_vec)/(np.sqrt(np.dot(x_vec,x_vec)))
data = np.ma.array(data, mask=np.abs(data) > 100)
plot_utils.plot_mickett(coords=coords,data=data,varname='U', region=' Main Basin', filename='/Users/lederer/tmp/rompy.mickett_main_U.png', n=n, clim=[-2,2], x_axis_offset=utils.offset_region(coords),cmap='red_blue')
if map11:
print('map11')
n = 5
x,y = utils.high_res_hood_canal_xy(n=n)
# x,y = utils.high_res_main_basin_xy(n=n)
(data, coords) = rompy.extract('ocean_his_0001.nc', varname='salt', extraction_type='profile', x=x, y=y)
plot_utils.plot_parker(coords=coords, data=data, varname='Salinity', region='Hood Canal', filename='/Users/lederer/tmp/rompy.parker_hood_salt.png', n=n, x_axis_offset=utils.offset_region(coords), clim=[0,20,32,32], cmap='banas_hsv_cm')
|
mit
|
CVL-GitHub/karaage
|
karaage/legacy/people/south_migrations/0001_initial.py
|
3
|
11036
|
# encoding: utf-8
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Institute'
db.create_table('institute', (
('gid', self.gf('django.db.models.fields.IntegerField')()),
('active_delegate', self.gf('django.db.models.fields.related.ForeignKey')(related_name='active_delegate', blank=True, null=True, to=orm['people.Person'])),
('delegate', self.gf('django.db.models.fields.related.ForeignKey')(related_name='delegate', blank=True, null=True, to=orm['people.Person'])),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
))
db.send_create_signal('people', ['Institute'])
# Adding M2M table for field sub_delegates on 'Institute'
db.create_table('institute_sub_delegates', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('institute', models.ForeignKey(orm['people.institute'], null=False)),
('person', models.ForeignKey(orm['people.person'], null=False))
))
db.create_unique('institute_sub_delegates', ['institute_id', 'person_id'])
# Adding model 'Person'
db.create_table('person', (
('comment', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('supervisor', self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True)),
('telephone', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('postcode', self.gf('django.db.models.fields.CharField')(max_length=8, null=True, blank=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('date_approved', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('city', self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=10, null=True, blank=True)),
('state', self.gf('django.db.models.fields.CharField')(max_length=4, null=True, blank=True)),
('approved_by', self.gf('django.db.models.fields.related.ForeignKey')(related_name='user_approver', blank=True, null=True, to=orm['people.Person'])),
('department', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('last_usage', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('website', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('fax', self.gf('django.db.models.fields.CharField')(max_length=50, null=True, blank=True)),
('expires', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], unique=True)),
('address', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('mobile', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('country', self.gf('django.db.models.fields.CharField')(max_length=2)),
('date_deleted', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('deleted_by', self.gf('django.db.models.fields.related.ForeignKey')(related_name='user_deletor', blank=True, null=True, to=orm['people.Person'])),
('institute', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['people.Institute'])),
('position', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
))
db.send_create_signal('people', ['Person'])
def backwards(self, orm):
# Deleting model 'Institute'
db.delete_table('institute')
# Removing M2M table for field sub_delegates on 'Institute'
db.delete_table('institute_sub_delegates')
# Deleting model 'Person'
db.delete_table('person')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'unique': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '30', 'unique': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'people.institute': {
'Meta': {'object_name': 'Institute', 'db_table': "'institute'"},
'active_delegate': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'active_delegate'", 'blank': 'True', 'null': 'True', 'to': "orm['people.Person']"}),
'delegate': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'delegate'", 'blank': 'True', 'null': 'True', 'to': "orm['people.Person']"}),
'gid': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'sub_delegates': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'sub_delegates'", 'blank': 'True', 'null': 'True', 'to': "orm['people.Person']"})
},
'people.person': {
'Meta': {'object_name': 'Person', 'db_table': "'person'"},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'approved_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_approver'", 'blank': 'True', 'null': 'True', 'to': "orm['people.Person']"}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'date_approved': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_deleted': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_deletor'", 'blank': 'True', 'null': 'True', 'to': "orm['people.Person']"}),
'department': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'expires': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'institute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Institute']"}),
'last_usage': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'mobile': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'supervisor': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['people']
|
gpl-3.0
|
cloudbase/cinder
|
cinder/tests/unit/volume/drivers/emc/vnx/test_utils.py
|
5
|
6969
|
# Copyright (c) 2016 EMC Corporation, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from cinder import exception
from cinder import test
from cinder.tests.unit.volume.drivers.emc.vnx import fake_exception \
as storops_ex
from cinder.tests.unit.volume.drivers.emc.vnx import fake_storops as storops
from cinder.tests.unit.volume.drivers.emc.vnx import res_mock
from cinder.tests.unit.volume.drivers.emc.vnx import utils as ut_utils
from cinder.volume.drivers.emc.vnx import common
from cinder.volume.drivers.emc.vnx import utils
class TestUtils(test.TestCase):
def setUp(self):
super(TestUtils, self).setUp()
self.origin_timeout = common.DEFAULT_TIMEOUT
common.DEFAULT_TIMEOUT = 0.05
def tearDown(self):
super(TestUtils, self).tearDown()
common.DEFAULT_TIMEOUT = self.origin_timeout
def test_wait_until(self):
mock_testmethod = mock.Mock(return_value=True)
utils.wait_until(mock_testmethod, interval=0)
mock_testmethod.assert_has_calls([mock.call()])
def test_wait_until_with_exception(self):
mock_testmethod = mock.Mock(
side_effect=storops_ex.VNXAttachSnapError('Unknown error'))
mock_testmethod.__name__ = 'test_method'
self.assertRaises(storops_ex.VNXAttachSnapError,
utils.wait_until,
mock_testmethod,
timeout=1,
interval=0,
reraise_arbiter=(
lambda ex: not isinstance(
ex, storops_ex.VNXCreateLunError)))
mock_testmethod.assert_has_calls([mock.call()])
def test_wait_until_with_params(self):
mock_testmethod = mock.Mock(return_value=True)
utils.wait_until(mock_testmethod,
param1=1,
param2='test')
mock_testmethod.assert_has_calls(
[mock.call(param1=1, param2='test')])
mock_testmethod.assert_has_calls([mock.call(param1=1, param2='test')])
@res_mock.mock_driver_input
def test_retype_need_migration_when_host_changed(self, driver_in):
volume = driver_in['volume']
another_host = driver_in['host']
re = utils.retype_need_migration(
volume, None, None, another_host)
self.assertTrue(re)
@res_mock.mock_driver_input
def test_retype_need_migration_for_smp_volume(self, driver_in):
volume = driver_in['volume']
host = driver_in['host']
re = utils.retype_need_migration(
volume, None, None, host)
self.assertTrue(re)
@res_mock.mock_driver_input
def test_retype_need_migration_when_provision_changed(
self, driver_in):
volume = driver_in['volume']
host = driver_in['host']
old_spec = common.ExtraSpecs({'provisioning:type': 'thin'})
new_spec = common.ExtraSpecs({'provisioning:type': 'deduplicated'})
re = utils.retype_need_migration(
volume, old_spec.provision, new_spec.provision, host)
self.assertTrue(re)
@res_mock.mock_driver_input
def test_retype_not_need_migration_when_provision_changed(
self, driver_in):
volume = driver_in['volume']
host = driver_in['host']
old_spec = common.ExtraSpecs({'provisioning:type': 'thick'})
new_spec = common.ExtraSpecs({'provisioning:type': 'compressed'})
re = utils.retype_need_migration(
volume, old_spec.provision, new_spec.provision, host)
self.assertFalse(re)
@res_mock.mock_driver_input
def test_retype_not_need_migration(self, driver_in):
volume = driver_in['volume']
host = driver_in['host']
old_spec = common.ExtraSpecs({'storagetype:tiering': 'auto'})
new_spec = common.ExtraSpecs(
{'storagetype:tiering': 'starthighthenauto'})
re = utils.retype_need_migration(
volume, old_spec.provision, new_spec.provision, host)
self.assertFalse(re)
def test_retype_need_change_tier(self):
re = utils.retype_need_change_tier(
storops.VNXTieringEnum.AUTO, storops.VNXTieringEnum.HIGH_AUTO)
self.assertTrue(re)
def test_retype_need_turn_on_compression(self):
re = utils.retype_need_turn_on_compression(
storops.VNXProvisionEnum.THIN,
storops.VNXProvisionEnum.COMPRESSED)
self.assertTrue(re)
re = utils.retype_need_turn_on_compression(
storops.VNXProvisionEnum.THICK,
storops.VNXProvisionEnum.COMPRESSED)
self.assertTrue(re)
def test_retype_not_need_turn_on_compression(self):
re = utils.retype_need_turn_on_compression(
storops.VNXProvisionEnum.DEDUPED,
storops.VNXProvisionEnum.COMPRESSED)
self.assertFalse(re)
re = utils.retype_need_turn_on_compression(
storops.VNXProvisionEnum.DEDUPED,
storops.VNXProvisionEnum.COMPRESSED)
self.assertFalse(re)
@ut_utils.patch_extra_specs({'provisioning:type': 'compressed'})
@res_mock.mock_driver_input
def test_validate_cg_type(self, mocked_input):
cg = mocked_input['cg']
self.assertRaises(exception.InvalidInput,
utils.validate_cg_type,
cg)
@res_mock.mock_driver_input
def test_get_base_lun_name(self, mocked):
volume = mocked['volume']
self.assertEqual(
'test',
utils.get_base_lun_name(volume))
def test_convert_to_tgt_list_and_itor_tgt_map(self):
zone_mapping = {
'san_1': {'initiator_port_wwn_list':
['wwn1_1'],
'target_port_wwn_list':
['wwnt_1', 'wwnt_2']},
'san_2': {'initiator_port_wwn_list':
['wwn2_1', 'wwn2_2'],
'target_port_wwn_list':
['wwnt_1', 'wwnt_3']},
}
tgt_wwns, itor_tgt_map = (
utils.convert_to_tgt_list_and_itor_tgt_map(zone_mapping))
self.assertEqual(set(['wwnt_1', 'wwnt_2', 'wwnt_3']), set(tgt_wwns))
self.assertEqual({'wwn1_1': ['wwnt_1', 'wwnt_2'],
'wwn2_1': ['wwnt_1', 'wwnt_3'],
'wwn2_2': ['wwnt_1', 'wwnt_3']},
itor_tgt_map)
|
apache-2.0
|
engdan77/edoAutoHomeMobile
|
twisted/conch/ssh/service.py
|
69
|
1408
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
The parent class for all the SSH services. Currently implemented services
are ssh-userauth and ssh-connection.
Maintainer: Paul Swartz
"""
from twisted.python import log
class SSHService(log.Logger):
name = None # this is the ssh name for the service
protocolMessages = {} # these map #'s -> protocol names
transport = None # gets set later
def serviceStarted(self):
"""
called when the service is active on the transport.
"""
def serviceStopped(self):
"""
called when the service is stopped, either by the connection ending
or by another service being started
"""
def logPrefix(self):
return "SSHService %s on %s" % (self.name,
self.transport.transport.logPrefix())
def packetReceived(self, messageNum, packet):
"""
called when we receive a packet on the transport
"""
#print self.protocolMessages
if messageNum in self.protocolMessages:
messageType = self.protocolMessages[messageNum]
f = getattr(self,'ssh_%s' % messageType[4:],
None)
if f is not None:
return f(packet)
log.msg("couldn't handle %r" % messageNum)
log.msg(repr(packet))
self.transport.sendUnimplemented()
|
mit
|
ianyh/heroku-buildpack-python-opencv
|
vendor/.heroku/lib/python2.7/test/test_datetime.py
|
9
|
134923
|
"""Test date/time type.
See http://www.zope.org/Members/fdrake/DateTimeWiki/TestCases
"""
from __future__ import division
import sys
import pickle
import cPickle
import unittest
from test import test_support
from datetime import MINYEAR, MAXYEAR
from datetime import timedelta
from datetime import tzinfo
from datetime import time
from datetime import date, datetime
pickle_choices = [(pickler, unpickler, proto)
for pickler in pickle, cPickle
for unpickler in pickle, cPickle
for proto in range(3)]
assert len(pickle_choices) == 2*2*3
# An arbitrary collection of objects of non-datetime types, for testing
# mixed-type comparisons.
OTHERSTUFF = (10, 10L, 34.5, "abc", {}, [], ())
#############################################################################
# module tests
class TestModule(unittest.TestCase):
def test_constants(self):
import datetime
self.assertEqual(datetime.MINYEAR, 1)
self.assertEqual(datetime.MAXYEAR, 9999)
#############################################################################
# tzinfo tests
class FixedOffset(tzinfo):
def __init__(self, offset, name, dstoffset=42):
if isinstance(offset, int):
offset = timedelta(minutes=offset)
if isinstance(dstoffset, int):
dstoffset = timedelta(minutes=dstoffset)
self.__offset = offset
self.__name = name
self.__dstoffset = dstoffset
def __repr__(self):
return self.__name.lower()
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return self.__dstoffset
class PicklableFixedOffset(FixedOffset):
def __init__(self, offset=None, name=None, dstoffset=None):
FixedOffset.__init__(self, offset, name, dstoffset)
class TestTZInfo(unittest.TestCase):
def test_non_abstractness(self):
# In order to allow subclasses to get pickled, the C implementation
# wasn't able to get away with having __init__ raise
# NotImplementedError.
useless = tzinfo()
dt = datetime.max
self.assertRaises(NotImplementedError, useless.tzname, dt)
self.assertRaises(NotImplementedError, useless.utcoffset, dt)
self.assertRaises(NotImplementedError, useless.dst, dt)
def test_subclass_must_override(self):
class NotEnough(tzinfo):
def __init__(self, offset, name):
self.__offset = offset
self.__name = name
self.assertTrue(issubclass(NotEnough, tzinfo))
ne = NotEnough(3, "NotByALongShot")
self.assertIsInstance(ne, tzinfo)
dt = datetime.now()
self.assertRaises(NotImplementedError, ne.tzname, dt)
self.assertRaises(NotImplementedError, ne.utcoffset, dt)
self.assertRaises(NotImplementedError, ne.dst, dt)
def test_normal(self):
fo = FixedOffset(3, "Three")
self.assertIsInstance(fo, tzinfo)
for dt in datetime.now(), None:
self.assertEqual(fo.utcoffset(dt), timedelta(minutes=3))
self.assertEqual(fo.tzname(dt), "Three")
self.assertEqual(fo.dst(dt), timedelta(minutes=42))
def test_pickling_base(self):
# There's no point to pickling tzinfo objects on their own (they
# carry no data), but they need to be picklable anyway else
# concrete subclasses can't be pickled.
orig = tzinfo.__new__(tzinfo)
self.assertTrue(type(orig) is tzinfo)
for pickler, unpickler, proto in pickle_choices:
green = pickler.dumps(orig, proto)
derived = unpickler.loads(green)
self.assertTrue(type(derived) is tzinfo)
def test_pickling_subclass(self):
# Make sure we can pickle/unpickle an instance of a subclass.
offset = timedelta(minutes=-300)
orig = PicklableFixedOffset(offset, 'cookie')
self.assertIsInstance(orig, tzinfo)
self.assertTrue(type(orig) is PicklableFixedOffset)
self.assertEqual(orig.utcoffset(None), offset)
self.assertEqual(orig.tzname(None), 'cookie')
for pickler, unpickler, proto in pickle_choices:
green = pickler.dumps(orig, proto)
derived = unpickler.loads(green)
self.assertIsInstance(derived, tzinfo)
self.assertTrue(type(derived) is PicklableFixedOffset)
self.assertEqual(derived.utcoffset(None), offset)
self.assertEqual(derived.tzname(None), 'cookie')
#############################################################################
# Base class for testing a particular aspect of timedelta, time, date and
# datetime comparisons.
class HarmlessMixedComparison:
# Test that __eq__ and __ne__ don't complain for mixed-type comparisons.
# Subclasses must define 'theclass', and theclass(1, 1, 1) must be a
# legit constructor.
def test_harmless_mixed_comparison(self):
me = self.theclass(1, 1, 1)
self.assertFalse(me == ())
self.assertTrue(me != ())
self.assertFalse(() == me)
self.assertTrue(() != me)
self.assertIn(me, [1, 20L, [], me])
self.assertIn([], [me, 1, 20L, []])
def test_harmful_mixed_comparison(self):
me = self.theclass(1, 1, 1)
self.assertRaises(TypeError, lambda: me < ())
self.assertRaises(TypeError, lambda: me <= ())
self.assertRaises(TypeError, lambda: me > ())
self.assertRaises(TypeError, lambda: me >= ())
self.assertRaises(TypeError, lambda: () < me)
self.assertRaises(TypeError, lambda: () <= me)
self.assertRaises(TypeError, lambda: () > me)
self.assertRaises(TypeError, lambda: () >= me)
self.assertRaises(TypeError, cmp, (), me)
self.assertRaises(TypeError, cmp, me, ())
#############################################################################
# timedelta tests
class TestTimeDelta(HarmlessMixedComparison, unittest.TestCase):
theclass = timedelta
def test_constructor(self):
eq = self.assertEqual
td = timedelta
# Check keyword args to constructor
eq(td(), td(weeks=0, days=0, hours=0, minutes=0, seconds=0,
milliseconds=0, microseconds=0))
eq(td(1), td(days=1))
eq(td(0, 1), td(seconds=1))
eq(td(0, 0, 1), td(microseconds=1))
eq(td(weeks=1), td(days=7))
eq(td(days=1), td(hours=24))
eq(td(hours=1), td(minutes=60))
eq(td(minutes=1), td(seconds=60))
eq(td(seconds=1), td(milliseconds=1000))
eq(td(milliseconds=1), td(microseconds=1000))
# Check float args to constructor
eq(td(weeks=1.0/7), td(days=1))
eq(td(days=1.0/24), td(hours=1))
eq(td(hours=1.0/60), td(minutes=1))
eq(td(minutes=1.0/60), td(seconds=1))
eq(td(seconds=0.001), td(milliseconds=1))
eq(td(milliseconds=0.001), td(microseconds=1))
def test_computations(self):
eq = self.assertEqual
td = timedelta
a = td(7) # One week
b = td(0, 60) # One minute
c = td(0, 0, 1000) # One millisecond
eq(a+b+c, td(7, 60, 1000))
eq(a-b, td(6, 24*3600 - 60))
eq(-a, td(-7))
eq(+a, td(7))
eq(-b, td(-1, 24*3600 - 60))
eq(-c, td(-1, 24*3600 - 1, 999000))
eq(abs(a), a)
eq(abs(-a), a)
eq(td(6, 24*3600), a)
eq(td(0, 0, 60*1000000), b)
eq(a*10, td(70))
eq(a*10, 10*a)
eq(a*10L, 10*a)
eq(b*10, td(0, 600))
eq(10*b, td(0, 600))
eq(b*10L, td(0, 600))
eq(c*10, td(0, 0, 10000))
eq(10*c, td(0, 0, 10000))
eq(c*10L, td(0, 0, 10000))
eq(a*-1, -a)
eq(b*-2, -b-b)
eq(c*-2, -c+-c)
eq(b*(60*24), (b*60)*24)
eq(b*(60*24), (60*b)*24)
eq(c*1000, td(0, 1))
eq(1000*c, td(0, 1))
eq(a//7, td(1))
eq(b//10, td(0, 6))
eq(c//1000, td(0, 0, 1))
eq(a//10, td(0, 7*24*360))
eq(a//3600000, td(0, 0, 7*24*1000))
# Issue #11576
eq(td(999999999, 86399, 999999) - td(999999999, 86399, 999998),
td(0, 0, 1))
eq(td(999999999, 1, 1) - td(999999999, 1, 0),
td(0, 0, 1))
def test_disallowed_computations(self):
a = timedelta(42)
# Add/sub ints, longs, floats should be illegal
for i in 1, 1L, 1.0:
self.assertRaises(TypeError, lambda: a+i)
self.assertRaises(TypeError, lambda: a-i)
self.assertRaises(TypeError, lambda: i+a)
self.assertRaises(TypeError, lambda: i-a)
# Mul/div by float isn't supported.
x = 2.3
self.assertRaises(TypeError, lambda: a*x)
self.assertRaises(TypeError, lambda: x*a)
self.assertRaises(TypeError, lambda: a/x)
self.assertRaises(TypeError, lambda: x/a)
self.assertRaises(TypeError, lambda: a // x)
self.assertRaises(TypeError, lambda: x // a)
# Division of int by timedelta doesn't make sense.
# Division by zero doesn't make sense.
for zero in 0, 0L:
self.assertRaises(TypeError, lambda: zero // a)
self.assertRaises(ZeroDivisionError, lambda: a // zero)
def test_basic_attributes(self):
days, seconds, us = 1, 7, 31
td = timedelta(days, seconds, us)
self.assertEqual(td.days, days)
self.assertEqual(td.seconds, seconds)
self.assertEqual(td.microseconds, us)
def test_total_seconds(self):
td = timedelta(days=365)
self.assertEqual(td.total_seconds(), 31536000.0)
for total_seconds in [123456.789012, -123456.789012, 0.123456, 0, 1e6]:
td = timedelta(seconds=total_seconds)
self.assertEqual(td.total_seconds(), total_seconds)
# Issue8644: Test that td.total_seconds() has the same
# accuracy as td / timedelta(seconds=1).
for ms in [-1, -2, -123]:
td = timedelta(microseconds=ms)
self.assertEqual(td.total_seconds(),
((24*3600*td.days + td.seconds)*10**6
+ td.microseconds)/10**6)
def test_carries(self):
t1 = timedelta(days=100,
weeks=-7,
hours=-24*(100-49),
minutes=-3,
seconds=12,
microseconds=(3*60 - 12) * 1e6 + 1)
t2 = timedelta(microseconds=1)
self.assertEqual(t1, t2)
def test_hash_equality(self):
t1 = timedelta(days=100,
weeks=-7,
hours=-24*(100-49),
minutes=-3,
seconds=12,
microseconds=(3*60 - 12) * 1000000)
t2 = timedelta()
self.assertEqual(hash(t1), hash(t2))
t1 += timedelta(weeks=7)
t2 += timedelta(days=7*7)
self.assertEqual(t1, t2)
self.assertEqual(hash(t1), hash(t2))
d = {t1: 1}
d[t2] = 2
self.assertEqual(len(d), 1)
self.assertEqual(d[t1], 2)
def test_pickling(self):
args = 12, 34, 56
orig = timedelta(*args)
for pickler, unpickler, proto in pickle_choices:
green = pickler.dumps(orig, proto)
derived = unpickler.loads(green)
self.assertEqual(orig, derived)
def test_compare(self):
t1 = timedelta(2, 3, 4)
t2 = timedelta(2, 3, 4)
self.assertTrue(t1 == t2)
self.assertTrue(t1 <= t2)
self.assertTrue(t1 >= t2)
self.assertTrue(not t1 != t2)
self.assertTrue(not t1 < t2)
self.assertTrue(not t1 > t2)
self.assertEqual(cmp(t1, t2), 0)
self.assertEqual(cmp(t2, t1), 0)
for args in (3, 3, 3), (2, 4, 4), (2, 3, 5):
t2 = timedelta(*args) # this is larger than t1
self.assertTrue(t1 < t2)
self.assertTrue(t2 > t1)
self.assertTrue(t1 <= t2)
self.assertTrue(t2 >= t1)
self.assertTrue(t1 != t2)
self.assertTrue(t2 != t1)
self.assertTrue(not t1 == t2)
self.assertTrue(not t2 == t1)
self.assertTrue(not t1 > t2)
self.assertTrue(not t2 < t1)
self.assertTrue(not t1 >= t2)
self.assertTrue(not t2 <= t1)
self.assertEqual(cmp(t1, t2), -1)
self.assertEqual(cmp(t2, t1), 1)
for badarg in OTHERSTUFF:
self.assertEqual(t1 == badarg, False)
self.assertEqual(t1 != badarg, True)
self.assertEqual(badarg == t1, False)
self.assertEqual(badarg != t1, True)
self.assertRaises(TypeError, lambda: t1 <= badarg)
self.assertRaises(TypeError, lambda: t1 < badarg)
self.assertRaises(TypeError, lambda: t1 > badarg)
self.assertRaises(TypeError, lambda: t1 >= badarg)
self.assertRaises(TypeError, lambda: badarg <= t1)
self.assertRaises(TypeError, lambda: badarg < t1)
self.assertRaises(TypeError, lambda: badarg > t1)
self.assertRaises(TypeError, lambda: badarg >= t1)
def test_str(self):
td = timedelta
eq = self.assertEqual
eq(str(td(1)), "1 day, 0:00:00")
eq(str(td(-1)), "-1 day, 0:00:00")
eq(str(td(2)), "2 days, 0:00:00")
eq(str(td(-2)), "-2 days, 0:00:00")
eq(str(td(hours=12, minutes=58, seconds=59)), "12:58:59")
eq(str(td(hours=2, minutes=3, seconds=4)), "2:03:04")
eq(str(td(weeks=-30, hours=23, minutes=12, seconds=34)),
"-210 days, 23:12:34")
eq(str(td(milliseconds=1)), "0:00:00.001000")
eq(str(td(microseconds=3)), "0:00:00.000003")
eq(str(td(days=999999999, hours=23, minutes=59, seconds=59,
microseconds=999999)),
"999999999 days, 23:59:59.999999")
def test_roundtrip(self):
for td in (timedelta(days=999999999, hours=23, minutes=59,
seconds=59, microseconds=999999),
timedelta(days=-999999999),
timedelta(days=1, seconds=2, microseconds=3)):
# Verify td -> string -> td identity.
s = repr(td)
self.assertTrue(s.startswith('datetime.'))
s = s[9:]
td2 = eval(s)
self.assertEqual(td, td2)
# Verify identity via reconstructing from pieces.
td2 = timedelta(td.days, td.seconds, td.microseconds)
self.assertEqual(td, td2)
def test_resolution_info(self):
self.assertIsInstance(timedelta.min, timedelta)
self.assertIsInstance(timedelta.max, timedelta)
self.assertIsInstance(timedelta.resolution, timedelta)
self.assertTrue(timedelta.max > timedelta.min)
self.assertEqual(timedelta.min, timedelta(-999999999))
self.assertEqual(timedelta.max, timedelta(999999999, 24*3600-1, 1e6-1))
self.assertEqual(timedelta.resolution, timedelta(0, 0, 1))
def test_overflow(self):
tiny = timedelta.resolution
td = timedelta.min + tiny
td -= tiny # no problem
self.assertRaises(OverflowError, td.__sub__, tiny)
self.assertRaises(OverflowError, td.__add__, -tiny)
td = timedelta.max - tiny
td += tiny # no problem
self.assertRaises(OverflowError, td.__add__, tiny)
self.assertRaises(OverflowError, td.__sub__, -tiny)
self.assertRaises(OverflowError, lambda: -timedelta.max)
def test_microsecond_rounding(self):
td = timedelta
eq = self.assertEqual
# Single-field rounding.
eq(td(milliseconds=0.4/1000), td(0)) # rounds to 0
eq(td(milliseconds=-0.4/1000), td(0)) # rounds to 0
eq(td(milliseconds=0.6/1000), td(microseconds=1))
eq(td(milliseconds=-0.6/1000), td(microseconds=-1))
# Rounding due to contributions from more than one field.
us_per_hour = 3600e6
us_per_day = us_per_hour * 24
eq(td(days=.4/us_per_day), td(0))
eq(td(hours=.2/us_per_hour), td(0))
eq(td(days=.4/us_per_day, hours=.2/us_per_hour), td(microseconds=1))
eq(td(days=-.4/us_per_day), td(0))
eq(td(hours=-.2/us_per_hour), td(0))
eq(td(days=-.4/us_per_day, hours=-.2/us_per_hour), td(microseconds=-1))
def test_massive_normalization(self):
td = timedelta(microseconds=-1)
self.assertEqual((td.days, td.seconds, td.microseconds),
(-1, 24*3600-1, 999999))
def test_bool(self):
self.assertTrue(timedelta(1))
self.assertTrue(timedelta(0, 1))
self.assertTrue(timedelta(0, 0, 1))
self.assertTrue(timedelta(microseconds=1))
self.assertTrue(not timedelta(0))
def test_subclass_timedelta(self):
class T(timedelta):
@staticmethod
def from_td(td):
return T(td.days, td.seconds, td.microseconds)
def as_hours(self):
sum = (self.days * 24 +
self.seconds / 3600.0 +
self.microseconds / 3600e6)
return round(sum)
t1 = T(days=1)
self.assertTrue(type(t1) is T)
self.assertEqual(t1.as_hours(), 24)
t2 = T(days=-1, seconds=-3600)
self.assertTrue(type(t2) is T)
self.assertEqual(t2.as_hours(), -25)
t3 = t1 + t2
self.assertTrue(type(t3) is timedelta)
t4 = T.from_td(t3)
self.assertTrue(type(t4) is T)
self.assertEqual(t3.days, t4.days)
self.assertEqual(t3.seconds, t4.seconds)
self.assertEqual(t3.microseconds, t4.microseconds)
self.assertEqual(str(t3), str(t4))
self.assertEqual(t4.as_hours(), -1)
#############################################################################
# date tests
class TestDateOnly(unittest.TestCase):
# Tests here won't pass if also run on datetime objects, so don't
# subclass this to test datetimes too.
def test_delta_non_days_ignored(self):
dt = date(2000, 1, 2)
delta = timedelta(days=1, hours=2, minutes=3, seconds=4,
microseconds=5)
days = timedelta(delta.days)
self.assertEqual(days, timedelta(1))
dt2 = dt + delta
self.assertEqual(dt2, dt + days)
dt2 = delta + dt
self.assertEqual(dt2, dt + days)
dt2 = dt - delta
self.assertEqual(dt2, dt - days)
delta = -delta
days = timedelta(delta.days)
self.assertEqual(days, timedelta(-2))
dt2 = dt + delta
self.assertEqual(dt2, dt + days)
dt2 = delta + dt
self.assertEqual(dt2, dt + days)
dt2 = dt - delta
self.assertEqual(dt2, dt - days)
class SubclassDate(date):
sub_var = 1
class TestDate(HarmlessMixedComparison, unittest.TestCase):
# Tests here should pass for both dates and datetimes, except for a
# few tests that TestDateTime overrides.
theclass = date
def test_basic_attributes(self):
dt = self.theclass(2002, 3, 1)
self.assertEqual(dt.year, 2002)
self.assertEqual(dt.month, 3)
self.assertEqual(dt.day, 1)
def test_roundtrip(self):
for dt in (self.theclass(1, 2, 3),
self.theclass.today()):
# Verify dt -> string -> date identity.
s = repr(dt)
self.assertTrue(s.startswith('datetime.'))
s = s[9:]
dt2 = eval(s)
self.assertEqual(dt, dt2)
# Verify identity via reconstructing from pieces.
dt2 = self.theclass(dt.year, dt.month, dt.day)
self.assertEqual(dt, dt2)
def test_ordinal_conversions(self):
# Check some fixed values.
for y, m, d, n in [(1, 1, 1, 1), # calendar origin
(1, 12, 31, 365),
(2, 1, 1, 366),
# first example from "Calendrical Calculations"
(1945, 11, 12, 710347)]:
d = self.theclass(y, m, d)
self.assertEqual(n, d.toordinal())
fromord = self.theclass.fromordinal(n)
self.assertEqual(d, fromord)
if hasattr(fromord, "hour"):
# if we're checking something fancier than a date, verify
# the extra fields have been zeroed out
self.assertEqual(fromord.hour, 0)
self.assertEqual(fromord.minute, 0)
self.assertEqual(fromord.second, 0)
self.assertEqual(fromord.microsecond, 0)
# Check first and last days of year spottily across the whole
# range of years supported.
for year in xrange(MINYEAR, MAXYEAR+1, 7):
# Verify (year, 1, 1) -> ordinal -> y, m, d is identity.
d = self.theclass(year, 1, 1)
n = d.toordinal()
d2 = self.theclass.fromordinal(n)
self.assertEqual(d, d2)
# Verify that moving back a day gets to the end of year-1.
if year > 1:
d = self.theclass.fromordinal(n-1)
d2 = self.theclass(year-1, 12, 31)
self.assertEqual(d, d2)
self.assertEqual(d2.toordinal(), n-1)
# Test every day in a leap-year and a non-leap year.
dim = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
for year, isleap in (2000, True), (2002, False):
n = self.theclass(year, 1, 1).toordinal()
for month, maxday in zip(range(1, 13), dim):
if month == 2 and isleap:
maxday += 1
for day in range(1, maxday+1):
d = self.theclass(year, month, day)
self.assertEqual(d.toordinal(), n)
self.assertEqual(d, self.theclass.fromordinal(n))
n += 1
def test_extreme_ordinals(self):
a = self.theclass.min
a = self.theclass(a.year, a.month, a.day) # get rid of time parts
aord = a.toordinal()
b = a.fromordinal(aord)
self.assertEqual(a, b)
self.assertRaises(ValueError, lambda: a.fromordinal(aord - 1))
b = a + timedelta(days=1)
self.assertEqual(b.toordinal(), aord + 1)
self.assertEqual(b, self.theclass.fromordinal(aord + 1))
a = self.theclass.max
a = self.theclass(a.year, a.month, a.day) # get rid of time parts
aord = a.toordinal()
b = a.fromordinal(aord)
self.assertEqual(a, b)
self.assertRaises(ValueError, lambda: a.fromordinal(aord + 1))
b = a - timedelta(days=1)
self.assertEqual(b.toordinal(), aord - 1)
self.assertEqual(b, self.theclass.fromordinal(aord - 1))
def test_bad_constructor_arguments(self):
# bad years
self.theclass(MINYEAR, 1, 1) # no exception
self.theclass(MAXYEAR, 1, 1) # no exception
self.assertRaises(ValueError, self.theclass, MINYEAR-1, 1, 1)
self.assertRaises(ValueError, self.theclass, MAXYEAR+1, 1, 1)
# bad months
self.theclass(2000, 1, 1) # no exception
self.theclass(2000, 12, 1) # no exception
self.assertRaises(ValueError, self.theclass, 2000, 0, 1)
self.assertRaises(ValueError, self.theclass, 2000, 13, 1)
# bad days
self.theclass(2000, 2, 29) # no exception
self.theclass(2004, 2, 29) # no exception
self.theclass(2400, 2, 29) # no exception
self.assertRaises(ValueError, self.theclass, 2000, 2, 30)
self.assertRaises(ValueError, self.theclass, 2001, 2, 29)
self.assertRaises(ValueError, self.theclass, 2100, 2, 29)
self.assertRaises(ValueError, self.theclass, 1900, 2, 29)
self.assertRaises(ValueError, self.theclass, 2000, 1, 0)
self.assertRaises(ValueError, self.theclass, 2000, 1, 32)
def test_hash_equality(self):
d = self.theclass(2000, 12, 31)
# same thing
e = self.theclass(2000, 12, 31)
self.assertEqual(d, e)
self.assertEqual(hash(d), hash(e))
dic = {d: 1}
dic[e] = 2
self.assertEqual(len(dic), 1)
self.assertEqual(dic[d], 2)
self.assertEqual(dic[e], 2)
d = self.theclass(2001, 1, 1)
# same thing
e = self.theclass(2001, 1, 1)
self.assertEqual(d, e)
self.assertEqual(hash(d), hash(e))
dic = {d: 1}
dic[e] = 2
self.assertEqual(len(dic), 1)
self.assertEqual(dic[d], 2)
self.assertEqual(dic[e], 2)
def test_computations(self):
a = self.theclass(2002, 1, 31)
b = self.theclass(1956, 1, 31)
diff = a-b
self.assertEqual(diff.days, 46*365 + len(range(1956, 2002, 4)))
self.assertEqual(diff.seconds, 0)
self.assertEqual(diff.microseconds, 0)
day = timedelta(1)
week = timedelta(7)
a = self.theclass(2002, 3, 2)
self.assertEqual(a + day, self.theclass(2002, 3, 3))
self.assertEqual(day + a, self.theclass(2002, 3, 3))
self.assertEqual(a - day, self.theclass(2002, 3, 1))
self.assertEqual(-day + a, self.theclass(2002, 3, 1))
self.assertEqual(a + week, self.theclass(2002, 3, 9))
self.assertEqual(a - week, self.theclass(2002, 2, 23))
self.assertEqual(a + 52*week, self.theclass(2003, 3, 1))
self.assertEqual(a - 52*week, self.theclass(2001, 3, 3))
self.assertEqual((a + week) - a, week)
self.assertEqual((a + day) - a, day)
self.assertEqual((a - week) - a, -week)
self.assertEqual((a - day) - a, -day)
self.assertEqual(a - (a + week), -week)
self.assertEqual(a - (a + day), -day)
self.assertEqual(a - (a - week), week)
self.assertEqual(a - (a - day), day)
# Add/sub ints, longs, floats should be illegal
for i in 1, 1L, 1.0:
self.assertRaises(TypeError, lambda: a+i)
self.assertRaises(TypeError, lambda: a-i)
self.assertRaises(TypeError, lambda: i+a)
self.assertRaises(TypeError, lambda: i-a)
# delta - date is senseless.
self.assertRaises(TypeError, lambda: day - a)
# mixing date and (delta or date) via * or // is senseless
self.assertRaises(TypeError, lambda: day * a)
self.assertRaises(TypeError, lambda: a * day)
self.assertRaises(TypeError, lambda: day // a)
self.assertRaises(TypeError, lambda: a // day)
self.assertRaises(TypeError, lambda: a * a)
self.assertRaises(TypeError, lambda: a // a)
# date + date is senseless
self.assertRaises(TypeError, lambda: a + a)
def test_overflow(self):
tiny = self.theclass.resolution
for delta in [tiny, timedelta(1), timedelta(2)]:
dt = self.theclass.min + delta
dt -= delta # no problem
self.assertRaises(OverflowError, dt.__sub__, delta)
self.assertRaises(OverflowError, dt.__add__, -delta)
dt = self.theclass.max - delta
dt += delta # no problem
self.assertRaises(OverflowError, dt.__add__, delta)
self.assertRaises(OverflowError, dt.__sub__, -delta)
def test_fromtimestamp(self):
import time
# Try an arbitrary fixed value.
year, month, day = 1999, 9, 19
ts = time.mktime((year, month, day, 0, 0, 0, 0, 0, -1))
d = self.theclass.fromtimestamp(ts)
self.assertEqual(d.year, year)
self.assertEqual(d.month, month)
self.assertEqual(d.day, day)
def test_insane_fromtimestamp(self):
# It's possible that some platform maps time_t to double,
# and that this test will fail there. This test should
# exempt such platforms (provided they return reasonable
# results!).
for insane in -1e200, 1e200:
self.assertRaises(ValueError, self.theclass.fromtimestamp,
insane)
def test_today(self):
import time
# We claim that today() is like fromtimestamp(time.time()), so
# prove it.
for dummy in range(3):
today = self.theclass.today()
ts = time.time()
todayagain = self.theclass.fromtimestamp(ts)
if today == todayagain:
break
# There are several legit reasons that could fail:
# 1. It recently became midnight, between the today() and the
# time() calls.
# 2. The platform time() has such fine resolution that we'll
# never get the same value twice.
# 3. The platform time() has poor resolution, and we just
# happened to call today() right before a resolution quantum
# boundary.
# 4. The system clock got fiddled between calls.
# In any case, wait a little while and try again.
time.sleep(0.1)
# It worked or it didn't. If it didn't, assume it's reason #2, and
# let the test pass if they're within half a second of each other.
self.assertTrue(today == todayagain or
abs(todayagain - today) < timedelta(seconds=0.5))
def test_weekday(self):
for i in range(7):
# March 4, 2002 is a Monday
self.assertEqual(self.theclass(2002, 3, 4+i).weekday(), i)
self.assertEqual(self.theclass(2002, 3, 4+i).isoweekday(), i+1)
# January 2, 1956 is a Monday
self.assertEqual(self.theclass(1956, 1, 2+i).weekday(), i)
self.assertEqual(self.theclass(1956, 1, 2+i).isoweekday(), i+1)
def test_isocalendar(self):
# Check examples from
# http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm
for i in range(7):
d = self.theclass(2003, 12, 22+i)
self.assertEqual(d.isocalendar(), (2003, 52, i+1))
d = self.theclass(2003, 12, 29) + timedelta(i)
self.assertEqual(d.isocalendar(), (2004, 1, i+1))
d = self.theclass(2004, 1, 5+i)
self.assertEqual(d.isocalendar(), (2004, 2, i+1))
d = self.theclass(2009, 12, 21+i)
self.assertEqual(d.isocalendar(), (2009, 52, i+1))
d = self.theclass(2009, 12, 28) + timedelta(i)
self.assertEqual(d.isocalendar(), (2009, 53, i+1))
d = self.theclass(2010, 1, 4+i)
self.assertEqual(d.isocalendar(), (2010, 1, i+1))
def test_iso_long_years(self):
# Calculate long ISO years and compare to table from
# http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm
ISO_LONG_YEARS_TABLE = """
4 32 60 88
9 37 65 93
15 43 71 99
20 48 76
26 54 82
105 133 161 189
111 139 167 195
116 144 172
122 150 178
128 156 184
201 229 257 285
207 235 263 291
212 240 268 296
218 246 274
224 252 280
303 331 359 387
308 336 364 392
314 342 370 398
320 348 376
325 353 381
"""
iso_long_years = map(int, ISO_LONG_YEARS_TABLE.split())
iso_long_years.sort()
L = []
for i in range(400):
d = self.theclass(2000+i, 12, 31)
d1 = self.theclass(1600+i, 12, 31)
self.assertEqual(d.isocalendar()[1:], d1.isocalendar()[1:])
if d.isocalendar()[1] == 53:
L.append(i)
self.assertEqual(L, iso_long_years)
def test_isoformat(self):
t = self.theclass(2, 3, 2)
self.assertEqual(t.isoformat(), "0002-03-02")
def test_ctime(self):
t = self.theclass(2002, 3, 2)
self.assertEqual(t.ctime(), "Sat Mar 2 00:00:00 2002")
def test_strftime(self):
t = self.theclass(2005, 3, 2)
self.assertEqual(t.strftime("m:%m d:%d y:%y"), "m:03 d:02 y:05")
self.assertEqual(t.strftime(""), "") # SF bug #761337
self.assertEqual(t.strftime('x'*1000), 'x'*1000) # SF bug #1556784
self.assertRaises(TypeError, t.strftime) # needs an arg
self.assertRaises(TypeError, t.strftime, "one", "two") # too many args
self.assertRaises(TypeError, t.strftime, 42) # arg wrong type
# test that unicode input is allowed (issue 2782)
self.assertEqual(t.strftime(u"%m"), "03")
# A naive object replaces %z and %Z w/ empty strings.
self.assertEqual(t.strftime("'%z' '%Z'"), "'' ''")
#make sure that invalid format specifiers are handled correctly
#self.assertRaises(ValueError, t.strftime, "%e")
#self.assertRaises(ValueError, t.strftime, "%")
#self.assertRaises(ValueError, t.strftime, "%#")
#oh well, some systems just ignore those invalid ones.
#at least, excercise them to make sure that no crashes
#are generated
for f in ["%e", "%", "%#"]:
try:
t.strftime(f)
except ValueError:
pass
#check that this standard extension works
t.strftime("%f")
def test_format(self):
dt = self.theclass(2007, 9, 10)
self.assertEqual(dt.__format__(''), str(dt))
# check that a derived class's __str__() gets called
class A(self.theclass):
def __str__(self):
return 'A'
a = A(2007, 9, 10)
self.assertEqual(a.__format__(''), 'A')
# check that a derived class's strftime gets called
class B(self.theclass):
def strftime(self, format_spec):
return 'B'
b = B(2007, 9, 10)
self.assertEqual(b.__format__(''), str(dt))
for fmt in ["m:%m d:%d y:%y",
"m:%m d:%d y:%y H:%H M:%M S:%S",
"%z %Z",
]:
self.assertEqual(dt.__format__(fmt), dt.strftime(fmt))
self.assertEqual(a.__format__(fmt), dt.strftime(fmt))
self.assertEqual(b.__format__(fmt), 'B')
def test_resolution_info(self):
self.assertIsInstance(self.theclass.min, self.theclass)
self.assertIsInstance(self.theclass.max, self.theclass)
self.assertIsInstance(self.theclass.resolution, timedelta)
self.assertTrue(self.theclass.max > self.theclass.min)
def test_extreme_timedelta(self):
big = self.theclass.max - self.theclass.min
# 3652058 days, 23 hours, 59 minutes, 59 seconds, 999999 microseconds
n = (big.days*24*3600 + big.seconds)*1000000 + big.microseconds
# n == 315537897599999999 ~= 2**58.13
justasbig = timedelta(0, 0, n)
self.assertEqual(big, justasbig)
self.assertEqual(self.theclass.min + big, self.theclass.max)
self.assertEqual(self.theclass.max - big, self.theclass.min)
def test_timetuple(self):
for i in range(7):
# January 2, 1956 is a Monday (0)
d = self.theclass(1956, 1, 2+i)
t = d.timetuple()
self.assertEqual(t, (1956, 1, 2+i, 0, 0, 0, i, 2+i, -1))
# February 1, 1956 is a Wednesday (2)
d = self.theclass(1956, 2, 1+i)
t = d.timetuple()
self.assertEqual(t, (1956, 2, 1+i, 0, 0, 0, (2+i)%7, 32+i, -1))
# March 1, 1956 is a Thursday (3), and is the 31+29+1 = 61st day
# of the year.
d = self.theclass(1956, 3, 1+i)
t = d.timetuple()
self.assertEqual(t, (1956, 3, 1+i, 0, 0, 0, (3+i)%7, 61+i, -1))
self.assertEqual(t.tm_year, 1956)
self.assertEqual(t.tm_mon, 3)
self.assertEqual(t.tm_mday, 1+i)
self.assertEqual(t.tm_hour, 0)
self.assertEqual(t.tm_min, 0)
self.assertEqual(t.tm_sec, 0)
self.assertEqual(t.tm_wday, (3+i)%7)
self.assertEqual(t.tm_yday, 61+i)
self.assertEqual(t.tm_isdst, -1)
def test_pickling(self):
args = 6, 7, 23
orig = self.theclass(*args)
for pickler, unpickler, proto in pickle_choices:
green = pickler.dumps(orig, proto)
derived = unpickler.loads(green)
self.assertEqual(orig, derived)
def test_compare(self):
t1 = self.theclass(2, 3, 4)
t2 = self.theclass(2, 3, 4)
self.assertTrue(t1 == t2)
self.assertTrue(t1 <= t2)
self.assertTrue(t1 >= t2)
self.assertTrue(not t1 != t2)
self.assertTrue(not t1 < t2)
self.assertTrue(not t1 > t2)
self.assertEqual(cmp(t1, t2), 0)
self.assertEqual(cmp(t2, t1), 0)
for args in (3, 3, 3), (2, 4, 4), (2, 3, 5):
t2 = self.theclass(*args) # this is larger than t1
self.assertTrue(t1 < t2)
self.assertTrue(t2 > t1)
self.assertTrue(t1 <= t2)
self.assertTrue(t2 >= t1)
self.assertTrue(t1 != t2)
self.assertTrue(t2 != t1)
self.assertTrue(not t1 == t2)
self.assertTrue(not t2 == t1)
self.assertTrue(not t1 > t2)
self.assertTrue(not t2 < t1)
self.assertTrue(not t1 >= t2)
self.assertTrue(not t2 <= t1)
self.assertEqual(cmp(t1, t2), -1)
self.assertEqual(cmp(t2, t1), 1)
for badarg in OTHERSTUFF:
self.assertEqual(t1 == badarg, False)
self.assertEqual(t1 != badarg, True)
self.assertEqual(badarg == t1, False)
self.assertEqual(badarg != t1, True)
self.assertRaises(TypeError, lambda: t1 < badarg)
self.assertRaises(TypeError, lambda: t1 > badarg)
self.assertRaises(TypeError, lambda: t1 >= badarg)
self.assertRaises(TypeError, lambda: badarg <= t1)
self.assertRaises(TypeError, lambda: badarg < t1)
self.assertRaises(TypeError, lambda: badarg > t1)
self.assertRaises(TypeError, lambda: badarg >= t1)
def test_mixed_compare(self):
our = self.theclass(2000, 4, 5)
self.assertRaises(TypeError, cmp, our, 1)
self.assertRaises(TypeError, cmp, 1, our)
class AnotherDateTimeClass(object):
def __cmp__(self, other):
# Return "equal" so calling this can't be confused with
# compare-by-address (which never says "equal" for distinct
# objects).
return 0
__hash__ = None # Silence Py3k warning
# This still errors, because date and datetime comparison raise
# TypeError instead of NotImplemented when they don't know what to
# do, in order to stop comparison from falling back to the default
# compare-by-address.
their = AnotherDateTimeClass()
self.assertRaises(TypeError, cmp, our, their)
# Oops: The next stab raises TypeError in the C implementation,
# but not in the Python implementation of datetime. The difference
# is due to that the Python implementation defines __cmp__ but
# the C implementation defines tp_richcompare. This is more pain
# to fix than it's worth, so commenting out the test.
# self.assertEqual(cmp(their, our), 0)
# But date and datetime comparison return NotImplemented instead if the
# other object has a timetuple attr. This gives the other object a
# chance to do the comparison.
class Comparable(AnotherDateTimeClass):
def timetuple(self):
return ()
their = Comparable()
self.assertEqual(cmp(our, their), 0)
self.assertEqual(cmp(their, our), 0)
self.assertTrue(our == their)
self.assertTrue(their == our)
def test_bool(self):
# All dates are considered true.
self.assertTrue(self.theclass.min)
self.assertTrue(self.theclass.max)
def test_strftime_out_of_range(self):
# For nasty technical reasons, we can't handle years before 1900.
cls = self.theclass
self.assertEqual(cls(1900, 1, 1).strftime("%Y"), "1900")
for y in 1, 49, 51, 99, 100, 1000, 1899:
self.assertRaises(ValueError, cls(y, 1, 1).strftime, "%Y")
def test_replace(self):
cls = self.theclass
args = [1, 2, 3]
base = cls(*args)
self.assertEqual(base, base.replace())
i = 0
for name, newval in (("year", 2),
("month", 3),
("day", 4)):
newargs = args[:]
newargs[i] = newval
expected = cls(*newargs)
got = base.replace(**{name: newval})
self.assertEqual(expected, got)
i += 1
# Out of bounds.
base = cls(2000, 2, 29)
self.assertRaises(ValueError, base.replace, year=2001)
def test_subclass_date(self):
class C(self.theclass):
theAnswer = 42
def __new__(cls, *args, **kws):
temp = kws.copy()
extra = temp.pop('extra')
result = self.theclass.__new__(cls, *args, **temp)
result.extra = extra
return result
def newmeth(self, start):
return start + self.year + self.month
args = 2003, 4, 14
dt1 = self.theclass(*args)
dt2 = C(*args, **{'extra': 7})
self.assertEqual(dt2.__class__, C)
self.assertEqual(dt2.theAnswer, 42)
self.assertEqual(dt2.extra, 7)
self.assertEqual(dt1.toordinal(), dt2.toordinal())
self.assertEqual(dt2.newmeth(-7), dt1.year + dt1.month - 7)
def test_pickling_subclass_date(self):
args = 6, 7, 23
orig = SubclassDate(*args)
for pickler, unpickler, proto in pickle_choices:
green = pickler.dumps(orig, proto)
derived = unpickler.loads(green)
self.assertEqual(orig, derived)
def test_backdoor_resistance(self):
# For fast unpickling, the constructor accepts a pickle string.
# This is a low-overhead backdoor. A user can (by intent or
# mistake) pass a string directly, which (if it's the right length)
# will get treated like a pickle, and bypass the normal sanity
# checks in the constructor. This can create insane objects.
# The constructor doesn't want to burn the time to validate all
# fields, but does check the month field. This stops, e.g.,
# datetime.datetime('1995-03-25') from yielding an insane object.
base = '1995-03-25'
if not issubclass(self.theclass, datetime):
base = base[:4]
for month_byte in '9', chr(0), chr(13), '\xff':
self.assertRaises(TypeError, self.theclass,
base[:2] + month_byte + base[3:])
for ord_byte in range(1, 13):
# This shouldn't blow up because of the month byte alone. If
# the implementation changes to do more-careful checking, it may
# blow up because other fields are insane.
self.theclass(base[:2] + chr(ord_byte) + base[3:])
#############################################################################
# datetime tests
class SubclassDatetime(datetime):
sub_var = 1
class TestDateTime(TestDate):
theclass = datetime
def test_basic_attributes(self):
dt = self.theclass(2002, 3, 1, 12, 0)
self.assertEqual(dt.year, 2002)
self.assertEqual(dt.month, 3)
self.assertEqual(dt.day, 1)
self.assertEqual(dt.hour, 12)
self.assertEqual(dt.minute, 0)
self.assertEqual(dt.second, 0)
self.assertEqual(dt.microsecond, 0)
def test_basic_attributes_nonzero(self):
# Make sure all attributes are non-zero so bugs in
# bit-shifting access show up.
dt = self.theclass(2002, 3, 1, 12, 59, 59, 8000)
self.assertEqual(dt.year, 2002)
self.assertEqual(dt.month, 3)
self.assertEqual(dt.day, 1)
self.assertEqual(dt.hour, 12)
self.assertEqual(dt.minute, 59)
self.assertEqual(dt.second, 59)
self.assertEqual(dt.microsecond, 8000)
def test_roundtrip(self):
for dt in (self.theclass(1, 2, 3, 4, 5, 6, 7),
self.theclass.now()):
# Verify dt -> string -> datetime identity.
s = repr(dt)
self.assertTrue(s.startswith('datetime.'))
s = s[9:]
dt2 = eval(s)
self.assertEqual(dt, dt2)
# Verify identity via reconstructing from pieces.
dt2 = self.theclass(dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second,
dt.microsecond)
self.assertEqual(dt, dt2)
def test_isoformat(self):
t = self.theclass(2, 3, 2, 4, 5, 1, 123)
self.assertEqual(t.isoformat(), "0002-03-02T04:05:01.000123")
self.assertEqual(t.isoformat('T'), "0002-03-02T04:05:01.000123")
self.assertEqual(t.isoformat(' '), "0002-03-02 04:05:01.000123")
self.assertEqual(t.isoformat('\x00'), "0002-03-02\x0004:05:01.000123")
# str is ISO format with the separator forced to a blank.
self.assertEqual(str(t), "0002-03-02 04:05:01.000123")
t = self.theclass(2, 3, 2)
self.assertEqual(t.isoformat(), "0002-03-02T00:00:00")
self.assertEqual(t.isoformat('T'), "0002-03-02T00:00:00")
self.assertEqual(t.isoformat(' '), "0002-03-02 00:00:00")
# str is ISO format with the separator forced to a blank.
self.assertEqual(str(t), "0002-03-02 00:00:00")
def test_format(self):
dt = self.theclass(2007, 9, 10, 4, 5, 1, 123)
self.assertEqual(dt.__format__(''), str(dt))
# check that a derived class's __str__() gets called
class A(self.theclass):
def __str__(self):
return 'A'
a = A(2007, 9, 10, 4, 5, 1, 123)
self.assertEqual(a.__format__(''), 'A')
# check that a derived class's strftime gets called
class B(self.theclass):
def strftime(self, format_spec):
return 'B'
b = B(2007, 9, 10, 4, 5, 1, 123)
self.assertEqual(b.__format__(''), str(dt))
for fmt in ["m:%m d:%d y:%y",
"m:%m d:%d y:%y H:%H M:%M S:%S",
"%z %Z",
]:
self.assertEqual(dt.__format__(fmt), dt.strftime(fmt))
self.assertEqual(a.__format__(fmt), dt.strftime(fmt))
self.assertEqual(b.__format__(fmt), 'B')
def test_more_ctime(self):
# Test fields that TestDate doesn't touch.
import time
t = self.theclass(2002, 3, 2, 18, 3, 5, 123)
self.assertEqual(t.ctime(), "Sat Mar 2 18:03:05 2002")
# Oops! The next line fails on Win2K under MSVC 6, so it's commented
# out. The difference is that t.ctime() produces " 2" for the day,
# but platform ctime() produces "02" for the day. According to
# C99, t.ctime() is correct here.
# self.assertEqual(t.ctime(), time.ctime(time.mktime(t.timetuple())))
# So test a case where that difference doesn't matter.
t = self.theclass(2002, 3, 22, 18, 3, 5, 123)
self.assertEqual(t.ctime(), time.ctime(time.mktime(t.timetuple())))
def test_tz_independent_comparing(self):
dt1 = self.theclass(2002, 3, 1, 9, 0, 0)
dt2 = self.theclass(2002, 3, 1, 10, 0, 0)
dt3 = self.theclass(2002, 3, 1, 9, 0, 0)
self.assertEqual(dt1, dt3)
self.assertTrue(dt2 > dt3)
# Make sure comparison doesn't forget microseconds, and isn't done
# via comparing a float timestamp (an IEEE double doesn't have enough
# precision to span microsecond resolution across years 1 thru 9999,
# so comparing via timestamp necessarily calls some distinct values
# equal).
dt1 = self.theclass(MAXYEAR, 12, 31, 23, 59, 59, 999998)
us = timedelta(microseconds=1)
dt2 = dt1 + us
self.assertEqual(dt2 - dt1, us)
self.assertTrue(dt1 < dt2)
def test_strftime_with_bad_tzname_replace(self):
# verify ok if tzinfo.tzname().replace() returns a non-string
class MyTzInfo(FixedOffset):
def tzname(self, dt):
class MyStr(str):
def replace(self, *args):
return None
return MyStr('name')
t = self.theclass(2005, 3, 2, 0, 0, 0, 0, MyTzInfo(3, 'name'))
self.assertRaises(TypeError, t.strftime, '%Z')
def test_bad_constructor_arguments(self):
# bad years
self.theclass(MINYEAR, 1, 1) # no exception
self.theclass(MAXYEAR, 1, 1) # no exception
self.assertRaises(ValueError, self.theclass, MINYEAR-1, 1, 1)
self.assertRaises(ValueError, self.theclass, MAXYEAR+1, 1, 1)
# bad months
self.theclass(2000, 1, 1) # no exception
self.theclass(2000, 12, 1) # no exception
self.assertRaises(ValueError, self.theclass, 2000, 0, 1)
self.assertRaises(ValueError, self.theclass, 2000, 13, 1)
# bad days
self.theclass(2000, 2, 29) # no exception
self.theclass(2004, 2, 29) # no exception
self.theclass(2400, 2, 29) # no exception
self.assertRaises(ValueError, self.theclass, 2000, 2, 30)
self.assertRaises(ValueError, self.theclass, 2001, 2, 29)
self.assertRaises(ValueError, self.theclass, 2100, 2, 29)
self.assertRaises(ValueError, self.theclass, 1900, 2, 29)
self.assertRaises(ValueError, self.theclass, 2000, 1, 0)
self.assertRaises(ValueError, self.theclass, 2000, 1, 32)
# bad hours
self.theclass(2000, 1, 31, 0) # no exception
self.theclass(2000, 1, 31, 23) # no exception
self.assertRaises(ValueError, self.theclass, 2000, 1, 31, -1)
self.assertRaises(ValueError, self.theclass, 2000, 1, 31, 24)
# bad minutes
self.theclass(2000, 1, 31, 23, 0) # no exception
self.theclass(2000, 1, 31, 23, 59) # no exception
self.assertRaises(ValueError, self.theclass, 2000, 1, 31, 23, -1)
self.assertRaises(ValueError, self.theclass, 2000, 1, 31, 23, 60)
# bad seconds
self.theclass(2000, 1, 31, 23, 59, 0) # no exception
self.theclass(2000, 1, 31, 23, 59, 59) # no exception
self.assertRaises(ValueError, self.theclass, 2000, 1, 31, 23, 59, -1)
self.assertRaises(ValueError, self.theclass, 2000, 1, 31, 23, 59, 60)
# bad microseconds
self.theclass(2000, 1, 31, 23, 59, 59, 0) # no exception
self.theclass(2000, 1, 31, 23, 59, 59, 999999) # no exception
self.assertRaises(ValueError, self.theclass,
2000, 1, 31, 23, 59, 59, -1)
self.assertRaises(ValueError, self.theclass,
2000, 1, 31, 23, 59, 59,
1000000)
def test_hash_equality(self):
d = self.theclass(2000, 12, 31, 23, 30, 17)
e = self.theclass(2000, 12, 31, 23, 30, 17)
self.assertEqual(d, e)
self.assertEqual(hash(d), hash(e))
dic = {d: 1}
dic[e] = 2
self.assertEqual(len(dic), 1)
self.assertEqual(dic[d], 2)
self.assertEqual(dic[e], 2)
d = self.theclass(2001, 1, 1, 0, 5, 17)
e = self.theclass(2001, 1, 1, 0, 5, 17)
self.assertEqual(d, e)
self.assertEqual(hash(d), hash(e))
dic = {d: 1}
dic[e] = 2
self.assertEqual(len(dic), 1)
self.assertEqual(dic[d], 2)
self.assertEqual(dic[e], 2)
def test_computations(self):
a = self.theclass(2002, 1, 31)
b = self.theclass(1956, 1, 31)
diff = a-b
self.assertEqual(diff.days, 46*365 + len(range(1956, 2002, 4)))
self.assertEqual(diff.seconds, 0)
self.assertEqual(diff.microseconds, 0)
a = self.theclass(2002, 3, 2, 17, 6)
millisec = timedelta(0, 0, 1000)
hour = timedelta(0, 3600)
day = timedelta(1)
week = timedelta(7)
self.assertEqual(a + hour, self.theclass(2002, 3, 2, 18, 6))
self.assertEqual(hour + a, self.theclass(2002, 3, 2, 18, 6))
self.assertEqual(a + 10*hour, self.theclass(2002, 3, 3, 3, 6))
self.assertEqual(a - hour, self.theclass(2002, 3, 2, 16, 6))
self.assertEqual(-hour + a, self.theclass(2002, 3, 2, 16, 6))
self.assertEqual(a - hour, a + -hour)
self.assertEqual(a - 20*hour, self.theclass(2002, 3, 1, 21, 6))
self.assertEqual(a + day, self.theclass(2002, 3, 3, 17, 6))
self.assertEqual(a - day, self.theclass(2002, 3, 1, 17, 6))
self.assertEqual(a + week, self.theclass(2002, 3, 9, 17, 6))
self.assertEqual(a - week, self.theclass(2002, 2, 23, 17, 6))
self.assertEqual(a + 52*week, self.theclass(2003, 3, 1, 17, 6))
self.assertEqual(a - 52*week, self.theclass(2001, 3, 3, 17, 6))
self.assertEqual((a + week) - a, week)
self.assertEqual((a + day) - a, day)
self.assertEqual((a + hour) - a, hour)
self.assertEqual((a + millisec) - a, millisec)
self.assertEqual((a - week) - a, -week)
self.assertEqual((a - day) - a, -day)
self.assertEqual((a - hour) - a, -hour)
self.assertEqual((a - millisec) - a, -millisec)
self.assertEqual(a - (a + week), -week)
self.assertEqual(a - (a + day), -day)
self.assertEqual(a - (a + hour), -hour)
self.assertEqual(a - (a + millisec), -millisec)
self.assertEqual(a - (a - week), week)
self.assertEqual(a - (a - day), day)
self.assertEqual(a - (a - hour), hour)
self.assertEqual(a - (a - millisec), millisec)
self.assertEqual(a + (week + day + hour + millisec),
self.theclass(2002, 3, 10, 18, 6, 0, 1000))
self.assertEqual(a + (week + day + hour + millisec),
(((a + week) + day) + hour) + millisec)
self.assertEqual(a - (week + day + hour + millisec),
self.theclass(2002, 2, 22, 16, 5, 59, 999000))
self.assertEqual(a - (week + day + hour + millisec),
(((a - week) - day) - hour) - millisec)
# Add/sub ints, longs, floats should be illegal
for i in 1, 1L, 1.0:
self.assertRaises(TypeError, lambda: a+i)
self.assertRaises(TypeError, lambda: a-i)
self.assertRaises(TypeError, lambda: i+a)
self.assertRaises(TypeError, lambda: i-a)
# delta - datetime is senseless.
self.assertRaises(TypeError, lambda: day - a)
# mixing datetime and (delta or datetime) via * or // is senseless
self.assertRaises(TypeError, lambda: day * a)
self.assertRaises(TypeError, lambda: a * day)
self.assertRaises(TypeError, lambda: day // a)
self.assertRaises(TypeError, lambda: a // day)
self.assertRaises(TypeError, lambda: a * a)
self.assertRaises(TypeError, lambda: a // a)
# datetime + datetime is senseless
self.assertRaises(TypeError, lambda: a + a)
def test_pickling(self):
args = 6, 7, 23, 20, 59, 1, 64**2
orig = self.theclass(*args)
for pickler, unpickler, proto in pickle_choices:
green = pickler.dumps(orig, proto)
derived = unpickler.loads(green)
self.assertEqual(orig, derived)
def test_more_pickling(self):
a = self.theclass(2003, 2, 7, 16, 48, 37, 444116)
s = pickle.dumps(a)
b = pickle.loads(s)
self.assertEqual(b.year, 2003)
self.assertEqual(b.month, 2)
self.assertEqual(b.day, 7)
def test_pickling_subclass_datetime(self):
args = 6, 7, 23, 20, 59, 1, 64**2
orig = SubclassDatetime(*args)
for pickler, unpickler, proto in pickle_choices:
green = pickler.dumps(orig, proto)
derived = unpickler.loads(green)
self.assertEqual(orig, derived)
def test_more_compare(self):
# The test_compare() inherited from TestDate covers the error cases.
# We just want to test lexicographic ordering on the members datetime
# has that date lacks.
args = [2000, 11, 29, 20, 58, 16, 999998]
t1 = self.theclass(*args)
t2 = self.theclass(*args)
self.assertTrue(t1 == t2)
self.assertTrue(t1 <= t2)
self.assertTrue(t1 >= t2)
self.assertTrue(not t1 != t2)
self.assertTrue(not t1 < t2)
self.assertTrue(not t1 > t2)
self.assertEqual(cmp(t1, t2), 0)
self.assertEqual(cmp(t2, t1), 0)
for i in range(len(args)):
newargs = args[:]
newargs[i] = args[i] + 1
t2 = self.theclass(*newargs) # this is larger than t1
self.assertTrue(t1 < t2)
self.assertTrue(t2 > t1)
self.assertTrue(t1 <= t2)
self.assertTrue(t2 >= t1)
self.assertTrue(t1 != t2)
self.assertTrue(t2 != t1)
self.assertTrue(not t1 == t2)
self.assertTrue(not t2 == t1)
self.assertTrue(not t1 > t2)
self.assertTrue(not t2 < t1)
self.assertTrue(not t1 >= t2)
self.assertTrue(not t2 <= t1)
self.assertEqual(cmp(t1, t2), -1)
self.assertEqual(cmp(t2, t1), 1)
# A helper for timestamp constructor tests.
def verify_field_equality(self, expected, got):
self.assertEqual(expected.tm_year, got.year)
self.assertEqual(expected.tm_mon, got.month)
self.assertEqual(expected.tm_mday, got.day)
self.assertEqual(expected.tm_hour, got.hour)
self.assertEqual(expected.tm_min, got.minute)
self.assertEqual(expected.tm_sec, got.second)
def test_fromtimestamp(self):
import time
ts = time.time()
expected = time.localtime(ts)
got = self.theclass.fromtimestamp(ts)
self.verify_field_equality(expected, got)
def test_utcfromtimestamp(self):
import time
ts = time.time()
expected = time.gmtime(ts)
got = self.theclass.utcfromtimestamp(ts)
self.verify_field_equality(expected, got)
def test_microsecond_rounding(self):
# Test whether fromtimestamp "rounds up" floats that are less
# than one microsecond smaller than an integer.
self.assertEqual(self.theclass.fromtimestamp(0.9999999),
self.theclass.fromtimestamp(1))
def test_insane_fromtimestamp(self):
# It's possible that some platform maps time_t to double,
# and that this test will fail there. This test should
# exempt such platforms (provided they return reasonable
# results!).
for insane in -1e200, 1e200:
self.assertRaises(ValueError, self.theclass.fromtimestamp,
insane)
def test_insane_utcfromtimestamp(self):
# It's possible that some platform maps time_t to double,
# and that this test will fail there. This test should
# exempt such platforms (provided they return reasonable
# results!).
for insane in -1e200, 1e200:
self.assertRaises(ValueError, self.theclass.utcfromtimestamp,
insane)
@unittest.skipIf(sys.platform == "win32", "Windows doesn't accept negative timestamps")
def test_negative_float_fromtimestamp(self):
# The result is tz-dependent; at least test that this doesn't
# fail (like it did before bug 1646728 was fixed).
self.theclass.fromtimestamp(-1.05)
@unittest.skipIf(sys.platform == "win32", "Windows doesn't accept negative timestamps")
def test_negative_float_utcfromtimestamp(self):
d = self.theclass.utcfromtimestamp(-1.05)
self.assertEqual(d, self.theclass(1969, 12, 31, 23, 59, 58, 950000))
def test_utcnow(self):
import time
# Call it a success if utcnow() and utcfromtimestamp() are within
# a second of each other.
tolerance = timedelta(seconds=1)
for dummy in range(3):
from_now = self.theclass.utcnow()
from_timestamp = self.theclass.utcfromtimestamp(time.time())
if abs(from_timestamp - from_now) <= tolerance:
break
# Else try again a few times.
self.assertTrue(abs(from_timestamp - from_now) <= tolerance)
def test_strptime(self):
import _strptime
string = '2004-12-01 13:02:47.197'
format = '%Y-%m-%d %H:%M:%S.%f'
result, frac = _strptime._strptime(string, format)
expected = self.theclass(*(result[0:6]+(frac,)))
got = self.theclass.strptime(string, format)
self.assertEqual(expected, got)
def test_more_timetuple(self):
# This tests fields beyond those tested by the TestDate.test_timetuple.
t = self.theclass(2004, 12, 31, 6, 22, 33)
self.assertEqual(t.timetuple(), (2004, 12, 31, 6, 22, 33, 4, 366, -1))
self.assertEqual(t.timetuple(),
(t.year, t.month, t.day,
t.hour, t.minute, t.second,
t.weekday(),
t.toordinal() - date(t.year, 1, 1).toordinal() + 1,
-1))
tt = t.timetuple()
self.assertEqual(tt.tm_year, t.year)
self.assertEqual(tt.tm_mon, t.month)
self.assertEqual(tt.tm_mday, t.day)
self.assertEqual(tt.tm_hour, t.hour)
self.assertEqual(tt.tm_min, t.minute)
self.assertEqual(tt.tm_sec, t.second)
self.assertEqual(tt.tm_wday, t.weekday())
self.assertEqual(tt.tm_yday, t.toordinal() -
date(t.year, 1, 1).toordinal() + 1)
self.assertEqual(tt.tm_isdst, -1)
def test_more_strftime(self):
# This tests fields beyond those tested by the TestDate.test_strftime.
t = self.theclass(2004, 12, 31, 6, 22, 33, 47)
self.assertEqual(t.strftime("%m %d %y %f %S %M %H %j"),
"12 31 04 000047 33 22 06 366")
def test_extract(self):
dt = self.theclass(2002, 3, 4, 18, 45, 3, 1234)
self.assertEqual(dt.date(), date(2002, 3, 4))
self.assertEqual(dt.time(), time(18, 45, 3, 1234))
def test_combine(self):
d = date(2002, 3, 4)
t = time(18, 45, 3, 1234)
expected = self.theclass(2002, 3, 4, 18, 45, 3, 1234)
combine = self.theclass.combine
dt = combine(d, t)
self.assertEqual(dt, expected)
dt = combine(time=t, date=d)
self.assertEqual(dt, expected)
self.assertEqual(d, dt.date())
self.assertEqual(t, dt.time())
self.assertEqual(dt, combine(dt.date(), dt.time()))
self.assertRaises(TypeError, combine) # need an arg
self.assertRaises(TypeError, combine, d) # need two args
self.assertRaises(TypeError, combine, t, d) # args reversed
self.assertRaises(TypeError, combine, d, t, 1) # too many args
self.assertRaises(TypeError, combine, "date", "time") # wrong types
def test_replace(self):
cls = self.theclass
args = [1, 2, 3, 4, 5, 6, 7]
base = cls(*args)
self.assertEqual(base, base.replace())
i = 0
for name, newval in (("year", 2),
("month", 3),
("day", 4),
("hour", 5),
("minute", 6),
("second", 7),
("microsecond", 8)):
newargs = args[:]
newargs[i] = newval
expected = cls(*newargs)
got = base.replace(**{name: newval})
self.assertEqual(expected, got)
i += 1
# Out of bounds.
base = cls(2000, 2, 29)
self.assertRaises(ValueError, base.replace, year=2001)
def test_astimezone(self):
# Pretty boring! The TZ test is more interesting here. astimezone()
# simply can't be applied to a naive object.
dt = self.theclass.now()
f = FixedOffset(44, "")
self.assertRaises(TypeError, dt.astimezone) # not enough args
self.assertRaises(TypeError, dt.astimezone, f, f) # too many args
self.assertRaises(TypeError, dt.astimezone, dt) # arg wrong type
self.assertRaises(ValueError, dt.astimezone, f) # naive
self.assertRaises(ValueError, dt.astimezone, tz=f) # naive
class Bogus(tzinfo):
def utcoffset(self, dt): return None
def dst(self, dt): return timedelta(0)
bog = Bogus()
self.assertRaises(ValueError, dt.astimezone, bog) # naive
class AlsoBogus(tzinfo):
def utcoffset(self, dt): return timedelta(0)
def dst(self, dt): return None
alsobog = AlsoBogus()
self.assertRaises(ValueError, dt.astimezone, alsobog) # also naive
def test_subclass_datetime(self):
class C(self.theclass):
theAnswer = 42
def __new__(cls, *args, **kws):
temp = kws.copy()
extra = temp.pop('extra')
result = self.theclass.__new__(cls, *args, **temp)
result.extra = extra
return result
def newmeth(self, start):
return start + self.year + self.month + self.second
args = 2003, 4, 14, 12, 13, 41
dt1 = self.theclass(*args)
dt2 = C(*args, **{'extra': 7})
self.assertEqual(dt2.__class__, C)
self.assertEqual(dt2.theAnswer, 42)
self.assertEqual(dt2.extra, 7)
self.assertEqual(dt1.toordinal(), dt2.toordinal())
self.assertEqual(dt2.newmeth(-7), dt1.year + dt1.month +
dt1.second - 7)
class SubclassTime(time):
sub_var = 1
class TestTime(HarmlessMixedComparison, unittest.TestCase):
theclass = time
def test_basic_attributes(self):
t = self.theclass(12, 0)
self.assertEqual(t.hour, 12)
self.assertEqual(t.minute, 0)
self.assertEqual(t.second, 0)
self.assertEqual(t.microsecond, 0)
def test_basic_attributes_nonzero(self):
# Make sure all attributes are non-zero so bugs in
# bit-shifting access show up.
t = self.theclass(12, 59, 59, 8000)
self.assertEqual(t.hour, 12)
self.assertEqual(t.minute, 59)
self.assertEqual(t.second, 59)
self.assertEqual(t.microsecond, 8000)
def test_roundtrip(self):
t = self.theclass(1, 2, 3, 4)
# Verify t -> string -> time identity.
s = repr(t)
self.assertTrue(s.startswith('datetime.'))
s = s[9:]
t2 = eval(s)
self.assertEqual(t, t2)
# Verify identity via reconstructing from pieces.
t2 = self.theclass(t.hour, t.minute, t.second,
t.microsecond)
self.assertEqual(t, t2)
def test_comparing(self):
args = [1, 2, 3, 4]
t1 = self.theclass(*args)
t2 = self.theclass(*args)
self.assertTrue(t1 == t2)
self.assertTrue(t1 <= t2)
self.assertTrue(t1 >= t2)
self.assertTrue(not t1 != t2)
self.assertTrue(not t1 < t2)
self.assertTrue(not t1 > t2)
self.assertEqual(cmp(t1, t2), 0)
self.assertEqual(cmp(t2, t1), 0)
for i in range(len(args)):
newargs = args[:]
newargs[i] = args[i] + 1
t2 = self.theclass(*newargs) # this is larger than t1
self.assertTrue(t1 < t2)
self.assertTrue(t2 > t1)
self.assertTrue(t1 <= t2)
self.assertTrue(t2 >= t1)
self.assertTrue(t1 != t2)
self.assertTrue(t2 != t1)
self.assertTrue(not t1 == t2)
self.assertTrue(not t2 == t1)
self.assertTrue(not t1 > t2)
self.assertTrue(not t2 < t1)
self.assertTrue(not t1 >= t2)
self.assertTrue(not t2 <= t1)
self.assertEqual(cmp(t1, t2), -1)
self.assertEqual(cmp(t2, t1), 1)
for badarg in OTHERSTUFF:
self.assertEqual(t1 == badarg, False)
self.assertEqual(t1 != badarg, True)
self.assertEqual(badarg == t1, False)
self.assertEqual(badarg != t1, True)
self.assertRaises(TypeError, lambda: t1 <= badarg)
self.assertRaises(TypeError, lambda: t1 < badarg)
self.assertRaises(TypeError, lambda: t1 > badarg)
self.assertRaises(TypeError, lambda: t1 >= badarg)
self.assertRaises(TypeError, lambda: badarg <= t1)
self.assertRaises(TypeError, lambda: badarg < t1)
self.assertRaises(TypeError, lambda: badarg > t1)
self.assertRaises(TypeError, lambda: badarg >= t1)
def test_bad_constructor_arguments(self):
# bad hours
self.theclass(0, 0) # no exception
self.theclass(23, 0) # no exception
self.assertRaises(ValueError, self.theclass, -1, 0)
self.assertRaises(ValueError, self.theclass, 24, 0)
# bad minutes
self.theclass(23, 0) # no exception
self.theclass(23, 59) # no exception
self.assertRaises(ValueError, self.theclass, 23, -1)
self.assertRaises(ValueError, self.theclass, 23, 60)
# bad seconds
self.theclass(23, 59, 0) # no exception
self.theclass(23, 59, 59) # no exception
self.assertRaises(ValueError, self.theclass, 23, 59, -1)
self.assertRaises(ValueError, self.theclass, 23, 59, 60)
# bad microseconds
self.theclass(23, 59, 59, 0) # no exception
self.theclass(23, 59, 59, 999999) # no exception
self.assertRaises(ValueError, self.theclass, 23, 59, 59, -1)
self.assertRaises(ValueError, self.theclass, 23, 59, 59, 1000000)
def test_hash_equality(self):
d = self.theclass(23, 30, 17)
e = self.theclass(23, 30, 17)
self.assertEqual(d, e)
self.assertEqual(hash(d), hash(e))
dic = {d: 1}
dic[e] = 2
self.assertEqual(len(dic), 1)
self.assertEqual(dic[d], 2)
self.assertEqual(dic[e], 2)
d = self.theclass(0, 5, 17)
e = self.theclass(0, 5, 17)
self.assertEqual(d, e)
self.assertEqual(hash(d), hash(e))
dic = {d: 1}
dic[e] = 2
self.assertEqual(len(dic), 1)
self.assertEqual(dic[d], 2)
self.assertEqual(dic[e], 2)
def test_isoformat(self):
t = self.theclass(4, 5, 1, 123)
self.assertEqual(t.isoformat(), "04:05:01.000123")
self.assertEqual(t.isoformat(), str(t))
t = self.theclass()
self.assertEqual(t.isoformat(), "00:00:00")
self.assertEqual(t.isoformat(), str(t))
t = self.theclass(microsecond=1)
self.assertEqual(t.isoformat(), "00:00:00.000001")
self.assertEqual(t.isoformat(), str(t))
t = self.theclass(microsecond=10)
self.assertEqual(t.isoformat(), "00:00:00.000010")
self.assertEqual(t.isoformat(), str(t))
t = self.theclass(microsecond=100)
self.assertEqual(t.isoformat(), "00:00:00.000100")
self.assertEqual(t.isoformat(), str(t))
t = self.theclass(microsecond=1000)
self.assertEqual(t.isoformat(), "00:00:00.001000")
self.assertEqual(t.isoformat(), str(t))
t = self.theclass(microsecond=10000)
self.assertEqual(t.isoformat(), "00:00:00.010000")
self.assertEqual(t.isoformat(), str(t))
t = self.theclass(microsecond=100000)
self.assertEqual(t.isoformat(), "00:00:00.100000")
self.assertEqual(t.isoformat(), str(t))
def test_1653736(self):
# verify it doesn't accept extra keyword arguments
t = self.theclass(second=1)
self.assertRaises(TypeError, t.isoformat, foo=3)
def test_strftime(self):
t = self.theclass(1, 2, 3, 4)
self.assertEqual(t.strftime('%H %M %S %f'), "01 02 03 000004")
# A naive object replaces %z and %Z with empty strings.
self.assertEqual(t.strftime("'%z' '%Z'"), "'' ''")
def test_format(self):
t = self.theclass(1, 2, 3, 4)
self.assertEqual(t.__format__(''), str(t))
# check that a derived class's __str__() gets called
class A(self.theclass):
def __str__(self):
return 'A'
a = A(1, 2, 3, 4)
self.assertEqual(a.__format__(''), 'A')
# check that a derived class's strftime gets called
class B(self.theclass):
def strftime(self, format_spec):
return 'B'
b = B(1, 2, 3, 4)
self.assertEqual(b.__format__(''), str(t))
for fmt in ['%H %M %S',
]:
self.assertEqual(t.__format__(fmt), t.strftime(fmt))
self.assertEqual(a.__format__(fmt), t.strftime(fmt))
self.assertEqual(b.__format__(fmt), 'B')
def test_str(self):
self.assertEqual(str(self.theclass(1, 2, 3, 4)), "01:02:03.000004")
self.assertEqual(str(self.theclass(10, 2, 3, 4000)), "10:02:03.004000")
self.assertEqual(str(self.theclass(0, 2, 3, 400000)), "00:02:03.400000")
self.assertEqual(str(self.theclass(12, 2, 3, 0)), "12:02:03")
self.assertEqual(str(self.theclass(23, 15, 0, 0)), "23:15:00")
def test_repr(self):
name = 'datetime.' + self.theclass.__name__
self.assertEqual(repr(self.theclass(1, 2, 3, 4)),
"%s(1, 2, 3, 4)" % name)
self.assertEqual(repr(self.theclass(10, 2, 3, 4000)),
"%s(10, 2, 3, 4000)" % name)
self.assertEqual(repr(self.theclass(0, 2, 3, 400000)),
"%s(0, 2, 3, 400000)" % name)
self.assertEqual(repr(self.theclass(12, 2, 3, 0)),
"%s(12, 2, 3)" % name)
self.assertEqual(repr(self.theclass(23, 15, 0, 0)),
"%s(23, 15)" % name)
def test_resolution_info(self):
self.assertIsInstance(self.theclass.min, self.theclass)
self.assertIsInstance(self.theclass.max, self.theclass)
self.assertIsInstance(self.theclass.resolution, timedelta)
self.assertTrue(self.theclass.max > self.theclass.min)
def test_pickling(self):
args = 20, 59, 16, 64**2
orig = self.theclass(*args)
for pickler, unpickler, proto in pickle_choices:
green = pickler.dumps(orig, proto)
derived = unpickler.loads(green)
self.assertEqual(orig, derived)
def test_pickling_subclass_time(self):
args = 20, 59, 16, 64**2
orig = SubclassTime(*args)
for pickler, unpickler, proto in pickle_choices:
green = pickler.dumps(orig, proto)
derived = unpickler.loads(green)
self.assertEqual(orig, derived)
def test_bool(self):
cls = self.theclass
self.assertTrue(cls(1))
self.assertTrue(cls(0, 1))
self.assertTrue(cls(0, 0, 1))
self.assertTrue(cls(0, 0, 0, 1))
self.assertTrue(not cls(0))
self.assertTrue(not cls())
def test_replace(self):
cls = self.theclass
args = [1, 2, 3, 4]
base = cls(*args)
self.assertEqual(base, base.replace())
i = 0
for name, newval in (("hour", 5),
("minute", 6),
("second", 7),
("microsecond", 8)):
newargs = args[:]
newargs[i] = newval
expected = cls(*newargs)
got = base.replace(**{name: newval})
self.assertEqual(expected, got)
i += 1
# Out of bounds.
base = cls(1)
self.assertRaises(ValueError, base.replace, hour=24)
self.assertRaises(ValueError, base.replace, minute=-1)
self.assertRaises(ValueError, base.replace, second=100)
self.assertRaises(ValueError, base.replace, microsecond=1000000)
def test_subclass_time(self):
class C(self.theclass):
theAnswer = 42
def __new__(cls, *args, **kws):
temp = kws.copy()
extra = temp.pop('extra')
result = self.theclass.__new__(cls, *args, **temp)
result.extra = extra
return result
def newmeth(self, start):
return start + self.hour + self.second
args = 4, 5, 6
dt1 = self.theclass(*args)
dt2 = C(*args, **{'extra': 7})
self.assertEqual(dt2.__class__, C)
self.assertEqual(dt2.theAnswer, 42)
self.assertEqual(dt2.extra, 7)
self.assertEqual(dt1.isoformat(), dt2.isoformat())
self.assertEqual(dt2.newmeth(-7), dt1.hour + dt1.second - 7)
def test_backdoor_resistance(self):
# see TestDate.test_backdoor_resistance().
base = '2:59.0'
for hour_byte in ' ', '9', chr(24), '\xff':
self.assertRaises(TypeError, self.theclass,
hour_byte + base[1:])
# A mixin for classes with a tzinfo= argument. Subclasses must define
# theclass as a class atribute, and theclass(1, 1, 1, tzinfo=whatever)
# must be legit (which is true for time and datetime).
class TZInfoBase:
def test_argument_passing(self):
cls = self.theclass
# A datetime passes itself on, a time passes None.
class introspective(tzinfo):
def tzname(self, dt): return dt and "real" or "none"
def utcoffset(self, dt):
return timedelta(minutes = dt and 42 or -42)
dst = utcoffset
obj = cls(1, 2, 3, tzinfo=introspective())
expected = cls is time and "none" or "real"
self.assertEqual(obj.tzname(), expected)
expected = timedelta(minutes=(cls is time and -42 or 42))
self.assertEqual(obj.utcoffset(), expected)
self.assertEqual(obj.dst(), expected)
def test_bad_tzinfo_classes(self):
cls = self.theclass
self.assertRaises(TypeError, cls, 1, 1, 1, tzinfo=12)
class NiceTry(object):
def __init__(self): pass
def utcoffset(self, dt): pass
self.assertRaises(TypeError, cls, 1, 1, 1, tzinfo=NiceTry)
class BetterTry(tzinfo):
def __init__(self): pass
def utcoffset(self, dt): pass
b = BetterTry()
t = cls(1, 1, 1, tzinfo=b)
self.assertTrue(t.tzinfo is b)
def test_utc_offset_out_of_bounds(self):
class Edgy(tzinfo):
def __init__(self, offset):
self.offset = timedelta(minutes=offset)
def utcoffset(self, dt):
return self.offset
cls = self.theclass
for offset, legit in ((-1440, False),
(-1439, True),
(1439, True),
(1440, False)):
if cls is time:
t = cls(1, 2, 3, tzinfo=Edgy(offset))
elif cls is datetime:
t = cls(6, 6, 6, 1, 2, 3, tzinfo=Edgy(offset))
else:
assert 0, "impossible"
if legit:
aofs = abs(offset)
h, m = divmod(aofs, 60)
tag = "%c%02d:%02d" % (offset < 0 and '-' or '+', h, m)
if isinstance(t, datetime):
t = t.timetz()
self.assertEqual(str(t), "01:02:03" + tag)
else:
self.assertRaises(ValueError, str, t)
def test_tzinfo_classes(self):
cls = self.theclass
class C1(tzinfo):
def utcoffset(self, dt): return None
def dst(self, dt): return None
def tzname(self, dt): return None
for t in (cls(1, 1, 1),
cls(1, 1, 1, tzinfo=None),
cls(1, 1, 1, tzinfo=C1())):
self.assertTrue(t.utcoffset() is None)
self.assertTrue(t.dst() is None)
self.assertTrue(t.tzname() is None)
class C3(tzinfo):
def utcoffset(self, dt): return timedelta(minutes=-1439)
def dst(self, dt): return timedelta(minutes=1439)
def tzname(self, dt): return "aname"
t = cls(1, 1, 1, tzinfo=C3())
self.assertEqual(t.utcoffset(), timedelta(minutes=-1439))
self.assertEqual(t.dst(), timedelta(minutes=1439))
self.assertEqual(t.tzname(), "aname")
# Wrong types.
class C4(tzinfo):
def utcoffset(self, dt): return "aname"
def dst(self, dt): return 7
def tzname(self, dt): return 0
t = cls(1, 1, 1, tzinfo=C4())
self.assertRaises(TypeError, t.utcoffset)
self.assertRaises(TypeError, t.dst)
self.assertRaises(TypeError, t.tzname)
# Offset out of range.
class C6(tzinfo):
def utcoffset(self, dt): return timedelta(hours=-24)
def dst(self, dt): return timedelta(hours=24)
t = cls(1, 1, 1, tzinfo=C6())
self.assertRaises(ValueError, t.utcoffset)
self.assertRaises(ValueError, t.dst)
# Not a whole number of minutes.
class C7(tzinfo):
def utcoffset(self, dt): return timedelta(seconds=61)
def dst(self, dt): return timedelta(microseconds=-81)
t = cls(1, 1, 1, tzinfo=C7())
self.assertRaises(ValueError, t.utcoffset)
self.assertRaises(ValueError, t.dst)
def test_aware_compare(self):
cls = self.theclass
# Ensure that utcoffset() gets ignored if the comparands have
# the same tzinfo member.
class OperandDependentOffset(tzinfo):
def utcoffset(self, t):
if t.minute < 10:
# d0 and d1 equal after adjustment
return timedelta(minutes=t.minute)
else:
# d2 off in the weeds
return timedelta(minutes=59)
base = cls(8, 9, 10, tzinfo=OperandDependentOffset())
d0 = base.replace(minute=3)
d1 = base.replace(minute=9)
d2 = base.replace(minute=11)
for x in d0, d1, d2:
for y in d0, d1, d2:
got = cmp(x, y)
expected = cmp(x.minute, y.minute)
self.assertEqual(got, expected)
# However, if they're different members, uctoffset is not ignored.
# Note that a time can't actually have an operand-depedent offset,
# though (and time.utcoffset() passes None to tzinfo.utcoffset()),
# so skip this test for time.
if cls is not time:
d0 = base.replace(minute=3, tzinfo=OperandDependentOffset())
d1 = base.replace(minute=9, tzinfo=OperandDependentOffset())
d2 = base.replace(minute=11, tzinfo=OperandDependentOffset())
for x in d0, d1, d2:
for y in d0, d1, d2:
got = cmp(x, y)
if (x is d0 or x is d1) and (y is d0 or y is d1):
expected = 0
elif x is y is d2:
expected = 0
elif x is d2:
expected = -1
else:
assert y is d2
expected = 1
self.assertEqual(got, expected)
# Testing time objects with a non-None tzinfo.
class TestTimeTZ(TestTime, TZInfoBase, unittest.TestCase):
theclass = time
def test_empty(self):
t = self.theclass()
self.assertEqual(t.hour, 0)
self.assertEqual(t.minute, 0)
self.assertEqual(t.second, 0)
self.assertEqual(t.microsecond, 0)
self.assertTrue(t.tzinfo is None)
def test_zones(self):
est = FixedOffset(-300, "EST", 1)
utc = FixedOffset(0, "UTC", -2)
met = FixedOffset(60, "MET", 3)
t1 = time( 7, 47, tzinfo=est)
t2 = time(12, 47, tzinfo=utc)
t3 = time(13, 47, tzinfo=met)
t4 = time(microsecond=40)
t5 = time(microsecond=40, tzinfo=utc)
self.assertEqual(t1.tzinfo, est)
self.assertEqual(t2.tzinfo, utc)
self.assertEqual(t3.tzinfo, met)
self.assertTrue(t4.tzinfo is None)
self.assertEqual(t5.tzinfo, utc)
self.assertEqual(t1.utcoffset(), timedelta(minutes=-300))
self.assertEqual(t2.utcoffset(), timedelta(minutes=0))
self.assertEqual(t3.utcoffset(), timedelta(minutes=60))
self.assertTrue(t4.utcoffset() is None)
self.assertRaises(TypeError, t1.utcoffset, "no args")
self.assertEqual(t1.tzname(), "EST")
self.assertEqual(t2.tzname(), "UTC")
self.assertEqual(t3.tzname(), "MET")
self.assertTrue(t4.tzname() is None)
self.assertRaises(TypeError, t1.tzname, "no args")
self.assertEqual(t1.dst(), timedelta(minutes=1))
self.assertEqual(t2.dst(), timedelta(minutes=-2))
self.assertEqual(t3.dst(), timedelta(minutes=3))
self.assertTrue(t4.dst() is None)
self.assertRaises(TypeError, t1.dst, "no args")
self.assertEqual(hash(t1), hash(t2))
self.assertEqual(hash(t1), hash(t3))
self.assertEqual(hash(t2), hash(t3))
self.assertEqual(t1, t2)
self.assertEqual(t1, t3)
self.assertEqual(t2, t3)
self.assertRaises(TypeError, lambda: t4 == t5) # mixed tz-aware & naive
self.assertRaises(TypeError, lambda: t4 < t5) # mixed tz-aware & naive
self.assertRaises(TypeError, lambda: t5 < t4) # mixed tz-aware & naive
self.assertEqual(str(t1), "07:47:00-05:00")
self.assertEqual(str(t2), "12:47:00+00:00")
self.assertEqual(str(t3), "13:47:00+01:00")
self.assertEqual(str(t4), "00:00:00.000040")
self.assertEqual(str(t5), "00:00:00.000040+00:00")
self.assertEqual(t1.isoformat(), "07:47:00-05:00")
self.assertEqual(t2.isoformat(), "12:47:00+00:00")
self.assertEqual(t3.isoformat(), "13:47:00+01:00")
self.assertEqual(t4.isoformat(), "00:00:00.000040")
self.assertEqual(t5.isoformat(), "00:00:00.000040+00:00")
d = 'datetime.time'
self.assertEqual(repr(t1), d + "(7, 47, tzinfo=est)")
self.assertEqual(repr(t2), d + "(12, 47, tzinfo=utc)")
self.assertEqual(repr(t3), d + "(13, 47, tzinfo=met)")
self.assertEqual(repr(t4), d + "(0, 0, 0, 40)")
self.assertEqual(repr(t5), d + "(0, 0, 0, 40, tzinfo=utc)")
self.assertEqual(t1.strftime("%H:%M:%S %%Z=%Z %%z=%z"),
"07:47:00 %Z=EST %z=-0500")
self.assertEqual(t2.strftime("%H:%M:%S %Z %z"), "12:47:00 UTC +0000")
self.assertEqual(t3.strftime("%H:%M:%S %Z %z"), "13:47:00 MET +0100")
yuck = FixedOffset(-1439, "%z %Z %%z%%Z")
t1 = time(23, 59, tzinfo=yuck)
self.assertEqual(t1.strftime("%H:%M %%Z='%Z' %%z='%z'"),
"23:59 %Z='%z %Z %%z%%Z' %z='-2359'")
# Check that an invalid tzname result raises an exception.
class Badtzname(tzinfo):
def tzname(self, dt): return 42
t = time(2, 3, 4, tzinfo=Badtzname())
self.assertEqual(t.strftime("%H:%M:%S"), "02:03:04")
self.assertRaises(TypeError, t.strftime, "%Z")
def test_hash_edge_cases(self):
# Offsets that overflow a basic time.
t1 = self.theclass(0, 1, 2, 3, tzinfo=FixedOffset(1439, ""))
t2 = self.theclass(0, 0, 2, 3, tzinfo=FixedOffset(1438, ""))
self.assertEqual(hash(t1), hash(t2))
t1 = self.theclass(23, 58, 6, 100, tzinfo=FixedOffset(-1000, ""))
t2 = self.theclass(23, 48, 6, 100, tzinfo=FixedOffset(-1010, ""))
self.assertEqual(hash(t1), hash(t2))
def test_pickling(self):
# Try one without a tzinfo.
args = 20, 59, 16, 64**2
orig = self.theclass(*args)
for pickler, unpickler, proto in pickle_choices:
green = pickler.dumps(orig, proto)
derived = unpickler.loads(green)
self.assertEqual(orig, derived)
# Try one with a tzinfo.
tinfo = PicklableFixedOffset(-300, 'cookie')
orig = self.theclass(5, 6, 7, tzinfo=tinfo)
for pickler, unpickler, proto in pickle_choices:
green = pickler.dumps(orig, proto)
derived = unpickler.loads(green)
self.assertEqual(orig, derived)
self.assertIsInstance(derived.tzinfo, PicklableFixedOffset)
self.assertEqual(derived.utcoffset(), timedelta(minutes=-300))
self.assertEqual(derived.tzname(), 'cookie')
def test_more_bool(self):
# Test cases with non-None tzinfo.
cls = self.theclass
t = cls(0, tzinfo=FixedOffset(-300, ""))
self.assertTrue(t)
t = cls(5, tzinfo=FixedOffset(-300, ""))
self.assertTrue(t)
t = cls(5, tzinfo=FixedOffset(300, ""))
self.assertTrue(not t)
t = cls(23, 59, tzinfo=FixedOffset(23*60 + 59, ""))
self.assertTrue(not t)
# Mostly ensuring this doesn't overflow internally.
t = cls(0, tzinfo=FixedOffset(23*60 + 59, ""))
self.assertTrue(t)
# But this should yield a value error -- the utcoffset is bogus.
t = cls(0, tzinfo=FixedOffset(24*60, ""))
self.assertRaises(ValueError, lambda: bool(t))
# Likewise.
t = cls(0, tzinfo=FixedOffset(-24*60, ""))
self.assertRaises(ValueError, lambda: bool(t))
def test_replace(self):
cls = self.theclass
z100 = FixedOffset(100, "+100")
zm200 = FixedOffset(timedelta(minutes=-200), "-200")
args = [1, 2, 3, 4, z100]
base = cls(*args)
self.assertEqual(base, base.replace())
i = 0
for name, newval in (("hour", 5),
("minute", 6),
("second", 7),
("microsecond", 8),
("tzinfo", zm200)):
newargs = args[:]
newargs[i] = newval
expected = cls(*newargs)
got = base.replace(**{name: newval})
self.assertEqual(expected, got)
i += 1
# Ensure we can get rid of a tzinfo.
self.assertEqual(base.tzname(), "+100")
base2 = base.replace(tzinfo=None)
self.assertTrue(base2.tzinfo is None)
self.assertTrue(base2.tzname() is None)
# Ensure we can add one.
base3 = base2.replace(tzinfo=z100)
self.assertEqual(base, base3)
self.assertTrue(base.tzinfo is base3.tzinfo)
# Out of bounds.
base = cls(1)
self.assertRaises(ValueError, base.replace, hour=24)
self.assertRaises(ValueError, base.replace, minute=-1)
self.assertRaises(ValueError, base.replace, second=100)
self.assertRaises(ValueError, base.replace, microsecond=1000000)
def test_mixed_compare(self):
t1 = time(1, 2, 3)
t2 = time(1, 2, 3)
self.assertEqual(t1, t2)
t2 = t2.replace(tzinfo=None)
self.assertEqual(t1, t2)
t2 = t2.replace(tzinfo=FixedOffset(None, ""))
self.assertEqual(t1, t2)
t2 = t2.replace(tzinfo=FixedOffset(0, ""))
self.assertRaises(TypeError, lambda: t1 == t2)
# In time w/ identical tzinfo objects, utcoffset is ignored.
class Varies(tzinfo):
def __init__(self):
self.offset = timedelta(minutes=22)
def utcoffset(self, t):
self.offset += timedelta(minutes=1)
return self.offset
v = Varies()
t1 = t2.replace(tzinfo=v)
t2 = t2.replace(tzinfo=v)
self.assertEqual(t1.utcoffset(), timedelta(minutes=23))
self.assertEqual(t2.utcoffset(), timedelta(minutes=24))
self.assertEqual(t1, t2)
# But if they're not identical, it isn't ignored.
t2 = t2.replace(tzinfo=Varies())
self.assertTrue(t1 < t2) # t1's offset counter still going up
def test_subclass_timetz(self):
class C(self.theclass):
theAnswer = 42
def __new__(cls, *args, **kws):
temp = kws.copy()
extra = temp.pop('extra')
result = self.theclass.__new__(cls, *args, **temp)
result.extra = extra
return result
def newmeth(self, start):
return start + self.hour + self.second
args = 4, 5, 6, 500, FixedOffset(-300, "EST", 1)
dt1 = self.theclass(*args)
dt2 = C(*args, **{'extra': 7})
self.assertEqual(dt2.__class__, C)
self.assertEqual(dt2.theAnswer, 42)
self.assertEqual(dt2.extra, 7)
self.assertEqual(dt1.utcoffset(), dt2.utcoffset())
self.assertEqual(dt2.newmeth(-7), dt1.hour + dt1.second - 7)
# Testing datetime objects with a non-None tzinfo.
class TestDateTimeTZ(TestDateTime, TZInfoBase, unittest.TestCase):
theclass = datetime
def test_trivial(self):
dt = self.theclass(1, 2, 3, 4, 5, 6, 7)
self.assertEqual(dt.year, 1)
self.assertEqual(dt.month, 2)
self.assertEqual(dt.day, 3)
self.assertEqual(dt.hour, 4)
self.assertEqual(dt.minute, 5)
self.assertEqual(dt.second, 6)
self.assertEqual(dt.microsecond, 7)
self.assertEqual(dt.tzinfo, None)
def test_even_more_compare(self):
# The test_compare() and test_more_compare() inherited from TestDate
# and TestDateTime covered non-tzinfo cases.
# Smallest possible after UTC adjustment.
t1 = self.theclass(1, 1, 1, tzinfo=FixedOffset(1439, ""))
# Largest possible after UTC adjustment.
t2 = self.theclass(MAXYEAR, 12, 31, 23, 59, 59, 999999,
tzinfo=FixedOffset(-1439, ""))
# Make sure those compare correctly, and w/o overflow.
self.assertTrue(t1 < t2)
self.assertTrue(t1 != t2)
self.assertTrue(t2 > t1)
self.assertTrue(t1 == t1)
self.assertTrue(t2 == t2)
# Equal afer adjustment.
t1 = self.theclass(1, 12, 31, 23, 59, tzinfo=FixedOffset(1, ""))
t2 = self.theclass(2, 1, 1, 3, 13, tzinfo=FixedOffset(3*60+13+2, ""))
self.assertEqual(t1, t2)
# Change t1 not to subtract a minute, and t1 should be larger.
t1 = self.theclass(1, 12, 31, 23, 59, tzinfo=FixedOffset(0, ""))
self.assertTrue(t1 > t2)
# Change t1 to subtract 2 minutes, and t1 should be smaller.
t1 = self.theclass(1, 12, 31, 23, 59, tzinfo=FixedOffset(2, ""))
self.assertTrue(t1 < t2)
# Back to the original t1, but make seconds resolve it.
t1 = self.theclass(1, 12, 31, 23, 59, tzinfo=FixedOffset(1, ""),
second=1)
self.assertTrue(t1 > t2)
# Likewise, but make microseconds resolve it.
t1 = self.theclass(1, 12, 31, 23, 59, tzinfo=FixedOffset(1, ""),
microsecond=1)
self.assertTrue(t1 > t2)
# Make t2 naive and it should fail.
t2 = self.theclass.min
self.assertRaises(TypeError, lambda: t1 == t2)
self.assertEqual(t2, t2)
# It's also naive if it has tzinfo but tzinfo.utcoffset() is None.
class Naive(tzinfo):
def utcoffset(self, dt): return None
t2 = self.theclass(5, 6, 7, tzinfo=Naive())
self.assertRaises(TypeError, lambda: t1 == t2)
self.assertEqual(t2, t2)
# OTOH, it's OK to compare two of these mixing the two ways of being
# naive.
t1 = self.theclass(5, 6, 7)
self.assertEqual(t1, t2)
# Try a bogus uctoffset.
class Bogus(tzinfo):
def utcoffset(self, dt):
return timedelta(minutes=1440) # out of bounds
t1 = self.theclass(2, 2, 2, tzinfo=Bogus())
t2 = self.theclass(2, 2, 2, tzinfo=FixedOffset(0, ""))
self.assertRaises(ValueError, lambda: t1 == t2)
def test_pickling(self):
# Try one without a tzinfo.
args = 6, 7, 23, 20, 59, 1, 64**2
orig = self.theclass(*args)
for pickler, unpickler, proto in pickle_choices:
green = pickler.dumps(orig, proto)
derived = unpickler.loads(green)
self.assertEqual(orig, derived)
# Try one with a tzinfo.
tinfo = PicklableFixedOffset(-300, 'cookie')
orig = self.theclass(*args, **{'tzinfo': tinfo})
derived = self.theclass(1, 1, 1, tzinfo=FixedOffset(0, "", 0))
for pickler, unpickler, proto in pickle_choices:
green = pickler.dumps(orig, proto)
derived = unpickler.loads(green)
self.assertEqual(orig, derived)
self.assertIsInstance(derived.tzinfo, PicklableFixedOffset)
self.assertEqual(derived.utcoffset(), timedelta(minutes=-300))
self.assertEqual(derived.tzname(), 'cookie')
def test_extreme_hashes(self):
# If an attempt is made to hash these via subtracting the offset
# then hashing a datetime object, OverflowError results. The
# Python implementation used to blow up here.
t = self.theclass(1, 1, 1, tzinfo=FixedOffset(1439, ""))
hash(t)
t = self.theclass(MAXYEAR, 12, 31, 23, 59, 59, 999999,
tzinfo=FixedOffset(-1439, ""))
hash(t)
# OTOH, an OOB offset should blow up.
t = self.theclass(5, 5, 5, tzinfo=FixedOffset(-1440, ""))
self.assertRaises(ValueError, hash, t)
def test_zones(self):
est = FixedOffset(-300, "EST")
utc = FixedOffset(0, "UTC")
met = FixedOffset(60, "MET")
t1 = datetime(2002, 3, 19, 7, 47, tzinfo=est)
t2 = datetime(2002, 3, 19, 12, 47, tzinfo=utc)
t3 = datetime(2002, 3, 19, 13, 47, tzinfo=met)
self.assertEqual(t1.tzinfo, est)
self.assertEqual(t2.tzinfo, utc)
self.assertEqual(t3.tzinfo, met)
self.assertEqual(t1.utcoffset(), timedelta(minutes=-300))
self.assertEqual(t2.utcoffset(), timedelta(minutes=0))
self.assertEqual(t3.utcoffset(), timedelta(minutes=60))
self.assertEqual(t1.tzname(), "EST")
self.assertEqual(t2.tzname(), "UTC")
self.assertEqual(t3.tzname(), "MET")
self.assertEqual(hash(t1), hash(t2))
self.assertEqual(hash(t1), hash(t3))
self.assertEqual(hash(t2), hash(t3))
self.assertEqual(t1, t2)
self.assertEqual(t1, t3)
self.assertEqual(t2, t3)
self.assertEqual(str(t1), "2002-03-19 07:47:00-05:00")
self.assertEqual(str(t2), "2002-03-19 12:47:00+00:00")
self.assertEqual(str(t3), "2002-03-19 13:47:00+01:00")
d = 'datetime.datetime(2002, 3, 19, '
self.assertEqual(repr(t1), d + "7, 47, tzinfo=est)")
self.assertEqual(repr(t2), d + "12, 47, tzinfo=utc)")
self.assertEqual(repr(t3), d + "13, 47, tzinfo=met)")
def test_combine(self):
met = FixedOffset(60, "MET")
d = date(2002, 3, 4)
tz = time(18, 45, 3, 1234, tzinfo=met)
dt = datetime.combine(d, tz)
self.assertEqual(dt, datetime(2002, 3, 4, 18, 45, 3, 1234,
tzinfo=met))
def test_extract(self):
met = FixedOffset(60, "MET")
dt = self.theclass(2002, 3, 4, 18, 45, 3, 1234, tzinfo=met)
self.assertEqual(dt.date(), date(2002, 3, 4))
self.assertEqual(dt.time(), time(18, 45, 3, 1234))
self.assertEqual(dt.timetz(), time(18, 45, 3, 1234, tzinfo=met))
def test_tz_aware_arithmetic(self):
import random
now = self.theclass.now()
tz55 = FixedOffset(-330, "west 5:30")
timeaware = now.time().replace(tzinfo=tz55)
nowaware = self.theclass.combine(now.date(), timeaware)
self.assertTrue(nowaware.tzinfo is tz55)
self.assertEqual(nowaware.timetz(), timeaware)
# Can't mix aware and non-aware.
self.assertRaises(TypeError, lambda: now - nowaware)
self.assertRaises(TypeError, lambda: nowaware - now)
# And adding datetime's doesn't make sense, aware or not.
self.assertRaises(TypeError, lambda: now + nowaware)
self.assertRaises(TypeError, lambda: nowaware + now)
self.assertRaises(TypeError, lambda: nowaware + nowaware)
# Subtracting should yield 0.
self.assertEqual(now - now, timedelta(0))
self.assertEqual(nowaware - nowaware, timedelta(0))
# Adding a delta should preserve tzinfo.
delta = timedelta(weeks=1, minutes=12, microseconds=5678)
nowawareplus = nowaware + delta
self.assertTrue(nowaware.tzinfo is tz55)
nowawareplus2 = delta + nowaware
self.assertTrue(nowawareplus2.tzinfo is tz55)
self.assertEqual(nowawareplus, nowawareplus2)
# that - delta should be what we started with, and that - what we
# started with should be delta.
diff = nowawareplus - delta
self.assertTrue(diff.tzinfo is tz55)
self.assertEqual(nowaware, diff)
self.assertRaises(TypeError, lambda: delta - nowawareplus)
self.assertEqual(nowawareplus - nowaware, delta)
# Make up a random timezone.
tzr = FixedOffset(random.randrange(-1439, 1440), "randomtimezone")
# Attach it to nowawareplus.
nowawareplus = nowawareplus.replace(tzinfo=tzr)
self.assertTrue(nowawareplus.tzinfo is tzr)
# Make sure the difference takes the timezone adjustments into account.
got = nowaware - nowawareplus
# Expected: (nowaware base - nowaware offset) -
# (nowawareplus base - nowawareplus offset) =
# (nowaware base - nowawareplus base) +
# (nowawareplus offset - nowaware offset) =
# -delta + nowawareplus offset - nowaware offset
expected = nowawareplus.utcoffset() - nowaware.utcoffset() - delta
self.assertEqual(got, expected)
# Try max possible difference.
min = self.theclass(1, 1, 1, tzinfo=FixedOffset(1439, "min"))
max = self.theclass(MAXYEAR, 12, 31, 23, 59, 59, 999999,
tzinfo=FixedOffset(-1439, "max"))
maxdiff = max - min
self.assertEqual(maxdiff, self.theclass.max - self.theclass.min +
timedelta(minutes=2*1439))
def test_tzinfo_now(self):
meth = self.theclass.now
# Ensure it doesn't require tzinfo (i.e., that this doesn't blow up).
base = meth()
# Try with and without naming the keyword.
off42 = FixedOffset(42, "42")
another = meth(off42)
again = meth(tz=off42)
self.assertTrue(another.tzinfo is again.tzinfo)
self.assertEqual(another.utcoffset(), timedelta(minutes=42))
# Bad argument with and w/o naming the keyword.
self.assertRaises(TypeError, meth, 16)
self.assertRaises(TypeError, meth, tzinfo=16)
# Bad keyword name.
self.assertRaises(TypeError, meth, tinfo=off42)
# Too many args.
self.assertRaises(TypeError, meth, off42, off42)
# We don't know which time zone we're in, and don't have a tzinfo
# class to represent it, so seeing whether a tz argument actually
# does a conversion is tricky.
weirdtz = FixedOffset(timedelta(hours=15, minutes=58), "weirdtz", 0)
utc = FixedOffset(0, "utc", 0)
for dummy in range(3):
now = datetime.now(weirdtz)
self.assertTrue(now.tzinfo is weirdtz)
utcnow = datetime.utcnow().replace(tzinfo=utc)
now2 = utcnow.astimezone(weirdtz)
if abs(now - now2) < timedelta(seconds=30):
break
# Else the code is broken, or more than 30 seconds passed between
# calls; assuming the latter, just try again.
else:
# Three strikes and we're out.
self.fail("utcnow(), now(tz), or astimezone() may be broken")
def test_tzinfo_fromtimestamp(self):
import time
meth = self.theclass.fromtimestamp
ts = time.time()
# Ensure it doesn't require tzinfo (i.e., that this doesn't blow up).
base = meth(ts)
# Try with and without naming the keyword.
off42 = FixedOffset(42, "42")
another = meth(ts, off42)
again = meth(ts, tz=off42)
self.assertTrue(another.tzinfo is again.tzinfo)
self.assertEqual(another.utcoffset(), timedelta(minutes=42))
# Bad argument with and w/o naming the keyword.
self.assertRaises(TypeError, meth, ts, 16)
self.assertRaises(TypeError, meth, ts, tzinfo=16)
# Bad keyword name.
self.assertRaises(TypeError, meth, ts, tinfo=off42)
# Too many args.
self.assertRaises(TypeError, meth, ts, off42, off42)
# Too few args.
self.assertRaises(TypeError, meth)
# Try to make sure tz= actually does some conversion.
timestamp = 1000000000
utcdatetime = datetime.utcfromtimestamp(timestamp)
# In POSIX (epoch 1970), that's 2001-09-09 01:46:40 UTC, give or take.
# But on some flavor of Mac, it's nowhere near that. So we can't have
# any idea here what time that actually is, we can only test that
# relative changes match.
utcoffset = timedelta(hours=-15, minutes=39) # arbitrary, but not zero
tz = FixedOffset(utcoffset, "tz", 0)
expected = utcdatetime + utcoffset
got = datetime.fromtimestamp(timestamp, tz)
self.assertEqual(expected, got.replace(tzinfo=None))
def test_tzinfo_utcnow(self):
meth = self.theclass.utcnow
# Ensure it doesn't require tzinfo (i.e., that this doesn't blow up).
base = meth()
# Try with and without naming the keyword; for whatever reason,
# utcnow() doesn't accept a tzinfo argument.
off42 = FixedOffset(42, "42")
self.assertRaises(TypeError, meth, off42)
self.assertRaises(TypeError, meth, tzinfo=off42)
def test_tzinfo_utcfromtimestamp(self):
import time
meth = self.theclass.utcfromtimestamp
ts = time.time()
# Ensure it doesn't require tzinfo (i.e., that this doesn't blow up).
base = meth(ts)
# Try with and without naming the keyword; for whatever reason,
# utcfromtimestamp() doesn't accept a tzinfo argument.
off42 = FixedOffset(42, "42")
self.assertRaises(TypeError, meth, ts, off42)
self.assertRaises(TypeError, meth, ts, tzinfo=off42)
def test_tzinfo_timetuple(self):
# TestDateTime tested most of this. datetime adds a twist to the
# DST flag.
class DST(tzinfo):
def __init__(self, dstvalue):
if isinstance(dstvalue, int):
dstvalue = timedelta(minutes=dstvalue)
self.dstvalue = dstvalue
def dst(self, dt):
return self.dstvalue
cls = self.theclass
for dstvalue, flag in (-33, 1), (33, 1), (0, 0), (None, -1):
d = cls(1, 1, 1, 10, 20, 30, 40, tzinfo=DST(dstvalue))
t = d.timetuple()
self.assertEqual(1, t.tm_year)
self.assertEqual(1, t.tm_mon)
self.assertEqual(1, t.tm_mday)
self.assertEqual(10, t.tm_hour)
self.assertEqual(20, t.tm_min)
self.assertEqual(30, t.tm_sec)
self.assertEqual(0, t.tm_wday)
self.assertEqual(1, t.tm_yday)
self.assertEqual(flag, t.tm_isdst)
# dst() returns wrong type.
self.assertRaises(TypeError, cls(1, 1, 1, tzinfo=DST("x")).timetuple)
# dst() at the edge.
self.assertEqual(cls(1,1,1, tzinfo=DST(1439)).timetuple().tm_isdst, 1)
self.assertEqual(cls(1,1,1, tzinfo=DST(-1439)).timetuple().tm_isdst, 1)
# dst() out of range.
self.assertRaises(ValueError, cls(1,1,1, tzinfo=DST(1440)).timetuple)
self.assertRaises(ValueError, cls(1,1,1, tzinfo=DST(-1440)).timetuple)
def test_utctimetuple(self):
class DST(tzinfo):
def __init__(self, dstvalue):
if isinstance(dstvalue, int):
dstvalue = timedelta(minutes=dstvalue)
self.dstvalue = dstvalue
def dst(self, dt):
return self.dstvalue
cls = self.theclass
# This can't work: DST didn't implement utcoffset.
self.assertRaises(NotImplementedError,
cls(1, 1, 1, tzinfo=DST(0)).utcoffset)
class UOFS(DST):
def __init__(self, uofs, dofs=None):
DST.__init__(self, dofs)
self.uofs = timedelta(minutes=uofs)
def utcoffset(self, dt):
return self.uofs
# Ensure tm_isdst is 0 regardless of what dst() says: DST is never
# in effect for a UTC time.
for dstvalue in -33, 33, 0, None:
d = cls(1, 2, 3, 10, 20, 30, 40, tzinfo=UOFS(-53, dstvalue))
t = d.utctimetuple()
self.assertEqual(d.year, t.tm_year)
self.assertEqual(d.month, t.tm_mon)
self.assertEqual(d.day, t.tm_mday)
self.assertEqual(11, t.tm_hour) # 20mm + 53mm = 1hn + 13mm
self.assertEqual(13, t.tm_min)
self.assertEqual(d.second, t.tm_sec)
self.assertEqual(d.weekday(), t.tm_wday)
self.assertEqual(d.toordinal() - date(1, 1, 1).toordinal() + 1,
t.tm_yday)
self.assertEqual(0, t.tm_isdst)
# At the edges, UTC adjustment can normalize into years out-of-range
# for a datetime object. Ensure that a correct timetuple is
# created anyway.
tiny = cls(MINYEAR, 1, 1, 0, 0, 37, tzinfo=UOFS(1439))
# That goes back 1 minute less than a full day.
t = tiny.utctimetuple()
self.assertEqual(t.tm_year, MINYEAR-1)
self.assertEqual(t.tm_mon, 12)
self.assertEqual(t.tm_mday, 31)
self.assertEqual(t.tm_hour, 0)
self.assertEqual(t.tm_min, 1)
self.assertEqual(t.tm_sec, 37)
self.assertEqual(t.tm_yday, 366) # "year 0" is a leap year
self.assertEqual(t.tm_isdst, 0)
huge = cls(MAXYEAR, 12, 31, 23, 59, 37, 999999, tzinfo=UOFS(-1439))
# That goes forward 1 minute less than a full day.
t = huge.utctimetuple()
self.assertEqual(t.tm_year, MAXYEAR+1)
self.assertEqual(t.tm_mon, 1)
self.assertEqual(t.tm_mday, 1)
self.assertEqual(t.tm_hour, 23)
self.assertEqual(t.tm_min, 58)
self.assertEqual(t.tm_sec, 37)
self.assertEqual(t.tm_yday, 1)
self.assertEqual(t.tm_isdst, 0)
def test_tzinfo_isoformat(self):
zero = FixedOffset(0, "+00:00")
plus = FixedOffset(220, "+03:40")
minus = FixedOffset(-231, "-03:51")
unknown = FixedOffset(None, "")
cls = self.theclass
datestr = '0001-02-03'
for ofs in None, zero, plus, minus, unknown:
for us in 0, 987001:
d = cls(1, 2, 3, 4, 5, 59, us, tzinfo=ofs)
timestr = '04:05:59' + (us and '.987001' or '')
ofsstr = ofs is not None and d.tzname() or ''
tailstr = timestr + ofsstr
iso = d.isoformat()
self.assertEqual(iso, datestr + 'T' + tailstr)
self.assertEqual(iso, d.isoformat('T'))
self.assertEqual(d.isoformat('k'), datestr + 'k' + tailstr)
self.assertEqual(str(d), datestr + ' ' + tailstr)
def test_replace(self):
cls = self.theclass
z100 = FixedOffset(100, "+100")
zm200 = FixedOffset(timedelta(minutes=-200), "-200")
args = [1, 2, 3, 4, 5, 6, 7, z100]
base = cls(*args)
self.assertEqual(base, base.replace())
i = 0
for name, newval in (("year", 2),
("month", 3),
("day", 4),
("hour", 5),
("minute", 6),
("second", 7),
("microsecond", 8),
("tzinfo", zm200)):
newargs = args[:]
newargs[i] = newval
expected = cls(*newargs)
got = base.replace(**{name: newval})
self.assertEqual(expected, got)
i += 1
# Ensure we can get rid of a tzinfo.
self.assertEqual(base.tzname(), "+100")
base2 = base.replace(tzinfo=None)
self.assertTrue(base2.tzinfo is None)
self.assertTrue(base2.tzname() is None)
# Ensure we can add one.
base3 = base2.replace(tzinfo=z100)
self.assertEqual(base, base3)
self.assertTrue(base.tzinfo is base3.tzinfo)
# Out of bounds.
base = cls(2000, 2, 29)
self.assertRaises(ValueError, base.replace, year=2001)
def test_more_astimezone(self):
# The inherited test_astimezone covered some trivial and error cases.
fnone = FixedOffset(None, "None")
f44m = FixedOffset(44, "44")
fm5h = FixedOffset(-timedelta(hours=5), "m300")
dt = self.theclass.now(tz=f44m)
self.assertTrue(dt.tzinfo is f44m)
# Replacing with degenerate tzinfo raises an exception.
self.assertRaises(ValueError, dt.astimezone, fnone)
# Ditto with None tz.
self.assertRaises(TypeError, dt.astimezone, None)
# Replacing with same tzinfo makes no change.
x = dt.astimezone(dt.tzinfo)
self.assertTrue(x.tzinfo is f44m)
self.assertEqual(x.date(), dt.date())
self.assertEqual(x.time(), dt.time())
# Replacing with different tzinfo does adjust.
got = dt.astimezone(fm5h)
self.assertTrue(got.tzinfo is fm5h)
self.assertEqual(got.utcoffset(), timedelta(hours=-5))
expected = dt - dt.utcoffset() # in effect, convert to UTC
expected += fm5h.utcoffset(dt) # and from there to local time
expected = expected.replace(tzinfo=fm5h) # and attach new tzinfo
self.assertEqual(got.date(), expected.date())
self.assertEqual(got.time(), expected.time())
self.assertEqual(got.timetz(), expected.timetz())
self.assertTrue(got.tzinfo is expected.tzinfo)
self.assertEqual(got, expected)
def test_aware_subtract(self):
cls = self.theclass
# Ensure that utcoffset() is ignored when the operands have the
# same tzinfo member.
class OperandDependentOffset(tzinfo):
def utcoffset(self, t):
if t.minute < 10:
# d0 and d1 equal after adjustment
return timedelta(minutes=t.minute)
else:
# d2 off in the weeds
return timedelta(minutes=59)
base = cls(8, 9, 10, 11, 12, 13, 14, tzinfo=OperandDependentOffset())
d0 = base.replace(minute=3)
d1 = base.replace(minute=9)
d2 = base.replace(minute=11)
for x in d0, d1, d2:
for y in d0, d1, d2:
got = x - y
expected = timedelta(minutes=x.minute - y.minute)
self.assertEqual(got, expected)
# OTOH, if the tzinfo members are distinct, utcoffsets aren't
# ignored.
base = cls(8, 9, 10, 11, 12, 13, 14)
d0 = base.replace(minute=3, tzinfo=OperandDependentOffset())
d1 = base.replace(minute=9, tzinfo=OperandDependentOffset())
d2 = base.replace(minute=11, tzinfo=OperandDependentOffset())
for x in d0, d1, d2:
for y in d0, d1, d2:
got = x - y
if (x is d0 or x is d1) and (y is d0 or y is d1):
expected = timedelta(0)
elif x is y is d2:
expected = timedelta(0)
elif x is d2:
expected = timedelta(minutes=(11-59)-0)
else:
assert y is d2
expected = timedelta(minutes=0-(11-59))
self.assertEqual(got, expected)
def test_mixed_compare(self):
t1 = datetime(1, 2, 3, 4, 5, 6, 7)
t2 = datetime(1, 2, 3, 4, 5, 6, 7)
self.assertEqual(t1, t2)
t2 = t2.replace(tzinfo=None)
self.assertEqual(t1, t2)
t2 = t2.replace(tzinfo=FixedOffset(None, ""))
self.assertEqual(t1, t2)
t2 = t2.replace(tzinfo=FixedOffset(0, ""))
self.assertRaises(TypeError, lambda: t1 == t2)
# In datetime w/ identical tzinfo objects, utcoffset is ignored.
class Varies(tzinfo):
def __init__(self):
self.offset = timedelta(minutes=22)
def utcoffset(self, t):
self.offset += timedelta(minutes=1)
return self.offset
v = Varies()
t1 = t2.replace(tzinfo=v)
t2 = t2.replace(tzinfo=v)
self.assertEqual(t1.utcoffset(), timedelta(minutes=23))
self.assertEqual(t2.utcoffset(), timedelta(minutes=24))
self.assertEqual(t1, t2)
# But if they're not identical, it isn't ignored.
t2 = t2.replace(tzinfo=Varies())
self.assertTrue(t1 < t2) # t1's offset counter still going up
def test_subclass_datetimetz(self):
class C(self.theclass):
theAnswer = 42
def __new__(cls, *args, **kws):
temp = kws.copy()
extra = temp.pop('extra')
result = self.theclass.__new__(cls, *args, **temp)
result.extra = extra
return result
def newmeth(self, start):
return start + self.hour + self.year
args = 2002, 12, 31, 4, 5, 6, 500, FixedOffset(-300, "EST", 1)
dt1 = self.theclass(*args)
dt2 = C(*args, **{'extra': 7})
self.assertEqual(dt2.__class__, C)
self.assertEqual(dt2.theAnswer, 42)
self.assertEqual(dt2.extra, 7)
self.assertEqual(dt1.utcoffset(), dt2.utcoffset())
self.assertEqual(dt2.newmeth(-7), dt1.hour + dt1.year - 7)
# Pain to set up DST-aware tzinfo classes.
def first_sunday_on_or_after(dt):
days_to_go = 6 - dt.weekday()
if days_to_go:
dt += timedelta(days_to_go)
return dt
ZERO = timedelta(0)
HOUR = timedelta(hours=1)
DAY = timedelta(days=1)
# In the US, DST starts at 2am (standard time) on the first Sunday in April.
DSTSTART = datetime(1, 4, 1, 2)
# and ends at 2am (DST time; 1am standard time) on the last Sunday of Oct,
# which is the first Sunday on or after Oct 25. Because we view 1:MM as
# being standard time on that day, there is no spelling in local time of
# the last hour of DST (that's 1:MM DST, but 1:MM is taken as standard time).
DSTEND = datetime(1, 10, 25, 1)
class USTimeZone(tzinfo):
def __init__(self, hours, reprname, stdname, dstname):
self.stdoffset = timedelta(hours=hours)
self.reprname = reprname
self.stdname = stdname
self.dstname = dstname
def __repr__(self):
return self.reprname
def tzname(self, dt):
if self.dst(dt):
return self.dstname
else:
return self.stdname
def utcoffset(self, dt):
return self.stdoffset + self.dst(dt)
def dst(self, dt):
if dt is None or dt.tzinfo is None:
# An exception instead may be sensible here, in one or more of
# the cases.
return ZERO
assert dt.tzinfo is self
# Find first Sunday in April.
start = first_sunday_on_or_after(DSTSTART.replace(year=dt.year))
assert start.weekday() == 6 and start.month == 4 and start.day <= 7
# Find last Sunday in October.
end = first_sunday_on_or_after(DSTEND.replace(year=dt.year))
assert end.weekday() == 6 and end.month == 10 and end.day >= 25
# Can't compare naive to aware objects, so strip the timezone from
# dt first.
if start <= dt.replace(tzinfo=None) < end:
return HOUR
else:
return ZERO
Eastern = USTimeZone(-5, "Eastern", "EST", "EDT")
Central = USTimeZone(-6, "Central", "CST", "CDT")
Mountain = USTimeZone(-7, "Mountain", "MST", "MDT")
Pacific = USTimeZone(-8, "Pacific", "PST", "PDT")
utc_real = FixedOffset(0, "UTC", 0)
# For better test coverage, we want another flavor of UTC that's west of
# the Eastern and Pacific timezones.
utc_fake = FixedOffset(-12*60, "UTCfake", 0)
class TestTimezoneConversions(unittest.TestCase):
# The DST switch times for 2002, in std time.
dston = datetime(2002, 4, 7, 2)
dstoff = datetime(2002, 10, 27, 1)
theclass = datetime
# Check a time that's inside DST.
def checkinside(self, dt, tz, utc, dston, dstoff):
self.assertEqual(dt.dst(), HOUR)
# Conversion to our own timezone is always an identity.
self.assertEqual(dt.astimezone(tz), dt)
asutc = dt.astimezone(utc)
there_and_back = asutc.astimezone(tz)
# Conversion to UTC and back isn't always an identity here,
# because there are redundant spellings (in local time) of
# UTC time when DST begins: the clock jumps from 1:59:59
# to 3:00:00, and a local time of 2:MM:SS doesn't really
# make sense then. The classes above treat 2:MM:SS as
# daylight time then (it's "after 2am"), really an alias
# for 1:MM:SS standard time. The latter form is what
# conversion back from UTC produces.
if dt.date() == dston.date() and dt.hour == 2:
# We're in the redundant hour, and coming back from
# UTC gives the 1:MM:SS standard-time spelling.
self.assertEqual(there_and_back + HOUR, dt)
# Although during was considered to be in daylight
# time, there_and_back is not.
self.assertEqual(there_and_back.dst(), ZERO)
# They're the same times in UTC.
self.assertEqual(there_and_back.astimezone(utc),
dt.astimezone(utc))
else:
# We're not in the redundant hour.
self.assertEqual(dt, there_and_back)
# Because we have a redundant spelling when DST begins, there is
# (unfortunately) an hour when DST ends that can't be spelled at all in
# local time. When DST ends, the clock jumps from 1:59 back to 1:00
# again. The hour 1:MM DST has no spelling then: 1:MM is taken to be
# standard time. 1:MM DST == 0:MM EST, but 0:MM is taken to be
# daylight time. The hour 1:MM daylight == 0:MM standard can't be
# expressed in local time. Nevertheless, we want conversion back
# from UTC to mimic the local clock's "repeat an hour" behavior.
nexthour_utc = asutc + HOUR
nexthour_tz = nexthour_utc.astimezone(tz)
if dt.date() == dstoff.date() and dt.hour == 0:
# We're in the hour before the last DST hour. The last DST hour
# is ineffable. We want the conversion back to repeat 1:MM.
self.assertEqual(nexthour_tz, dt.replace(hour=1))
nexthour_utc += HOUR
nexthour_tz = nexthour_utc.astimezone(tz)
self.assertEqual(nexthour_tz, dt.replace(hour=1))
else:
self.assertEqual(nexthour_tz - dt, HOUR)
# Check a time that's outside DST.
def checkoutside(self, dt, tz, utc):
self.assertEqual(dt.dst(), ZERO)
# Conversion to our own timezone is always an identity.
self.assertEqual(dt.astimezone(tz), dt)
# Converting to UTC and back is an identity too.
asutc = dt.astimezone(utc)
there_and_back = asutc.astimezone(tz)
self.assertEqual(dt, there_and_back)
def convert_between_tz_and_utc(self, tz, utc):
dston = self.dston.replace(tzinfo=tz)
# Because 1:MM on the day DST ends is taken as being standard time,
# there is no spelling in tz for the last hour of daylight time.
# For purposes of the test, the last hour of DST is 0:MM, which is
# taken as being daylight time (and 1:MM is taken as being standard
# time).
dstoff = self.dstoff.replace(tzinfo=tz)
for delta in (timedelta(weeks=13),
DAY,
HOUR,
timedelta(minutes=1),
timedelta(microseconds=1)):
self.checkinside(dston, tz, utc, dston, dstoff)
for during in dston + delta, dstoff - delta:
self.checkinside(during, tz, utc, dston, dstoff)
self.checkoutside(dstoff, tz, utc)
for outside in dston - delta, dstoff + delta:
self.checkoutside(outside, tz, utc)
def test_easy(self):
# Despite the name of this test, the endcases are excruciating.
self.convert_between_tz_and_utc(Eastern, utc_real)
self.convert_between_tz_and_utc(Pacific, utc_real)
self.convert_between_tz_and_utc(Eastern, utc_fake)
self.convert_between_tz_and_utc(Pacific, utc_fake)
# The next is really dancing near the edge. It works because
# Pacific and Eastern are far enough apart that their "problem
# hours" don't overlap.
self.convert_between_tz_and_utc(Eastern, Pacific)
self.convert_between_tz_and_utc(Pacific, Eastern)
# OTOH, these fail! Don't enable them. The difficulty is that
# the edge case tests assume that every hour is representable in
# the "utc" class. This is always true for a fixed-offset tzinfo
# class (lke utc_real and utc_fake), but not for Eastern or Central.
# For these adjacent DST-aware time zones, the range of time offsets
# tested ends up creating hours in the one that aren't representable
# in the other. For the same reason, we would see failures in the
# Eastern vs Pacific tests too if we added 3*HOUR to the list of
# offset deltas in convert_between_tz_and_utc().
#
# self.convert_between_tz_and_utc(Eastern, Central) # can't work
# self.convert_between_tz_and_utc(Central, Eastern) # can't work
def test_tricky(self):
# 22:00 on day before daylight starts.
fourback = self.dston - timedelta(hours=4)
ninewest = FixedOffset(-9*60, "-0900", 0)
fourback = fourback.replace(tzinfo=ninewest)
# 22:00-0900 is 7:00 UTC == 2:00 EST == 3:00 DST. Since it's "after
# 2", we should get the 3 spelling.
# If we plug 22:00 the day before into Eastern, it "looks like std
# time", so its offset is returned as -5, and -5 - -9 = 4. Adding 4
# to 22:00 lands on 2:00, which makes no sense in local time (the
# local clock jumps from 1 to 3). The point here is to make sure we
# get the 3 spelling.
expected = self.dston.replace(hour=3)
got = fourback.astimezone(Eastern).replace(tzinfo=None)
self.assertEqual(expected, got)
# Similar, but map to 6:00 UTC == 1:00 EST == 2:00 DST. In that
# case we want the 1:00 spelling.
sixutc = self.dston.replace(hour=6, tzinfo=utc_real)
# Now 6:00 "looks like daylight", so the offset wrt Eastern is -4,
# and adding -4-0 == -4 gives the 2:00 spelling. We want the 1:00 EST
# spelling.
expected = self.dston.replace(hour=1)
got = sixutc.astimezone(Eastern).replace(tzinfo=None)
self.assertEqual(expected, got)
# Now on the day DST ends, we want "repeat an hour" behavior.
# UTC 4:MM 5:MM 6:MM 7:MM checking these
# EST 23:MM 0:MM 1:MM 2:MM
# EDT 0:MM 1:MM 2:MM 3:MM
# wall 0:MM 1:MM 1:MM 2:MM against these
for utc in utc_real, utc_fake:
for tz in Eastern, Pacific:
first_std_hour = self.dstoff - timedelta(hours=2) # 23:MM
# Convert that to UTC.
first_std_hour -= tz.utcoffset(None)
# Adjust for possibly fake UTC.
asutc = first_std_hour + utc.utcoffset(None)
# First UTC hour to convert; this is 4:00 when utc=utc_real &
# tz=Eastern.
asutcbase = asutc.replace(tzinfo=utc)
for tzhour in (0, 1, 1, 2):
expectedbase = self.dstoff.replace(hour=tzhour)
for minute in 0, 30, 59:
expected = expectedbase.replace(minute=minute)
asutc = asutcbase.replace(minute=minute)
astz = asutc.astimezone(tz)
self.assertEqual(astz.replace(tzinfo=None), expected)
asutcbase += HOUR
def test_bogus_dst(self):
class ok(tzinfo):
def utcoffset(self, dt): return HOUR
def dst(self, dt): return HOUR
now = self.theclass.now().replace(tzinfo=utc_real)
# Doesn't blow up.
now.astimezone(ok())
# Does blow up.
class notok(ok):
def dst(self, dt): return None
self.assertRaises(ValueError, now.astimezone, notok())
def test_fromutc(self):
self.assertRaises(TypeError, Eastern.fromutc) # not enough args
now = datetime.utcnow().replace(tzinfo=utc_real)
self.assertRaises(ValueError, Eastern.fromutc, now) # wrong tzinfo
now = now.replace(tzinfo=Eastern) # insert correct tzinfo
enow = Eastern.fromutc(now) # doesn't blow up
self.assertEqual(enow.tzinfo, Eastern) # has right tzinfo member
self.assertRaises(TypeError, Eastern.fromutc, now, now) # too many args
self.assertRaises(TypeError, Eastern.fromutc, date.today()) # wrong type
# Always converts UTC to standard time.
class FauxUSTimeZone(USTimeZone):
def fromutc(self, dt):
return dt + self.stdoffset
FEastern = FauxUSTimeZone(-5, "FEastern", "FEST", "FEDT")
# UTC 4:MM 5:MM 6:MM 7:MM 8:MM 9:MM
# EST 23:MM 0:MM 1:MM 2:MM 3:MM 4:MM
# EDT 0:MM 1:MM 2:MM 3:MM 4:MM 5:MM
# Check around DST start.
start = self.dston.replace(hour=4, tzinfo=Eastern)
fstart = start.replace(tzinfo=FEastern)
for wall in 23, 0, 1, 3, 4, 5:
expected = start.replace(hour=wall)
if wall == 23:
expected -= timedelta(days=1)
got = Eastern.fromutc(start)
self.assertEqual(expected, got)
expected = fstart + FEastern.stdoffset
got = FEastern.fromutc(fstart)
self.assertEqual(expected, got)
# Ensure astimezone() calls fromutc() too.
got = fstart.replace(tzinfo=utc_real).astimezone(FEastern)
self.assertEqual(expected, got)
start += HOUR
fstart += HOUR
# Check around DST end.
start = self.dstoff.replace(hour=4, tzinfo=Eastern)
fstart = start.replace(tzinfo=FEastern)
for wall in 0, 1, 1, 2, 3, 4:
expected = start.replace(hour=wall)
got = Eastern.fromutc(start)
self.assertEqual(expected, got)
expected = fstart + FEastern.stdoffset
got = FEastern.fromutc(fstart)
self.assertEqual(expected, got)
# Ensure astimezone() calls fromutc() too.
got = fstart.replace(tzinfo=utc_real).astimezone(FEastern)
self.assertEqual(expected, got)
start += HOUR
fstart += HOUR
#############################################################################
# oddballs
class Oddballs(unittest.TestCase):
def test_bug_1028306(self):
# Trying to compare a date to a datetime should act like a mixed-
# type comparison, despite that datetime is a subclass of date.
as_date = date.today()
as_datetime = datetime.combine(as_date, time())
self.assertTrue(as_date != as_datetime)
self.assertTrue(as_datetime != as_date)
self.assertTrue(not as_date == as_datetime)
self.assertTrue(not as_datetime == as_date)
self.assertRaises(TypeError, lambda: as_date < as_datetime)
self.assertRaises(TypeError, lambda: as_datetime < as_date)
self.assertRaises(TypeError, lambda: as_date <= as_datetime)
self.assertRaises(TypeError, lambda: as_datetime <= as_date)
self.assertRaises(TypeError, lambda: as_date > as_datetime)
self.assertRaises(TypeError, lambda: as_datetime > as_date)
self.assertRaises(TypeError, lambda: as_date >= as_datetime)
self.assertRaises(TypeError, lambda: as_datetime >= as_date)
# Neverthelss, comparison should work with the base-class (date)
# projection if use of a date method is forced.
self.assertTrue(as_date.__eq__(as_datetime))
different_day = (as_date.day + 1) % 20 + 1
self.assertTrue(not as_date.__eq__(as_datetime.replace(day=
different_day)))
# And date should compare with other subclasses of date. If a
# subclass wants to stop this, it's up to the subclass to do so.
date_sc = SubclassDate(as_date.year, as_date.month, as_date.day)
self.assertEqual(as_date, date_sc)
self.assertEqual(date_sc, as_date)
# Ditto for datetimes.
datetime_sc = SubclassDatetime(as_datetime.year, as_datetime.month,
as_date.day, 0, 0, 0)
self.assertEqual(as_datetime, datetime_sc)
self.assertEqual(datetime_sc, as_datetime)
def test_main():
test_support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
|
mit
|
adelina-t/neutron
|
neutron/api/v2/base.py
|
9
|
32455
|
# Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import netaddr
from oslo_config import cfg
from oslo_log import log as logging
from oslo_policy import policy as oslo_policy
from oslo_utils import excutils
import six
import webob.exc
from neutron.api import api_common
from neutron.api.rpc.agentnotifiers import dhcp_rpc_agent_api
from neutron.api.v2 import attributes
from neutron.api.v2 import resource as wsgi_resource
from neutron.common import constants as const
from neutron.common import exceptions
from neutron.common import rpc as n_rpc
from neutron.db import api as db_api
from neutron.i18n import _LE, _LI
from neutron import policy
from neutron import quota
from neutron.quota import resource_registry
LOG = logging.getLogger(__name__)
FAULT_MAP = {exceptions.NotFound: webob.exc.HTTPNotFound,
exceptions.Conflict: webob.exc.HTTPConflict,
exceptions.InUse: webob.exc.HTTPConflict,
exceptions.BadRequest: webob.exc.HTTPBadRequest,
exceptions.ServiceUnavailable: webob.exc.HTTPServiceUnavailable,
exceptions.NotAuthorized: webob.exc.HTTPForbidden,
netaddr.AddrFormatError: webob.exc.HTTPBadRequest,
oslo_policy.PolicyNotAuthorized: webob.exc.HTTPForbidden
}
class Controller(object):
LIST = 'list'
SHOW = 'show'
CREATE = 'create'
UPDATE = 'update'
DELETE = 'delete'
def __init__(self, plugin, collection, resource, attr_info,
allow_bulk=False, member_actions=None, parent=None,
allow_pagination=False, allow_sorting=False):
if member_actions is None:
member_actions = []
self._plugin = plugin
self._collection = collection.replace('-', '_')
self._resource = resource.replace('-', '_')
self._attr_info = attr_info
self._allow_bulk = allow_bulk
self._allow_pagination = allow_pagination
self._allow_sorting = allow_sorting
self._native_bulk = self._is_native_bulk_supported()
self._native_pagination = self._is_native_pagination_supported()
self._native_sorting = self._is_native_sorting_supported()
self._policy_attrs = [name for (name, info) in self._attr_info.items()
if info.get('required_by_policy')]
self._notifier = n_rpc.get_notifier('network')
# use plugin's dhcp notifier, if this is already instantiated
agent_notifiers = getattr(plugin, 'agent_notifiers', {})
self._dhcp_agent_notifier = (
agent_notifiers.get(const.AGENT_TYPE_DHCP) or
dhcp_rpc_agent_api.DhcpAgentNotifyAPI()
)
if cfg.CONF.notify_nova_on_port_data_changes:
from neutron.notifiers import nova
self._nova_notifier = nova.Notifier()
self._member_actions = member_actions
self._primary_key = self._get_primary_key()
if self._allow_pagination and self._native_pagination:
# Native pagination need native sorting support
if not self._native_sorting:
raise exceptions.Invalid(
_("Native pagination depend on native sorting")
)
if not self._allow_sorting:
LOG.info(_LI("Allow sorting is enabled because native "
"pagination requires native sorting"))
self._allow_sorting = True
if parent:
self._parent_id_name = '%s_id' % parent['member_name']
parent_part = '_%s' % parent['member_name']
else:
self._parent_id_name = None
parent_part = ''
self._plugin_handlers = {
self.LIST: 'get%s_%s' % (parent_part, self._collection),
self.SHOW: 'get%s_%s' % (parent_part, self._resource)
}
for action in [self.CREATE, self.UPDATE, self.DELETE]:
self._plugin_handlers[action] = '%s%s_%s' % (action, parent_part,
self._resource)
def _get_primary_key(self, default_primary_key='id'):
for key, value in six.iteritems(self._attr_info):
if value.get('primary_key', False):
return key
return default_primary_key
def _is_native_bulk_supported(self):
native_bulk_attr_name = ("_%s__native_bulk_support"
% self._plugin.__class__.__name__)
return getattr(self._plugin, native_bulk_attr_name, False)
def _is_native_pagination_supported(self):
native_pagination_attr_name = ("_%s__native_pagination_support"
% self._plugin.__class__.__name__)
return getattr(self._plugin, native_pagination_attr_name, False)
def _is_native_sorting_supported(self):
native_sorting_attr_name = ("_%s__native_sorting_support"
% self._plugin.__class__.__name__)
return getattr(self._plugin, native_sorting_attr_name, False)
def _exclude_attributes_by_policy(self, context, data):
"""Identifies attributes to exclude according to authZ policies.
Return a list of attribute names which should be stripped from the
response returned to the user because the user is not authorized
to see them.
"""
attributes_to_exclude = []
for attr_name in data.keys():
attr_data = self._attr_info.get(attr_name)
if attr_data and attr_data['is_visible']:
if policy.check(
context,
'%s:%s' % (self._plugin_handlers[self.SHOW], attr_name),
data,
might_not_exist=True,
pluralized=self._collection):
# this attribute is visible, check next one
continue
# if the code reaches this point then either the policy check
# failed or the attribute was not visible in the first place
attributes_to_exclude.append(attr_name)
return attributes_to_exclude
def _view(self, context, data, fields_to_strip=None):
"""Build a view of an API resource.
:param context: the neutron context
:param data: the object for which a view is being created
:param fields_to_strip: attributes to remove from the view
:returns: a view of the object which includes only attributes
visible according to API resource declaration and authZ policies.
"""
fields_to_strip = ((fields_to_strip or []) +
self._exclude_attributes_by_policy(context, data))
return self._filter_attributes(context, data, fields_to_strip)
def _filter_attributes(self, context, data, fields_to_strip=None):
if not fields_to_strip:
return data
return dict(item for item in six.iteritems(data)
if (item[0] not in fields_to_strip))
def _do_field_list(self, original_fields):
fields_to_add = None
# don't do anything if fields were not specified in the request
if original_fields:
fields_to_add = [attr for attr in self._policy_attrs
if attr not in original_fields]
original_fields.extend(self._policy_attrs)
return original_fields, fields_to_add
def __getattr__(self, name):
if name in self._member_actions:
@db_api.retry_db_errors
def _handle_action(request, id, **kwargs):
arg_list = [request.context, id]
# Ensure policy engine is initialized
policy.init()
# Fetch the resource and verify if the user can access it
try:
parent_id = kwargs.get(self._parent_id_name)
resource = self._item(request,
id,
do_authz=True,
field_list=None,
parent_id=parent_id)
except oslo_policy.PolicyNotAuthorized:
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
body = copy.deepcopy(kwargs.pop('body', None))
# Explicit comparison with None to distinguish from {}
if body is not None:
arg_list.append(body)
# It is ok to raise a 403 because accessibility to the
# object was checked earlier in this method
policy.enforce(request.context,
name,
resource,
pluralized=self._collection)
ret_value = getattr(self._plugin, name)(*arg_list, **kwargs)
# It is simply impossible to predict whether one of this
# actions alters resource usage. For instance a tenant port
# is created when a router interface is added. Therefore it is
# important to mark as dirty resources whose counters have
# been altered by this operation
resource_registry.set_resources_dirty(request.context)
return ret_value
return _handle_action
else:
raise AttributeError()
def _get_pagination_helper(self, request):
if self._allow_pagination and self._native_pagination:
return api_common.PaginationNativeHelper(request,
self._primary_key)
elif self._allow_pagination:
return api_common.PaginationEmulatedHelper(request,
self._primary_key)
return api_common.NoPaginationHelper(request, self._primary_key)
def _get_sorting_helper(self, request):
if self._allow_sorting and self._native_sorting:
return api_common.SortingNativeHelper(request, self._attr_info)
elif self._allow_sorting:
return api_common.SortingEmulatedHelper(request, self._attr_info)
return api_common.NoSortingHelper(request, self._attr_info)
def _items(self, request, do_authz=False, parent_id=None):
"""Retrieves and formats a list of elements of the requested entity."""
# NOTE(salvatore-orlando): The following ensures that fields which
# are needed for authZ policy validation are not stripped away by the
# plugin before returning.
original_fields, fields_to_add = self._do_field_list(
api_common.list_args(request, 'fields'))
filters = api_common.get_filters(request, self._attr_info,
['fields', 'sort_key', 'sort_dir',
'limit', 'marker', 'page_reverse'])
kwargs = {'filters': filters,
'fields': original_fields}
sorting_helper = self._get_sorting_helper(request)
pagination_helper = self._get_pagination_helper(request)
sorting_helper.update_args(kwargs)
sorting_helper.update_fields(original_fields, fields_to_add)
pagination_helper.update_args(kwargs)
pagination_helper.update_fields(original_fields, fields_to_add)
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj_getter = getattr(self._plugin, self._plugin_handlers[self.LIST])
obj_list = obj_getter(request.context, **kwargs)
obj_list = sorting_helper.sort(obj_list)
obj_list = pagination_helper.paginate(obj_list)
# Check authz
if do_authz:
# FIXME(salvatore-orlando): obj_getter might return references to
# other resources. Must check authZ on them too.
# Omit items from list that should not be visible
obj_list = [obj for obj in obj_list
if policy.check(request.context,
self._plugin_handlers[self.SHOW],
obj,
plugin=self._plugin,
pluralized=self._collection)]
# Use the first element in the list for discriminating which attributes
# should be filtered out because of authZ policies
# fields_to_add contains a list of attributes added for request policy
# checks but that were not required by the user. They should be
# therefore stripped
fields_to_strip = fields_to_add or []
if obj_list:
fields_to_strip += self._exclude_attributes_by_policy(
request.context, obj_list[0])
collection = {self._collection:
[self._filter_attributes(
request.context, obj,
fields_to_strip=fields_to_strip)
for obj in obj_list]}
pagination_links = pagination_helper.get_links(obj_list)
if pagination_links:
collection[self._collection + "_links"] = pagination_links
# Synchronize usage trackers, if needed
resource_registry.resync_resource(
request.context, self._resource, request.context.tenant_id)
return collection
def _item(self, request, id, do_authz=False, field_list=None,
parent_id=None):
"""Retrieves and formats a single element of the requested entity."""
kwargs = {'fields': field_list}
action = self._plugin_handlers[self.SHOW]
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj_getter = getattr(self._plugin, action)
obj = obj_getter(request.context, id, **kwargs)
# Check authz
# FIXME(salvatore-orlando): obj_getter might return references to
# other resources. Must check authZ on them too.
if do_authz:
policy.enforce(request.context,
action,
obj,
pluralized=self._collection)
return obj
def _send_dhcp_notification(self, context, data, methodname):
if cfg.CONF.dhcp_agent_notification:
if self._collection in data:
for body in data[self._collection]:
item = {self._resource: body}
self._dhcp_agent_notifier.notify(context, item, methodname)
else:
self._dhcp_agent_notifier.notify(context, data, methodname)
def _send_nova_notification(self, action, orig, returned):
if hasattr(self, '_nova_notifier'):
self._nova_notifier.send_network_change(action, orig, returned)
def index(self, request, **kwargs):
"""Returns a list of the requested entity."""
parent_id = kwargs.get(self._parent_id_name)
# Ensure policy engine is initialized
policy.init()
return self._items(request, True, parent_id)
def show(self, request, id, **kwargs):
"""Returns detailed information about the requested entity."""
try:
# NOTE(salvatore-orlando): The following ensures that fields
# which are needed for authZ policy validation are not stripped
# away by the plugin before returning.
field_list, added_fields = self._do_field_list(
api_common.list_args(request, "fields"))
parent_id = kwargs.get(self._parent_id_name)
# Ensure policy engine is initialized
policy.init()
return {self._resource:
self._view(request.context,
self._item(request,
id,
do_authz=True,
field_list=field_list,
parent_id=parent_id),
fields_to_strip=added_fields)}
except oslo_policy.PolicyNotAuthorized:
# To avoid giving away information, pretend that it
# doesn't exist
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
def _emulate_bulk_create(self, obj_creator, request, body, parent_id=None):
objs = []
try:
for item in body[self._collection]:
kwargs = {self._resource: item}
if parent_id:
kwargs[self._parent_id_name] = parent_id
fields_to_strip = self._exclude_attributes_by_policy(
request.context, item)
objs.append(self._filter_attributes(
request.context,
obj_creator(request.context, **kwargs),
fields_to_strip=fields_to_strip))
return objs
# Note(salvatore-orlando): broad catch as in theory a plugin
# could raise any kind of exception
except Exception:
with excutils.save_and_reraise_exception():
for obj in objs:
obj_deleter = getattr(self._plugin,
self._plugin_handlers[self.DELETE])
try:
kwargs = ({self._parent_id_name: parent_id}
if parent_id else {})
obj_deleter(request.context, obj['id'], **kwargs)
except Exception:
# broad catch as our only purpose is to log the
# exception
LOG.exception(_LE("Unable to undo add for "
"%(resource)s %(id)s"),
{'resource': self._resource,
'id': obj['id']})
# TODO(salvatore-orlando): The object being processed when the
# plugin raised might have been created or not in the db.
# We need a way for ensuring that if it has been created,
# it is then deleted
@db_api.retry_db_errors
def create(self, request, body=None, **kwargs):
"""Creates a new instance of the requested entity."""
parent_id = kwargs.get(self._parent_id_name)
self._notifier.info(request.context,
self._resource + '.create.start',
body)
body = Controller.prepare_request_body(request.context,
copy.deepcopy(body), True,
self._resource, self._attr_info,
allow_bulk=self._allow_bulk)
action = self._plugin_handlers[self.CREATE]
# Check authz
if self._collection in body:
# Have to account for bulk create
items = body[self._collection]
deltas = {}
bulk = True
else:
items = [body]
bulk = False
# Ensure policy engine is initialized
policy.init()
for item in items:
self._validate_network_tenant_ownership(request,
item[self._resource])
policy.enforce(request.context,
action,
item[self._resource],
pluralized=self._collection)
if 'tenant_id' not in item[self._resource]:
# no tenant_id - no quota check
continue
try:
tenant_id = item[self._resource]['tenant_id']
count = quota.QUOTAS.count(request.context, self._resource,
self._plugin, tenant_id)
if bulk:
delta = deltas.get(tenant_id, 0) + 1
deltas[tenant_id] = delta
else:
delta = 1
kwargs = {self._resource: count + delta}
except exceptions.QuotaResourceUnknown as e:
# We don't want to quota this resource
LOG.debug(e)
else:
quota.QUOTAS.limit_check(request.context,
item[self._resource]['tenant_id'],
**kwargs)
def notify(create_result):
# Ensure usage trackers for all resources affected by this API
# operation are marked as dirty
# TODO(salv-orlando): This operation will happen in a single
# transaction with reservation commit once that is implemented
resource_registry.set_resources_dirty(request.context)
notifier_method = self._resource + '.create.end'
self._notifier.info(request.context,
notifier_method,
create_result)
self._send_dhcp_notification(request.context,
create_result,
notifier_method)
return create_result
kwargs = {self._parent_id_name: parent_id} if parent_id else {}
if self._collection in body and self._native_bulk:
# plugin does atomic bulk create operations
obj_creator = getattr(self._plugin, "%s_bulk" % action)
objs = obj_creator(request.context, body, **kwargs)
# Use first element of list to discriminate attributes which
# should be removed because of authZ policies
fields_to_strip = self._exclude_attributes_by_policy(
request.context, objs[0])
return notify({self._collection: [self._filter_attributes(
request.context, obj, fields_to_strip=fields_to_strip)
for obj in objs]})
else:
obj_creator = getattr(self._plugin, action)
if self._collection in body:
# Emulate atomic bulk behavior
objs = self._emulate_bulk_create(obj_creator, request,
body, parent_id)
return notify({self._collection: objs})
else:
kwargs.update({self._resource: body})
obj = obj_creator(request.context, **kwargs)
self._send_nova_notification(action, {},
{self._resource: obj})
return notify({self._resource: self._view(request.context,
obj)})
@db_api.retry_db_errors
def delete(self, request, id, **kwargs):
"""Deletes the specified entity."""
self._notifier.info(request.context,
self._resource + '.delete.start',
{self._resource + '_id': id})
action = self._plugin_handlers[self.DELETE]
# Check authz
policy.init()
parent_id = kwargs.get(self._parent_id_name)
obj = self._item(request, id, parent_id=parent_id)
try:
policy.enforce(request.context,
action,
obj,
pluralized=self._collection)
except oslo_policy.PolicyNotAuthorized:
# To avoid giving away information, pretend that it
# doesn't exist
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
obj_deleter = getattr(self._plugin, action)
obj_deleter(request.context, id, **kwargs)
# A delete operation usually alters resource usage, so mark affected
# usage trackers as dirty
resource_registry.set_resources_dirty(request.context)
notifier_method = self._resource + '.delete.end'
self._notifier.info(request.context,
notifier_method,
{self._resource + '_id': id})
result = {self._resource: self._view(request.context, obj)}
self._send_nova_notification(action, {}, result)
self._send_dhcp_notification(request.context,
result,
notifier_method)
@db_api.retry_db_errors
def update(self, request, id, body=None, **kwargs):
"""Updates the specified entity's attributes."""
parent_id = kwargs.get(self._parent_id_name)
try:
payload = body.copy()
except AttributeError:
msg = _("Invalid format: %s") % request.body
raise exceptions.BadRequest(resource='body', msg=msg)
payload['id'] = id
self._notifier.info(request.context,
self._resource + '.update.start',
payload)
body = Controller.prepare_request_body(request.context, body, False,
self._resource, self._attr_info,
allow_bulk=self._allow_bulk)
action = self._plugin_handlers[self.UPDATE]
# Load object to check authz
# but pass only attributes in the original body and required
# by the policy engine to the policy 'brain'
field_list = [name for (name, value) in six.iteritems(self._attr_info)
if (value.get('required_by_policy') or
value.get('primary_key') or
'default' not in value)]
# Ensure policy engine is initialized
policy.init()
orig_obj = self._item(request, id, field_list=field_list,
parent_id=parent_id)
orig_object_copy = copy.copy(orig_obj)
orig_obj.update(body[self._resource])
# Make a list of attributes to be updated to inform the policy engine
# which attributes are set explicitly so that it can distinguish them
# from the ones that are set to their default values.
orig_obj[const.ATTRIBUTES_TO_UPDATE] = body[self._resource].keys()
try:
policy.enforce(request.context,
action,
orig_obj,
pluralized=self._collection)
except oslo_policy.PolicyNotAuthorized:
with excutils.save_and_reraise_exception() as ctxt:
# If a tenant is modifying it's own object, it's safe to return
# a 403. Otherwise, pretend that it doesn't exist to avoid
# giving away information.
if request.context.tenant_id != orig_obj['tenant_id']:
ctxt.reraise = False
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
obj_updater = getattr(self._plugin, action)
kwargs = {self._resource: body}
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj = obj_updater(request.context, id, **kwargs)
# Usually an update operation does not alter resource usage, but as
# there might be side effects it might be worth checking for changes
# in resource usage here as well (e.g: a tenant port is created when a
# router interface is added)
resource_registry.set_resources_dirty(request.context)
result = {self._resource: self._view(request.context, obj)}
notifier_method = self._resource + '.update.end'
self._notifier.info(request.context, notifier_method, result)
self._send_dhcp_notification(request.context,
result,
notifier_method)
self._send_nova_notification(action, orig_object_copy, result)
return result
@staticmethod
def prepare_request_body(context, body, is_create, resource, attr_info,
allow_bulk=False):
"""Verifies required attributes are in request body.
Also checking that an attribute is only specified if it is allowed
for the given operation (create/update).
Attribute with default values are considered to be optional.
body argument must be the deserialized body.
"""
collection = resource + "s"
if not body:
raise webob.exc.HTTPBadRequest(_("Resource body required"))
LOG.debug("Request body: %(body)s", {'body': body})
try:
if collection in body:
if not allow_bulk:
raise webob.exc.HTTPBadRequest(_("Bulk operation "
"not supported"))
if not body[collection]:
raise webob.exc.HTTPBadRequest(_("Resources required"))
bulk_body = [
Controller.prepare_request_body(
context, item if resource in item
else {resource: item}, is_create, resource, attr_info,
allow_bulk) for item in body[collection]
]
return {collection: bulk_body}
res_dict = body.get(resource)
except (AttributeError, TypeError):
msg = _("Body contains invalid data")
raise webob.exc.HTTPBadRequest(msg)
if res_dict is None:
msg = _("Unable to find '%s' in request body") % resource
raise webob.exc.HTTPBadRequest(msg)
attributes.populate_tenant_id(context, res_dict, attr_info, is_create)
attributes.verify_attributes(res_dict, attr_info)
if is_create: # POST
attributes.fill_default_value(attr_info, res_dict,
webob.exc.HTTPBadRequest)
else: # PUT
for attr, attr_vals in six.iteritems(attr_info):
if attr in res_dict and not attr_vals['allow_put']:
msg = _("Cannot update read-only attribute %s") % attr
raise webob.exc.HTTPBadRequest(msg)
attributes.convert_value(attr_info, res_dict, webob.exc.HTTPBadRequest)
return body
def _validate_network_tenant_ownership(self, request, resource_item):
# TODO(salvatore-orlando): consider whether this check can be folded
# in the policy engine
if (request.context.is_admin or request.context.is_advsvc or
self._resource not in ('port', 'subnet')):
return
network = self._plugin.get_network(
request.context,
resource_item['network_id'])
# do not perform the check on shared networks
if network.get('shared'):
return
network_owner = network['tenant_id']
if network_owner != resource_item['tenant_id']:
msg = _("Tenant %(tenant_id)s not allowed to "
"create %(resource)s on this network")
raise webob.exc.HTTPForbidden(msg % {
"tenant_id": resource_item['tenant_id'],
"resource": self._resource,
})
def create_resource(collection, resource, plugin, params, allow_bulk=False,
member_actions=None, parent=None, allow_pagination=False,
allow_sorting=False):
controller = Controller(plugin, collection, resource, params, allow_bulk,
member_actions=member_actions, parent=parent,
allow_pagination=allow_pagination,
allow_sorting=allow_sorting)
return wsgi_resource.Resource(controller, FAULT_MAP)
|
apache-2.0
|
kevinastone/sentry
|
tests/integration/tests.py
|
9
|
10414
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function
import datetime
import json
import logging
import mock
import zlib
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from django.utils import timezone
from gzip import GzipFile
from exam import fixture
from raven import Client
from sentry.models import Group, Event
from sentry.testutils import TestCase, TransactionTestCase
from sentry.testutils.helpers import get_auth_header
from sentry.utils.compat import StringIO
from sentry.utils.settings import (
validate_settings, ConfigurationError, import_string)
DEPENDENCY_TEST_DATA = {
"postgresql": ('DATABASES', 'psycopg2.extensions', "database engine", "django.db.backends.postgresql_psycopg2", {
'default': {
'ENGINE': "django.db.backends.postgresql_psycopg2",
'NAME': 'test',
'USER': 'root',
'PASSWORD': '',
'HOST': 'localhost',
'PORT': ''
}
}),
"mysql": ('DATABASES', 'MySQLdb', "database engine", "django.db.backends.mysql", {
'default': {
'ENGINE': "django.db.backends.mysql",
'NAME': 'test',
'USER': 'root',
'PASSWORD': '',
'HOST': 'localhost',
'PORT': ''
}
}),
"oracle": ('DATABASES', 'cx_Oracle', "database engine", "django.db.backends.oracle", {
'default': {
'ENGINE': "django.db.backends.oracle",
'NAME': 'test',
'USER': 'root',
'PASSWORD': '',
'HOST': 'localhost',
'PORT': ''
}
}),
"memcache": ('CACHES', 'memcache', "caching backend", "django.core.cache.backends.memcached.MemcachedCache", {
'default': {
'BACKEND': "django.core.cache.backends.memcached.MemcachedCache",
'LOCATION': '127.0.0.1:11211',
}
}),
"pylibmc": ('CACHES', 'pylibmc', "caching backend", "django.core.cache.backends.memcached.PyLibMCCache", {
'default': {
'BACKEND': "django.core.cache.backends.memcached.PyLibMCCache",
'LOCATION': '127.0.0.1:11211',
}
}),
}
class AssertHandler(logging.Handler):
def emit(self, entry):
raise AssertionError(entry.message)
class RavenIntegrationTest(TransactionTestCase):
"""
This mocks the test server and specifically tests behavior that would
happen between Raven <--> Sentry over HTTP communication.
"""
def setUp(self):
self.user = self.create_user('[email protected]')
self.project = self.create_project()
self.pm = self.project.team.member_set.get_or_create(user=self.user)[0]
self.pk = self.project.key_set.get_or_create()[0]
self.configure_sentry_errors()
def configure_sentry_errors(self):
assert_handler = AssertHandler()
sentry_errors = logging.getLogger('sentry.errors')
sentry_errors.addHandler(assert_handler)
sentry_errors.setLevel(logging.DEBUG)
def remove_handler():
sentry_errors.handlers.pop(sentry_errors.handlers.index(assert_handler))
self.addCleanup(remove_handler)
def sendRemote(self, url, data, headers={}):
content_type = headers.pop('Content-Type', None)
headers = dict(('HTTP_' + k.replace('-', '_').upper(), v) for k, v in headers.iteritems())
resp = self.client.post(
reverse('sentry-api-store', args=[self.pk.project_id]),
data=data,
content_type=content_type,
**headers)
self.assertEquals(resp.status_code, 200, resp.content)
@mock.patch('raven.base.Client.send_remote')
def test_basic(self, send_remote):
send_remote.side_effect = self.sendRemote
client = Client(
dsn='http://%s:%s@localhost:8000/%s' % (
self.pk.public_key, self.pk.secret_key, self.pk.project_id)
)
with self.tasks():
client.capture('Message', message='foo')
send_remote.assert_called_once()
self.assertEquals(Group.objects.count(), 1)
group = Group.objects.get()
self.assertEquals(group.event_set.count(), 1)
instance = group.event_set.get()
self.assertEquals(instance.message, 'foo')
class SentryRemoteTest(TestCase):
@fixture
def path(self):
return reverse('sentry-api-store')
def test_minimal(self):
kwargs = {'message': 'hello'}
resp = self._postWithHeader(kwargs)
assert resp.status_code == 200, resp.content
event_id = json.loads(resp.content)['id']
instance = Event.objects.get(event_id=event_id)
assert instance.message == 'hello'
def test_timestamp(self):
timestamp = timezone.now().replace(microsecond=0, tzinfo=timezone.utc) - datetime.timedelta(hours=1)
kwargs = {u'message': 'hello', 'timestamp': timestamp.strftime('%s.%f')}
resp = self._postWithSignature(kwargs)
self.assertEquals(resp.status_code, 200, resp.content)
instance = Event.objects.get()
self.assertEquals(instance.message, 'hello')
self.assertEquals(instance.datetime, timestamp)
group = instance.group
self.assertEquals(group.first_seen, timestamp)
self.assertEquals(group.last_seen, timestamp)
def test_timestamp_as_iso(self):
timestamp = timezone.now().replace(microsecond=0, tzinfo=timezone.utc) - datetime.timedelta(hours=1)
kwargs = {u'message': 'hello', 'timestamp': timestamp.strftime('%Y-%m-%dT%H:%M:%S.%f')}
resp = self._postWithSignature(kwargs)
self.assertEquals(resp.status_code, 200, resp.content)
instance = Event.objects.get()
self.assertEquals(instance.message, 'hello')
self.assertEquals(instance.datetime, timestamp)
group = instance.group
self.assertEquals(group.first_seen, timestamp)
self.assertEquals(group.last_seen, timestamp)
def test_ungzipped_data(self):
kwargs = {'message': 'hello'}
resp = self._postWithSignature(kwargs)
self.assertEquals(resp.status_code, 200)
instance = Event.objects.get()
self.assertEquals(instance.message, 'hello')
@override_settings(SENTRY_ALLOW_ORIGIN='getsentry.com')
def test_correct_data_with_get(self):
kwargs = {'message': 'hello'}
resp = self._getWithReferer(kwargs)
self.assertEquals(resp.status_code, 200, resp.content)
instance = Event.objects.get()
self.assertEquals(instance.message, 'hello')
@override_settings(SENTRY_ALLOW_ORIGIN='getsentry.com')
def test_get_without_referer(self):
kwargs = {'message': 'hello'}
resp = self._getWithReferer(kwargs, referer=None, protocol='4')
self.assertEquals(resp.status_code, 400, resp.content)
@override_settings(SENTRY_ALLOW_ORIGIN='*')
def test_get_without_referer_allowed(self):
kwargs = {'message': 'hello'}
resp = self._getWithReferer(kwargs, referer=None, protocol='4')
self.assertEquals(resp.status_code, 200, resp.content)
def test_signature(self):
kwargs = {'message': 'hello'}
resp = self._postWithSignature(kwargs)
self.assertEquals(resp.status_code, 200, resp.content)
instance = Event.objects.get()
self.assertEquals(instance.message, 'hello')
def test_content_encoding_deflate(self):
kwargs = {'message': 'hello'}
message = zlib.compress(json.dumps(kwargs))
key = self.projectkey.public_key
secret = self.projectkey.secret_key
with self.tasks():
resp = self.client.post(
self.path, message,
content_type='application/octet-stream',
HTTP_CONTENT_ENCODING='deflate',
HTTP_X_SENTRY_AUTH=get_auth_header('_postWithHeader', key, secret),
)
assert resp.status_code == 200, resp.content
event_id = json.loads(resp.content)['id']
instance = Event.objects.get(event_id=event_id)
assert instance.message == 'hello'
def test_content_encoding_gzip(self):
kwargs = {'message': 'hello'}
message = json.dumps(kwargs)
fp = StringIO()
try:
f = GzipFile(fileobj=fp, mode='w')
f.write(message)
finally:
f.close()
key = self.projectkey.public_key
secret = self.projectkey.secret_key
with self.tasks():
resp = self.client.post(
self.path, fp.getvalue(),
content_type='application/octet-stream',
HTTP_CONTENT_ENCODING='gzip',
HTTP_X_SENTRY_AUTH=get_auth_header('_postWithHeader', key, secret),
)
assert resp.status_code == 200, resp.content
event_id = json.loads(resp.content)['id']
instance = Event.objects.get(event_id=event_id)
assert instance.message == 'hello'
class DepdendencyTest(TestCase):
def raise_import_error(self, package):
def callable(package_name):
if package_name != package:
return import_string(package_name)
raise ImportError("No module named %s" % (package,))
return callable
@mock.patch('django.conf.settings', mock.Mock())
@mock.patch('sentry.utils.settings.import_string')
def validate_dependency(self, key, package, dependency_type, dependency,
setting_value, import_string):
import_string.side_effect = self.raise_import_error(package)
with self.settings(**{key: setting_value}):
with self.assertRaises(ConfigurationError):
validate_settings(settings)
def test_validate_fails_on_postgres(self):
self.validate_dependency(*DEPENDENCY_TEST_DATA['postgresql'])
def test_validate_fails_on_mysql(self):
self.validate_dependency(*DEPENDENCY_TEST_DATA['mysql'])
def test_validate_fails_on_oracle(self):
self.validate_dependency(*DEPENDENCY_TEST_DATA['oracle'])
def test_validate_fails_on_memcache(self):
self.validate_dependency(*DEPENDENCY_TEST_DATA['memcache'])
def test_validate_fails_on_pylibmc(self):
self.validate_dependency(*DEPENDENCY_TEST_DATA['pylibmc'])
|
bsd-3-clause
|
TeachAtTUM/edx-platform
|
cms/djangoapps/contentstore/tests/test_import_pure_xblock.py
|
25
|
3002
|
"""
Integration tests for importing courses containing pure XBlocks.
"""
from django.conf import settings
from xblock.core import XBlock
from xblock.fields import String
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.mongo.draft import as_draft
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.xml_importer import import_course_from_xml
TEST_DATA_DIR = settings.COMMON_TEST_DATA_ROOT
class StubXBlock(XBlock):
"""
Stub XBlock to use in tests.
The default XBlock implementation will load this XBlock
from XML, using the lowercase version of the class
as an element name ("stubxblock") and the field names
as attributes of that element.
Example:
<stubxblock test_field="this is only a test" />
"""
test_field = String(default="default")
class XBlockImportTest(ModuleStoreTestCase):
@XBlock.register_temp_plugin(StubXBlock)
def test_import_public(self):
self._assert_import(
'pure_xblock_public',
'set by xml'
)
@XBlock.register_temp_plugin(StubXBlock)
def test_import_draft(self):
self._assert_import(
'pure_xblock_draft',
'set by xml',
has_draft=True
)
def _assert_import(self, course_dir, expected_field_val, has_draft=False):
"""
Import a course from XML, then verify that the XBlock was loaded
with the correct field value.
Args:
course_dir (str): The name of the course directory (relative to the test data directory)
expected_xblock_loc (str): The location of the XBlock in the course.
expected_field_val (str): The expected value of the XBlock's test field.
Kwargs:
has_draft (bool): If true, check that a draft of the XBlock exists with
the expected field value set.
"""
# It is necessary to use the "old mongo" modulestore because split doesn't work
# with the "has_draft" logic below.
store = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.mongo) # pylint: disable=protected-access
courses = import_course_from_xml(
store, self.user.id, TEST_DATA_DIR, [course_dir], create_if_not_present=True
)
xblock_location = courses[0].id.make_usage_key('stubxblock', 'xblock_test')
if has_draft:
xblock_location = as_draft(xblock_location)
xblock = store.get_item(xblock_location)
self.assertTrue(isinstance(xblock, StubXBlock))
self.assertEqual(xblock.test_field, expected_field_val)
if has_draft:
draft_xblock = store.get_item(xblock_location)
self.assertTrue(getattr(draft_xblock, 'is_draft', False))
self.assertTrue(isinstance(draft_xblock, StubXBlock))
self.assertEqual(draft_xblock.test_field, expected_field_val)
|
agpl-3.0
|
pleaseproject/python-for-android
|
python3-alpha/python3-src/Lib/test/pydocfodder.py
|
203
|
6332
|
"""Something just to look at via pydoc."""
import types
class A_classic:
"A classic class."
def A_method(self):
"Method defined in A."
def AB_method(self):
"Method defined in A and B."
def AC_method(self):
"Method defined in A and C."
def AD_method(self):
"Method defined in A and D."
def ABC_method(self):
"Method defined in A, B and C."
def ABD_method(self):
"Method defined in A, B and D."
def ACD_method(self):
"Method defined in A, C and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
class B_classic(A_classic):
"A classic class, derived from A_classic."
def AB_method(self):
"Method defined in A and B."
def ABC_method(self):
"Method defined in A, B and C."
def ABD_method(self):
"Method defined in A, B and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
def B_method(self):
"Method defined in B."
def BC_method(self):
"Method defined in B and C."
def BD_method(self):
"Method defined in B and D."
def BCD_method(self):
"Method defined in B, C and D."
class C_classic(A_classic):
"A classic class, derived from A_classic."
def AC_method(self):
"Method defined in A and C."
def ABC_method(self):
"Method defined in A, B and C."
def ACD_method(self):
"Method defined in A, C and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
def BC_method(self):
"Method defined in B and C."
def BCD_method(self):
"Method defined in B, C and D."
def C_method(self):
"Method defined in C."
def CD_method(self):
"Method defined in C and D."
class D_classic(B_classic, C_classic):
"A classic class, derived from B_classic and C_classic."
def AD_method(self):
"Method defined in A and D."
def ABD_method(self):
"Method defined in A, B and D."
def ACD_method(self):
"Method defined in A, C and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
def BD_method(self):
"Method defined in B and D."
def BCD_method(self):
"Method defined in B, C and D."
def CD_method(self):
"Method defined in C and D."
def D_method(self):
"Method defined in D."
class A_new(object):
"A new-style class."
def A_method(self):
"Method defined in A."
def AB_method(self):
"Method defined in A and B."
def AC_method(self):
"Method defined in A and C."
def AD_method(self):
"Method defined in A and D."
def ABC_method(self):
"Method defined in A, B and C."
def ABD_method(self):
"Method defined in A, B and D."
def ACD_method(self):
"Method defined in A, C and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
def A_classmethod(cls, x):
"A class method defined in A."
A_classmethod = classmethod(A_classmethod)
def A_staticmethod():
"A static method defined in A."
A_staticmethod = staticmethod(A_staticmethod)
def _getx(self):
"A property getter function."
def _setx(self, value):
"A property setter function."
def _delx(self):
"A property deleter function."
A_property = property(fdel=_delx, fget=_getx, fset=_setx,
doc="A sample property defined in A.")
A_int_alias = int
class B_new(A_new):
"A new-style class, derived from A_new."
def AB_method(self):
"Method defined in A and B."
def ABC_method(self):
"Method defined in A, B and C."
def ABD_method(self):
"Method defined in A, B and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
def B_method(self):
"Method defined in B."
def BC_method(self):
"Method defined in B and C."
def BD_method(self):
"Method defined in B and D."
def BCD_method(self):
"Method defined in B, C and D."
class C_new(A_new):
"A new-style class, derived from A_new."
def AC_method(self):
"Method defined in A and C."
def ABC_method(self):
"Method defined in A, B and C."
def ACD_method(self):
"Method defined in A, C and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
def BC_method(self):
"Method defined in B and C."
def BCD_method(self):
"Method defined in B, C and D."
def C_method(self):
"Method defined in C."
def CD_method(self):
"Method defined in C and D."
class D_new(B_new, C_new):
"""A new-style class, derived from B_new and C_new.
"""
def AD_method(self):
"Method defined in A and D."
def ABD_method(self):
"Method defined in A, B and D."
def ACD_method(self):
"Method defined in A, C and D."
def ABCD_method(self):
"Method defined in A, B, C and D."
def BD_method(self):
"Method defined in B and D."
def BCD_method(self):
"Method defined in B, C and D."
def CD_method(self):
"Method defined in C and D."
def D_method(self):
"Method defined in D."
class FunkyProperties(object):
"""From SF bug 472347, by Roeland Rengelink.
Property getters etc may not be vanilla functions or methods,
and this used to make GUI pydoc blow up.
"""
def __init__(self):
self.desc = {'x':0}
class get_desc:
def __init__(self, attr):
self.attr = attr
def __call__(self, inst):
print('Get called', self, inst)
return inst.desc[self.attr]
class set_desc:
def __init__(self, attr):
self.attr = attr
def __call__(self, inst, val):
print('Set called', self, inst, val)
inst.desc[self.attr] = val
class del_desc:
def __init__(self, attr):
self.attr = attr
def __call__(self, inst):
print('Del called', self, inst)
del inst.desc[self.attr]
x = property(get_desc('x'), set_desc('x'), del_desc('x'), 'prop x')
submodule = types.ModuleType(__name__ + '.submodule',
"""A submodule, which should appear in its parent's summary""")
|
apache-2.0
|
asgard-lab/neutron
|
neutron/tests/unit/objects/qos/test_rule.py
|
10
|
3451
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import constants
from neutron.objects.qos import policy
from neutron.objects.qos import rule
from neutron.services.qos import qos_consts
from neutron.tests import base as neutron_test_base
from neutron.tests.unit.objects import test_base
from neutron.tests.unit import testlib_api
POLICY_ID_A = 'policy-id-a'
POLICY_ID_B = 'policy-id-b'
DEVICE_OWNER_COMPUTE = 'compute:None'
class QosRuleObjectTestCase(neutron_test_base.BaseTestCase):
def _test_should_apply_to_port(self, rule_policy_id, port_policy_id,
device_owner, expected_result):
test_rule = rule.QosRule(qos_policy_id=rule_policy_id)
port = {qos_consts.QOS_POLICY_ID: port_policy_id,
'device_owner': device_owner}
self.assertEqual(expected_result, test_rule.should_apply_to_port(port))
def test_should_apply_to_port_with_network_port_and_net_policy(self):
self._test_should_apply_to_port(
rule_policy_id=POLICY_ID_B,
port_policy_id=POLICY_ID_A,
device_owner=constants.DEVICE_OWNER_ROUTER_INTF,
expected_result=False)
def test_should_apply_to_port_with_network_port_and_port_policy(self):
self._test_should_apply_to_port(
rule_policy_id=POLICY_ID_A,
port_policy_id=POLICY_ID_A,
device_owner=constants.DEVICE_OWNER_ROUTER_INTF,
expected_result=True)
def test_should_apply_to_port_with_compute_port_and_net_policy(self):
self._test_should_apply_to_port(
rule_policy_id=POLICY_ID_B,
port_policy_id=POLICY_ID_A,
device_owner=DEVICE_OWNER_COMPUTE,
expected_result=True)
def test_should_apply_to_port_with_compute_port_and_port_policy(self):
self._test_should_apply_to_port(
rule_policy_id=POLICY_ID_A,
port_policy_id=POLICY_ID_A,
device_owner=DEVICE_OWNER_COMPUTE,
expected_result=True)
class QosBandwidthLimitRuleObjectTestCase(test_base.BaseObjectIfaceTestCase):
_test_class = rule.QosBandwidthLimitRule
def test_to_dict_returns_type(self):
obj = rule.QosBandwidthLimitRule(self.context, **self.db_obj)
dict_ = obj.to_dict()
self.assertEqual(qos_consts.RULE_TYPE_BANDWIDTH_LIMIT, dict_['type'])
class QosBandwidthLimitRuleDbObjectTestCase(test_base.BaseDbObjectTestCase,
testlib_api.SqlTestCase):
_test_class = rule.QosBandwidthLimitRule
def setUp(self):
super(QosBandwidthLimitRuleDbObjectTestCase, self).setUp()
# Prepare policy to be able to insert a rule
generated_qos_policy_id = self.db_obj['qos_policy_id']
policy_obj = policy.QosPolicy(self.context,
id=generated_qos_policy_id)
policy_obj.create()
|
apache-2.0
|
mm1ke/portage
|
pym/_emerge/post_emerge.py
|
3
|
5021
|
# Copyright 1999-2012 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from __future__ import print_function
import logging
import textwrap
import portage
from portage import os
from portage.emaint.modules.logs.logs import CleanLogs
from portage.news import count_unread_news, display_news_notifications
from portage.output import colorize
from portage.util._dyn_libs.display_preserved_libs import \
display_preserved_libs
from portage.util._info_files import chk_updated_info_files
from .chk_updated_cfg_files import chk_updated_cfg_files
from .emergelog import emergelog
from ._flush_elog_mod_echo import _flush_elog_mod_echo
def clean_logs(settings):
if "clean-logs" not in settings.features:
return
logdir = settings.get("PORT_LOGDIR")
if logdir is None or not os.path.isdir(logdir):
return
cleanlogs = CleanLogs()
returncode, msgs = cleanlogs.clean(settings=settings)
if not returncode:
out = portage.output.EOutput()
for msg in msgs:
out.eerror(msg)
def display_news_notification(root_config, myopts):
if "news" not in root_config.settings.features:
return False
portdb = root_config.trees["porttree"].dbapi
vardb = root_config.trees["vartree"].dbapi
news_counts = count_unread_news(portdb, vardb)
if all(v == 0 for v in news_counts.values()):
return False
display_news_notifications(news_counts)
return True
def show_depclean_suggestion():
out = portage.output.EOutput()
msg = "After world updates, it is important to remove " + \
"obsolete packages with emerge --depclean. Refer " + \
"to `man emerge` for more information."
for line in textwrap.wrap(msg, 72):
out.ewarn(line)
def post_emerge(myaction, myopts, myfiles,
target_root, trees, mtimedb, retval):
"""
Misc. things to run at the end of a merge session.
Update Info Files
Update Config Files
Update News Items
Commit mtimeDB
Display preserved libs warnings
@param myaction: The action returned from parse_opts()
@type myaction: String
@param myopts: emerge options
@type myopts: dict
@param myfiles: emerge arguments
@type myfiles: list
@param target_root: The target EROOT for myaction
@type target_root: String
@param trees: A dictionary mapping each ROOT to it's package databases
@type trees: dict
@param mtimedb: The mtimeDB to store data needed across merge invocations
@type mtimedb: MtimeDB class instance
@param retval: Emerge's return value
@type retval: Int
"""
root_config = trees[target_root]["root_config"]
vardbapi = trees[target_root]['vartree'].dbapi
settings = vardbapi.settings
info_mtimes = mtimedb["info"]
# Load the most current variables from ${ROOT}/etc/profile.env
settings.unlock()
settings.reload()
settings.regenerate()
settings.lock()
config_protect = portage.util.shlex_split(
settings.get("CONFIG_PROTECT", ""))
infodirs = settings.get("INFOPATH","").split(":") + \
settings.get("INFODIR","").split(":")
os.chdir("/")
if retval == os.EX_OK:
exit_msg = " *** exiting successfully."
else:
exit_msg = " *** exiting unsuccessfully with status '%s'." % retval
emergelog("notitles" not in settings.features, exit_msg)
_flush_elog_mod_echo()
if not vardbapi._pkgs_changed:
# GLEP 42 says to display news *after* an emerge --pretend
if "--pretend" in myopts:
display_news_notification(root_config, myopts)
# If vdb state has not changed then there's nothing else to do.
return
vdb_path = os.path.join(root_config.settings['EROOT'], portage.VDB_PATH)
portage.util.ensure_dirs(vdb_path)
vdb_lock = None
if os.access(vdb_path, os.W_OK) and not "--pretend" in myopts:
vardbapi.lock()
vdb_lock = True
if vdb_lock:
try:
if "noinfo" not in settings.features:
chk_updated_info_files(target_root,
infodirs, info_mtimes)
mtimedb.commit()
finally:
if vdb_lock:
vardbapi.unlock()
# Explicitly load and prune the PreservedLibsRegistry in order
# to ensure that we do not display stale data.
vardbapi._plib_registry.load()
if vardbapi._plib_registry.hasEntries():
if "--quiet" in myopts:
print()
print(colorize("WARN", "!!!") + " existing preserved libs found")
else:
print()
print(colorize("WARN", "!!!") + " existing preserved libs:")
display_preserved_libs(vardbapi)
print("Use " + colorize("GOOD", "emerge @preserved-rebuild") +
" to rebuild packages using these libraries")
chk_updated_cfg_files(settings['EROOT'], config_protect)
display_news_notification(root_config, myopts)
postemerge = os.path.join(settings["PORTAGE_CONFIGROOT"],
portage.USER_CONFIG_PATH, "bin", "post_emerge")
if os.access(postemerge, os.X_OK):
hook_retval = portage.process.spawn(
[postemerge], env=settings.environ())
if hook_retval != os.EX_OK:
portage.util.writemsg_level(
" %s spawn failed of %s\n" %
(colorize("BAD", "*"), postemerge,),
level=logging.ERROR, noiselevel=-1)
clean_logs(settings)
if "--quiet" not in myopts and \
myaction is None and "@world" in myfiles:
show_depclean_suggestion()
|
gpl-2.0
|
stijnvanhoey/defence
|
node_modules/grunt-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
|
1524
|
22178
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import gyp.input
import optparse
import os.path
import re
import shlex
import sys
import traceback
from gyp.common import GypError
# Default debug modes for GYP
debug = {}
# List of "official" debug modes, but you can use anything you like.
DEBUG_GENERAL = 'general'
DEBUG_VARIABLES = 'variables'
DEBUG_INCLUDES = 'includes'
def DebugOutput(mode, message, *args):
if 'all' in gyp.debug or mode in gyp.debug:
ctx = ('unknown', 0, 'unknown')
try:
f = traceback.extract_stack(limit=2)
if f:
ctx = f[0][:3]
except:
pass
if args:
message %= args
print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
ctx[1], ctx[2], message)
def FindBuildFiles():
extension = '.gyp'
files = os.listdir(os.getcwd())
build_files = []
for file in files:
if file.endswith(extension):
build_files.append(file)
return build_files
def Load(build_files, format, default_variables={},
includes=[], depth='.', params=None, check=False,
circular_check=True, duplicate_basename_check=True):
"""
Loads one or more specified build files.
default_variables and includes will be copied before use.
Returns the generator for the specified format and the
data returned by loading the specified build files.
"""
if params is None:
params = {}
if '-' in format:
format, params['flavor'] = format.split('-', 1)
default_variables = copy.copy(default_variables)
# Default variables provided by this program and its modules should be
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
# avoiding collisions with user and automatic variables.
default_variables['GENERATOR'] = format
default_variables['GENERATOR_FLAVOR'] = params.get('flavor', '')
# Format can be a custom python file, or by default the name of a module
# within gyp.generator.
if format.endswith('.py'):
generator_name = os.path.splitext(format)[0]
path, generator_name = os.path.split(generator_name)
# Make sure the path to the custom generator is in sys.path
# Don't worry about removing it once we are done. Keeping the path
# to each generator that is used in sys.path is likely harmless and
# arguably a good idea.
path = os.path.abspath(path)
if path not in sys.path:
sys.path.insert(0, path)
else:
generator_name = 'gyp.generator.' + format
# These parameters are passed in order (as opposed to by key)
# because ActivePython cannot handle key parameters to __import__.
generator = __import__(generator_name, globals(), locals(), generator_name)
for (key, val) in generator.generator_default_variables.items():
default_variables.setdefault(key, val)
# Give the generator the opportunity to set additional variables based on
# the params it will receive in the output phase.
if getattr(generator, 'CalculateVariables', None):
generator.CalculateVariables(default_variables, params)
# Give the generator the opportunity to set generator_input_info based on
# the params it will receive in the output phase.
if getattr(generator, 'CalculateGeneratorInputInfo', None):
generator.CalculateGeneratorInputInfo(params)
# Fetch the generator specific info that gets fed to input, we use getattr
# so we can default things and the generators only have to provide what
# they need.
generator_input_info = {
'non_configuration_keys':
getattr(generator, 'generator_additional_non_configuration_keys', []),
'path_sections':
getattr(generator, 'generator_additional_path_sections', []),
'extra_sources_for_rules':
getattr(generator, 'generator_extra_sources_for_rules', []),
'generator_supports_multiple_toolsets':
getattr(generator, 'generator_supports_multiple_toolsets', False),
'generator_wants_static_library_dependencies_adjusted':
getattr(generator,
'generator_wants_static_library_dependencies_adjusted', True),
'generator_wants_sorted_dependencies':
getattr(generator, 'generator_wants_sorted_dependencies', False),
'generator_filelist_paths':
getattr(generator, 'generator_filelist_paths', None),
}
# Process the input specific to this generator.
result = gyp.input.Load(build_files, default_variables, includes[:],
depth, generator_input_info, check, circular_check,
duplicate_basename_check,
params['parallel'], params['root_targets'])
return [generator] + result
def NameValueListToDict(name_value_list):
"""
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
of the pairs. If a string is simply NAME, then the value in the dictionary
is set to True. If VALUE can be converted to an integer, it is.
"""
result = { }
for item in name_value_list:
tokens = item.split('=', 1)
if len(tokens) == 2:
# If we can make it an int, use that, otherwise, use the string.
try:
token_value = int(tokens[1])
except ValueError:
token_value = tokens[1]
# Set the variable to the supplied value.
result[tokens[0]] = token_value
else:
# No value supplied, treat it as a boolean and set it.
result[tokens[0]] = True
return result
def ShlexEnv(env_name):
flags = os.environ.get(env_name, [])
if flags:
flags = shlex.split(flags)
return flags
def FormatOpt(opt, value):
if opt.startswith('--'):
return '%s=%s' % (opt, value)
return opt + value
def RegenerateAppendFlag(flag, values, predicate, env_name, options):
"""Regenerate a list of command line flags, for an option of action='append'.
The |env_name|, if given, is checked in the environment and used to generate
an initial list of options, then the options that were specified on the
command line (given in |values|) are appended. This matches the handling of
environment variables and command line flags where command line flags override
the environment, while not requiring the environment to be set when the flags
are used again.
"""
flags = []
if options.use_environment and env_name:
for flag_value in ShlexEnv(env_name):
value = FormatOpt(flag, predicate(flag_value))
if value in flags:
flags.remove(value)
flags.append(value)
if values:
for flag_value in values:
flags.append(FormatOpt(flag, predicate(flag_value)))
return flags
def RegenerateFlags(options):
"""Given a parsed options object, and taking the environment variables into
account, returns a list of flags that should regenerate an equivalent options
object (even in the absence of the environment variables.)
Any path options will be normalized relative to depth.
The format flag is not included, as it is assumed the calling generator will
set that as appropriate.
"""
def FixPath(path):
path = gyp.common.FixIfRelativePath(path, options.depth)
if not path:
return os.path.curdir
return path
def Noop(value):
return value
# We always want to ignore the environment when regenerating, to avoid
# duplicate or changed flags in the environment at the time of regeneration.
flags = ['--ignore-environment']
for name, metadata in options._regeneration_metadata.iteritems():
opt = metadata['opt']
value = getattr(options, name)
value_predicate = metadata['type'] == 'path' and FixPath or Noop
action = metadata['action']
env_name = metadata['env_name']
if action == 'append':
flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
env_name, options))
elif action in ('store', None): # None is a synonym for 'store'.
if value:
flags.append(FormatOpt(opt, value_predicate(value)))
elif options.use_environment and env_name and os.environ.get(env_name):
flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
elif action in ('store_true', 'store_false'):
if ((action == 'store_true' and value) or
(action == 'store_false' and not value)):
flags.append(opt)
elif options.use_environment and env_name:
print >>sys.stderr, ('Warning: environment regeneration unimplemented '
'for %s flag %r env_name %r' % (action, opt,
env_name))
else:
print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
'flag %r' % (action, opt))
return flags
class RegeneratableOptionParser(optparse.OptionParser):
def __init__(self):
self.__regeneratable_options = {}
optparse.OptionParser.__init__(self)
def add_option(self, *args, **kw):
"""Add an option to the parser.
This accepts the same arguments as OptionParser.add_option, plus the
following:
regenerate: can be set to False to prevent this option from being included
in regeneration.
env_name: name of environment variable that additional values for this
option come from.
type: adds type='path', to tell the regenerator that the values of
this option need to be made relative to options.depth
"""
env_name = kw.pop('env_name', None)
if 'dest' in kw and kw.pop('regenerate', True):
dest = kw['dest']
# The path type is needed for regenerating, for optparse we can just treat
# it as a string.
type = kw.get('type')
if type == 'path':
kw['type'] = 'string'
self.__regeneratable_options[dest] = {
'action': kw.get('action'),
'type': type,
'env_name': env_name,
'opt': args[0],
}
optparse.OptionParser.add_option(self, *args, **kw)
def parse_args(self, *args):
values, args = optparse.OptionParser.parse_args(self, *args)
values._regeneration_metadata = self.__regeneratable_options
return values, args
def gyp_main(args):
my_name = os.path.basename(sys.argv[0])
parser = RegeneratableOptionParser()
usage = 'usage: %s [options ...] [build_file ...]'
parser.set_usage(usage.replace('%s', '%prog'))
parser.add_option('--build', dest='configs', action='append',
help='configuration for build after project generation')
parser.add_option('--check', dest='check', action='store_true',
help='check format of gyp files')
parser.add_option('--config-dir', dest='config_dir', action='store',
env_name='GYP_CONFIG_DIR', default=None,
help='The location for configuration files like '
'include.gypi.')
parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
action='append', default=[], help='turn on a debugging '
'mode for debugging GYP. Supported modes are "variables", '
'"includes" and "general" or "all" for all of them.')
parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
env_name='GYP_DEFINES',
help='sets variable VAR to value VAL')
parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
help='set DEPTH gyp variable to a relative path to PATH')
parser.add_option('-f', '--format', dest='formats', action='append',
env_name='GYP_GENERATORS', regenerate=False,
help='output formats to generate')
parser.add_option('-G', dest='generator_flags', action='append', default=[],
metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
help='sets generator flag FLAG to VAL')
parser.add_option('--generator-output', dest='generator_output',
action='store', default=None, metavar='DIR', type='path',
env_name='GYP_GENERATOR_OUTPUT',
help='puts generated build files under DIR')
parser.add_option('--ignore-environment', dest='use_environment',
action='store_false', default=True, regenerate=False,
help='do not read options from environment variables')
parser.add_option('-I', '--include', dest='includes', action='append',
metavar='INCLUDE', type='path',
help='files to include in all loaded .gyp files')
# --no-circular-check disables the check for circular relationships between
# .gyp files. These relationships should not exist, but they've only been
# observed to be harmful with the Xcode generator. Chromium's .gyp files
# currently have some circular relationships on non-Mac platforms, so this
# option allows the strict behavior to be used on Macs and the lenient
# behavior to be used elsewhere.
# TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
parser.add_option('--no-circular-check', dest='circular_check',
action='store_false', default=True, regenerate=False,
help="don't check for circular relationships between files")
# --no-duplicate-basename-check disables the check for duplicate basenames
# in a static_library/shared_library project. Visual C++ 2008 generator
# doesn't support this configuration. Libtool on Mac also generates warnings
# when duplicate basenames are passed into Make generator on Mac.
# TODO(yukawa): Remove this option when these legacy generators are
# deprecated.
parser.add_option('--no-duplicate-basename-check',
dest='duplicate_basename_check', action='store_false',
default=True, regenerate=False,
help="don't check for duplicate basenames")
parser.add_option('--no-parallel', action='store_true', default=False,
help='Disable multiprocessing')
parser.add_option('-S', '--suffix', dest='suffix', default='',
help='suffix to add to generated files')
parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
default=None, metavar='DIR', type='path',
help='directory to use as the root of the source tree')
parser.add_option('-R', '--root-target', dest='root_targets',
action='append', metavar='TARGET',
help='include only TARGET and its deep dependencies')
options, build_files_arg = parser.parse_args(args)
build_files = build_files_arg
# Set up the configuration directory (defaults to ~/.gyp)
if not options.config_dir:
home = None
home_dot_gyp = None
if options.use_environment:
home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
if home_dot_gyp:
home_dot_gyp = os.path.expanduser(home_dot_gyp)
if not home_dot_gyp:
home_vars = ['HOME']
if sys.platform in ('cygwin', 'win32'):
home_vars.append('USERPROFILE')
for home_var in home_vars:
home = os.getenv(home_var)
if home != None:
home_dot_gyp = os.path.join(home, '.gyp')
if not os.path.exists(home_dot_gyp):
home_dot_gyp = None
else:
break
else:
home_dot_gyp = os.path.expanduser(options.config_dir)
if home_dot_gyp and not os.path.exists(home_dot_gyp):
home_dot_gyp = None
if not options.formats:
# If no format was given on the command line, then check the env variable.
generate_formats = []
if options.use_environment:
generate_formats = os.environ.get('GYP_GENERATORS', [])
if generate_formats:
generate_formats = re.split(r'[\s,]', generate_formats)
if generate_formats:
options.formats = generate_formats
else:
# Nothing in the variable, default based on platform.
if sys.platform == 'darwin':
options.formats = ['xcode']
elif sys.platform in ('win32', 'cygwin'):
options.formats = ['msvs']
else:
options.formats = ['make']
if not options.generator_output and options.use_environment:
g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
if g_o:
options.generator_output = g_o
options.parallel = not options.no_parallel
for mode in options.debug:
gyp.debug[mode] = 1
# Do an extra check to avoid work when we're not debugging.
if DEBUG_GENERAL in gyp.debug:
DebugOutput(DEBUG_GENERAL, 'running with these options:')
for option, value in sorted(options.__dict__.items()):
if option[0] == '_':
continue
if isinstance(value, basestring):
DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
else:
DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
if not build_files:
build_files = FindBuildFiles()
if not build_files:
raise GypError((usage + '\n\n%s: error: no build_file') %
(my_name, my_name))
# TODO(mark): Chromium-specific hack!
# For Chromium, the gyp "depth" variable should always be a relative path
# to Chromium's top-level "src" directory. If no depth variable was set
# on the command line, try to find a "src" directory by looking at the
# absolute path to each build file's directory. The first "src" component
# found will be treated as though it were the path used for --depth.
if not options.depth:
for build_file in build_files:
build_file_dir = os.path.abspath(os.path.dirname(build_file))
build_file_dir_components = build_file_dir.split(os.path.sep)
components_len = len(build_file_dir_components)
for index in xrange(components_len - 1, -1, -1):
if build_file_dir_components[index] == 'src':
options.depth = os.path.sep.join(build_file_dir_components)
break
del build_file_dir_components[index]
# If the inner loop found something, break without advancing to another
# build file.
if options.depth:
break
if not options.depth:
raise GypError('Could not automatically locate src directory. This is'
'a temporary Chromium feature that will be removed. Use'
'--depth as a workaround.')
# If toplevel-dir is not set, we assume that depth is the root of our source
# tree.
if not options.toplevel_dir:
options.toplevel_dir = options.depth
# -D on the command line sets variable defaults - D isn't just for define,
# it's for default. Perhaps there should be a way to force (-F?) a
# variable's value so that it can't be overridden by anything else.
cmdline_default_variables = {}
defines = []
if options.use_environment:
defines += ShlexEnv('GYP_DEFINES')
if options.defines:
defines += options.defines
cmdline_default_variables = NameValueListToDict(defines)
if DEBUG_GENERAL in gyp.debug:
DebugOutput(DEBUG_GENERAL,
"cmdline_default_variables: %s", cmdline_default_variables)
# Set up includes.
includes = []
# If ~/.gyp/include.gypi exists, it'll be forcibly included into every
# .gyp file that's loaded, before anything else is included.
if home_dot_gyp != None:
default_include = os.path.join(home_dot_gyp, 'include.gypi')
if os.path.exists(default_include):
print 'Using overrides found in ' + default_include
includes.append(default_include)
# Command-line --include files come after the default include.
if options.includes:
includes.extend(options.includes)
# Generator flags should be prefixed with the target generator since they
# are global across all generator runs.
gen_flags = []
if options.use_environment:
gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
if options.generator_flags:
gen_flags += options.generator_flags
generator_flags = NameValueListToDict(gen_flags)
if DEBUG_GENERAL in gyp.debug.keys():
DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
# Generate all requested formats (use a set in case we got one format request
# twice)
for format in set(options.formats):
params = {'options': options,
'build_files': build_files,
'generator_flags': generator_flags,
'cwd': os.getcwd(),
'build_files_arg': build_files_arg,
'gyp_binary': sys.argv[0],
'home_dot_gyp': home_dot_gyp,
'parallel': options.parallel,
'root_targets': options.root_targets,
'target_arch': cmdline_default_variables.get('target_arch', '')}
# Start with the default variables from the command line.
[generator, flat_list, targets, data] = Load(
build_files, format, cmdline_default_variables, includes, options.depth,
params, options.check, options.circular_check,
options.duplicate_basename_check)
# TODO(mark): Pass |data| for now because the generator needs a list of
# build files that came in. In the future, maybe it should just accept
# a list, and not the whole data dict.
# NOTE: flat_list is the flattened dependency graph specifying the order
# that targets may be built. Build systems that operate serially or that
# need to have dependencies defined before dependents reference them should
# generate targets in the order specified in flat_list.
generator.GenerateOutput(flat_list, targets, data, params)
if options.configs:
valid_configs = targets[flat_list[0]]['configurations'].keys()
for conf in options.configs:
if conf not in valid_configs:
raise GypError('Invalid config specified via --build: %s' % conf)
generator.PerformBuild(data, options.configs, params)
# Done
return 0
def main(args):
try:
return gyp_main(args)
except GypError, e:
sys.stderr.write("gyp: %s\n" % e)
return 1
# NOTE: setuptools generated console_scripts calls function with no arguments
def script_main():
return main(sys.argv[1:])
if __name__ == '__main__':
sys.exit(script_main())
|
mit
|
sxjscience/mxnet
|
python/mxnet/gluon/contrib/estimator/batch_processor.py
|
12
|
3925
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=wildcard-import, unused-argument, too-many-ancestors
"""Gluon Batch Processor for Estimators"""
from ...utils import split_and_load
from .... import autograd
__all__ = ['BatchProcessor']
class BatchProcessor(object):
"""BatchProcessor Class for plug and play fit_batch & evaluate_batch
During training or validation, data are divided into minibatches for processing. This
class aims at providing hooks of training or validating on a minibatch of data. Users
may provide customized fit_batch() and evaluate_batch() methods by inheriting from
this class and overriding class methods.
:py:class:`BatchProcessor` can be used to replace fit_batch() and evaluate_batch()
in the base estimator class
"""
def __init__(self):
pass
def _get_data_and_label(self, batch, ctx, batch_axis=0):
data = batch[0]
label = batch[1]
data = split_and_load(data, ctx_list=ctx, batch_axis=batch_axis)
label = split_and_load(label, ctx_list=ctx, batch_axis=batch_axis)
return data, label
def evaluate_batch(self, estimator,
val_batch,
batch_axis=0):
"""Evaluate the estimator model on a batch of validation data.
Parameters
----------
estimator : Estimator
Reference to the estimator
val_batch : tuple
Data and label of a batch from the validation data loader.
batch_axis : int, default 0
Batch axis to split the validation data into devices.
"""
data, label = self._get_data_and_label(val_batch, estimator.context, batch_axis)
pred = [estimator.val_net(x) for x in data]
loss = [estimator.val_loss(y_hat, y) for y_hat, y in zip(pred, label)]
return data, label, pred, loss
def fit_batch(self, estimator,
train_batch,
batch_axis=0):
"""Trains the estimator model on a batch of training data.
Parameters
----------
estimator : Estimator
Reference to the estimator
train_batch : tuple
Data and label of a batch from the training data loader.
batch_axis : int, default 0
Batch axis to split the training data into devices.
Returns
-------
data: List of NDArray
Sharded data from the batch. Data is sharded with
`gluon.split_and_load`.
label: List of NDArray
Sharded label from the batch. Labels are sharded with
`gluon.split_and_load`.
pred: List of NDArray
Prediction on each of the sharded inputs.
loss: List of NDArray
Loss on each of the sharded inputs.
"""
data, label = self._get_data_and_label(train_batch, estimator.context, batch_axis)
with autograd.record():
pred = [estimator.net(x) for x in data]
loss = [estimator.loss(y_hat, y) for y_hat, y in zip(pred, label)]
for l in loss:
l.backward()
return data, label, pred, loss
|
apache-2.0
|
schreiberx/sweet
|
benchmarks_sphere/rexi_mass_energy_galewsky_martinium/pp_plot_csv.py
|
2
|
2918
|
#! /usr/bin/python2
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import sys
first = True
s = 2e-5
eta_contour_levels = np.append(np.arange(-1e-4, 0, s), np.arange(s, 1e-4, s))
zoom_lat = True
zoom_lat = False
zoom_lat = 'eta' in sys.argv[1]
fontsize=8
figsize=(9, 3)
filename = sys.argv[1]
ref_filename = sys.argv[2]
if True:
print(filename)
data = np.loadtxt(filename, skiprows=3)
labelsx = data[0,1:]
labelsy = data[1:,0]
data = data[1:,1:]
if np.isnan(data).any():
print("Skipping "+filename+" because of NaN")
sys.exit(1)
if zoom_lat:
while labelsy[1] < 10:
labelsy = labelsy[1:]
data = data[1:]
while labelsy[-2] > 80:
labelsy = labelsy[0:-2]
data = data[0:-2]
# while labelsx[1] < 90:
# tmplabelsx = labelsx[0]
# labelsx[0:-1] = labelsx[1:]
# labelsx[-1] = tmplabelsx
#
# tmpdata = data[:,0]
# data[:,0:-1] = data[:,1:]
# data[:,-1] = tmpdata
# Reference
if True:
refdata = np.loadtxt(ref_filename, skiprows=3)
refdata = refdata[1:,1:]
if zoom_lat:
while labelsy[1] < 10:
labelsy = labelsy[1:]
refdata = refdata[1:]
while labelsy[-2] > 80:
labelsy = labelsy[0:-2]
refdata = refdata[0:-2]
if first:
lon_min = labelsx[0]
lon_max = labelsx[-1]
lat_min = labelsy[0]
lat_max = labelsy[-1]
new_labelsx = np.linspace(lon_min, lon_max, 7)
new_labelsy = np.linspace(lat_min, lat_max, 7)
labelsx = np.interp(new_labelsx, labelsx, labelsx)
labelsy = np.interp(new_labelsy, labelsy, labelsy)
if first:
cmin = np.amin(data)
cmax = np.amax(data)
if 'eta' in filename:
cmin *= 1.2
cmax *= 1.2
extent = (labelsx[0], labelsx[-1], labelsy[0], labelsy[-1])
plt.figure(figsize=figsize)
plt.imshow(data, interpolation='nearest', extent=extent, origin='lower', aspect='auto')
plt.clim(cmin, cmax)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=fontsize)
plt.title(filename, fontsize=fontsize)
if 'eta' in filename:
plt.contour(data, colors="black", origin='lower', extent=extent, vmin=cmin, vmax=cmax, levels=eta_contour_levels, linewidths=0.5)
plt.contour(refdata, colors="black", origin='lower', extent=extent, vmin=cmin, vmax=cmax, levels=eta_contour_levels, linewidths=0.5, linestyles='dashed')
else:
if cmin != cmax:
plt.contour(data, colors="black", origin='lower', extent=extent, vmin=cmin, vmax=cmax, linewidths=0.5)
plt.contour(refdata, colors="black", origin='lower', extent=extent, vmin=cmin, vmax=cmax, linewidths=0.5, linestyles='dashed')
ax = plt.gca()
ax.xaxis.set_label_coords(0.5, -0.075)
plt.xticks(labelsx, fontsize=fontsize)
plt.xlabel("Longitude", fontsize=fontsize)
plt.yticks(labelsy, fontsize=fontsize)
plt.ylabel("Latitude", fontsize=fontsize)
#plt.show()
outfilename = filename.replace('.csv', '.png')
print(outfilename)
plt.savefig(outfilename, dpi=200)
plt.close()
first = False
|
mit
|
dmilith/SublimeText3-dmilith
|
Packages/pygments/all/pygments/styles/algol_nu.py
|
37
|
2278
|
# -*- coding: utf-8 -*-
"""
pygments.styles.algol_nu
~~~~~~~~~~~~~~~~~~~~~~~~
Algol publication style without underlining of keywords.
This style renders source code for publication of algorithms in
scientific papers and academic texts, where its format is frequently used.
It is based on the style of the revised Algol-60 language report[1].
o No colours, only black, white and shades of grey are used.
o Keywords are rendered in lowercase boldface.
o Builtins are rendered in lowercase boldface italic.
o Docstrings and pragmas are rendered in dark grey boldface.
o Library identifiers are rendered in dark grey boldface italic.
o Comments are rendered in grey italic.
To render keywords with underlining, refer to the `Algol` style.
For lowercase conversion of keywords and builtins in languages where
these are not or might not be lowercase, a supporting lexer is required.
The Algol and Modula-2 lexers automatically convert to lowercase whenever
this style is selected.
[1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, Operator
class Algol_NuStyle(Style):
background_color = "#ffffff"
default_style = ""
styles = {
Comment: "italic #888",
Comment.Preproc: "bold noitalic #888",
Comment.Special: "bold noitalic #888",
Keyword: "bold",
Keyword.Declaration: "italic",
Name.Builtin: "bold italic",
Name.Builtin.Pseudo: "bold italic",
Name.Namespace: "bold italic #666",
Name.Class: "bold italic #666",
Name.Function: "bold italic #666",
Name.Variable: "bold italic #666",
Name.Constant: "bold italic #666",
Operator.Word: "bold",
String: "italic #666",
Error: "border:#FF0000"
}
|
mit
|
ovilab/atomify-lammps
|
libs/lammps/tools/moltemplate/moltemplate/postprocess_input_script.py
|
8
|
6687
|
#!/usr/bin/env python
"""
Reorder the integer arguments to the commands in a LAMMPS input
file if these arguments violate LAMMPS order requirements.
We have to do this because the moltemplate.sh script will automatically
assign these integers in a way which may violate these restrictions
and the user has little control over this.
This script:
swaps the I and J integers in "pair_coeff I J ..." commands when I > J
Other features may be added later
"""
import sys
def main():
lines_orig = []
f = None
fname = None
num_lines_ignore = 0
# Lines from files passed as arguments are read and processed silently.
# (Why? Sometimes it's necessary to read the contents of previous input scripts
# in order to be able to understand a script command which appears later.
# I'm assuming these files will be processed by lammps in the same order. So I
# must insure that moltemplate.sh passes them to this program in that order.
# I'm too lazy to read the "include" commands in input scripts correctly.)
if len(sys.argv) > 1:
for fname in sys.argv[1:]:
f = open(fname, 'r')
in_stream = f
lines_orig += in_stream.readlines()
num_lines_ignore += len(lines_orig)
f.close()
# Lines read from the standard input are read, processed, and printed to stdout
in_stream = sys.stdin
lines_orig += in_stream.readlines()
pair_style_list = []
swap_occured = False
warn_wildcard = False
i = 0
while i < len(lines_orig):
# Read the next logical line
# Any lines ending in '&' should be merged with the next line before
# breaking
line_orig = ''
while i < len(lines_orig):
line_counter = 1 + i - num_lines_ignore
line_orig += lines_orig[i]
if ((len(line_orig) < 2) or (line_orig[-2:] != '&\n')):
break
i += 1
line = line_orig.replace('&\n', '\n').rstrip('\n')
comment = ''
if '#' in line_orig:
ic = line.find('#')
line = line_orig[:ic]
# keep track of comments (put them back later)
comment = line_orig[ic:].rstrip()
tokens = line.strip().split()
if ((len(tokens) >= 2) and (tokens[0] == 'pair_style')):
pair_style_list = tokens[1:]
if ((len(tokens) >= 3) and (tokens[0] == 'pair_coeff')):
if ((tokens[1].isdigit() and (tokens[2].isdigit())) and
(int(tokens[1]) > int(tokens[2]))):
swap_occured = True
tmp = tokens[2]
tokens[2] = tokens[1]
tokens[1] = tmp
if i >= num_lines_ignore:
# polite warning:
sys.stderr.write(
'swapped pair_coeff order on line ' + str(line_counter))
# if (fname != None):
# sys.stderr.write(' of file \"'+fname+'\"')
sys.stderr.write('\n')
# Deal with the "hbond/" pair coeffs.
#
# The hbond/dreiding pair style designates one of the two atom types
# as a donor, and the other as an acceptor (using the 'i','j' flags)
# If swapped atom types eariler, we also need to swap 'i' with 'j'.
#
# If "hbond/dreiding.." pair style is used with "hybrid" or
# "hybrid/overlay" then tokens[3] is the name of the pair style
# and tokens[5] is either 'i' or 'j'.
if len(pair_style_list) > 0:
if ((pair_style_list[0] == 'hybrid') or
(pair_style_list[0] == 'hybrid/overlay')):
if ((len(tokens) > 5) and (tokens[5] == 'i') and (tokens[3][0:6] == 'hbond/')):
tokens[5] = 'j'
sys.stderr.write(
' (and replaced \"i\" with \"j\")\n')
elif ((len(tokens) > 5) and (tokens[5] == 'j') and (tokens[3][0:6] == 'hbond/')):
tokens[5] = 'i'
sys.stderr.write(
' (and replaced \"j\" with \"i\")\n')
elif (pair_style_list[0][0:6] == 'hbond/'):
if ((len(tokens) > 4) and (tokens[4] == 'i')):
tokens[4] = 'j'
sys.stderr.write(
' (and replaced \"i\" with \"j\")\n')
elif ((len(tokens) > 4) and (tokens[4] == 'j')):
tokens[4] = 'i'
sys.stderr.write(
' (and replaced \"j\" with \"i\")\n')
sys.stdout.write(
(' '.join(tokens) + comment).replace('\n', '&\n') + '\n')
else:
if ((('*' in tokens[1]) or ('*' in tokens[2]))
and
(not (('*' == tokens[1]) and ('*' == tokens[2])))):
warn_wildcard = True
if i >= num_lines_ignore:
sys.stdout.write(line_orig)
else:
if i >= num_lines_ignore:
sys.stdout.write(line_orig)
i += 1
if swap_occured:
sys.stderr.write('\n'
' WARNING: Atom order in some pair_coeff commands was swapped to pacify LAMMPS.\n'
' For some exotic pair_styles such as hbond/dreiding, this is not enough. If you\n'
' use exotic pair_styles, please verify the \"pair_coeff\" commands are correct.\n')
if warn_wildcard:
sys.stderr.write('\n'
' WARNING: The use of wildcard characters (\"*\") in your \"pair_coeff\"\n'
' commands is not recommended.\n'
' (It is safer to specify each interaction pair manually.\n'
' Check every pair_coeff command. Make sure that every atom type in\n'
' the first group is <= atom types in the second group.\n'
' Moltemplate does NOT do this when wildcards are used.)\n'
' If you are using a many-body pair style then ignore this warning.\n')
return
if __name__ == '__main__':
main()
|
gpl-3.0
|
chubiei/ycmd
|
ycmd/extra_conf_store.py
|
13
|
7343
|
#!/usr/bin/env python
#
# Copyright (C) 2011, 2012 Google Inc.
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
# NOTE: This module is used as a Singleton
import os
import imp
import random
import string
import sys
import logging
from threading import Lock
from ycmd import user_options_store
from ycmd.responses import UnknownExtraConf, YCM_EXTRA_CONF_FILENAME
from fnmatch import fnmatch
# Singleton variables
_module_for_module_file = {}
_module_for_module_file_lock = Lock()
_module_file_for_source_file = {}
_module_file_for_source_file_lock = Lock()
def Reset():
global _module_for_module_file, _module_file_for_source_file
_module_for_module_file = {}
_module_file_for_source_file = {}
def ModuleForSourceFile( filename ):
return Load( ModuleFileForSourceFile( filename ) )
def ModuleFileForSourceFile( filename ):
"""This will try all files returned by _ExtraConfModuleSourceFilesForFile in
order and return the filename of the first module that was allowed to load.
If no module was found or allowed to load, None is returned."""
with _module_file_for_source_file_lock:
if not filename in _module_file_for_source_file:
for module_file in _ExtraConfModuleSourceFilesForFile( filename ):
if Load( module_file ):
_module_file_for_source_file[ filename ] = module_file
break
return _module_file_for_source_file.setdefault( filename )
def CallGlobalExtraConfYcmCorePreloadIfExists():
_CallGlobalExtraConfMethod( 'YcmCorePreload' )
def Shutdown():
# VimClose is for the sake of backwards compatibility; it's a no-op when it
# doesn't exist.
_CallGlobalExtraConfMethod( 'VimClose' )
_CallGlobalExtraConfMethod( 'Shutdown' )
def _CallGlobalExtraConfMethod( function_name ):
logger = _Logger()
global_ycm_extra_conf = _GlobalYcmExtraConfFileLocation()
if not ( global_ycm_extra_conf and
os.path.exists( global_ycm_extra_conf ) ):
logger.debug( 'No global extra conf, not calling method ' + function_name )
return
module = Load( global_ycm_extra_conf, force = True )
if not module or not hasattr( module, function_name ):
logger.debug( 'Global extra conf not loaded or no function ' +
function_name )
return
logger.info( 'Calling global extra conf method {0} on conf file {1}'.format(
function_name, global_ycm_extra_conf ) )
getattr( module, function_name )()
def Disable( module_file ):
"""Disables the loading of a module for the current session."""
with _module_for_module_file_lock:
_module_for_module_file[ module_file ] = None
def _ShouldLoad( module_file ):
"""Checks if a module is safe to be loaded. By default this will try to
decide using a white-/blacklist and ask the user for confirmation as a
fallback."""
if ( module_file == _GlobalYcmExtraConfFileLocation() or
not user_options_store.Value( 'confirm_extra_conf' ) ):
return True
globlist = user_options_store.Value( 'extra_conf_globlist' )
for glob in globlist:
is_blacklisted = glob[0] == '!'
if _MatchesGlobPattern( module_file, glob.lstrip('!') ):
return not is_blacklisted
raise UnknownExtraConf( module_file )
def Load( module_file, force = False ):
"""Load and return the module contained in a file.
Using force = True the module will be loaded regardless
of the criteria in _ShouldLoad.
This will return None if the module was not allowed to be loaded."""
if not module_file:
return None
if not force:
with _module_for_module_file_lock:
if module_file in _module_for_module_file:
return _module_for_module_file[ module_file ]
if not _ShouldLoad( module_file ):
Disable( module_file )
return None
# This has to be here because a long time ago, the ycm_extra_conf.py files
# used to import clang_helpers.py from the cpp folder. This is not needed
# anymore, but there are a lot of old ycm_extra_conf.py files that we don't
# want to break.
sys.path.insert( 0, _PathToCppCompleterFolder() )
module = imp.load_source( _RandomName(), module_file )
del sys.path[ 0 ]
with _module_for_module_file_lock:
_module_for_module_file[ module_file ] = module
return module
def _MatchesGlobPattern( filename, glob ):
"""Returns true if a filename matches a given pattern. A '~' in glob will be
expanded to the home directory and checking will be performed using absolute
paths. See the documentation of fnmatch for the supported patterns."""
abspath = os.path.abspath( filename )
return fnmatch( abspath, os.path.abspath( os.path.expanduser( glob ) ) )
def _ExtraConfModuleSourceFilesForFile( filename ):
"""For a given filename, search all parent folders for YCM_EXTRA_CONF_FILENAME
files that will compute the flags necessary to compile the file.
If _GlobalYcmExtraConfFileLocation() exists it is returned as a fallback."""
for folder in _PathsToAllParentFolders( filename ):
candidate = os.path.join( folder, YCM_EXTRA_CONF_FILENAME )
if os.path.exists( candidate ):
yield candidate
global_ycm_extra_conf = _GlobalYcmExtraConfFileLocation()
if ( global_ycm_extra_conf
and os.path.exists( global_ycm_extra_conf ) ):
yield global_ycm_extra_conf
def _PathsToAllParentFolders( filename ):
"""Build a list of all parent folders of a file.
The nearest folders will be returned first.
Example: _PathsToAllParentFolders( '/home/user/projects/test.c' )
[ '/home/user/projects', '/home/user', '/home', '/' ]"""
def PathFolderComponents( filename ):
folders = []
path = os.path.normpath( os.path.dirname( filename ) )
while True:
path, folder = os.path.split( path )
if folder:
folders.append( folder )
else:
if path:
folders.append( path )
break
return list( reversed( folders ) )
parent_folders = PathFolderComponents( filename )
parent_folders = [ os.path.join( *parent_folders[:i + 1] )
for i in xrange( len( parent_folders ) ) ]
return reversed( parent_folders )
def _PathToCppCompleterFolder():
"""Returns the path to the 'cpp' completer folder. This is necessary
because ycm_extra_conf files need it on the path."""
return os.path.join( _DirectoryOfThisScript(), 'completers', 'cpp' )
def _DirectoryOfThisScript():
return os.path.dirname( os.path.abspath( __file__ ) )
def _RandomName():
"""Generates a random module name."""
return ''.join( random.choice( string.ascii_lowercase ) for x in range( 15 ) )
def _GlobalYcmExtraConfFileLocation():
return os.path.expanduser(
user_options_store.Value( 'global_ycm_extra_conf' ) )
def _Logger():
return logging.getLogger( __name__ )
|
gpl-3.0
|
astroumd/GradMap
|
notebooks/check_imports/draw_dolphins.py
|
1
|
3779
|
"""
Draws dolphins using matplotlib features.
From matplotlib documentation:
https://matplotlib.org/gallery/shapes_and_collections/dolphin.html#sphx-glr-gallery-shapes-and-collections-dolphin-py
"""
# Fixing random state for reproducibility
import matplotlib.cm as cm
import matplotlib.pyplot as plt
from matplotlib.patches import Circle, PathPatch
from matplotlib.path import Path
from matplotlib.transforms import Affine2D
import numpy as np
np.random.seed(19680801)
r = np.random.rand(50)
t = np.random.rand(50) * np.pi * 2.0
x = r * np.cos(t)
y = r * np.sin(t)
fig, ax = plt.subplots(figsize=(6, 6))
circle = Circle((0, 0), 1, facecolor='none',
edgecolor=(0, 0.8, 0.8), linewidth=3, alpha=0.5)
ax.add_patch(circle)
im = plt.imshow(np.random.random((100, 100)),
origin='lower', cmap=cm.winter,
interpolation='spline36',
extent=([-1, 1, -1, 1]))
im.set_clip_path(circle)
plt.plot(x, y, 'o', color=(0.9, 0.9, 1.0), alpha=0.8)
# Dolphin from OpenClipart library by Andy Fitzsimon
# <cc:License rdf:about="http://web.resource.org/cc/PublicDomain">
# <cc:permits rdf:resource="http://web.resource.org/cc/Reproduction"/>
# <cc:permits rdf:resource="http://web.resource.org/cc/Distribution"/>
# <cc:permits rdf:resource="http://web.resource.org/cc/DerivativeWorks"/>
# </cc:License>
dolphin = """
M -0.59739425,160.18173 C -0.62740401,160.18885 -0.57867129,160.11183
-0.57867129,160.11183 C -0.57867129,160.11183 -0.5438361,159.89315
-0.39514638,159.81496 C -0.24645668,159.73678 -0.18316813,159.71981
-0.18316813,159.71981 C -0.18316813,159.71981 -0.10322971,159.58124
-0.057804323,159.58725 C -0.029723983,159.58913 -0.061841603,159.60356
-0.071265813,159.62815 C -0.080250183,159.65325 -0.082918513,159.70554
-0.061841203,159.71248 C -0.040763903,159.7194 -0.0066711426,159.71091
0.077336307,159.73612 C 0.16879567,159.76377 0.28380306,159.86448
0.31516668,159.91533 C 0.3465303,159.96618 0.5011127,160.1771
0.5011127,160.1771 C 0.63668998,160.19238 0.67763022,160.31259
0.66556395,160.32668 C 0.65339985,160.34212 0.66350443,160.33642
0.64907098,160.33088 C 0.63463742,160.32533 0.61309688,160.297
0.5789627,160.29339 C 0.54348657,160.28968 0.52329693,160.27674
0.50728856,160.27737 C 0.49060916,160.27795 0.48965803,160.31565
0.46114204,160.33673 C 0.43329696,160.35786 0.4570711,160.39871
0.43309565,160.40685 C 0.4105108,160.41442 0.39416631,160.33027
0.3954995,160.2935 C 0.39683269,160.25672 0.43807996,160.21522
0.44567915,160.19734 C 0.45327833,160.17946 0.27946869,159.9424
-0.061852613,159.99845 C -0.083965233,160.0427 -0.26176109,160.06683
-0.26176109,160.06683 C -0.30127962,160.07028 -0.21167141,160.09731
-0.24649368,160.1011 C -0.32642366,160.11569 -0.34521187,160.06895
-0.40622293,160.0819 C -0.467234,160.09485 -0.56738444,160.17461
-0.59739425,160.18173
"""
vertices = []
codes = []
parts = dolphin.split()
i = 0
code_map = {
'M': Path.MOVETO,
'C': Path.CURVE4,
'L': Path.LINETO,
}
while i < len(parts):
path_code = code_map[parts[i]]
npoints = Path.NUM_VERTICES_FOR_CODE[path_code]
codes.extend([path_code] * npoints)
vertices.extend([[*map(float, y.split(','))]
for y in parts[i + 1:][:npoints]])
i += npoints + 1
vertices = np.array(vertices)
vertices[:, 1] -= 160
dolphin_path = Path(vertices, codes)
dolphin_patch = PathPatch(dolphin_path, facecolor=(0.6, 0.6, 0.6),
edgecolor=(0.0, 0.0, 0.0))
ax.add_patch(dolphin_patch)
vertices = Affine2D().rotate_deg(60).transform(vertices)
dolphin_path2 = Path(vertices, codes)
dolphin_patch2 = PathPatch(dolphin_path2, facecolor=(0.5, 0.5, 0.5),
edgecolor=(0.0, 0.0, 0.0))
ax.add_patch(dolphin_patch2)
plt.show()
|
gpl-3.0
|
jianpingye/linux
|
tools/perf/scripts/python/event_analyzing_sample.py
|
4719
|
7393
|
# event_analyzing_sample.py: general event handler in python
#
# Current perf report is already very powerful with the annotation integrated,
# and this script is not trying to be as powerful as perf report, but
# providing end user/developer a flexible way to analyze the events other
# than trace points.
#
# The 2 database related functions in this script just show how to gather
# the basic information, and users can modify and write their own functions
# according to their specific requirement.
#
# The first function "show_general_events" just does a basic grouping for all
# generic events with the help of sqlite, and the 2nd one "show_pebs_ll" is
# for a x86 HW PMU event: PEBS with load latency data.
#
import os
import sys
import math
import struct
import sqlite3
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from EventClass import *
#
# If the perf.data has a big number of samples, then the insert operation
# will be very time consuming (about 10+ minutes for 10000 samples) if the
# .db database is on disk. Move the .db file to RAM based FS to speedup
# the handling, which will cut the time down to several seconds.
#
con = sqlite3.connect("/dev/shm/perf.db")
con.isolation_level = None
def trace_begin():
print "In trace_begin:\n"
#
# Will create several tables at the start, pebs_ll is for PEBS data with
# load latency info, while gen_events is for general event.
#
con.execute("""
create table if not exists gen_events (
name text,
symbol text,
comm text,
dso text
);""")
con.execute("""
create table if not exists pebs_ll (
name text,
symbol text,
comm text,
dso text,
flags integer,
ip integer,
status integer,
dse integer,
dla integer,
lat integer
);""")
#
# Create and insert event object to a database so that user could
# do more analysis with simple database commands.
#
def process_event(param_dict):
event_attr = param_dict["attr"]
sample = param_dict["sample"]
raw_buf = param_dict["raw_buf"]
comm = param_dict["comm"]
name = param_dict["ev_name"]
# Symbol and dso info are not always resolved
if (param_dict.has_key("dso")):
dso = param_dict["dso"]
else:
dso = "Unknown_dso"
if (param_dict.has_key("symbol")):
symbol = param_dict["symbol"]
else:
symbol = "Unknown_symbol"
# Create the event object and insert it to the right table in database
event = create_event(name, comm, dso, symbol, raw_buf)
insert_db(event)
def insert_db(event):
if event.ev_type == EVTYPE_GENERIC:
con.execute("insert into gen_events values(?, ?, ?, ?)",
(event.name, event.symbol, event.comm, event.dso))
elif event.ev_type == EVTYPE_PEBS_LL:
event.ip &= 0x7fffffffffffffff
event.dla &= 0x7fffffffffffffff
con.execute("insert into pebs_ll values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(event.name, event.symbol, event.comm, event.dso, event.flags,
event.ip, event.status, event.dse, event.dla, event.lat))
def trace_end():
print "In trace_end:\n"
# We show the basic info for the 2 type of event classes
show_general_events()
show_pebs_ll()
con.close()
#
# As the event number may be very big, so we can't use linear way
# to show the histogram in real number, but use a log2 algorithm.
#
def num2sym(num):
# Each number will have at least one '#'
snum = '#' * (int)(math.log(num, 2) + 1)
return snum
def show_general_events():
# Check the total record number in the table
count = con.execute("select count(*) from gen_events")
for t in count:
print "There is %d records in gen_events table" % t[0]
if t[0] == 0:
return
print "Statistics about the general events grouped by thread/symbol/dso: \n"
# Group by thread
commq = con.execute("select comm, count(comm) from gen_events group by comm order by -count(comm)")
print "\n%16s %8s %16s\n%s" % ("comm", "number", "histogram", "="*42)
for row in commq:
print "%16s %8d %s" % (row[0], row[1], num2sym(row[1]))
# Group by symbol
print "\n%32s %8s %16s\n%s" % ("symbol", "number", "histogram", "="*58)
symbolq = con.execute("select symbol, count(symbol) from gen_events group by symbol order by -count(symbol)")
for row in symbolq:
print "%32s %8d %s" % (row[0], row[1], num2sym(row[1]))
# Group by dso
print "\n%40s %8s %16s\n%s" % ("dso", "number", "histogram", "="*74)
dsoq = con.execute("select dso, count(dso) from gen_events group by dso order by -count(dso)")
for row in dsoq:
print "%40s %8d %s" % (row[0], row[1], num2sym(row[1]))
#
# This function just shows the basic info, and we could do more with the
# data in the tables, like checking the function parameters when some
# big latency events happen.
#
def show_pebs_ll():
count = con.execute("select count(*) from pebs_ll")
for t in count:
print "There is %d records in pebs_ll table" % t[0]
if t[0] == 0:
return
print "Statistics about the PEBS Load Latency events grouped by thread/symbol/dse/latency: \n"
# Group by thread
commq = con.execute("select comm, count(comm) from pebs_ll group by comm order by -count(comm)")
print "\n%16s %8s %16s\n%s" % ("comm", "number", "histogram", "="*42)
for row in commq:
print "%16s %8d %s" % (row[0], row[1], num2sym(row[1]))
# Group by symbol
print "\n%32s %8s %16s\n%s" % ("symbol", "number", "histogram", "="*58)
symbolq = con.execute("select symbol, count(symbol) from pebs_ll group by symbol order by -count(symbol)")
for row in symbolq:
print "%32s %8d %s" % (row[0], row[1], num2sym(row[1]))
# Group by dse
dseq = con.execute("select dse, count(dse) from pebs_ll group by dse order by -count(dse)")
print "\n%32s %8s %16s\n%s" % ("dse", "number", "histogram", "="*58)
for row in dseq:
print "%32s %8d %s" % (row[0], row[1], num2sym(row[1]))
# Group by latency
latq = con.execute("select lat, count(lat) from pebs_ll group by lat order by lat")
print "\n%32s %8s %16s\n%s" % ("latency", "number", "histogram", "="*58)
for row in latq:
print "%32s %8d %s" % (row[0], row[1], num2sym(row[1]))
def trace_unhandled(event_name, context, event_fields_dict):
print ' '.join(['%s=%s'%(k,str(v))for k,v in sorted(event_fields_dict.items())])
|
gpl-2.0
|
zhoulingjun/django
|
django/utils/deprecation.py
|
199
|
2627
|
from __future__ import absolute_import
import inspect
import warnings
class RemovedInDjango20Warning(PendingDeprecationWarning):
pass
class RemovedInDjango110Warning(DeprecationWarning):
pass
RemovedInNextVersionWarning = RemovedInDjango110Warning
class warn_about_renamed_method(object):
def __init__(self, class_name, old_method_name, new_method_name, deprecation_warning):
self.class_name = class_name
self.old_method_name = old_method_name
self.new_method_name = new_method_name
self.deprecation_warning = deprecation_warning
def __call__(self, f):
def wrapped(*args, **kwargs):
warnings.warn(
"`%s.%s` is deprecated, use `%s` instead." %
(self.class_name, self.old_method_name, self.new_method_name),
self.deprecation_warning, 2)
return f(*args, **kwargs)
return wrapped
class RenameMethodsBase(type):
"""
Handles the deprecation paths when renaming a method.
It does the following:
1) Define the new method if missing and complain about it.
2) Define the old method if missing.
3) Complain whenever an old method is called.
See #15363 for more details.
"""
renamed_methods = ()
def __new__(cls, name, bases, attrs):
new_class = super(RenameMethodsBase, cls).__new__(cls, name, bases, attrs)
for base in inspect.getmro(new_class):
class_name = base.__name__
for renamed_method in cls.renamed_methods:
old_method_name = renamed_method[0]
old_method = base.__dict__.get(old_method_name)
new_method_name = renamed_method[1]
new_method = base.__dict__.get(new_method_name)
deprecation_warning = renamed_method[2]
wrapper = warn_about_renamed_method(class_name, *renamed_method)
# Define the new method if missing and complain about it
if not new_method and old_method:
warnings.warn(
"`%s.%s` method should be renamed `%s`." %
(class_name, old_method_name, new_method_name),
deprecation_warning, 2)
setattr(base, new_method_name, old_method)
setattr(base, old_method_name, wrapper(old_method))
# Define the old method as a wrapped call to the new method.
if not old_method and new_method:
setattr(base, old_method_name, wrapper(new_method))
return new_class
|
bsd-3-clause
|
kitsunde/ansible
|
lib/ansible/plugins/action/fail.py
|
172
|
1228
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
# (c) 2012, Dag Wieers <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
''' Fail with custom message '''
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=dict()):
msg = 'Failed as requested from task'
if self._task.args and 'msg' in self._task.args:
msg = self._task.args.get('msg')
return dict(failed=True, msg=msg)
|
gpl-3.0
|
pkimber/old_cms
|
cms/tests/test_service.py
|
1
|
1778
|
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from cms.models import Page
from cms.service import init_page
class TestService(TestCase):
def setUp(self):
self.SLUG = 'home'
self.HOME = 'Home'
def test_init_not(self):
try:
Page.objects.get(slug=self.SLUG)
self.fail("'{}' page exists, but hasn't been "
"created yet".format(self.SLUG))
except Page.DoesNotExist:
pass
def test_init(self):
init_page(self.HOME, 0)
try:
Page.objects.get(slug=self.SLUG)
except Page.DoesNotExist:
self.fail("'{}' page was not initialised".format(self.SLUG))
def test_init_change_order(self):
# create page (order 1)
init_page(self.HOME, 1)
page = Page.objects.get(slug=self.SLUG)
self.assertEqual(1, page.order)
# update page (order 3)
init_page(self.HOME, 3)
page = Page.objects.get(slug=self.SLUG)
self.assertEqual(3, page.order)
def test_init_is_home(self):
init_page(self.HOME, 0, is_home=True)
page = Page.objects.get(slug=self.SLUG)
self.assertTrue(page.is_home)
def test_init_is_not_home(self):
init_page(self.HOME, 0)
page = Page.objects.get(slug=self.SLUG)
self.assertFalse(page.is_home)
def test_init_set_home(self):
# create page (is not a home page)
init_page(self.HOME, 0)
page = Page.objects.get(slug=self.SLUG)
self.assertFalse(page.is_home)
# update page (is now a home page)
init_page(self.HOME, 0, is_home=True)
page = Page.objects.get(slug=self.SLUG)
self.assertTrue(page.is_home)
|
apache-2.0
|
zenners/angular-contacts
|
node_modules/firebase/node_modules/faye-websocket/node_modules/websocket-driver/node_modules/websocket-extensions/node_modules/jstest/node_modules/nopt/node_modules/tap/node_modules/readable-stream/node_modules/string_decoder/node_modules/tap/node_modules/yamlish/yamlish-py/test/test_input.py
|
157
|
1764
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import test
try:
import unittest2 as unittest
except ImportError:
import unittest
import yamlish
test_data_list = [
{
"name": "Input test",
"in": r"""---
bill-to:
address:
city: "Royal Oak"
lines: "458 Walkman Dr.\nSuite #292\n"
postal: 48046
state: MI
family: Dumars
given: Chris
comments: "Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338\n"
date: 2001-01-23
invoice: 34843
product:
-
description: Basketball
price: 450.00
quantity: 4
sku: BL394D
-
description: "Super Hoop"
price: 2392.00
quantity: 1
sku: BL4438H
tax: 251.42
total: 4443.52
...
""",
'out': {
'bill-to': {
'given': 'Chris',
'address': {
'city': 'Royal Oak',
'postal': 48046,
'lines': "458 Walkman Dr.\nSuite #292\n",
'state': 'MI'
},
'family': 'Dumars'
},
'invoice': 34843,
'date': '2001-01-23',
'tax': 251.42,
'product': [
{
'sku': 'BL394D',
'quantity': 4,
'price': 450.00,
'description': 'Basketball'
},
{
'sku': 'BL4438H',
'quantity': 1,
'price': 2392.00,
'description': 'Super Hoop'
}
],
'comments':
"Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338\n",
'total': 4443.52
}
}
]
class TestInput(unittest.TestCase): # IGNORE:C0111
pass
test.generate_testsuite(test_data_list, TestInput, yamlish.load)
if __name__ == "__main__":
unittest.main()
|
mit
|
nozuono/calibre-webserver
|
src/calibre/ebooks/rtf2xml/convert_to_tags.py
|
19
|
10742
|
import os, sys
from codecs import EncodedFile
from calibre.ebooks.rtf2xml import copy, check_encoding
from calibre.ptempfile import better_mktemp
public_dtd = 'rtf2xml1.0.dtd'
class ConvertToTags:
"""
Convert file to XML
"""
def __init__(self,
in_file,
bug_handler,
dtd_path,
no_dtd,
encoding,
indent = None,
copy = None,
run_level = 1,
):
"""
Required:
'file'
Optional:
'copy'-- whether to make a copy of result for debugging
'temp_dir' --where to output temporary results (default is
directory from which the script is run.)
Returns:
nothing
"""
self.__file = in_file
self.__bug_handler = bug_handler
self.__copy = copy
self.__dtd_path = dtd_path
self.__no_dtd = no_dtd
self.__encoding = 'cp' + encoding
# if encoding == 'mac_roman':
# self.__encoding = 'mac_roman'
self.__indent = indent
self.__run_level = run_level
self.__write_to = better_mktemp()
self.__convert_utf = False
self.__bad_encoding = False
def __initiate_values(self):
"""
Set values, including those for the dictionary.
"""
self.__state = 'default'
self.__new_line = 0
self.__block = ('doc', 'preamble', 'rtf-definition', 'font-table',
'font-in-table', 'color-table', 'color-in-table', 'style-sheet',
'paragraph-styles', 'paragraph-style-in-table', 'character-styles',
'character-style-in-table', 'list-table', 'doc-information', 'title',
'author', 'operator', 'creation-time', 'revision-time',
'editing-time', 'time', 'number-of-pages', 'number-of-words',
'number-of-characters', 'page-definition', 'section-definition',
'headers-and-footers', 'section', 'para', 'body',
'paragraph-definition', 'cell', 'row', 'table', 'revision-table',
'style-group', 'border-group','styles-in-body', 'paragraph-style-in-body',
'list-in-table', 'level-in-table', 'override-table','override-list',
)
self.__two_new_line = ('section', 'body', 'table', 'row' 'list-table')
self.__state_dict = {
'default' : self.__default_func,
'mi<tg<open______' : self.__open_func,
'mi<tg<close_____' : self.__close_func,
'mi<tg<open-att__' : self.__open_att_func,
'mi<tg<empty-att_' : self.__empty_att_func,
'tx<nu<__________' : self.__text_func,
'tx<ut<__________' : self.__text_func,
'mi<tg<empty_____' : self.__empty_func,
}
def __open_func(self, line):
"""
Print the opening tag and newlines when needed.
"""
#mi<tg<open______<style-sheet
info = line[17:-1]
self.__new_line = 0
if info in self.__block:
self.__write_new_line()
if info in self.__two_new_line:
self.__write_extra_new_line()
self.__write_obj.write('<%s>' % info)
def __empty_func(self, line):
"""
Print out empty tag and newlines when needed.
"""
info = line[17:-1]
self.__write_obj.write(
'<%s/>' % info)
self.__new_line = 0
if info in self.__block:
self.__write_new_line()
if info in self.__two_new_line:
self.__write_extra_new_line()
def __open_att_func(self, line):
"""
Process lines for open tags that have attributes.
The important info is between [17:-1]. Take this info and split it
with the delimeter '<'. The first token in this group is the element
name. The rest are attributes, separated fromt their values by '>'. So
read each token one at a time, and split them by '>'.
"""
#mi<tg<open-att__<footnote<num>
info = line[17:-1]
tokens = info.split("<")
element_name = tokens[0]
tokens = tokens[1:]
self.__write_obj.write('<%s' % element_name)
for token in tokens:
groups = token.split('>')
try:
val = groups[0]
att = groups[1]
att = att.replace('"', '"')
att = att.replace("'", '"')
self.__write_obj.write(
' %s="%s"' % (val, att)
)
except:
if self.__run_level > 3:
msg = 'index out of range\n'
raise self.__bug_handler, msg
self.__write_obj.write('>')
self.__new_line = 0
if element_name in self.__block:
self.__write_new_line()
if element_name in self.__two_new_line:
self.__write_extra_new_line()
def __empty_att_func(self, line):
"""
Same as the __open_att_func, except a '/' is placed at the end of the tag.
"""
#mi<tg<open-att__<footnote<num>
info = line[17:-1]
tokens = info.split("<")
element_name = tokens[0]
tokens = tokens[1:]
self.__write_obj.write('<%s' % element_name)
for token in tokens:
groups = token.split('>')
val = groups[0]
att = groups[1]
att = att.replace('"', '"')
att = att.replace("'", '"')
self.__write_obj.write(
' %s="%s"' % (val, att))
self.__write_obj.write('/>')
self.__new_line = 0
if element_name in self.__block:
self.__write_new_line()
if element_name in self.__two_new_line:
self.__write_extra_new_line()
def __close_func(self, line):
"""
Print out the closed tag and new lines, if appropriate.
"""
#mi<tg<close_____<style-sheet\n
info = line[17:-1]
self.__write_obj.write(
'</%s>' % info)
self.__new_line = 0
if info in self.__block:
self.__write_new_line()
if info in self.__two_new_line:
self.__write_extra_new_line()
def __text_func(self, line):
"""
Simply print out the information between [17:-1]
"""
#tx<nu<__________<Normal;
# change this!
self.__write_obj.write(line[17:-1])
def __write_extra_new_line(self):
"""
Print out extra new lines if the new lines have not exceeded two. If
the new lines are greater than two, do nothing.
"""
if not self.__indent:
return
if self.__new_line < 2:
self.__write_obj.write('\n')
def __default_func(self, line):
pass
def __write_new_line(self):
"""
Print out a new line if a new line has not already been printed out.
"""
if not self.__indent:
return
if not self.__new_line:
self.__write_obj.write('\n')
self.__new_line += 1
def __write_dec(self):
"""
Write the XML declaration at the top of the document.
"""
#keep maximum compatibility with previous version
check_encoding_obj = check_encoding.CheckEncoding(
bug_handler=self.__bug_handler)
if not check_encoding_obj.check_encoding(self.__file, verbose=False):
self.__write_obj.write('<?xml version="1.0" encoding="US-ASCII" ?>')
elif not check_encoding_obj.check_encoding(self.__file, self.__encoding, verbose=False):
self.__write_obj.write('<?xml version="1.0" encoding="UTF-8" ?>')
self.__convert_utf = True
else:
self.__write_obj.write('<?xml version="1.0" encoding="US-ASCII" ?>')
sys.stderr.write('Bad RTF encoding, revert to US-ASCII chars and'
' hope for the best')
self.__bad_encoding = True
self.__new_line = 0
self.__write_new_line()
if self.__no_dtd:
pass
elif self.__dtd_path:
self.__write_obj.write(
'<!DOCTYPE doc SYSTEM "%s">' % self.__dtd_path
)
elif self.__dtd_path == '':
# don't print dtd if further transformations are going to take
# place
pass
else:
self.__write_obj.write(
'<!DOCTYPE doc PUBLIC "publicID" '
'"http://rtf2xml.sourceforge.net/dtd/%s">' % public_dtd
)
self.__new_line = 0
self.__write_new_line()
def convert_to_tags(self):
"""
Read in the file one line at a time. Get the important info, between
[:16]. Check if this info matches a dictionary entry. If it does, call
the appropriate function.
The functions that are called:
a text function for text
an open function for open tags
an open with attribute function for tags with attributes
an empty with attribute function for tags that are empty but have
attribtes.
a closed function for closed tags.
an empty tag function.
"""
self.__initiate_values()
with open(self.__write_to, 'w') as self.__write_obj:
self.__write_dec()
with open(self.__file, 'r') as read_obj:
for line in read_obj:
self.__token_info = line[:16]
action = self.__state_dict.get(self.__token_info)
if action is not None:
action(line)
#convert all encodings to UTF8 or ASCII to avoid unsupported encodings in lxml
if self.__convert_utf or self.__bad_encoding:
copy_obj = copy.Copy(bug_handler = self.__bug_handler)
copy_obj.rename(self.__write_to, self.__file)
file_encoding = "utf-8"
if self.__bad_encoding:
file_encoding = "us-ascii"
with open(self.__file, 'r') as read_obj:
with open(self.__write_to, 'w') as write_obj:
write_objenc = EncodedFile(write_obj, self.__encoding,
file_encoding, 'replace')
for line in read_obj:
write_objenc.write(line)
copy_obj = copy.Copy(bug_handler = self.__bug_handler)
if self.__copy:
copy_obj.copy_file(self.__write_to, "convert_to_tags.data")
copy_obj.rename(self.__write_to, self.__file)
os.remove(self.__write_to)
|
gpl-3.0
|
pixelgremlins/ztruck
|
dj/lib/python2.7/site-packages/django/core/files/utils.py
|
901
|
1230
|
class FileProxyMixin(object):
"""
A mixin class used to forward file methods to an underlaying file
object. The internal file object has to be called "file"::
class FileProxy(FileProxyMixin):
def __init__(self, file):
self.file = file
"""
encoding = property(lambda self: self.file.encoding)
fileno = property(lambda self: self.file.fileno)
flush = property(lambda self: self.file.flush)
isatty = property(lambda self: self.file.isatty)
newlines = property(lambda self: self.file.newlines)
read = property(lambda self: self.file.read)
readinto = property(lambda self: self.file.readinto)
readline = property(lambda self: self.file.readline)
readlines = property(lambda self: self.file.readlines)
seek = property(lambda self: self.file.seek)
softspace = property(lambda self: self.file.softspace)
tell = property(lambda self: self.file.tell)
truncate = property(lambda self: self.file.truncate)
write = property(lambda self: self.file.write)
writelines = property(lambda self: self.file.writelines)
xreadlines = property(lambda self: self.file.xreadlines)
def __iter__(self):
return iter(self.file)
|
apache-2.0
|
elventear/ansible-modules-core
|
system/group.py
|
25
|
14205
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Stephen Fromm <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'core',
'version': '1.0'}
DOCUMENTATION = '''
---
module: group
author: "Stephen Fromm (@sfromm)"
version_added: "0.0.2"
short_description: Add or remove groups
requirements: [ groupadd, groupdel, groupmod ]
description:
- Manage presence of groups on a host.
options:
name:
required: true
description:
- Name of the group to manage.
gid:
required: false
description:
- Optional I(GID) to set for the group.
state:
required: false
default: "present"
choices: [ present, absent ]
description:
- Whether the group should be present or not on the remote host.
system:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- If I(yes), indicates that the group created is a system group.
'''
EXAMPLES = '''
# Example group command from Ansible Playbooks
- group:
name: somegroup
state: present
'''
import grp
import platform
class Group(object):
"""
This is a generic Group manipulation class that is subclassed
based on platform.
A subclass may wish to override the following action methods:-
- group_del()
- group_add()
- group_mod()
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
GROUPFILE = '/etc/group'
def __new__(cls, *args, **kwargs):
return load_platform_subclass(Group, args, kwargs)
def __init__(self, module):
self.module = module
self.state = module.params['state']
self.name = module.params['name']
self.gid = module.params['gid']
self.system = module.params['system']
def execute_command(self, cmd):
return self.module.run_command(cmd)
def group_del(self):
cmd = [self.module.get_bin_path('groupdel', True), self.name]
return self.execute_command(cmd)
def group_add(self, **kwargs):
cmd = [self.module.get_bin_path('groupadd', True)]
for key in kwargs:
if key == 'gid' and kwargs[key] is not None:
cmd.append('-g')
cmd.append(kwargs[key])
elif key == 'system' and kwargs[key] == True:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def group_mod(self, **kwargs):
cmd = [self.module.get_bin_path('groupmod', True)]
info = self.group_info()
for key in kwargs:
if key == 'gid':
if kwargs[key] is not None and info[2] != int(kwargs[key]):
cmd.append('-g')
cmd.append(kwargs[key])
if len(cmd) == 1:
return (None, '', '')
if self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
def group_exists(self):
try:
if grp.getgrnam(self.name):
return True
except KeyError:
return False
def group_info(self):
if not self.group_exists():
return False
try:
info = list(grp.getgrnam(self.name))
except KeyError:
return False
return info
# ===========================================
class SunOS(Group):
"""
This is a SunOS Group manipulation class. Solaris doesn't have
the 'system' group concept.
This overrides the following methods from the generic class:-
- group_add()
"""
platform = 'SunOS'
distribution = None
GROUPFILE = '/etc/group'
def group_add(self, **kwargs):
cmd = [self.module.get_bin_path('groupadd', True)]
for key in kwargs:
if key == 'gid' and kwargs[key] is not None:
cmd.append('-g')
cmd.append(kwargs[key])
cmd.append(self.name)
return self.execute_command(cmd)
# ===========================================
class AIX(Group):
"""
This is a AIX Group manipulation class.
This overrides the following methods from the generic class:-
- group_del()
- group_add()
- group_mod()
"""
platform = 'AIX'
distribution = None
GROUPFILE = '/etc/group'
def group_del(self):
cmd = [self.module.get_bin_path('rmgroup', True), self.name]
return self.execute_command(cmd)
def group_add(self, **kwargs):
cmd = [self.module.get_bin_path('mkgroup', True)]
for key in kwargs:
if key == 'gid' and kwargs[key] is not None:
cmd.append('id='+kwargs[key])
elif key == 'system' and kwargs[key] == True:
cmd.append('-a')
cmd.append(self.name)
return self.execute_command(cmd)
def group_mod(self, **kwargs):
cmd = [self.module.get_bin_path('chgroup', True)]
info = self.group_info()
for key in kwargs:
if key == 'gid':
if kwargs[key] is not None and info[2] != int(kwargs[key]):
cmd.append('id='+kwargs[key])
if len(cmd) == 1:
return (None, '', '')
if self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
# ===========================================
class FreeBsdGroup(Group):
"""
This is a FreeBSD Group manipulation class.
This overrides the following methods from the generic class:-
- group_del()
- group_add()
- group_mod()
"""
platform = 'FreeBSD'
distribution = None
GROUPFILE = '/etc/group'
def group_del(self):
cmd = [self.module.get_bin_path('pw', True), 'groupdel', self.name]
return self.execute_command(cmd)
def group_add(self, **kwargs):
cmd = [self.module.get_bin_path('pw', True), 'groupadd', self.name]
if self.gid is not None:
cmd.append('-g')
cmd.append('%d' % int(self.gid))
return self.execute_command(cmd)
def group_mod(self, **kwargs):
cmd = [self.module.get_bin_path('pw', True), 'groupmod', self.name]
info = self.group_info()
cmd_len = len(cmd)
if self.gid is not None and int(self.gid) != info[2]:
cmd.append('-g')
cmd.append('%d' % int(self.gid))
# modify the group if cmd will do anything
if cmd_len != len(cmd):
if self.module.check_mode:
return (0, '', '')
return self.execute_command(cmd)
return (None, '', '')
# ===========================================
class DarwinGroup(Group):
"""
This is a Mac OS X Darwin Group manipulation class.
This overrides the following methods from the generic class:-
- group_del()
- group_add()
- group_mod()
group manipulation are done using dseditgroup(1).
"""
platform = 'Darwin'
distribution = None
def group_add(self, **kwargs):
cmd = [self.module.get_bin_path('dseditgroup', True)]
cmd += [ '-o', 'create' ]
if self.gid is not None:
cmd += [ '-i', self.gid ]
elif 'system' in kwargs and kwargs['system'] == True:
gid = self.get_lowest_available_system_gid()
if gid != False:
self.gid = str(gid)
cmd += [ '-i', self.gid ]
cmd += [ '-L', self.name ]
(rc, out, err) = self.execute_command(cmd)
return (rc, out, err)
def group_del(self):
cmd = [self.module.get_bin_path('dseditgroup', True)]
cmd += [ '-o', 'delete' ]
cmd += [ '-L', self.name ]
(rc, out, err) = self.execute_command(cmd)
return (rc, out, err)
def group_mod(self, gid=None):
info = self.group_info()
if self.gid is not None and int(self.gid) != info[2]:
cmd = [self.module.get_bin_path('dseditgroup', True)]
cmd += [ '-o', 'edit' ]
if gid is not None:
cmd += [ '-i', gid ]
cmd += [ '-L', self.name ]
(rc, out, err) = self.execute_command(cmd)
return (rc, out, err)
return (None, '', '')
def get_lowest_available_system_gid(self):
# check for lowest available system gid (< 500)
try:
cmd = [self.module.get_bin_path('dscl', True)]
cmd += [ '/Local/Default', '-list', '/Groups', 'PrimaryGroupID']
(rc, out, err) = self.execute_command(cmd)
lines = out.splitlines()
highest = 0
for group_info in lines:
parts = group_info.split(' ')
if len(parts) > 1:
gid = int(parts[-1])
if gid > highest and gid < 500:
highest = gid
if highest == 0 or highest == 499:
return False
return (highest + 1)
except:
return False
class OpenBsdGroup(Group):
"""
This is a OpenBSD Group manipulation class.
This overrides the following methods from the generic class:-
- group_del()
- group_add()
- group_mod()
"""
platform = 'OpenBSD'
distribution = None
GROUPFILE = '/etc/group'
def group_del(self):
cmd = [self.module.get_bin_path('groupdel', True), self.name]
return self.execute_command(cmd)
def group_add(self, **kwargs):
cmd = [self.module.get_bin_path('groupadd', True)]
if self.gid is not None:
cmd.append('-g')
cmd.append('%d' % int(self.gid))
cmd.append(self.name)
return self.execute_command(cmd)
def group_mod(self, **kwargs):
cmd = [self.module.get_bin_path('groupmod', True)]
info = self.group_info()
cmd_len = len(cmd)
if self.gid is not None and int(self.gid) != info[2]:
cmd.append('-g')
cmd.append('%d' % int(self.gid))
if len(cmd) == 1:
return (None, '', '')
if self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
# ===========================================
class NetBsdGroup(Group):
"""
This is a NetBSD Group manipulation class.
This overrides the following methods from the generic class:-
- group_del()
- group_add()
- group_mod()
"""
platform = 'NetBSD'
distribution = None
GROUPFILE = '/etc/group'
def group_del(self):
cmd = [self.module.get_bin_path('groupdel', True), self.name]
return self.execute_command(cmd)
def group_add(self, **kwargs):
cmd = [self.module.get_bin_path('groupadd', True)]
if self.gid is not None:
cmd.append('-g')
cmd.append('%d' % int(self.gid))
cmd.append(self.name)
return self.execute_command(cmd)
def group_mod(self, **kwargs):
cmd = [self.module.get_bin_path('groupmod', True)]
info = self.group_info()
cmd_len = len(cmd)
if self.gid is not None and int(self.gid) != info[2]:
cmd.append('-g')
cmd.append('%d' % int(self.gid))
if len(cmd) == 1:
return (None, '', '')
if self.module.check_mode:
return (0, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
# ===========================================
def main():
module = AnsibleModule(
argument_spec = dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
name=dict(required=True, type='str'),
gid=dict(default=None, type='str'),
system=dict(default=False, type='bool'),
),
supports_check_mode=True
)
group = Group(module)
module.debug('Group instantiated - platform %s' % group.platform)
if group.distribution:
module.debug('Group instantiated - distribution %s' % group.distribution)
rc = None
out = ''
err = ''
result = {}
result['name'] = group.name
result['state'] = group.state
if group.state == 'absent':
if group.group_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = group.group_del()
if rc != 0:
module.fail_json(name=group.name, msg=err)
elif group.state == 'present':
if not group.group_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = group.group_add(gid=group.gid, system=group.system)
else:
(rc, out, err) = group.group_mod(gid=group.gid)
if rc is not None and rc != 0:
module.fail_json(name=group.name, msg=err)
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
if group.group_exists():
info = group.group_info()
result['system'] = group.system
result['gid'] = info[2]
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
avocado-framework/avocado-vt
|
virttest/libvirt_xml/devices/base.py
|
8
|
10527
|
"""
Common base classes for devices
"""
import logging
from six import StringIO
from virttest import xml_utils
from virttest.libvirt_xml import base, xcepts, accessors
from virttest.xml_utils import ElementTree
class UntypedDeviceBase(base.LibvirtXMLBase):
"""
Base class implementing common functions for all device XML w/o a type attr.
"""
__slots__ = ('device_tag',)
# Subclasses are expected to hide device_tag
def __init__(self, device_tag, virsh_instance=base.virsh):
"""
Initialize untyped device instance's basic XML with device_tag
"""
super(UntypedDeviceBase, self).__init__(virsh_instance=virsh_instance)
# Just a regular dictionary value
# (Using a property to change element tag won't work)
self['device_tag'] = device_tag
# setup bare-bones XML
self.xml = u"<%s/>" % device_tag
def from_element(self, element):
"""
Stateful component to helper method for new_from_element.
"""
class_name = self.__class__.__name__
if element.tag != class_name.lower():
raise xcepts.LibvirtXMLError('Refusing to create %s instance'
'from %s tagged element'
% (class_name, element.tag))
# XMLTreeFile only supports element trees
etree = xml_utils.ElementTree.ElementTree(element)
# ET only writes to open file-like objects
xmlstr = StringIO()
# Need element tree string value to initialize LibvirtXMLBase.xml
etree.write(xmlstr, xml_utils.ENCODING)
# Create a new XMLTreeFile object based on string input
self.xml = xmlstr.getvalue()
@classmethod
def new_from_element(cls, element, virsh_instance=base.virsh):
"""
Create a new device XML instance from an single ElementTree element
"""
# subclasses __init__ only takes virsh_instance parameter
instance = cls(virsh_instance=virsh_instance)
instance.from_element(element)
return instance
@classmethod
def new_from_dict(cls, properties, virsh_instance=base.virsh):
"""
Create a new device XML instance from a dict-like object
"""
instance = cls(virsh_instance=virsh_instance)
for key, value in list(properties.items()):
setattr(instance, key, value)
return instance
# Add accessors here to be used by any elements
def _get_list(self, tag_filter):
"""
Return a list of dictionaries containing element's attributes.
"""
dict_list = []
elements = self.xmltreefile.findall(tag_filter)
for element in elements:
dict_list.append(dict(list(element.items())))
return dict_list
def _set_list(self, tag_name, value):
"""
Set all elements to the value list of dictionaries of element's
attributes.
"""
xcept = xcepts.LibvirtXMLError("Must set %s child %s elements from"
" a list of dictionary"
% (self.device_tag, tag_name))
if not isinstance(value, list):
raise xcept
# Start with clean slate
self._del_list(tag_name)
for dict_item in value:
if not isinstance(dict_item, dict):
raise xcept
ElementTree.SubElement(self.xmltreefile.getroot(),
tag_name, dict_item)
self.xmltreefile.write()
def _del_list(self, tag_filter):
"""
Remove the list of dictionaries containing each element's attributes.
"""
element = self.xmltreefile.find(tag_filter)
while element is not None:
self.xmltreefile.getroot().remove(element)
element = self.xmltreefile.find(tag_filter)
self.xmltreefile.write()
def _add_item(self, prop_name, **attributes):
"""
Convenience method for appending an element from dictionary of
attributes.
"""
items = self[prop_name] # xml element name
items.append(attributes)
self[prop_name] = items
def _update_item(self, prop_name, index, **attributes):
"""
Convenience method for merging values into an element's attributes
"""
items = self[prop_name] # xml element name
item = items[index]
item.update(attributes)
self[prop_name] = items
class TypedDeviceBase(UntypedDeviceBase):
"""
Base class implementing common functions for all device XML w/o a type attr.
"""
__slots__ = ('type_name',)
# Subclasses are expected to hide device_tag
def __init__(self, device_tag, type_name, virsh_instance=base.virsh):
"""
Initialize Typed device instance's basic XML with type_name & device_tag
"""
# generate getter, setter, deleter for 'type_name' property
accessors.XMLAttribute('type_name', self,
# each device is it's own XML "document"
# because python 2.6 ElementPath is broken
parent_xpath='/',
tag_name=device_tag,
attribute='type')
super(TypedDeviceBase, self).__init__(device_tag=device_tag,
virsh_instance=virsh_instance)
# Calls accessor to modify xml
self.type_name = type_name
@classmethod
def new_from_element(cls, element, virsh_instance=base.virsh):
"""
Hides type_name from superclass new_from_element().
"""
type_name = element.get('type', None)
# subclasses must hide device_tag parameter
instance = cls(type_name=type_name,
virsh_instance=virsh_instance)
instance.from_element(element)
return instance
# Metaclass is a type-of-types or a class-generating class.
# Using it here to avoid copy-pasting very similar class
# definitions into every unwritten device module.
#
# Example usage for stub disk device:
#
# class Disk(base.TypedDeviceBase):
# __metaclass__ = base.StubDeviceMeta
# _device_tag = 'disk'
# _def_type_name = 'block'
#
# will become defined as:
#
# class Disk(base.TypedDeviceBase):
# def __init__(self, type_name='block', virsh_instance=base.virsh):
# issue_warning()
# super(Disk, self).__init__(device_tag='disk'),
# type_name=type_name,
# virsh_instance=virsh_instance)
#
class StubDeviceMeta(type):
"""
Metaclass for generating stub Device classes where not fully implemented yet
"""
warning_issued = False
# mcs is the class object being generated, name is it's name, bases
# is tuple of all baseclasses, and dct is what will become mcs's
# __dict__ after super(...).__init__() is called.
def __init__(mcs, name, bases, dct):
"""
Configuration for new class
"""
# Keep pylint happy
dct = dict(dct)
# Call type() to setup new class and store it as 'mcs'
super(StubDeviceMeta, mcs).__init__(name, bases, dct)
# Needed for UntypedDeviceBase __init__'s default argument value
# i.e. device_tag='disk' as specified by specific device class
if not hasattr(mcs, '_device_tag'):
raise ValueError(
"Class %s requires a _device_tag attribute" % name)
# Same message for both TypedDeviceBase & UntypedDeviceBase subclasses
message = ("Detected use of a stub device XML for a %s class. These "
"only implement a minimal interface that is very likely to "
"change in future versions. This warning will only be "
" logged once." % name)
def issue_warning():
"""
Closure for created __init__ to only print message once.
"""
# Examine the CLASS variable
if not StubDeviceMeta.warning_issued:
# Set the CLASS variable
StubDeviceMeta.warning_issued = True
logging.warning(message)
else:
pass # do nothing
# Create the proper init function for subclass type
if TypedDeviceBase in bases:
# Needed for TypedDeviceBase __init__'s default argument value
# i.e. type_name='pci' as specified by specific device class.
if not hasattr(mcs, '_def_type_name'):
raise ValueError("TypedDevice sub-Class %s must define a "
"_def_type_name attribute" % name)
# form __init__() and it's arguments for generated class
def stub_init(self, type_name=getattr(mcs, '_def_type_name'),
virsh_instance=base.virsh):
"""
Initialize stub typed device instance
"""
# issue warning only when some code instantiats
# object from generated class
issue_warning()
# Created class __init__ still needs to call superclass
# __init__ (i.e. UntypedDeviceBase or TypedDeviceBase)
TypedDeviceBase.__init__(self, device_tag=getattr(mcs,
'_device_tag'),
type_name=type_name,
virsh_instance=virsh_instance)
elif UntypedDeviceBase in bases:
# generate __init__() for untyped devices (similar to above)
def stub_init(self, virsh_instance=base.virsh):
"""
Initialize stub un-typed device instance
"""
issue_warning()
UntypedDeviceBase.__init__(self, device_tag=getattr(mcs,
'_device_tag'),
virsh_instance=virsh_instance)
else:
# unexpected usage
raise TypeError("Class %s is not a subclass of TypedDeviceBase or "
"UntypedDeviceBase")
# Point the generated class's __init__ at the generated function above
setattr(mcs, '__init__', stub_init)
|
gpl-2.0
|
VasLem/KinectPainting
|
palm_detection_alg.py
|
1
|
18445
|
import numpy as np
import math
import cv2
import itertools as it
import sys
import time
import helping_functs as hf
import class_objects as co
def detect_corners():
'''function to detects intersection limits of mask with calib_edges'''
calib_set = set([tuple(i) for i in np.transpose(
np.fliplr(np.nonzero(co.edges.calib_edges))).tolist()])
contour_tuple = [tuple(i) for i in co.contours.arm_contour.tolist()]
contour_dict = dict((k, i) for i, k in enumerate(contour_tuple))
inter = set(contour_tuple).intersection(calib_set)
co.contours.edges_inds = [contour_dict[x] for x in inter]
co.contours.edges = [co.contours.arm_contour.tolist()[ind]
for ind in co.contours.edges_inds]
if co.contours.edges:
x_coord, y_coord, width, height = cv2.boundingRect(
np.swapaxes(np.array([co.contours.edges]), 0, 1))
contour_corners = np.reshape(np.array(co.contours.edges)
[np.array(np.tile(np.array
(np.transpose(np.matrix(np.any(
((co.contours.edges ==
np.array(x_coord)) +
(co.contours.edges == np.array(y_coord)) +
(co.contours.edges ==
np.array(x_coord + width)) +
(co.contours.edges ==
np.array(y_coord +
height))) > 0, axis=1)))),
(1, 2)))], (-1, 2))
contour_corners = cv2.convexHull(np.swapaxes(
np.array([contour_corners]), 0, 1)).squeeze()
if contour_corners.shape[0] != 2 or len(contour_corners.shape) != 2:
return 'Object wrongly identified (too many entry co.points found)', []
corn_ind = []
for corner in contour_corners:
corn_ind += [i for (j, i) in enumerate(co.contours.edges_inds)
if np.all(co.contours.edges[j] == corner)]
if not corn_ind:
return ("Warning:Detected object not touching image edges(Probably" +
" misidentification)", [])
return contour_corners, sorted(corn_ind)
else:
return "Warning:Detected object not touching image edges(Probably misidentification)", []
def interpolate(points, winsize):
'''interpolate co.points of a contour using a window of winsize'''
interpolated_data = []
for i in range(0, points.shape[0], (winsize - 1) / 2):
interpolated_data.append(np.mean(points[max(
0, i - (winsize - 1) / 2):(min(i + (winsize - 1) / 2,
points.shape[0] - 1) + 1)], axis=0))
return np.array(interpolated_data)
def perform_initial_grouping():
'''group adjacent vectors with same angle'''
tmp = [list(same_angle_vecs) for _, same_angle_vecs in
it.groupby(co.meas.interpolated_contour_angles)]
co.meas.segment_angle = [i[0] for i in tmp]
co.meas.segment_points_num = [len(i) for i in tmp]
# co.meas.segment_angle,meas.segment_points_num=[list(el) for el in
# zip(*[(list(same_angle_vecs)[0],len(list(same_angle_vecs)))
def compute_contour_indices(winsize):
'''find starting and ending contour indices of the grouped segments'''
co.interpolated.vecs_starting_ind = (np.maximum(0,
(np.cumsum(np.array(co.meas.segment_points_num))
- np.array(co.meas.segment_points_num)
/ 2 - 1)
* (winsize - 1) / 2 -
(np.array(co.meas.segment_points_num)
+ 1) * (winsize - 1) / 4)).tolist()
co.interpolated.vecs_ending_ind = (np.minimum(co.contours.hand_centered_contour.shape[0] - 1,
(np.cumsum(np.array(co.meas.segment_points_num))
+
np.array(
co.meas.segment_points_num)
/ 2 - 1) *
(winsize - 1) / 2 +
(np.array(co.meas.segment_points_num)
+ 1) *
(winsize - 1) / 4)).tolist()
def find_largest_line_segments():
'''Find largest line segments with some angle tolerance specified by
cutoff_angle_ratio and after that join them'''
perform_initial_grouping()
compute_contour_indices(co.CONST['interp_window'])
angle_thres = math.pi / co.CONST['cutoff_angle_ratio']
line_window_size = 1
segment = []
# The co.interpolated co.contours.arm_contour is transversed multiple times, in
# order to get every possible applicant, and then the result is filtered,
# so that largest applicants to be selected, which do not intersect with
# each other
candidate_segments = []
for ind in range(0, len(co.meas.segment_points_num), line_window_size):
forward_count = 1
backward_count = 1
keep_counting_forward = 1
keep_counting_backward = 1
freq = co.meas.segment_points_num[ind]
goon_flag1 = 1
goon_flag2 = 1
candidate_segment = [0, 0]
while goon_flag1 or goon_flag2:
if keep_counting_forward and ind + forward_count < len(co.meas.segment_points_num):
if abs(co.meas.segment_angle[ind] -
co.meas.segment_angle[ind + forward_count]) < angle_thres:
co.meas.segment_angle[ind] = (freq * co.meas.segment_angle[ind] +
co.meas.segment_angle[ind + forward_count]) / (freq + 1)
freq += co.meas.segment_points_num[ind + forward_count]
else:
keep_counting_forward = 0
forward_count += 1
else:
goon_flag1 = 0
keep_counting_forward = 0
candidate_segment[1] = co.interpolated.vecs_ending_ind[
ind + forward_count - 1]
if keep_counting_backward and ind - backward_count > -1:
if abs(co.meas.segment_angle[ind]
- co.meas.segment_angle[ind - backward_count]) < angle_thres:
co.meas.segment_angle[ind] = (freq * co.meas.segment_angle[ind] +
co.meas.segment_angle[ind
- backward_count]) / (freq + 1)
freq += co.meas.segment_points_num[ind - backward_count]
else:
keep_counting_backward = 0
backward_count += 1
else:
goon_flag2 = 0
keep_counting_backward = 0
candidate_segment[0] = co.interpolated.vecs_starting_ind[
ind - (backward_count - 1)]
candidate_segment += [co.meas.segment_angle[ind], freq]
candidate_segments.append(candidate_segment)
candidate_segments_im = 255 * np.ones((co.meas.imy, co.meas.imx))
for segment in candidate_segments:
cv2.line(candidate_segments_im, tuple(co.contours.hand_centered_contour[
segment[0]]), tuple(co.contours.hand_centered_contour[segment[1]]), 0)
co.im_results.images.append(candidate_segments_im)
sorted_segments = sorted(
candidate_segments, key=lambda segment: segment[3], reverse=True)
final_segments = [sorted_segments[0]]
held_inds = range(final_segments[0][0], final_segments[0][1] + 1)
for segment in list(sorted_segments):
if segment[0] not in held_inds and segment[1] not in held_inds:
held_inds += range(segment[0], segment[1] + 1)
final_segments.append(segment)
co.contours.final_segments = sorted(
final_segments, key=lambda segment: segment[0])
def detect_wrist():
'''Detect wrist points'''
# Find hand entry co.points into image (corners)
contour_corners, corn_ind = detect_corners()
if not isinstance(contour_corners, str):
hand_centered_contour_ind = np.roll(
np.array(range(co.contours.arm_contour.shape[0])), -corn_ind[0] - 1)
co.contours.hand_centered_contour = co.contours.arm_contour[
hand_centered_contour_ind]
corn_ind[0] = co.contours.hand_centered_contour.shape[0] - 1
else:
return contour_corners, [] # returns warning
co.contours.hand_centered_contour = co.contours.hand_centered_contour[
corn_ind[1]:corn_ind[0] + 1]
# Interpolate contour so as to get polygonal approximation
winsize = co.CONST['interp_window']
co.interpolated.points = interpolate(
co.contours.hand_centered_contour, winsize)
if co.interpolated.points.shape[0] <= 1:
return "Object too small (misidentification)", []
co.meas.interpolated_contour_angles = hf.compute_angles(
co.interpolated.points)
if len(co.meas.interpolated_contour_angles) == 0:
print co.interpolated.points
print "check helping_functs for errors in compute_angles"
find_largest_line_segments()
# Print found segments result.
interpolated_contour_im = 255 * np.ones((co.meas.imy, co.meas.imx))
# print 'final_segments found:',co.contours.final_segments
for segment in co.contours.final_segments:
cv2.line(interpolated_contour_im, tuple(co.contours.hand_centered_contour[
segment[0]]), tuple(co.contours.hand_centered_contour[segment[1]]), 0, 1, 0)
co.im_results.images.append(interpolated_contour_im)
# The main part of the algorithm follows. A measure is constructed based
# on lambda, length ratio and max length, so as to find the best segments
# describing the forearm. Lambda is a parameter describing if two segments
# can construct effectively a quadrilateral
co.meas.lam = []
co.meas.len = []
total_meas = []
approved_segments = []
wearing_par1 = 1
lengths = []
wearing_rate = co.CONST['wearing_dist_rate']
lam_weight = co.CONST['lambda_power_weight']
len_rat_weight = co.CONST['length_ratio_power_weight']
max_length_weight = co.CONST['length_power_weight']
check_segments_num = co.CONST['num_of_checked_segments']
for count1, segment1 in enumerate(co.contours.final_segments[0:check_segments_num - 1]):
wearing_par2 = 1
st1_ind, en1_ind, _, _ = segment1
if len(co.contours.final_segments) == 1:
break
st1 = co.contours.hand_centered_contour[st1_ind]
en1 = co.contours.hand_centered_contour[en1_ind]
length1 = np.linalg.norm(en1 - st1)
mid1 = (st1 + en1) / 2
count2 = len(co.contours.final_segments) - 1
if np.all(en1 == st1):
continue
for segment2 in co.contours.final_segments[:max(len(co.contours.final_segments) -
check_segments_num, count1 + 1):-1]:
st2_ind, en2_ind, _, _ = segment2
st2 = co.contours.hand_centered_contour[st2_ind]
en2 = co.contours.hand_centered_contour[en2_ind]
length2 = np.linalg.norm(en2 - st2)
mid2 = (st2 + en2) / 2
if np.all(en2 == st2):
continue
lambda1 = np.dot((st1 - en1), (st1 - mid2)) / \
(float(np.linalg.norm((en1 - st1)))**2)
lambda2 = np.dot((st2 - en2), (st2 - mid1)) / \
(float(np.linalg.norm((en2 - st2)))**2)
lower_b = -0.3
upper_b = 1.3
middle_b = (lower_b + upper_b) / 2
if (lambda1 < 1.3) and (lambda1 > -0.3):
if (lambda2 < 1.3) and (lambda2 > -0.3):
co.meas.lam.append(
abs(1 / np.sqrt((middle_b - lambda1)**2 + (middle_b - lambda2)**2)))
co.meas.len.append(
min(length1 / float(length2), length2 / float(length1)))
total_meas.append(co.meas.lam[-1]**lam_weight *
co.meas.len[-1]**len_rat_weight *
wearing_par1 * wearing_par2 *
max(length1, length2)**max_length_weight)
approved_segments.append([segment1, segment2])
lengths.append(max(length1, length2))
# Uncomment to view procedure. You have to add found flag above
# for this to work
'''line_segments_im=np.ones((co.meas.imy, meas.imx))
for st_ind, en_ind, _, _ in line_segments:
cv2.line(line_segments_im, tuple(co.contours.hand_centered_contour[st_ind]), tuple(contours.hand_centered_contour[en_ind]), 0, 2)
cv2.imshow('line_segments_im', line_segments_im)
im_tmp_result=np.empty((co.meas.imy, meas.imx)+(3, ), dtype=float)
im_tmp_result[:, :, 0]=line_segments_im
im_tmp_result[:, :, 1]=line_segments_im
im_tmp_result[:, :, 2]=line_segments_im
cv2.line(im_tmp_result, tuple(st1), tuple(en1), [0, 0, 1], 2)
if found==1:
cv2.line(im_tmp_result, tuple(st2), tuple(en2), [0, 1, 0], 2)
else:
cv2.line(im_tmp_result, tuple(st2), tuple(en2), [1, 0, 0], 2)
cv2.imshow('im_tmp_result', im_tmp_result)
cv2.waitKey(1000/co.CONST['framerate'])
'''
wearing_par2 += -wearing_rate
count2 = count2 - 1
wearing_par1 += -wearing_rate
total_meas = [m / max(lengths) for m in total_meas]
if not approved_segments:
return "Warning: Wrist not found", []
segment1, segment2 = approved_segments[total_meas.index(max(total_meas))]
#lam = co.meas.lam[total_meas.index(max(total_meas))]
#leng = co.meas.len[total_meas.index(max(total_meas))]
st1_ind, en1_ind, _, _ = segment1
st2_ind, en2_ind, _, _ = segment2
st1 = co.contours.hand_centered_contour[st1_ind]
en1 = co.contours.hand_centered_contour[en1_ind]
st2 = co.contours.hand_centered_contour[st2_ind]
en2 = co.contours.hand_centered_contour[en2_ind]
# The best segment set is selected. Then ,it is known that the found
# segments have opposite directions. We make a first estimate of the wrist
# co.points by calculating the points of segments that are farthest from the
# corners.
co.points.wristpoints = np.zeros((2, 2), np.int64)
dist_st = np.linalg.norm(st1 - contour_corners[0])
dist_en = np.linalg.norm(en1 - contour_corners[0])
if dist_st > dist_en:
co.points.wristpoints[0] = st1
co.points.wristpoints[1] = en2
else:
co.points.wristpoints[0] = en1
co.points.wristpoints[1] = st2
# The next estimation is made by supposing that that the wrist point
# closer to image edge belongs to wrist
wristpoint_ind = [np.asscalar(np.where(
np.all(co.contours.hand_centered_contour ==
point, axis=1))[0][0]) for point in co.points.wristpoints]
hand_shape = np.zeros((co.meas.imy, co.meas.imx))
cv2.drawContours(hand_shape, [np.swapaxes(np.array([co.contours.hand_centered_contour[
min(wristpoint_ind):max(wristpoint_ind) + 1]]), 0, 1)], 0, 255, 1)
#nzeroelems = np.transpose(np.array(np.nonzero(hand_shape)))
#(starting from wrist point closer to image edge, find better second wrist point)
wristpoints_dist_from_corn = [[np.linalg.norm(co.points.wristpoints[0] - corner)
for corner in contour_corners],
[np.linalg.norm(co.points.wristpoints[1] - corner)
for corner in contour_corners]]
wristpoint1 = co.points.wristpoints[
np.argmin(np.array(wristpoints_dist_from_corn)) / 2]
# To find the other wrist point, two bounds are identified, within which
# it must be, those are the opposite corner from the first wrist point and
# the 2nd estimated wrist point found above
if np.all(wristpoint1 == co.points.wristpoints[0]):
w_bound1 = wristpoint_ind[1]
wristpoint_ind1 = wristpoint_ind[0]
else:
w_bound1 = wristpoint_ind[0]
wristpoint_ind1 = wristpoint_ind[1]
w_bound2 = -1
for ind in corn_ind:
if not (wristpoint_ind1 > min(w_bound1, ind) and wristpoint_ind1 < max(w_bound1, ind)):
w_bound2 = ind
break
if w_bound2 == -1:
return "no point found between the two bounds", []
# Second wrist point is found to be the one that belongs between the
# bounds and is least far from the first point
wristpoint_ind2 = min(w_bound1,
w_bound2) + np.argmin(
np.array(
[np.linalg.norm(
point - wristpoint1)
for point in
co.contours.hand_centered_contour[
min(w_bound1, w_bound2):max(w_bound1, w_bound2)]]))
new_wristpoint = [wristpoint1,
co.contours.hand_centered_contour[wristpoint_ind2]]
new_wristpoint_ind = [np.asscalar(np.where(np.all(
co.contours.hand_centered_contour == point, axis=1))[0][0]) for point in new_wristpoint]
if abs(new_wristpoint_ind[0] - new_wristpoint_ind[1]) <= 5:
return 'too small hand region found, probably false positive', []
hand_contour = np.swapaxes(np.array([co.contours.hand_centered_contour[min(
new_wristpoint_ind):max(new_wristpoint_ind) + 1]]), 0, 1)
return new_wristpoint, hand_contour
|
bsd-3-clause
|
framon/samba
|
python/samba/tests/registry.py
|
49
|
1772
|
# Unix SMB/CIFS implementation.
# Copyright (C) Jelmer Vernooij <[email protected]> 2007
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Tests for samba.registry."""
import os
from samba import registry
import samba.tests
class HelperTests(samba.tests.TestCase):
def test_predef_to_name(self):
self.assertEquals("HKEY_LOCAL_MACHINE",
registry.get_predef_name(0x80000002))
def test_str_regtype(self):
self.assertEquals("REG_DWORD", registry.str_regtype(4))
class HiveTests(samba.tests.TestCaseInTempDir):
def setUp(self):
super(HiveTests, self).setUp()
self.hive_path = os.path.join(self.tempdir, "ldb_new.ldb")
self.hive = registry.open_ldb(self.hive_path)
def tearDown(self):
del self.hive
os.unlink(self.hive_path)
super(HiveTests, self).tearDown()
def test_ldb_new(self):
self.assertTrue(self.hive is not None)
#def test_flush(self):
# self.hive.flush()
#def test_del_value(self):
# self.hive.del_value("FOO")
class RegistryTests(samba.tests.TestCase):
def test_new(self):
self.registry = registry.Registry()
|
gpl-3.0
|
openfun/edx-platform
|
common/test/acceptance/tests/studio/test_studio_settings_certificates.py
|
20
|
8185
|
"""
Acceptance tests for Studio's Setting pages
"""
from .base_studio_test import StudioCourseTest
from ...pages.studio.settings_certificates import CertificatesPage
class CertificatesTest(StudioCourseTest):
"""
Tests for settings/certificates Page.
"""
def setUp(self, is_staff=False):
super(CertificatesTest, self).setUp(is_staff)
self.certificates_page = CertificatesPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
def make_signatory_data(self, prefix='First'):
"""
Makes signatory dict which can be used in the tests to create certificates
"""
return {
'name': '{prefix} Signatory Name'.format(prefix=prefix),
'title': '{prefix} Signatory Title'.format(prefix=prefix),
'organization': '{prefix} Signatory Organization'.format(prefix=prefix),
}
def create_and_verify_certificate(self, course_title_override, existing_certs, signatories):
"""
Creates a new certificate and verifies that it was properly created.
"""
self.assertEqual(existing_certs, len(self.certificates_page.certificates))
if existing_certs == 0:
self.certificates_page.wait_for_first_certificate_button()
self.certificates_page.click_first_certificate_button()
else:
self.certificates_page.wait_for_add_certificate_button()
self.certificates_page.click_add_certificate_button()
certificate = self.certificates_page.certificates[existing_certs]
# Set the certificate properties
certificate.course_title = course_title_override
# add signatories
added_signatories = 0
for idx, signatory in enumerate(signatories):
certificate.signatories[idx].name = signatory['name']
certificate.signatories[idx].title = signatory['title']
certificate.signatories[idx].organization = signatory['organization']
certificate.signatories[idx].upload_signature_image('Signature-{}.png'.format(idx))
added_signatories += 1
if len(signatories) > added_signatories:
certificate.click_add_signatory_button()
# Save the certificate
self.assertEqual(certificate.get_text('.action-primary'), "Create")
certificate.click_create_certificate_button()
self.assertIn(course_title_override, certificate.course_title)
return certificate
def test_no_certificates_by_default(self):
"""
Scenario: Ensure that message telling me to create a new certificate is
shown when no certificate exist.
Given I have a course without certificates
When I go to the Certificates page in Studio
Then I see "You have not created any certificates yet." message
"""
self.certificates_page.visit()
self.assertTrue(self.certificates_page.no_certificates_message_shown)
self.assertIn(
"You have not created any certificates yet.",
self.certificates_page.no_certificates_message_text
)
def test_can_create_and_edit_certficate(self):
"""
Scenario: Ensure that the certificates can be created and edited correctly.
Given I have a course without certificates
When I click button 'Add your first Certificate'
And I set new the course title override and signatory and click the button 'Create'
Then I see the new certificate is added and has correct data
When I edit the certificate
And I change the name and click the button 'Save'
Then I see the certificate is saved successfully and has the new name
"""
self.certificates_page.visit()
self.certificates_page.wait_for_first_certificate_button()
certificate = self.create_and_verify_certificate(
"Course Title Override",
0,
[self.make_signatory_data('first'), self.make_signatory_data('second')]
)
# Edit the certificate
certificate.click_edit_certificate_button()
certificate.course_title = "Updated Course Title Override 2"
self.assertEqual(certificate.get_text('.action-primary'), "Save")
certificate.click_save_certificate_button()
self.assertIn("Updated Course Title Override 2", certificate.course_title)
def test_can_delete_certificate(self):
"""
Scenario: Ensure that the user can delete certificate.
Given I have a course with 1 certificate
And I go to the Certificates page
When I delete the Certificate with name "New Certificate"
Then I see that there is no certificate
When I refresh the page
Then I see that the certificate has been deleted
"""
self.certificates_page.visit()
certificate = self.create_and_verify_certificate(
"Course Title Override",
0,
[self.make_signatory_data('first'), self.make_signatory_data('second')]
)
certificate.wait_for_certificate_delete_button()
self.assertEqual(len(self.certificates_page.certificates), 1)
# Delete certificate
certificate.delete_certificate()
self.certificates_page.visit()
self.assertEqual(len(self.certificates_page.certificates), 0)
def test_can_create_and_edit_signatories_of_certficate(self):
"""
Scenario: Ensure that the certificates can be created with signatories and edited correctly.
Given I have a course without certificates
When I click button 'Add your first Certificate'
And I set new the course title override and signatory and click the button 'Create'
Then I see the new certificate is added and has one signatory inside it
When I click 'Edit' button of signatory panel
And I set the name and click the button 'Save' icon
Then I see the signatory name updated with newly set name
When I refresh the certificates page
Then I can see course has one certificate with new signatory name
When I click 'Edit' button of signatory panel
And click on 'Close' button
Then I can see no change in signatory detail
"""
self.certificates_page.visit()
certificate = self.create_and_verify_certificate(
"Course Title Override",
0,
[self.make_signatory_data('first')]
)
self.assertEqual(len(self.certificates_page.certificates), 1)
# Edit the signatory in certificate
signatory = certificate.signatories[0]
signatory.edit()
signatory.name = 'Updated signatory name'
signatory.title = 'Update signatory title'
signatory.organization = 'Updated signatory organization'
signatory.save()
self.assertEqual(len(self.certificates_page.certificates), 1)
signatory = self.certificates_page.certificates[0].signatories[0]
self.assertIn("Updated signatory name", signatory.name)
self.assertIn("Update signatory title", signatory.title)
self.assertIn("Updated signatory organization", signatory.organization)
signatory.edit()
signatory.close()
self.assertIn("Updated signatory name", signatory.name)
def test_can_cancel_creation_of_certificate(self):
"""
Scenario: Ensure that creation of a certificate can be canceled correctly.
Given I have a course without certificates
When I click button 'Add your first Certificate'
And I set name of certificate and click the button 'Cancel'
Then I see that there is no certificates in the course
"""
self.certificates_page.visit()
self.certificates_page.click_first_certificate_button()
certificate = self.certificates_page.certificates[0]
certificate.course_title = "Title Override"
certificate.click_cancel_edit_certificate()
self.assertEqual(len(self.certificates_page.certificates), 0)
|
agpl-3.0
|
Erotemic/dtool
|
dtool_ibeis/experimental_features.py
|
1
|
3573
|
# @ut.accepts_scalar_input2(argx_list=[1])
# def get_obj(depc, tablename, root_rowids, config=None, ensure=True):
# """ Convinience function. Gets data in `tablename` as a list of
# objects. """
# print('WARNING EXPERIMENTAL')
# try:
# if tablename == depc.root:
# obj_list = list(depc._root_asobject(root_rowids))
# else:
# def make_property_getter(rowid, colname):
# def wrapper():
# return depc.get_property(
# tablename, rowid, colnames=colname, config=config,
# ensure=ensure)
# return wrapper
# colnames = depc[tablename].data_colnames
# obj_list = [
# ut.LazyDict({colname: make_property_getter(rowid, colname)
# for colname in colnames})
# for rowid in root_rowids
# ]
# return obj_list
# # data_list = depc.get_property(tablename, root_rowids, config)
# # # TODO: lazy dict
# # return [dict(zip(colnames, data)) for data in data_list]
# except Exception as ex:
# ut.printex(ex, 'error in getobj', keys=['tablename', 'root_rowids',
# 'colnames'])
# raise
# def root_asobject(aid_list):
# """ Convinience for writing preproc funcs """
# for aid in aid_list:
# gpath = gpath_list[aid]
# root_obj = ut.LazyDict({
# 'aid': aid,
# 'gpath': gpath,
# 'image': lambda: vt.imread(gpath)
# })
# yield root_obj
# @depc.register_preproc(
# tablename='chip', parents=[dummy_root], colnames=['size', 'chip'],
# coltypes=[(int, int), vt.imread], configclass=DummyChipConfig,
# asobject=True)
# def dummy_preproc_chip(depc, annot_list, config=None):
# """
# TODO: Infer properties from docstr
# Args:
# annot_list (list): list of annot objects
# config (dict): config dictionary
# Returns:
# tuple : ((int, int), ('extern', vt.imread))
# """
# if config is None:
# config = {}
# # Demonstates using asobject to get input to function as a dictionary
# # of properties
# for annot in annot_list:
# print('[preproc] Computing chips of annot=%r' % (annot,))
# chip_fpath = annot['gpath']
# w, h = vt.image.open_image_size(chip_fpath)
# size = (w, h)
# print('* chip_fpath = %r' % (chip_fpath,))
# print('* size = %r' % (size,))
# yield size, chip_fpath
# #config_hashid = config.get('feat_cfgstr')
# #assert config_hashid is not None
# # TODO store config_rowid in qparams
# #else:
# # config_hashid = db.cfg.feat_cfg.get_cfgstr()
# if False:
# if config is not None:
# try:
# #config_hashid = 'none'
# config_hashid = config.get(table.tablename + '_hashid')
# except KeyError:
# try:
# subconfig = config.get(table.tablename + '_config')
# config_hashid = ut.hashstr27(ut.to_json(subconfig))
# except KeyError:
# print('[deptbl.config] Warning: Config must either'
# 'contain a string <tablename>_hashid or a dict'
# '<tablename>_config')
# raise
# else:
# config_hashid = 'none'
|
apache-2.0
|
bogdal/django-filer
|
filer/tests/permissions.py
|
1
|
14540
|
#-*- coding: utf-8 -*-
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except ImportError:
from django.contrib.auth.models import User, Permission # NOQA
from django.contrib.auth.models import Group
from django.core.files import File as DjangoFile
from django.conf import settings
from django.test.testcases import TestCase
from filer import settings as filer_settings
from filer.models.clipboardmodels import Clipboard
from filer.models.foldermodels import Folder, FolderPermission
from filer.models.imagemodels import Image
from filer.tests.utils import Mock
from filer.tests.helpers import create_image, create_superuser
import os
class FolderPermissionsTestCase(TestCase):
def setUp(self):
self.superuser = create_superuser()
self.client.login(username='admin', password='secret')
self.unauth_user = User.objects.create(username='unauth_user')
self.owner = User.objects.create(username='owner')
self.test_user1 = User.objects.create(username='test1', password='secret')
self.test_user2 = User.objects.create(username='test2', password='secret')
self.group1 = Group.objects.create(name='name1')
self.group2 = Group.objects.create(name='name2')
self.test_user1.groups.add(self.group1)
self.test_user2.groups.add(self.group2)
self.img = create_image()
self.image_name = 'test_file.jpg'
self.filename = os.path.join(settings.FILE_UPLOAD_TEMP_DIR, self.image_name)
self.img.save(self.filename, 'JPEG')
self.file = DjangoFile(open(self.filename), name=self.image_name)
# This is actually a "file" for filer considerations
self.image = Image.objects.create(owner=self.superuser,
original_filename=self.image_name,
file=self.file)
self.clipboard = Clipboard.objects.create(user=self.superuser)
self.clipboard.append_file(self.image)
self.folder = Folder.objects.create(name='test_folder')
self.folder_perm = Folder.objects.create(name='test_folder2')
self.subfolder = Folder.objects.create(name='test_subfolder', parent=self.folder_perm)
self.subsubfolder = Folder.objects.create(name='test_subsubfolder', parent=self.subfolder)
def tearDown(self):
self.image.delete()
def test_superuser_has_rights(self):
request = Mock()
setattr(request, 'user', self.superuser)
result = self.folder.has_read_permission(request)
self.assertEqual(result, True)
def test_unlogged_user_has_no_rights(self):
old_setting = filer_settings.FILER_ENABLE_PERMISSIONS
try:
filer_settings.FILER_ENABLE_PERMISSIONS = True
request = Mock()
setattr(request, 'user', self.unauth_user)
result = self.folder.has_read_permission(request)
self.assertEqual(result, False)
finally:
filer_settings.FILER_ENABLE_PERMISSIONS = old_setting
def test_unlogged_user_has_rights_when_permissions_disabled(self):
request = Mock()
setattr(request, 'user', self.unauth_user)
result = self.folder.has_read_permission(request)
self.assertEqual(result, True)
def test_owner_user_has_rights(self):
# Set owner as the owner of the folder.
self.folder.owner = self.owner
request = Mock()
setattr(request, 'user', self.owner)
result = self.folder.has_read_permission(request)
self.assertEqual(result, True)
def test_combined_groups(self):
request1 = Mock()
setattr(request1, 'user', self.test_user1)
request2 = Mock()
setattr(request2, 'user', self.test_user2)
old_setting = filer_settings.FILER_ENABLE_PERMISSIONS
try:
filer_settings.FILER_ENABLE_PERMISSIONS = True
self.assertEqual(self.folder.has_read_permission(request1), False)
self.assertEqual(self.folder.has_read_permission(request2), False)
self.assertEqual(self.folder_perm.has_read_permission(request1), False)
self.assertEqual(self.folder_perm.has_read_permission(request2), False)
self.assertEqual(self.subsubfolder.has_read_permission(request1), False)
self.assertEqual(self.subsubfolder.has_read_permission(request2), False)
self.assertEqual(FolderPermission.objects.count(), 0)
FolderPermission.objects.create(folder=self.folder, type=FolderPermission.CHILDREN, group=self.group1, can_edit=FolderPermission.DENY, can_read=FolderPermission.ALLOW, can_add_children=FolderPermission.DENY)
FolderPermission.objects.create(folder=self.folder_perm, type=FolderPermission.CHILDREN, group=self.group2, can_edit=FolderPermission.DENY, can_read=FolderPermission.ALLOW, can_add_children=FolderPermission.DENY)
self.assertEqual(FolderPermission.objects.count(), 2)
# We have to invalidate cache
delattr(self.folder, 'permission_cache')
delattr(self.folder_perm, 'permission_cache')
self.assertEqual(self.folder.has_read_permission(request1), True)
self.assertEqual(self.folder.has_read_permission(request2), False)
self.assertEqual(self.folder_perm.has_read_permission(request1), False)
self.assertEqual(self.folder_perm.has_read_permission(request2), True)
self.assertEqual(self.subsubfolder.has_read_permission(request1), False)
self.assertEqual(self.subsubfolder.has_read_permission(request2), True)
self.test_user1.groups.add(self.group2)
self.test_user2.groups.add(self.group1)
# We have to invalidate cache
delattr(self.folder, 'permission_cache')
delattr(self.folder_perm, 'permission_cache')
self.assertEqual(self.folder.has_read_permission(request1), True)
self.assertEqual(self.folder.has_read_permission(request2), True)
self.assertEqual(self.folder_perm.has_read_permission(request1), True)
self.assertEqual(self.folder_perm.has_read_permission(request2), True)
self.assertEqual(self.subsubfolder.has_read_permission(request1), True)
self.assertEqual(self.subsubfolder.has_read_permission(request2), True)
finally:
filer_settings.FILER_ENABLE_PERMISSIONS = old_setting
def test_overlapped_groups_deny1(self):
# Tests overlapped groups with explicit deny
request1 = Mock()
setattr(request1, 'user', self.test_user1)
old_setting = filer_settings.FILER_ENABLE_PERMISSIONS
try:
filer_settings.FILER_ENABLE_PERMISSIONS = True
self.assertEqual(self.folder.can_read(self.test_user1), False)
self.assertEqual(self.folder_perm.can_read(self.test_user1), False)
self.assertEqual(FolderPermission.objects.count(), 0)
Permission.objects.create(folder=self.folder, subject='folder', who='group', group=self.group1, can_edit=0, can_read=1)
Permission.objects.create(folder=self.folder, subject='folder', who='group', group=self.group2, can_edit=1, can_read=1)
self.assertEqual(FolderPermission.objects.count(), 2)
# We have to invalidate cache
delattr(self.folder, 'permission_cache')
self.assertEqual(self.test_user1.groups.filter(pk=self.group1.pk).exists(), True)
self.assertEqual(self.test_user1.groups.filter(pk=self.group2.pk).exists(), False)
self.assertEqual(self.folder.can_read(self.test_user1), True)
self.assertEqual(self.folder.can_edit(self.test_user1), False)
self.assertEqual(self.test_user1.groups.count(), 1)
self.test_user1.groups.add(self.group2)
self.assertEqual(self.test_user1.groups.count(), 2)
# We have to invalidate cache
delattr(self.folder, 'permission_cache')
self.assertEqual(self.folder.can_read(self.test_user1), True)
self.assertEqual(self.folder.can_edit(self.test_user1), False)
finally:
filer_settings.FILER_ENABLE_PERMISSIONS = old_setting
def test_overlapped_groups_deny2(self):
# Tests overlapped groups with explicit deny
# Similar test to test_overlapped_groups_deny1, only order of groups is different
request2 = Mock()
setattr(request2, 'user', self.test_user2)
old_setting = filer_settings.FILER_ENABLE_PERMISSIONS
try:
filer_settings.FILER_ENABLE_PERMISSIONS = True
self.assertEqual(self.folder.has_read_permission(request2), False)
self.assertEqual(self.folder_perm.has_read_permission(request2), False)
self.assertEqual(FolderPermission.objects.count(), 0)
FolderPermission.objects.create(folder=self.folder_perm, type=FolderPermission.CHILDREN, group=self.group2, can_edit=FolderPermission.DENY, can_read=FolderPermission.ALLOW, can_add_children=FolderPermission.DENY)
FolderPermission.objects.create(folder=self.folder_perm, type=FolderPermission.CHILDREN, group=self.group1, can_edit=FolderPermission.ALLOW, can_read=FolderPermission.ALLOW, can_add_children=FolderPermission.ALLOW)
self.assertEqual(FolderPermission.objects.count(), 2)
# We have to invalidate cache
delattr(self.folder_perm, 'permission_cache')
self.assertEqual(self.test_user2.groups.filter(pk=self.group2.pk).exists(), True)
self.assertEqual(self.test_user2.groups.filter(pk=self.group1.pk).exists(), False)
self.assertEqual(self.folder_perm.has_read_permission(request2), True)
self.assertEqual(self.folder_perm.has_edit_permission(request2), False)
self.assertEqual(self.test_user2.groups.count(), 1)
self.test_user2.groups.add(self.group1)
self.assertEqual(self.test_user2.groups.count(), 2)
# We have to invalidate cache
delattr(self.folder_perm, 'permission_cache')
self.assertEqual(self.folder_perm.has_read_permission(request2), True)
self.assertEqual(self.folder_perm.has_edit_permission(request2), False)
finally:
filer_settings.FILER_ENABLE_PERMISSIONS = old_setting
def test_overlapped_groups1(self):
# Tests overlapped groups without explicit deny
request1 = Mock()
setattr(request1, 'user', self.test_user1)
old_setting = filer_settings.FILER_ENABLE_PERMISSIONS
try:
filer_settings.FILER_ENABLE_PERMISSIONS = True
self.assertEqual(self.folder.has_read_permission(request1), False)
self.assertEqual(self.folder_perm.has_read_permission(request1), False)
self.assertEqual(FolderPermission.objects.count(), 0)
FolderPermission.objects.create(folder=self.folder, type=FolderPermission.CHILDREN, group=self.group1, can_edit=None, can_read=FolderPermission.ALLOW, can_add_children=None)
FolderPermission.objects.create(folder=self.folder, type=FolderPermission.CHILDREN, group=self.group2, can_edit=FolderPermission.ALLOW, can_read=FolderPermission.ALLOW, can_add_children=FolderPermission.ALLOW)
self.assertEqual(FolderPermission.objects.count(), 2)
# We have to invalidate cache
delattr(self.folder, 'permission_cache')
self.assertEqual(self.test_user1.groups.filter(pk=self.group1.pk).exists(), True)
self.assertEqual(self.test_user1.groups.filter(pk=self.group2.pk).exists(), False)
self.assertEqual(self.folder.has_read_permission(request1), True)
self.assertEqual(self.folder.has_edit_permission(request1), False)
self.assertEqual(self.test_user1.groups.count(), 1)
self.test_user1.groups.add(self.group2)
self.assertEqual(self.test_user1.groups.count(), 2)
# We have to invalidate cache
delattr(self.folder, 'permission_cache')
self.assertEqual(self.folder.has_read_permission(request1), True)
self.assertEqual(self.folder.has_edit_permission(request1), True)
finally:
filer_settings.FILER_ENABLE_PERMISSIONS = old_setting
def test_overlapped_groups2(self):
# Tests overlapped groups without explicit deny
# Similar test to test_overlapped_groups1, only order of groups is different
request2 = Mock()
setattr(request2, 'user', self.test_user2)
old_setting = filer_settings.FILER_ENABLE_PERMISSIONS
try:
filer_settings.FILER_ENABLE_PERMISSIONS = True
self.assertEqual(self.folder.has_read_permission(request2), False)
self.assertEqual(self.folder_perm.has_read_permission(request2), False)
self.assertEqual(FolderPermission.objects.count(), 0)
FolderPermission.objects.create(folder=self.folder_perm, type=FolderPermission.CHILDREN, group=self.group2, can_edit=None, can_read=FolderPermission.ALLOW, can_add_children=None)
FolderPermission.objects.create(folder=self.folder_perm, type=FolderPermission.CHILDREN, group=self.group1, can_edit=FolderPermission.ALLOW, can_read=FolderPermission.ALLOW, can_add_children=FolderPermission.ALLOW)
self.assertEqual(FolderPermission.objects.count(), 2)
# We have to invalidate cache
delattr(self.folder_perm, 'permission_cache')
self.assertEqual(self.test_user2.groups.filter(pk=self.group2.pk).exists(), True)
self.assertEqual(self.test_user2.groups.filter(pk=self.group1.pk).exists(), False)
self.assertEqual(self.folder_perm.has_read_permission(request2), True)
self.assertEqual(self.folder_perm.has_edit_permission(request2), False)
self.assertEqual(self.test_user2.groups.count(), 1)
self.test_user2.groups.add(self.group1)
self.assertEqual(self.test_user2.groups.count(), 2)
# We have to invalidate cache
delattr(self.folder_perm, 'permission_cache')
self.assertEqual(self.folder_perm.has_read_permission(request2), True)
self.assertEqual(self.folder_perm.has_edit_permission(request2), True)
finally:
filer_settings.FILER_ENABLE_PERMISSIONS = old_setting
|
bsd-3-clause
|
evancich/apm_motor
|
modules/waf/waflib/Utils.py
|
1
|
18328
|
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
"""
Utilities and platform-specific fixes
The portability fixes try to provide a consistent behavior of the Waf API
through Python versions 2.3 to 3.X and across different platforms (win32, linux, etc)
"""
import os, sys, errno, traceback, inspect, re, shutil, datetime, gc, platform
import subprocess # <- leave this!
from collections import deque, defaultdict
try:
import _winreg as winreg
except ImportError:
try:
import winreg
except ImportError:
winreg = None
from waflib import Errors
try:
from collections import UserDict
except ImportError:
from UserDict import UserDict
try:
from hashlib import md5
except ImportError:
try:
from md5 import md5
except ImportError:
# never fail to enable fixes from another module
pass
try:
import threading
except ImportError:
if not 'JOBS' in os.environ:
# no threading :-(
os.environ['JOBS'] = '1'
class threading(object):
"""
A fake threading class for platforms lacking the threading module.
Use ``waf -j1`` on those platforms
"""
pass
class Lock(object):
"""Fake Lock class"""
def acquire(self):
pass
def release(self):
pass
threading.Lock = threading.Thread = Lock
else:
run_old = threading.Thread.run
def run(*args, **kwargs):
try:
run_old(*args, **kwargs)
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
sys.excepthook(*sys.exc_info())
threading.Thread.run = run
SIG_NIL = 'iluvcuteoverload'.encode()
"""Arbitrary null value for a md5 hash. This value must be changed when the hash value is replaced (size)"""
O644 = 420
"""Constant representing the permissions for regular files (0644 raises a syntax error on python 3)"""
O755 = 493
"""Constant representing the permissions for executable files (0755 raises a syntax error on python 3)"""
rot_chr = ['\\', '|', '/', '-']
"List of characters to use when displaying the throbber (progress bar)"
rot_idx = 0
"Index of the current throbber character (progress bar)"
try:
from collections import OrderedDict as ordered_iter_dict
except ImportError:
class ordered_iter_dict(dict):
def __init__(self, *k, **kw):
self.lst = []
dict.__init__(self, *k, **kw)
def clear(self):
dict.clear(self)
self.lst = []
def __setitem__(self, key, value):
dict.__setitem__(self, key, value)
try:
self.lst.remove(key)
except ValueError:
pass
self.lst.append(key)
def __delitem__(self, key):
dict.__delitem__(self, key)
try:
self.lst.remove(key)
except ValueError:
pass
def __iter__(self):
for x in self.lst:
yield x
def keys(self):
return self.lst
is_win32 = os.sep == '\\' or sys.platform == 'win32' # msys2
def readf(fname, m='r', encoding='ISO8859-1'):
"""
Read an entire file into a string, use this function instead of os.open() whenever possible.
In practice the wrapper node.read(..) should be preferred to this function::
def build(ctx):
from waflib import Utils
txt = Utils.readf(self.path.find_node('wscript').abspath())
txt = ctx.path.find_node('wscript').read()
:type fname: string
:param fname: Path to file
:type m: string
:param m: Open mode
:type encoding: string
:param encoding: encoding value, only used for python 3
:rtype: string
:return: Content of the file
"""
if sys.hexversion > 0x3000000 and not 'b' in m:
m += 'b'
f = open(fname, m)
try:
txt = f.read()
finally:
f.close()
if encoding:
txt = txt.decode(encoding)
else:
txt = txt.decode()
else:
f = open(fname, m)
try:
txt = f.read()
finally:
f.close()
return txt
def writef(fname, data, m='w', encoding='ISO8859-1'):
"""
Write an entire file from a string, use this function instead of os.open() whenever possible.
In practice the wrapper node.write(..) should be preferred to this function::
def build(ctx):
from waflib import Utils
txt = Utils.writef(self.path.make_node('i_like_kittens').abspath(), 'some data')
self.path.make_node('i_like_kittens').write('some data')
:type fname: string
:param fname: Path to file
:type data: string
:param data: The contents to write to the file
:type m: string
:param m: Open mode
:type encoding: string
:param encoding: encoding value, only used for python 3
"""
if sys.hexversion > 0x3000000 and not 'b' in m:
data = data.encode(encoding)
m += 'b'
f = open(fname, m)
try:
f.write(data)
finally:
f.close()
def h_file(fname):
"""
Compute a hash value for a file by using md5. This method may be replaced by
a faster version if necessary. The following uses the file size and the timestamp value::
import stat
from waflib import Utils
def h_file(fname):
st = os.stat(fname)
if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
m = Utils.md5()
m.update(str(st.st_mtime))
m.update(str(st.st_size))
m.update(fname)
return m.digest()
Utils.h_file = h_file
:type fname: string
:param fname: path to the file to hash
:return: hash of the file contents
"""
f = open(fname, 'rb')
m = md5()
try:
while fname:
fname = f.read(200000)
m.update(fname)
finally:
f.close()
return m.digest()
def readf_win32(f, m='r', encoding='ISO8859-1'):
flags = os.O_NOINHERIT | os.O_RDONLY
if 'b' in m:
flags |= os.O_BINARY
if '+' in m:
flags |= os.O_RDWR
try:
fd = os.open(f, flags)
except OSError:
raise IOError('Cannot read from %r' % f)
if sys.hexversion > 0x3000000 and not 'b' in m:
m += 'b'
f = os.fdopen(fd, m)
try:
txt = f.read()
finally:
f.close()
if encoding:
txt = txt.decode(encoding)
else:
txt = txt.decode()
else:
f = os.fdopen(fd, m)
try:
txt = f.read()
finally:
f.close()
return txt
def writef_win32(f, data, m='w', encoding='ISO8859-1'):
if sys.hexversion > 0x3000000 and not 'b' in m:
data = data.encode(encoding)
m += 'b'
flags = os.O_CREAT | os.O_TRUNC | os.O_WRONLY | os.O_NOINHERIT
if 'b' in m:
flags |= os.O_BINARY
if '+' in m:
flags |= os.O_RDWR
try:
fd = os.open(f, flags)
except OSError:
raise IOError('Cannot write to %r' % f)
f = os.fdopen(fd, m)
try:
f.write(data)
finally:
f.close()
def h_file_win32(fname):
try:
fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT)
except OSError:
raise IOError('Cannot read from %r' % fname)
f = os.fdopen(fd, 'rb')
m = md5()
try:
while fname:
fname = f.read(200000)
m.update(fname)
finally:
f.close()
return m.digest()
# always save these
readf_unix = readf
writef_unix = writef
h_file_unix = h_file
if hasattr(os, 'O_NOINHERIT') and sys.hexversion < 0x3040000:
# replace the default functions
readf = readf_win32
writef = writef_win32
h_file = h_file_win32
try:
x = ''.encode('hex')
except LookupError:
import binascii
def to_hex(s):
ret = binascii.hexlify(s)
if not isinstance(ret, str):
ret = ret.decode('utf-8')
return ret
else:
def to_hex(s):
return s.encode('hex')
to_hex.__doc__ = """
Return the hexadecimal representation of a string
:param s: string to convert
:type s: string
"""
def listdir_win32(s):
"""
List the contents of a folder in a portable manner.
On Win32, return the list of drive letters: ['C:', 'X:', 'Z:']
:type s: string
:param s: a string, which can be empty on Windows
"""
if not s:
try:
import ctypes
except ImportError:
# there is nothing much we can do
return [x + ':\\' for x in list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')]
else:
dlen = 4 # length of "?:\\x00"
maxdrives = 26
buf = ctypes.create_string_buffer(maxdrives * dlen)
ndrives = ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen, ctypes.byref(buf))
return [ str(buf.raw[4*i:4*i+2].decode('ascii')) for i in range(int(ndrives/dlen)) ]
if len(s) == 2 and s[1] == ":":
s += os.sep
if not os.path.isdir(s):
e = OSError('%s is not a directory' % s)
e.errno = errno.ENOENT
raise e
return os.listdir(s)
listdir = os.listdir
if is_win32:
listdir = listdir_win32
def num2ver(ver):
"""
Convert a string, tuple or version number into an integer. The number is supposed to have at most 4 digits::
from waflib.Utils import num2ver
num2ver('1.3.2') == num2ver((1,3,2)) == num2ver((1,3,2,0))
:type ver: string or tuple of numbers
:param ver: a version number
"""
if isinstance(ver, str):
ver = tuple(ver.split('.'))
if isinstance(ver, tuple):
ret = 0
for i in range(4):
if i < len(ver):
ret += 256**(3 - i) * int(ver[i])
return ret
return ver
def ex_stack():
"""
Extract the stack to display exceptions
:return: a string represening the last exception
"""
exc_type, exc_value, tb = sys.exc_info()
exc_lines = traceback.format_exception(exc_type, exc_value, tb)
return ''.join(exc_lines)
def to_list(sth):
"""
Convert a string argument to a list by splitting on spaces, and pass
through a list argument unchanged::
from waflib.Utils import to_list
lst = to_list("a b c d")
:param sth: List or a string of items separated by spaces
:rtype: list
:return: Argument converted to list
"""
if isinstance(sth, str):
return sth.split()
else:
return sth
def split_path_unix(path):
return path.split('/')
def split_path_cygwin(path):
if path.startswith('//'):
ret = path.split('/')[2:]
ret[0] = '/' + ret[0]
return ret
return path.split('/')
re_sp = re.compile('[/\\\\]')
def split_path_win32(path):
if path.startswith('\\\\'):
ret = re.split(re_sp, path)[2:]
ret[0] = '\\' + ret[0]
return ret
return re.split(re_sp, path)
msysroot = None
def split_path_msys(path):
if (path.startswith('/') or path.startswith('\\')) and not path.startswith('//') and not path.startswith('\\\\'):
# msys paths can be in the form /usr/bin
global msysroot
if not msysroot:
# msys has python 2.7 or 3, so we can use this
msysroot = subprocess.check_output(['cygpath', '-w', '/']).decode(sys.stdout.encoding or 'iso8859-1')
msysroot = msysroot.strip()
path = os.path.normpath(msysroot + os.sep + path)
return split_path_win32(path)
if sys.platform == 'cygwin':
split_path = split_path_cygwin
elif is_win32:
if os.environ.get('MSYSTEM', None):
split_path = split_path_msys
else:
split_path = split_path_win32
else:
split_path = split_path_unix
split_path.__doc__ = """
Split a path by / or \\. This function is not like os.path.split
:type path: string
:param path: path to split
:return: list of strings
"""
def check_dir(path):
"""
Ensure that a directory exists (similar to ``mkdir -p``).
:type path: string
:param path: Path to directory
"""
if not os.path.isdir(path):
try:
os.makedirs(path)
except OSError as e:
if not os.path.isdir(path):
raise Errors.WafError('Cannot create the folder %r' % path, ex=e)
def check_exe(name, env=None):
"""
Ensure that a program exists
:type name: string
:param name: name or path to program
:return: path of the program or None
"""
if not name:
raise ValueError('Cannot execute an empty string!')
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(name)
if fpath and is_exe(name):
return os.path.abspath(name)
else:
env = env or os.environ
for path in env["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, name)
if is_exe(exe_file):
return os.path.abspath(exe_file)
return None
def def_attrs(cls, **kw):
"""
Set default attributes on a class instance
:type cls: class
:param cls: the class to update the given attributes in.
:type kw: dict
:param kw: dictionary of attributes names and values.
"""
for k, v in kw.items():
if not hasattr(cls, k):
setattr(cls, k, v)
def quote_define_name(s):
"""
Convert a string to an identifier suitable for C defines.
:type s: string
:param s: String to convert
:rtype: string
:return: Identifier suitable for C defines
"""
fu = re.sub('[^a-zA-Z0-9]', '_', s)
fu = re.sub('_+', '_', fu)
fu = fu.upper()
return fu
def h_list(lst):
"""
Hash lists. For tuples, using hash(tup) is much more efficient,
except on python >= 3.3 where hash randomization assumes everybody is running a web application.
:param lst: list to hash
:type lst: list of strings
:return: hash of the list
"""
m = md5()
m.update(str(lst).encode())
return m.digest()
def h_fun(fun):
"""
Hash functions
:param fun: function to hash
:type fun: function
:return: hash of the function
"""
try:
return fun.code
except AttributeError:
try:
h = inspect.getsource(fun)
except IOError:
h = "nocode"
try:
fun.code = h
except AttributeError:
pass
return h
def h_cmd(ins):
"""
Task command hashes are calculated by calling this function. The inputs can be
strings, functions, tuples/lists containing strings/functions
"""
# this function is not meant to be particularly fast
if isinstance(ins, str):
# a command is either a string
ret = ins
elif isinstance(ins, list) or isinstance(ins, tuple):
# or a list of functions/strings
ret = str([h_cmd(x) for x in ins])
else:
# or just a python function
ret = str(h_fun(ins))
if sys.hexversion > 0x3000000:
ret = ret.encode('iso8859-1', 'xmlcharrefreplace')
return ret
reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
def subst_vars(expr, params):
"""
Replace ${VAR} with the value of VAR taken from a dict or a config set::
from waflib import Utils
s = Utils.subst_vars('${PREFIX}/bin', env)
:type expr: string
:param expr: String to perform substitution on
:param params: Dictionary or config set to look up variable values.
"""
def repl_var(m):
if m.group(1):
return '\\'
if m.group(2):
return '$'
try:
# ConfigSet instances may contain lists
return params.get_flat(m.group(3))
except AttributeError:
return params[m.group(3)]
# if you get a TypeError, it means that 'expr' is not a string...
# Utils.subst_vars(None, env) will not work
return reg_subst.sub(repl_var, expr)
def destos_to_binfmt(key):
"""
Return the binary format based on the unversioned platform name.
:param key: platform name
:type key: string
:return: string representing the binary format
"""
if key == 'darwin':
return 'mac-o'
elif key in ('win32', 'cygwin', 'uwin', 'msys'):
return 'pe'
return 'elf'
def unversioned_sys_platform():
"""
Return the unversioned platform name.
Some Python platform names contain versions, that depend on
the build environment, e.g. linux2, freebsd6, etc.
This returns the name without the version number. Exceptions are
os2 and win32, which are returned verbatim.
:rtype: string
:return: Unversioned platform name
"""
s = sys.platform
if s.startswith('java'):
# The real OS is hidden under the JVM.
from java.lang import System
s = System.getProperty('os.name')
# see http://lopica.sourceforge.net/os.html for a list of possible values
if s == 'Mac OS X':
return 'darwin'
elif s.startswith('Windows '):
return 'win32'
elif s == 'OS/2':
return 'os2'
elif s == 'HP-UX':
return 'hp-ux'
elif s in ('SunOS', 'Solaris'):
return 'sunos'
else: s = s.lower()
# powerpc == darwin for our purposes
if s == 'powerpc':
return 'darwin'
if s == 'win32' or s == 'os2':
return s
if s == 'cli' and os.name == 'nt':
# ironpython is only on windows as far as we know
return 'win32'
return re.split('\d+$', s)[0]
def nada(*k, **kw):
"""
A function that does nothing
:return: None
"""
pass
class Timer(object):
"""
Simple object for timing the execution of commands.
Its string representation is the current time::
from waflib.Utils import Timer
timer = Timer()
a_few_operations()
s = str(timer)
"""
def __init__(self):
self.start_time = datetime.datetime.utcnow()
def __str__(self):
delta = datetime.datetime.utcnow() - self.start_time
days = delta.days
hours, rem = divmod(delta.seconds, 3600)
minutes, seconds = divmod(rem, 60)
seconds += delta.microseconds * 1e-6
result = ''
if days:
result += '%dd' % days
if days or hours:
result += '%dh' % hours
if days or hours or minutes:
result += '%dm' % minutes
return '%s%.3fs' % (result, seconds)
if is_win32:
old = shutil.copy2
def copy2(src, dst):
"""
shutil.copy2 does not copy the file attributes on windows, so we
hack into the shutil module to fix the problem
"""
old(src, dst)
shutil.copystat(src, dst)
setattr(shutil, 'copy2', copy2)
if os.name == 'java':
# Jython cannot disable the gc but they can enable it ... wtf?
try:
gc.disable()
gc.enable()
except NotImplementedError:
gc.disable = gc.enable
def read_la_file(path):
"""
Read property files, used by msvc.py
:param path: file to read
:type path: string
"""
sp = re.compile(r'^([^=]+)=\'(.*)\'$')
dc = {}
for line in readf(path).splitlines():
try:
_, left, right, _ = sp.split(line.strip())
dc[left] = right
except ValueError:
pass
return dc
def nogc(fun):
"""
Decorator: let a function disable the garbage collector during its execution.
It is used in the build context when storing/loading the build cache file (pickle)
:param fun: function to execute
:type fun: function
:return: the return value of the function executed
"""
def f(*k, **kw):
try:
gc.disable()
ret = fun(*k, **kw)
finally:
gc.enable()
return ret
f.__doc__ = fun.__doc__
return f
def run_once(fun):
"""
Decorator: let a function cache its results, use like this::
@run_once
def foo(k):
return 345*2343
:param fun: function to execute
:type fun: function
:return: the return value of the function executed
"""
cache = {}
def wrap(k):
try:
return cache[k]
except KeyError:
ret = fun(k)
cache[k] = ret
return ret
wrap.__cache__ = cache
return wrap
def get_registry_app_path(key, filename):
if not winreg:
return None
try:
result = winreg.QueryValue(key, "Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe" % filename[0])
except WindowsError:
pass
else:
if os.path.isfile(result):
return result
def lib64():
# default settings for /usr/lib
if os.sep == '/':
if platform.architecture()[0] == '64bit':
if os.path.exists('/usr/lib64') and not os.path.exists('/usr/lib32'):
return '64'
return ''
def sane_path(p):
# private function for the time being!
return os.path.abspath(os.path.expanduser(p))
|
gpl-3.0
|
kidaa/encoded
|
src/contentbase/util.py
|
1
|
1415
|
from past.builtins import basestring
from pyramid.threadlocal import manager as threadlocal_manager
def get_root_request():
if threadlocal_manager.stack:
return threadlocal_manager.stack[0]['request']
def ensurelist(value):
if isinstance(value, basestring):
return [value]
return value
def simple_path_ids(obj, path):
if isinstance(path, basestring):
path = path.split('.')
if not path:
yield obj
return
name = path[0]
remaining = path[1:]
value = obj.get(name, None)
if value is None:
return
if not isinstance(value, list):
value = [value]
for member in value:
for result in simple_path_ids(member, remaining):
yield result
def expand_path(request, obj, path):
if isinstance(path, basestring):
path = path.split('.')
if not path:
return
name = path[0]
remaining = path[1:]
value = obj.get(name, None)
if value is None:
return
if isinstance(value, list):
for index, member in enumerate(value):
if not isinstance(member, dict):
member = value[index] = request.embed(member, '@@object')
expand_path(request, member, remaining)
else:
if not isinstance(value, dict):
value = obj[name] = request.embed(value, '@@object')
expand_path(request, value, remaining)
|
mit
|
Denisolt/Tensorflow_Chat_Bot
|
local/lib/python2.7/site-packages/tensorflow/contrib/learn/python/learn/dataframe/queues/feeding_functions.py
|
12
|
12480
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Helper functions for enqueuing data from arrays and pandas `DataFrame`s."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import random
import numpy as np
from tensorflow.contrib.learn.python.learn.dataframe.queues import feeding_queue_runner as fqr
from tensorflow.python import summary
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import queue_runner
# pylint: disable=g-import-not-at-top
try:
import pandas as pd
HAS_PANDAS = True
except ImportError:
HAS_PANDAS = False
class _ArrayFeedFn(object):
"""Creates feed dictionaries from numpy arrays."""
def __init__(self,
placeholders,
array,
batch_size,
random_start=False,
seed=None,
num_epochs=None):
if len(placeholders) != 2:
raise ValueError("_array_feed_fn expects 2 placeholders; got {}.".format(
len(placeholders)))
self._placeholders = placeholders
self._array = array
self._max = len(array)
self._batch_size = batch_size
self._num_epochs = num_epochs
self._epoch = 0
random.seed(seed)
self._trav = random.randrange(self._max) if random_start else 0
self._epoch_end = (self._trav - 1) % self._max
def __call__(self):
if self._num_epochs and self._epoch >= self._num_epochs:
raise errors.OutOfRangeError(None, None,
"Already emitted %s epochs." % self._epoch)
integer_indexes = [j % self._max
for j in range(self._trav, self._trav + self._batch_size)
]
if self._epoch_end in integer_indexes:
# after this batch we will have processed self._epoch epochs, possibly
# overshooting a bit to fill out a batch.
self._epoch += 1
self._trav = (integer_indexes[-1] + 1) % self._max
return {self._placeholders[0]: integer_indexes,
self._placeholders[1]: self._array[integer_indexes]}
class _OrderedDictNumpyFeedFn(object):
"""Creates feed dictionaries from `OrderedDict`s of numpy arrays."""
def __init__(self,
placeholders,
ordered_dict_of_arrays,
batch_size,
random_start=False,
seed=None,
num_epochs=None):
if len(placeholders) != len(ordered_dict_of_arrays) + 1:
raise ValueError("Expected {} placeholders; got {}.".format(
len(ordered_dict_of_arrays), len(placeholders)))
self._index_placeholder = placeholders[0]
self._col_placeholders = placeholders[1:]
self._ordered_dict_of_arrays = ordered_dict_of_arrays
self._max = len(ordered_dict_of_arrays.values()[0])
for _, v in ordered_dict_of_arrays.items():
if len(v) != self._max:
raise ValueError("Array lengths must match.")
self._batch_size = batch_size
self._num_epochs = num_epochs
self._epoch = 0
random.seed(seed)
self._trav = random.randrange(self._max) if random_start else 0
self._epoch_end = (self._trav - 1) % self._max
def __call__(self):
if self._num_epochs and self._epoch >= self._num_epochs:
raise errors.OutOfRangeError(None, None,
"Already emitted %s epochs." % self._epoch)
integer_indexes = [j % self._max
for j in range(self._trav, self._trav + self._batch_size)
]
if self._epoch_end in integer_indexes:
# after this batch we will have processed self._epoch epochs, possibly
# overshooting a bit to fill out a batch.
self._epoch += 1
self._trav = (integer_indexes[-1] + 1) % self._max
feed_dict = {self._index_placeholder: integer_indexes}
cols = [column[integer_indexes]
for column in self._ordered_dict_of_arrays.values()]
feed_dict.update(dict(zip(self._col_placeholders, cols)))
return feed_dict
class _PandasFeedFn(object):
"""Creates feed dictionaries from pandas `DataFrames`."""
def __init__(self,
placeholders,
dataframe,
batch_size,
random_start=False,
seed=None,
num_epochs=None):
if len(placeholders) != len(dataframe.columns) + 1:
raise ValueError("Expected {} placeholders; got {}.".format(
len(dataframe.columns), len(placeholders)))
self._index_placeholder = placeholders[0]
self._col_placeholders = placeholders[1:]
self._dataframe = dataframe
self._max = len(dataframe)
self._batch_size = batch_size
self._num_epochs = num_epochs
self._epoch = 0
random.seed(seed)
self._trav = random.randrange(self._max) if random_start else 0
self._epoch_end = (self._trav - 1) % self._max
def __call__(self):
if self._num_epochs and self._epoch >= self._num_epochs:
raise errors.OutOfRangeError(None, None,
"Already emitted %s epochs." % self._epoch)
integer_indexes = [j % self._max
for j in range(self._trav, self._trav + self._batch_size)
]
if self._epoch_end in integer_indexes:
# after this batch we will have processed self._epoch epochs, possibly
# overshooting a bit to fill out a batch.
self._epoch += 1
if self._epoch == self._num_epochs:
# trim this batch, so as not to overshoot the last epoch.
batch_end_inclusive = integer_indexes.index(self._epoch_end)
integer_indexes = integer_indexes[:(batch_end_inclusive+1)]
self._trav = (integer_indexes[-1] + 1) % self._max
result = self._dataframe.iloc[integer_indexes]
cols = [result[col].values for col in result.columns]
feed_dict = dict(zip(self._col_placeholders, cols))
feed_dict[self._index_placeholder] = result.index.values
return feed_dict
def enqueue_data(data,
capacity,
shuffle=False,
min_after_dequeue=None,
num_threads=1,
seed=None,
name="enqueue_input",
enqueue_size=1,
num_epochs=None):
"""Creates a queue filled from a numpy array or pandas `DataFrame`.
Returns a queue filled with the rows of the given array or `DataFrame`. In
the case of a pandas `DataFrame`, the first enqueued `Tensor` corresponds to
the index of the `DataFrame`. For numpy arrays, the first enqueued `Tensor`
contains the row number.
Args:
data: a numpy `ndarray or` pandas `DataFrame` that will be read into the
queue.
capacity: the capacity of the queue.
shuffle: whether or not to shuffle the rows of the array.
min_after_dequeue: minimum number of elements that can remain in the queue
after a dequeue operation. Only used when `shuffle` is true. If not set,
defaults to `capacity` / 4.
num_threads: number of threads used for reading and enqueueing.
seed: used to seed shuffling and reader starting points.
name: a scope name identifying the data.
enqueue_size: the number of rows to enqueue per step.
num_epochs: limit enqueuing to a specified number of epochs, if provided.
Returns:
A queue filled with the rows of the given array or `DataFrame`.
Raises:
TypeError: `data` is not a Pandas `DataFrame` or a numpy `ndarray`.
"""
with ops.name_scope(name):
if isinstance(data, np.ndarray):
types = [dtypes.int64, dtypes.as_dtype(data.dtype)]
queue_shapes = [(), data.shape[1:]]
get_feed_fn = _ArrayFeedFn
elif isinstance(data, collections.OrderedDict):
types = [dtypes.int64] + [dtypes.as_dtype(col.dtype)
for col in data.values()]
queue_shapes = [()] + [col.shape[1:] for col in data.values()]
get_feed_fn = _OrderedDictNumpyFeedFn
elif HAS_PANDAS and isinstance(data, pd.DataFrame):
types = [dtypes.as_dtype(dt)
for dt in [data.index.dtype] + list(data.dtypes)]
queue_shapes = [() for _ in types]
get_feed_fn = _PandasFeedFn
else:
raise TypeError(
"data must be either a numpy array or pandas DataFrame if pandas is "
"installed; got {}".format(type(data).__name__))
# TODO(jamieas): TensorBoard warnings for all warnings below once available.
if num_threads > 1 and num_epochs is not None:
logging.warning(
"enqueue_data was called with num_epochs and num_threads > 1. "
"num_epochs is applied per thread, so this will produce more "
"epochs than you probably intend. "
"If you want to limit epochs, use one thread.")
if shuffle and num_threads > 1 and num_epochs is not None:
logging.warning(
"enqueue_data was called with shuffle=True, num_threads > 1, and "
"num_epochs. This will create multiple threads, all reading the "
"array/dataframe in order adding to the same shuffling queue; the "
"results will likely not be sufficiently shuffled.")
if not shuffle and num_threads > 1:
logging.warning(
"enqueue_data was called with shuffle=False and num_threads > 1. "
"This will create multiple threads, all reading the "
"array/dataframe in order. If you want examples read in order, use"
" one thread; if you want multiple threads, enable shuffling.")
if shuffle:
min_after_dequeue = int(capacity / 4 if min_after_dequeue is None else
min_after_dequeue)
queue = data_flow_ops.RandomShuffleQueue(capacity,
min_after_dequeue,
dtypes=types,
shapes=queue_shapes,
seed=seed)
else:
min_after_dequeue = 0 # just for the summary text
queue = data_flow_ops.FIFOQueue(capacity,
dtypes=types,
shapes=queue_shapes)
enqueue_ops = []
feed_fns = []
for i in range(num_threads):
# Note the placeholders have no shapes, so they will accept any
# enqueue_size. enqueue_many below will break them up.
placeholders = [array_ops.placeholder(t) for t in types]
enqueue_ops.append(queue.enqueue_many(placeholders))
seed_i = None if seed is None else (i + 1) * seed
feed_fns.append(get_feed_fn(placeholders,
data,
enqueue_size,
random_start=shuffle,
seed=seed_i,
num_epochs=num_epochs))
runner = fqr.FeedingQueueRunner(queue=queue,
enqueue_ops=enqueue_ops,
feed_fns=feed_fns)
queue_runner.add_queue_runner(runner)
full = (math_ops.cast(
math_ops.maximum(0, queue.size() - min_after_dequeue),
dtypes.float32) * (1. / (capacity - min_after_dequeue)))
# Note that name contains a '/' at the end so we intentionally do not place
# a '/' after %s below.
summary_name = ("queue/%sfraction_over_%d_of_%d_full" %
(queue.name, min_after_dequeue,
capacity - min_after_dequeue))
summary.scalar(summary_name, full)
return queue
|
gpl-3.0
|
y-higuchi/ramcloud
|
scripts/upload.py
|
10
|
64911
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Modified by Diego Ongaro <[email protected]> 2010-06-28:
# - Set rccodereview.appspot.com as the default code review server.
"""Tool for uploading diffs from a version control system to the codereview app.
Usage summary: upload.py [options] [-- diff_options]
Diff options are passed to the diff command of the underlying system.
Supported version control systems:
Git
Mercurial
Subversion
It is important for Git/Mercurial users to specify a tree/node/branch to diff
against by using the '--rev' option.
"""
# This code is derived from appcfg.py in the App Engine SDK (open source),
# and from ASPN recipe #146306.
import ConfigParser
import cookielib
import fnmatch
import getpass
import logging
import mimetypes
import optparse
import os
import re
import socket
import subprocess
import sys
import urllib
import urllib2
import urlparse
# The md5 module was deprecated in Python 2.5.
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
import readline
except ImportError:
pass
# The logging verbosity:
# 0: Errors only.
# 1: Status messages.
# 2: Info logs.
# 3: Debug logs.
verbosity = 1
# The account type used for authentication.
# This line could be changed by the review server (see handler for
# upload.py).
AUTH_ACCOUNT_TYPE = "GOOGLE"
# URL of the default review server. As for AUTH_ACCOUNT_TYPE, this line could be
# changed by the review server (see handler for upload.py).
DEFAULT_REVIEW_SERVER = "rccodereview.appspot.com"
# Max size of patch or base file.
MAX_UPLOAD_SIZE = 900 * 1024
# Constants for version control names. Used by GuessVCSName.
VCS_GIT = "Git"
VCS_MERCURIAL = "Mercurial"
VCS_SUBVERSION = "Subversion"
VCS_UNKNOWN = "Unknown"
# whitelist for non-binary filetypes which do not start with "text/"
# .mm (Objective-C) shows up as application/x-freemind on my Linux box.
TEXT_MIMETYPES = ['application/javascript', 'application/x-javascript',
'application/xml', 'application/x-freemind']
VCS_ABBREVIATIONS = {
VCS_MERCURIAL.lower(): VCS_MERCURIAL,
"hg": VCS_MERCURIAL,
VCS_SUBVERSION.lower(): VCS_SUBVERSION,
"svn": VCS_SUBVERSION,
VCS_GIT.lower(): VCS_GIT,
}
# The result of parsing Subversion's [auto-props] setting.
svn_auto_props_map = None
def GetEmail(prompt):
"""Prompts the user for their email address and returns it.
The last used email address is saved to a file and offered up as a suggestion
to the user. If the user presses enter without typing in anything the last
used email address is used. If the user enters a new address, it is saved
for next time we prompt.
"""
last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
last_email = ""
if os.path.exists(last_email_file_name):
try:
last_email_file = open(last_email_file_name, "r")
last_email = last_email_file.readline().strip("\n")
last_email_file.close()
prompt += " [%s]" % last_email
except IOError, e:
pass
email = raw_input(prompt + ": ").strip()
if email:
try:
last_email_file = open(last_email_file_name, "w")
last_email_file.write(email)
last_email_file.close()
except IOError, e:
pass
else:
email = last_email
return email
def StatusUpdate(msg):
"""Print a status message to stdout.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print msg
def ErrorExit(msg):
"""Print an error message to stderr and exit."""
print >>sys.stderr, msg
sys.exit(1)
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
self.reason = args["Error"]
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, host_override=None, extra_headers={},
save_cookies=False, account_type=AUTH_ACCOUNT_TYPE):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
account_type: Account type used for authentication. Defaults to
AUTH_ACCOUNT_TYPE.
"""
self.host = host
if (not self.host.startswith("http://") and
not self.host.startswith("https://")):
self.host = "http://" + self.host
self.host_override = host_override
self.auth_function = auth_function
self.authenticated = False
self.extra_headers = extra_headers
self.save_cookies = save_cookies
self.account_type = account_type
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplementedError()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
req = urllib2.Request(url, data=data)
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = self.account_type
if self.host.endswith(".google.com"):
# Needed for use inside Google.
account_type = "HOSTED"
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode({
"Email": email,
"Passwd": password,
"service": "ah",
"source": "rietveld-codereview-upload",
"accountType": account_type,
}),
)
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=")
for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg,
e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
# This is a dummy value to allow us to identify when we're successful.
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
req = self._CreateRequest("%s/_ah/login?%s" %
(self.host, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response (or a 302) and
directs us to authenticate ourselves with ClientLogin.
"""
for i in range(3):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
if e.reason == "BadAuthentication":
print >>sys.stderr, "Invalid username or password."
continue
if e.reason == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.\n"
"If you are using a Google Apps account the URL is:\n"
"https://www.google.com/a/yourdomain.com/UnlockCaptcha")
break
if e.reason == "NotVerified":
print >>sys.stderr, "Account not verified."
break
if e.reason == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
break
if e.reason == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
break
if e.reason == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
if e.reason == "ServiceDisabled":
print >>sys.stderr, ("The user's access to the service has been "
"disabled.")
break
if e.reason == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
break
raise
self._GetAuthCookie(auth_token)
return
def Send(self, request_path, payload=None,
content_type="application/octet-stream",
timeout=None,
extra_headers=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
extra_headers: Dict containing additional HTTP headers that should be
included in the request (string header names mapped to their values),
or None to not include any additional headers.
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
if not self.authenticated:
self._Authenticate()
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
if extra_headers:
for header, value in extra_headers.items():
req.add_header(header, value)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401 or e.code == 302:
self._Authenticate()
## elif e.code >= 500 and e.code < 600:
## # Server Error - try again.
## continue
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
def _Authenticate(self):
"""Save the cookie jar after authentication."""
super(HttpRpcServer, self)._Authenticate()
if self.save_cookies:
StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(urllib2.HTTPSHandler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
if os.path.exists(self.cookie_file):
try:
self.cookie_jar.load()
self.authenticated = True
StatusUpdate("Loaded authentication cookies from %s" %
self.cookie_file)
except (cookielib.LoadError, IOError):
# Failed to load cookies - just ignore them.
pass
else:
# Create an empty cookie file with mode 600
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
os.close(fd)
# Always chmod the cookie file
os.chmod(self.cookie_file, 0600)
else:
# Don't save cookies across runs of update.py.
self.cookie_jar = cookielib.CookieJar()
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]")
parser.add_option("-y", "--assume_yes", action="store_true",
dest="assume_yes", default=False,
help="Assume that the answer to yes/no questions is 'yes'.")
# Logging
group = parser.add_option_group("Logging options")
group.add_option("-q", "--quiet", action="store_const", const=0,
dest="verbose", help="Print errors only.")
group.add_option("-v", "--verbose", action="store_const", const=2,
dest="verbose", default=1,
help="Print info level logs (default).")
group.add_option("--noisy", action="store_const", const=3,
dest="verbose", help="Print all logs.")
# Review server
group = parser.add_option_group("Review server options")
group.add_option("-s", "--server", action="store", dest="server",
default=DEFAULT_REVIEW_SERVER,
metavar="SERVER",
help=("The server to upload to. The format is host[:port]. "
"Defaults to '%default'."))
group.add_option("-e", "--email", action="store", dest="email",
metavar="EMAIL", default=None,
help="The username to use. Will prompt if omitted.")
group.add_option("-H", "--host", action="store", dest="host",
metavar="HOST", default=None,
help="Overrides the Host header sent with all RPCs.")
group.add_option("--no_cookies", action="store_false",
dest="save_cookies", default=True,
help="Do not save authentication cookies to local disk.")
group.add_option("--account_type", action="store", dest="account_type",
metavar="TYPE", default=AUTH_ACCOUNT_TYPE,
choices=["GOOGLE", "HOSTED"],
help=("Override the default account type "
"(defaults to '%default', "
"valid choices are 'GOOGLE' and 'HOSTED')."))
# Issue
group = parser.add_option_group("Issue options")
group.add_option("-d", "--description", action="store", dest="description",
metavar="DESCRIPTION", default=None,
help="Optional description when creating an issue.")
group.add_option("-f", "--description_file", action="store",
dest="description_file", metavar="DESCRIPTION_FILE",
default=None,
help="Optional path of a file that contains "
"the description when creating an issue.")
group.add_option("-r", "--reviewers", action="store", dest="reviewers",
metavar="REVIEWERS", default=None,
help="Add reviewers (comma separated email addresses).")
group.add_option("--cc", action="store", dest="cc",
metavar="CC", default=None,
help="Add CC (comma separated email addresses).")
group.add_option("--private", action="store_true", dest="private",
default=False,
help="Make the issue restricted to reviewers and those CCed")
# Upload options
group = parser.add_option_group("Patch options")
group.add_option("-m", "--message", action="store", dest="message",
metavar="MESSAGE", default=None,
help="A message to identify the patch. "
"Will prompt if omitted.")
group.add_option("-i", "--issue", type="int", action="store",
metavar="ISSUE", default=None,
help="Issue number to which to add. Defaults to new issue.")
group.add_option("--base_url", action="store", dest="base_url", default=None,
help="Base repository URL (listed as \"Base URL\" when "
"viewing issue). If omitted, will be guessed automatically "
"for SVN repos and left blank for others.")
group.add_option("--download_base", action="store_true",
dest="download_base", default=False,
help="Base files will be downloaded by the server "
"(side-by-side diffs may not work on files with CRs).")
group.add_option("--rev", action="store", dest="revision",
metavar="REV", default=None,
help="Base revision/branch/tree to diff against. Use "
"rev1:rev2 range to review already committed changeset.")
group.add_option("--send_mail", action="store_true",
dest="send_mail", default=False,
help="Send notification email to reviewers.")
group.add_option("--vcs", action="store", dest="vcs",
metavar="VCS", default=None,
help=("Version control system (optional, usually upload.py "
"already guesses the right VCS)."))
group.add_option("--emulate_svn_auto_props", action="store_true",
dest="emulate_svn_auto_props", default=False,
help=("Emulate Subversion's auto properties feature."))
def GetRpcServer(server, email=None, host_override=None, save_cookies=True,
account_type=AUTH_ACCOUNT_TYPE):
"""Returns an instance of an AbstractRpcServer.
Args:
server: String containing the review server URL.
email: String containing user's email address.
host_override: If not None, string containing an alternate hostname to use
in the host header.
save_cookies: Whether authentication cookies should be saved to disk.
account_type: Account type for authentication, either 'GOOGLE'
or 'HOSTED'. Defaults to AUTH_ACCOUNT_TYPE.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
rpc_server_class = HttpRpcServer
# If this is the dev_appserver, use fake authentication.
host = (host_override or server).lower()
if host == "localhost" or host.startswith("localhost:"):
if email is None:
email = "[email protected]"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
server,
lambda: (email, "password"),
host_override=host_override,
extra_headers={"Cookie":
'dev_appserver_login="%s:False"' % email},
save_cookies=save_cookies,
account_type=account_type)
# Don't try to talk to ClientLogin.
server.authenticated = True
return server
def GetUserCredentials():
"""Prompts the user for a username and password."""
# Create a local alias to the email variable to avoid Python's crazy
# scoping rules.
local_email = email
if local_email is None:
local_email = GetEmail("Email (login for uploading to %s)" % server)
password = getpass.getpass("Password for %s: " % local_email)
return (local_email, password)
return rpc_server_class(server,
GetUserCredentials,
host_override=host_override,
save_cookies=save_cookies)
def EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for (key, value) in fields:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
if isinstance(value, unicode):
value = value.encode('utf-8')
lines.append(value)
for (key, filename, value) in files:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
(key, filename))
lines.append('Content-Type: %s' % GetContentType(filename))
lines.append('')
if isinstance(value, unicode):
value = value.encode('utf-8')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def GetContentType(filename):
"""Helper to guess the content-type from the filename."""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# Use a shell for subcommands on Windows to get a PATH search.
use_shell = sys.platform.startswith("win")
def RunShellWithReturnCode(command, print_output=False,
universal_newlines=True,
env=os.environ):
"""Executes a command and returns the output from stdout and the return code.
Args:
command: Command to execute.
print_output: If True, the output is printed to stdout.
If False, both stdout and stderr are ignored.
universal_newlines: Use universal_newlines flag (default: True).
Returns:
Tuple (output, return code)
"""
logging.info("Running %s", command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=use_shell, universal_newlines=universal_newlines,
env=env)
if print_output:
output_array = []
while True:
line = p.stdout.readline()
if not line:
break
print line.strip("\n")
output_array.append(line)
output = "".join(output_array)
else:
output = p.stdout.read()
p.wait()
errout = p.stderr.read()
if print_output and errout:
print >>sys.stderr, errout
p.stdout.close()
p.stderr.close()
return output, p.returncode
def RunShell(command, silent_ok=False, universal_newlines=True,
print_output=False, env=os.environ):
data, retcode = RunShellWithReturnCode(command, print_output,
universal_newlines, env)
if retcode:
ErrorExit("Got error status from %s:\n%s" % (command, data))
if not silent_ok and not data:
ErrorExit("No output from %s" % command)
return data
class VersionControlSystem(object):
"""Abstract base class providing an interface to the VCS."""
def __init__(self, options):
"""Constructor.
Args:
options: Command line options.
"""
self.options = options
def PostProcessDiff(self, diff):
"""Return the diff with any special post processing this VCS needs, e.g.
to include an svn-style "Index:"."""
return diff
def GenerateDiff(self, args):
"""Return the current diff as a string.
Args:
args: Extra arguments to pass to the diff command.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def CheckForUnknownFiles(self):
"""Show an "are you sure?" prompt if there are unknown files."""
unknown_files = self.GetUnknownFiles()
if unknown_files:
print "The following files are not added to version control:"
for line in unknown_files:
print line
prompt = "Are you sure to continue?(y/N) "
answer = raw_input(prompt).strip()
if answer != "y":
ErrorExit("User aborted")
def GetBaseFile(self, filename):
"""Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetBaseFiles(self, diff):
"""Helper that calls GetBase file for each file in the patch.
Returns:
A dictionary that maps from filename to GetBaseFile's tuple. Filenames
are retrieved based on lines that start with "Index:" or
"Property changes on:".
"""
files = {}
for line in diff.splitlines(True):
if line.startswith('Index:') or line.startswith('Property changes on:'):
unused, filename = line.split(':', 1)
# On Windows if a file has property changes its filename uses '\'
# instead of '/'.
filename = filename.strip().replace('\\', '/')
files[filename] = self.GetBaseFile(filename)
return files
def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
files):
"""Uploads the base files (and if necessary, the current ones as well)."""
def UploadFile(filename, file_id, content, is_binary, status, is_base):
"""Uploads a file to the server."""
file_too_large = False
if is_base:
type = "base"
else:
type = "current"
if len(content) > MAX_UPLOAD_SIZE:
print ("Not uploading the %s file for %s because it's too large." %
(type, filename))
file_too_large = True
content = ""
checksum = md5(content).hexdigest()
if options.verbose > 0 and not file_too_large:
print "Uploading %s file for %s" % (type, filename)
url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
form_fields = [("filename", filename),
("status", status),
("checksum", checksum),
("is_binary", str(is_binary)),
("is_current", str(not is_base)),
]
if file_too_large:
form_fields.append(("file_too_large", "1"))
if options.email:
form_fields.append(("user", options.email))
ctype, body = EncodeMultipartFormData(form_fields,
[("data", filename, content)])
response_body = rpc_server.Send(url, body,
content_type=ctype)
if not response_body.startswith("OK"):
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
patches = dict()
[patches.setdefault(v, k) for k, v in patch_list]
for filename in patches.keys():
base_content, new_content, is_binary, status = files[filename]
file_id_str = patches.get(filename)
if file_id_str.find("nobase") != -1:
base_content = None
file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
file_id = int(file_id_str)
if base_content != None:
UploadFile(filename, file_id, base_content, is_binary, status, True)
if new_content != None:
UploadFile(filename, file_id, new_content, is_binary, status, False)
def IsImage(self, filename):
"""Returns true if the filename has an image extension."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False
return mimetype.startswith("image/")
def IsBinary(self, filename):
"""Returns true if the guessed mimetyped isnt't in text group."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False # e.g. README, "real" binaries usually have an extension
# special case for text files which don't start with text/
if mimetype in TEXT_MIMETYPES:
return False
return not mimetype.startswith("text/")
class SubversionVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Subversion."""
def __init__(self, options):
super(SubversionVCS, self).__init__(options)
if self.options.revision:
match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
if not match:
ErrorExit("Invalid Subversion revision %s." % self.options.revision)
self.rev_start = match.group(1)
self.rev_end = match.group(3)
else:
self.rev_start = self.rev_end = None
# Cache output from "svn list -r REVNO dirname".
# Keys: dirname, Values: 2-tuple (ouput for start rev and end rev).
self.svnls_cache = {}
# Base URL is required to fetch files deleted in an older revision.
# Result is cached to not guess it over and over again in GetBaseFile().
required = self.options.download_base or self.options.revision is not None
self.svn_base = self._GuessBase(required)
def GuessBase(self, required):
"""Wrapper for _GuessBase."""
return self.svn_base
def _GuessBase(self, required):
"""Returns the SVN base URL.
Args:
required: If true, exits if the url can't be guessed, otherwise None is
returned.
"""
info = RunShell(["svn", "info"])
for line in info.splitlines():
words = line.split()
if len(words) == 2 and words[0] == "URL:":
url = words[1]
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
username, netloc = urllib.splituser(netloc)
if username:
logging.info("Removed username from base URL")
if netloc.endswith("svn.python.org"):
if netloc == "svn.python.org":
if path.startswith("/projects/"):
path = path[9:]
elif netloc != "[email protected]":
ErrorExit("Unrecognized Python URL: %s" % url)
base = "http://svn.python.org/view/*checkout*%s/" % path
logging.info("Guessed Python base = %s", base)
elif netloc.endswith("svn.collab.net"):
if path.startswith("/repos/"):
path = path[6:]
base = "http://svn.collab.net/viewvc/*checkout*%s/" % path
logging.info("Guessed CollabNet base = %s", base)
elif netloc.endswith(".googlecode.com"):
path = path + "/"
base = urlparse.urlunparse(("http", netloc, path, params,
query, fragment))
logging.info("Guessed Google Code base = %s", base)
else:
path = path + "/"
base = urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
logging.info("Guessed base = %s", base)
return base
if required:
ErrorExit("Can't find URL in output from svn info")
return None
def GenerateDiff(self, args):
cmd = ["svn", "diff"]
if self.options.revision:
cmd += ["-r", self.options.revision]
cmd.extend(args)
data = RunShell(cmd)
count = 0
for line in data.splitlines():
if line.startswith("Index:") or line.startswith("Property changes on:"):
count += 1
logging.info(line)
if not count:
ErrorExit("No valid patches found in output from svn diff")
return data
def _CollapseKeywords(self, content, keyword_str):
"""Collapses SVN keywords."""
# svn cat translates keywords but svn diff doesn't. As a result of this
# behavior patching.PatchChunks() fails with a chunk mismatch error.
# This part was originally written by the Review Board development team
# who had the same problem (http://reviews.review-board.org/r/276/).
# Mapping of keywords to known aliases
svn_keywords = {
# Standard keywords
'Date': ['Date', 'LastChangedDate'],
'Revision': ['Revision', 'LastChangedRevision', 'Rev'],
'Author': ['Author', 'LastChangedBy'],
'HeadURL': ['HeadURL', 'URL'],
'Id': ['Id'],
# Aliases
'LastChangedDate': ['LastChangedDate', 'Date'],
'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
'LastChangedBy': ['LastChangedBy', 'Author'],
'URL': ['URL', 'HeadURL'],
}
def repl(m):
if m.group(2):
return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
return "$%s$" % m.group(1)
keywords = [keyword
for name in keyword_str.split(" ")
for keyword in svn_keywords.get(name, [])]
return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
def GetUnknownFiles(self):
status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
unknown_files = []
for line in status.split("\n"):
if line and line[0] == "?":
unknown_files.append(line)
return unknown_files
def ReadFile(self, filename):
"""Returns the contents of a file."""
file = open(filename, 'rb')
result = ""
try:
result = file.read()
finally:
file.close()
return result
def GetStatus(self, filename):
"""Returns the status of a file."""
if not self.options.revision:
status = RunShell(["svn", "status", "--ignore-externals", filename])
if not status:
ErrorExit("svn status returned no output for %s" % filename)
status_lines = status.splitlines()
# If file is in a cl, the output will begin with
# "\n--- Changelist 'cl_name':\n". See
# http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
if (len(status_lines) == 3 and
not status_lines[0] and
status_lines[1].startswith("--- Changelist")):
status = status_lines[2]
else:
status = status_lines[0]
# If we have a revision to diff against we need to run "svn list"
# for the old and the new revision and compare the results to get
# the correct status for a file.
else:
dirname, relfilename = os.path.split(filename)
if dirname not in self.svnls_cache:
cmd = ["svn", "list", "-r", self.rev_start, dirname or "."]
out, returncode = RunShellWithReturnCode(cmd)
if returncode:
ErrorExit("Failed to get status for %s." % filename)
old_files = out.splitlines()
args = ["svn", "list"]
if self.rev_end:
args += ["-r", self.rev_end]
cmd = args + [dirname or "."]
out, returncode = RunShellWithReturnCode(cmd)
if returncode:
ErrorExit("Failed to run command %s" % cmd)
self.svnls_cache[dirname] = (old_files, out.splitlines())
old_files, new_files = self.svnls_cache[dirname]
if relfilename in old_files and relfilename not in new_files:
status = "D "
elif relfilename in old_files and relfilename in new_files:
status = "M "
else:
status = "A "
return status
def GetBaseFile(self, filename):
status = self.GetStatus(filename)
base_content = None
new_content = None
# If a file is copied its status will be "A +", which signifies
# "addition-with-history". See "svn st" for more information. We need to
# upload the original file or else diff parsing will fail if the file was
# edited.
if status[0] == "A" and status[3] != "+":
# We'll need to upload the new content if we're adding a binary file
# since diff's output won't contain it.
mimetype = RunShell(["svn", "propget", "svn:mime-type", filename],
silent_ok=True)
base_content = ""
is_binary = bool(mimetype) and not mimetype.startswith("text/")
if is_binary and self.IsImage(filename):
new_content = self.ReadFile(filename)
elif (status[0] in ("M", "D", "R") or
(status[0] == "A" and status[3] == "+") or # Copied file.
(status[0] == " " and status[1] == "M")): # Property change.
args = []
if self.options.revision:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
# Don't change filename, it's needed later.
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
mimetype, returncode = RunShellWithReturnCode(cmd)
if returncode:
# File does not exist in the requested revision.
# Reset mimetype, it contains an error message.
mimetype = ""
get_base = False
is_binary = bool(mimetype) and not mimetype.startswith("text/")
if status[0] == " ":
# Empty base content just to force an upload.
base_content = ""
elif is_binary:
if self.IsImage(filename):
get_base = True
if status[0] == "M":
if not self.rev_end:
new_content = self.ReadFile(filename)
else:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
new_content = RunShell(["svn", "cat", url],
universal_newlines=True, silent_ok=True)
else:
base_content = ""
else:
get_base = True
if get_base:
if is_binary:
universal_newlines = False
else:
universal_newlines = True
if self.rev_start:
# "svn cat -r REV delete_file.txt" doesn't work. cat requires
# the full URL with "@REV" appended instead of using "-r" option.
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
base_content = RunShell(["svn", "cat", url],
universal_newlines=universal_newlines,
silent_ok=True)
else:
base_content = RunShell(["svn", "cat", filename],
universal_newlines=universal_newlines,
silent_ok=True)
if not is_binary:
args = []
if self.rev_start:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:keywords", url]
keywords, returncode = RunShellWithReturnCode(cmd)
if keywords and not returncode:
base_content = self._CollapseKeywords(base_content, keywords)
else:
StatusUpdate("svn status returned unexpected output: %s" % status)
sys.exit(1)
return base_content, new_content, is_binary, status[0:5]
class GitVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Git."""
def __init__(self, options):
super(GitVCS, self).__init__(options)
# Map of filename -> (hash before, hash after) of base file.
# Hashes for "no such file" are represented as None.
self.hashes = {}
# Map of new filename -> old filename for renames.
self.renames = {}
def PostProcessDiff(self, gitdiff):
"""Converts the diff output to include an svn-style "Index:" line as well
as record the hashes of the files, so we can upload them along with our
diff."""
# Special used by git to indicate "no such content".
NULL_HASH = "0"*40
def IsFileNew(filename):
return filename in self.hashes and self.hashes[filename][0] is None
def AddSubversionPropertyChange(filename):
"""Add svn's property change information into the patch if given file is
new file.
We use Subversion's auto-props setting to retrieve its property.
See http://svnbook.red-bean.com/en/1.1/ch07.html#svn-ch-7-sect-1.3.2 for
Subversion's [auto-props] setting.
"""
if self.options.emulate_svn_auto_props and IsFileNew(filename):
svnprops = GetSubversionPropertyChanges(filename)
if svnprops:
svndiff.append("\n" + svnprops + "\n")
svndiff = []
filecount = 0
filename = None
for line in gitdiff.splitlines():
match = re.match(r"diff --git a/(.*) b/(.*)$", line)
if match:
# Add auto property here for previously seen file.
if filename is not None:
AddSubversionPropertyChange(filename)
filecount += 1
# Intentionally use the "after" filename so we can show renames.
filename = match.group(2)
svndiff.append("Index: %s\n" % filename)
if match.group(1) != match.group(2):
self.renames[match.group(2)] = match.group(1)
else:
# The "index" line in a git diff looks like this (long hashes elided):
# index 82c0d44..b2cee3f 100755
# We want to save the left hash, as that identifies the base file.
match = re.match(r"index (\w+)\.\.(\w+)", line)
if match:
before, after = (match.group(1), match.group(2))
if before == NULL_HASH:
before = None
if after == NULL_HASH:
after = None
self.hashes[filename] = (before, after)
svndiff.append(line + "\n")
if not filecount:
ErrorExit("No valid patches found in output from git diff")
# Add auto property for the last seen file.
assert filename is not None
AddSubversionPropertyChange(filename)
return "".join(svndiff)
def GenerateDiff(self, extra_args):
extra_args = extra_args[:]
if self.options.revision:
extra_args = [self.options.revision] + extra_args
# --no-ext-diff is broken in some versions of Git, so try to work around
# this by overriding the environment (but there is still a problem if the
# git config key "diff.external" is used).
env = os.environ.copy()
if 'GIT_EXTERNAL_DIFF' in env: del env['GIT_EXTERNAL_DIFF']
return RunShell(["git", "diff", "--no-ext-diff", "--full-index", "-M"]
+ extra_args, env=env)
def GetUnknownFiles(self):
status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
silent_ok=True)
return status.splitlines()
def GetFileContent(self, file_hash, is_binary):
"""Returns the content of a file identified by its git hash."""
data, retcode = RunShellWithReturnCode(["git", "show", file_hash],
universal_newlines=not is_binary)
if retcode:
ErrorExit("Got error status from 'git show %s'" % file_hash)
return data
def GetBaseFile(self, filename):
hash_before, hash_after = self.hashes.get(filename, (None,None))
base_content = None
new_content = None
is_binary = self.IsBinary(filename)
status = None
if filename in self.renames:
status = "A +" # Match svn attribute name for renames.
if filename not in self.hashes:
# If a rename doesn't change the content, we never get a hash.
base_content = RunShell(["git", "show", "HEAD:" + filename])
elif not hash_before:
status = "A"
base_content = ""
elif not hash_after:
status = "D"
else:
status = "M"
is_image = self.IsImage(filename)
# Grab the before/after content if we need it.
# We should include file contents if it's text or it's an image.
if not is_binary or is_image:
# Grab the base content if we don't have it already.
if base_content is None and hash_before:
base_content = self.GetFileContent(hash_before, is_binary)
# Only include the "after" file if it's an image; otherwise it
# it is reconstructed from the diff.
if is_image and hash_after:
new_content = self.GetFileContent(hash_after, is_binary)
return (base_content, new_content, is_binary, status)
class MercurialVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Mercurial."""
def __init__(self, options, repo_dir):
super(MercurialVCS, self).__init__(options)
# Absolute path to repository (we can be in a subdir)
self.repo_dir = os.path.normpath(repo_dir)
# Compute the subdir
cwd = os.path.normpath(os.getcwd())
assert cwd.startswith(self.repo_dir)
self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
if self.options.revision:
self.base_rev = self.options.revision
else:
self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
def _GetRelPath(self, filename):
"""Get relative path of a file according to the current directory,
given its logical path in the repo."""
assert filename.startswith(self.subdir), (filename, self.subdir)
return filename[len(self.subdir):].lstrip(r"\/")
def GenerateDiff(self, extra_args):
# If no file specified, restrict to the current subdir
extra_args = extra_args or ["."]
cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
data = RunShell(cmd, silent_ok=True)
svndiff = []
filecount = 0
for line in data.splitlines():
m = re.match("diff --git a/(\S+) b/(\S+)", line)
if m:
# Modify line to make it look like as it comes from svn diff.
# With this modification no changes on the server side are required
# to make upload.py work with Mercurial repos.
# NOTE: for proper handling of moved/copied files, we have to use
# the second filename.
filename = m.group(2)
svndiff.append("Index: %s" % filename)
svndiff.append("=" * 67)
filecount += 1
logging.info(line)
else:
svndiff.append(line)
if not filecount:
ErrorExit("No valid patches found in output from hg diff")
return "\n".join(svndiff) + "\n"
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
args = []
status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
silent_ok=True)
unknown_files = []
for line in status.splitlines():
st, fn = line.split(" ", 1)
if st == "?":
unknown_files.append(fn)
return unknown_files
def GetBaseFile(self, filename):
# "hg status" and "hg cat" both take a path relative to the current subdir
# rather than to the repo root, but "hg diff" has given us the full path
# to the repo root.
base_content = ""
new_content = None
is_binary = False
oldrelpath = relpath = self._GetRelPath(filename)
# "hg status -C" returns two lines for moved/copied files, one otherwise
out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
out = out.splitlines()
# HACK: strip error message about missing file/directory if it isn't in
# the working copy
if out[0].startswith('%s: ' % relpath):
out = out[1:]
if len(out) > 1:
# Moved/copied => considered as modified, use old filename to
# retrieve base contents
oldrelpath = out[1].strip()
status = "M"
else:
status, _ = out[0].split(' ', 1)
if ":" in self.base_rev:
base_rev = self.base_rev.split(":", 1)[0]
else:
base_rev = self.base_rev
if status != "A":
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True)
is_binary = "\0" in base_content # Mercurial's heuristic
if status != "R":
new_content = open(relpath, "rb").read()
is_binary = is_binary or "\0" in new_content
if is_binary and base_content:
# Fetch again without converting newlines
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True, universal_newlines=False)
if not is_binary or not self.IsImage(relpath):
new_content = None
return base_content, new_content, is_binary, status
# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
def SplitPatch(data):
"""Splits a patch into separate pieces for each file.
Args:
data: A string containing the output of svn diff.
Returns:
A list of 2-tuple (filename, text) where text is the svn diff output
pertaining to filename.
"""
patches = []
filename = None
diff = []
for line in data.splitlines(True):
new_filename = None
if line.startswith('Index:'):
unused, new_filename = line.split(':', 1)
new_filename = new_filename.strip()
elif line.startswith('Property changes on:'):
unused, temp_filename = line.split(':', 1)
# When a file is modified, paths use '/' between directories, however
# when a property is modified '\' is used on Windows. Make them the same
# otherwise the file shows up twice.
temp_filename = temp_filename.strip().replace('\\', '/')
if temp_filename != filename:
# File has property changes but no modifications, create a new diff.
new_filename = temp_filename
if new_filename:
if filename and diff:
patches.append((filename, ''.join(diff)))
filename = new_filename
diff = [line]
continue
if diff is not None:
diff.append(line)
if filename and diff:
patches.append((filename, ''.join(diff)))
return patches
def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
"""Uploads a separate patch for each file in the diff output.
Returns a list of [patch_key, filename] for each file.
"""
patches = SplitPatch(data)
rv = []
for patch in patches:
if len(patch[1]) > MAX_UPLOAD_SIZE:
print ("Not uploading the patch for " + patch[0] +
" because the file is too large.")
continue
form_fields = [("filename", patch[0])]
if not options.download_base:
form_fields.append(("content_upload", "1"))
files = [("data", "data.diff", patch[1])]
ctype, body = EncodeMultipartFormData(form_fields, files)
url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
print "Uploading patch for " + patch[0]
response_body = rpc_server.Send(url, body, content_type=ctype)
lines = response_body.splitlines()
if not lines or lines[0] != "OK":
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
rv.append([lines[1], patch[0]])
return rv
def GuessVCSName():
"""Helper to guess the version control system.
This examines the current directory, guesses which VersionControlSystem
we're using, and returns an string indicating which VCS is detected.
Returns:
A pair (vcs, output). vcs is a string indicating which VCS was detected
and is one of VCS_GIT, VCS_MERCURIAL, VCS_SUBVERSION, or VCS_UNKNOWN.
output is a string containing any interesting output from the vcs
detection routine, or None if there is nothing interesting.
"""
# Mercurial has a command to get the base directory of a repository
# Try running it, but don't die if we don't have hg installed.
# NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
try:
out, returncode = RunShellWithReturnCode(["hg", "root"])
if returncode == 0:
return (VCS_MERCURIAL, out.strip())
except OSError, (errno, message):
if errno != 2: # ENOENT -- they don't have hg installed.
raise
# Subversion has a .svn in all working directories.
if os.path.isdir('.svn'):
logging.info("Guessed VCS = Subversion")
return (VCS_SUBVERSION, None)
# Git has a command to test if you're in a git tree.
# Try running it, but don't die if we don't have git installed.
try:
out, returncode = RunShellWithReturnCode(["git", "rev-parse",
"--is-inside-work-tree"])
if returncode == 0:
return (VCS_GIT, None)
except OSError, (errno, message):
if errno != 2: # ENOENT -- they don't have git installed.
raise
return (VCS_UNKNOWN, None)
def GuessVCS(options):
"""Helper to guess the version control system.
This verifies any user-specified VersionControlSystem (by command line
or environment variable). If the user didn't specify one, this examines
the current directory, guesses which VersionControlSystem we're using,
and returns an instance of the appropriate class. Exit with an error
if we can't figure it out.
Returns:
A VersionControlSystem instance. Exits if the VCS can't be guessed.
"""
vcs = options.vcs
if not vcs:
vcs = os.environ.get("CODEREVIEW_VCS")
if vcs:
v = VCS_ABBREVIATIONS.get(vcs.lower())
if v is None:
ErrorExit("Unknown version control system %r specified." % vcs)
(vcs, extra_output) = (v, None)
else:
(vcs, extra_output) = GuessVCSName()
if vcs == VCS_MERCURIAL:
if extra_output is None:
extra_output = RunShell(["hg", "root"]).strip()
return MercurialVCS(options, extra_output)
elif vcs == VCS_SUBVERSION:
return SubversionVCS(options)
elif vcs == VCS_GIT:
return GitVCS(options)
ErrorExit(("Could not guess version control system. "
"Are you in a working copy directory?"))
def CheckReviewer(reviewer):
"""Validate a reviewer -- either a nickname or an email addres.
Args:
reviewer: A nickname or an email address.
Calls ErrorExit() if it is an invalid email address.
"""
if "@" not in reviewer:
return # Assume nickname
parts = reviewer.split("@")
if len(parts) > 2:
ErrorExit("Invalid email address: %r" % reviewer)
assert len(parts) == 2
if "." not in parts[1]:
ErrorExit("Invalid email address: %r" % reviewer)
def LoadSubversionAutoProperties():
"""Returns the content of [auto-props] section of Subversion's config file as
a dictionary.
Returns:
A dictionary whose key-value pair corresponds the [auto-props] section's
key-value pair.
In following cases, returns empty dictionary:
- config file doesn't exist, or
- 'enable-auto-props' is not set to 'true-like-value' in [miscellany].
"""
# Todo(hayato): Windows users might use different path for configuration file.
subversion_config = os.path.expanduser("~/.subversion/config")
if not os.path.exists(subversion_config):
return {}
config = ConfigParser.ConfigParser()
config.read(subversion_config)
if (config.has_section("miscellany") and
config.has_option("miscellany", "enable-auto-props") and
config.getboolean("miscellany", "enable-auto-props") and
config.has_section("auto-props")):
props = {}
for file_pattern in config.options("auto-props"):
props[file_pattern] = ParseSubversionPropertyValues(
config.get("auto-props", file_pattern))
return props
else:
return {}
def ParseSubversionPropertyValues(props):
"""Parse the given property value which comes from [auto-props] section and
returns a list whose element is a (svn_prop_key, svn_prop_value) pair.
See the following doctest for example.
>>> ParseSubversionPropertyValues('svn:eol-style=LF')
[('svn:eol-style', 'LF')]
>>> ParseSubversionPropertyValues('svn:mime-type=image/jpeg')
[('svn:mime-type', 'image/jpeg')]
>>> ParseSubversionPropertyValues('svn:eol-style=LF;svn:executable')
[('svn:eol-style', 'LF'), ('svn:executable', '*')]
"""
key_value_pairs = []
for prop in props.split(";"):
key_value = prop.split("=")
assert len(key_value) <= 2
if len(key_value) == 1:
# If value is not given, use '*' as a Subversion's convention.
key_value_pairs.append((key_value[0], "*"))
else:
key_value_pairs.append((key_value[0], key_value[1]))
return key_value_pairs
def GetSubversionPropertyChanges(filename):
"""Return a Subversion's 'Property changes on ...' string, which is used in
the patch file.
Args:
filename: filename whose property might be set by [auto-props] config.
Returns:
A string like 'Property changes on |filename| ...' if given |filename|
matches any entries in [auto-props] section. None, otherwise.
"""
global svn_auto_props_map
if svn_auto_props_map is None:
svn_auto_props_map = LoadSubversionAutoProperties()
all_props = []
for file_pattern, props in svn_auto_props_map.items():
if fnmatch.fnmatch(filename, file_pattern):
all_props.extend(props)
if all_props:
return FormatSubversionPropertyChanges(filename, all_props)
return None
def FormatSubversionPropertyChanges(filename, props):
"""Returns Subversion's 'Property changes on ...' strings using given filename
and properties.
Args:
filename: filename
props: A list whose element is a (svn_prop_key, svn_prop_value) pair.
Returns:
A string which can be used in the patch file for Subversion.
See the following doctest for example.
>>> print FormatSubversionPropertyChanges('foo.cc', [('svn:eol-style', 'LF')])
Property changes on: foo.cc
___________________________________________________________________
Added: svn:eol-style
+ LF
<BLANKLINE>
"""
prop_changes_lines = [
"Property changes on: %s" % filename,
"___________________________________________________________________"]
for key, value in props:
prop_changes_lines.append("Added: " + key)
prop_changes_lines.append(" + " + value)
return "\n".join(prop_changes_lines) + "\n"
def RealMain(argv, data=None):
"""The real main function.
Args:
argv: Command line arguments.
data: Diff contents. If None (default) the diff is generated by
the VersionControlSystem implementation returned by GuessVCS().
Returns:
A 2-tuple (issue id, patchset id).
The patchset id is None if the base files are not uploaded by this
script (applies only to SVN checkouts).
"""
logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
"%(lineno)s %(message)s "))
os.environ['LC_ALL'] = 'C'
options, args = parser.parse_args(argv[1:])
global verbosity
verbosity = options.verbose
if verbosity >= 3:
logging.getLogger().setLevel(logging.DEBUG)
elif verbosity >= 2:
logging.getLogger().setLevel(logging.INFO)
vcs = GuessVCS(options)
base = options.base_url
if isinstance(vcs, SubversionVCS):
# Guessing the base field is only supported for Subversion.
# Note: Fetching base files may become deprecated in future releases.
guessed_base = vcs.GuessBase(options.download_base)
if base:
if guessed_base and base != guessed_base:
print "Using base URL \"%s\" from --base_url instead of \"%s\"" % \
(base, guessed_base)
else:
base = guessed_base
if not base and options.download_base:
options.download_base = True
logging.info("Enabled upload of base file")
if not options.assume_yes:
vcs.CheckForUnknownFiles()
if data is None:
data = vcs.GenerateDiff(args)
data = vcs.PostProcessDiff(data)
files = vcs.GetBaseFiles(data)
if verbosity >= 1:
print "Upload server:", options.server, "(change with -s/--server)"
if options.issue:
prompt = "Message describing this patch set: "
else:
prompt = "New issue subject: "
message = options.message or raw_input(prompt).strip()
if not message:
ErrorExit("A non-empty message is required")
rpc_server = GetRpcServer(options.server,
options.email,
options.host,
options.save_cookies,
options.account_type)
form_fields = [("subject", message)]
if base:
form_fields.append(("base", base))
if options.issue:
form_fields.append(("issue", str(options.issue)))
if options.email:
form_fields.append(("user", options.email))
if options.reviewers:
for reviewer in options.reviewers.split(','):
CheckReviewer(reviewer)
form_fields.append(("reviewers", options.reviewers))
if options.cc:
for cc in options.cc.split(','):
CheckReviewer(cc)
form_fields.append(("cc", options.cc))
description = options.description
if options.description_file:
if options.description:
ErrorExit("Can't specify description and description_file")
file = open(options.description_file, 'r')
description = file.read()
file.close()
if description:
form_fields.append(("description", description))
# Send a hash of all the base file so the server can determine if a copy
# already exists in an earlier patchset.
base_hashes = ""
for file, info in files.iteritems():
if not info[0] is None:
checksum = md5(info[0]).hexdigest()
if base_hashes:
base_hashes += "|"
base_hashes += checksum + ":" + file
form_fields.append(("base_hashes", base_hashes))
if options.private:
if options.issue:
print "Warning: Private flag ignored when updating an existing issue."
else:
form_fields.append(("private", "1"))
# If we're uploading base files, don't send the email before the uploads, so
# that it contains the file status.
if options.send_mail and options.download_base:
form_fields.append(("send_mail", "1"))
if not options.download_base:
form_fields.append(("content_upload", "1"))
if len(data) > MAX_UPLOAD_SIZE:
print "Patch is large, so uploading file patches separately."
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = rpc_server.Send("/upload", body, content_type=ctype)
patchset = None
if not options.download_base or not uploaded_diff_file:
lines = response_body.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
else:
msg = response_body
else:
msg = response_body
StatusUpdate(msg)
if not response_body.startswith("Issue created.") and \
not response_body.startswith("Issue updated."):
sys.exit(0)
issue = msg[msg.rfind("/")+1:]
if not uploaded_diff_file:
result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
if not options.download_base:
patches = result
if not options.download_base:
vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
if options.send_mail:
rpc_server.Send("/" + issue + "/mail", payload="")
return issue, patchset
def main():
try:
RealMain(sys.argv)
except KeyboardInterrupt:
print
StatusUpdate("Interrupted.")
sys.exit(1)
if __name__ == "__main__":
main()
|
isc
|
raphaelmerx/django
|
django/core/management/commands/sendtestemail.py
|
349
|
1449
|
import socket
from django.core.mail import mail_admins, mail_managers, send_mail
from django.core.management.base import BaseCommand
from django.utils import timezone
class Command(BaseCommand):
help = "Sends a test email to the email addresses specified as arguments."
missing_args_message = "You must specify some email recipients, or pass the --managers or --admin options."
def add_arguments(self, parser):
parser.add_argument('email', nargs='*',
help='One or more email addresses to send a test email to.')
parser.add_argument('--managers', action='store_true', dest='managers', default=False,
help='Send a test email to the addresses specified in settings.MANAGERS.')
parser.add_argument('--admins', action='store_true', dest='admins', default=False,
help='Send a test email to the addresses specified in settings.ADMINS.')
def handle(self, *args, **kwargs):
subject = 'Test email from %s on %s' % (socket.gethostname(), timezone.now())
send_mail(
subject=subject,
message="If you\'re reading this, it was successful.",
from_email=None,
recipient_list=kwargs['email'],
)
if kwargs['managers']:
mail_managers(subject, "This email was sent to the site managers.")
if kwargs['admins']:
mail_admins(subject, "This email was sent to the site admins.")
|
bsd-3-clause
|
HanyuWorm/volatility
|
volatility/plugins/gui/vtypes/win7_sp0_x64_vtypes_gui.py
|
62
|
137575
|
win32k_types = {
'_HANDLEENTRY': [0x18, {
'pOwner': [8, ['pointer64', ['void']]],
'phead': [0, ['pointer64', ['_HEAD']]],
'bFlags': [17, ['unsigned char']],
'wUniq': [18, ['unsigned short']],
'bType': [16, ['unsigned char']],
}],
'tagTOUCHINPUTINFO': [0x50, {
'dwcInputs': [24, ['unsigned long']],
'head': [0, ['_THROBJHEAD']],
'uFlags': [28, ['unsigned long']],
'TouchInput': [32, ['array', 1, ['tagTOUCHINPUT']]],
}],
'tagHOOK': [0x60, {
'head': [0, ['_THRDESKHEAD']],
'offPfn': [56, ['unsigned long long']],
'flags': [64, ['unsigned long']],
'fLastHookHung': [88, ['BitField', {'end_bit': 8, 'start_bit': 7, 'native_type': 'long'}]],
'nTimeout': [88, ['BitField', {'end_bit': 7, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'ihmod': [68, ['long']],
'iHook': [48, ['long']],
'ptiHooked': [72, ['pointer64', ['tagTHREADINFO']]],
'phkNext': [40, ['pointer64', ['tagHOOK']]],
'rpdesk': [80, ['pointer64', ['tagDESKTOP']]],
}],
'DEADKEY': [0x8, {
'wchComposed': [4, ['wchar']],
'dwBoth': [0, ['unsigned long']],
'uFlags': [6, ['unsigned short']],
}],
'_W32THREAD': [0x150, {
'pRBRecursionCount': [96, ['unsigned long']],
'iVisRgnUniqueness': [328, ['unsigned long']],
'RefCount': [8, ['unsigned long']],
'pDevHTInfo': [280, ['pointer64', ['void']]],
'pUMPDHeap': [48, ['pointer64', ['void']]],
'pgdiBrushAttr': [32, ['pointer64', ['void']]],
'ulWindowSystemRendering': [324, ['unsigned long']],
'tlSpriteState': [104, ['_TLSPRITESTATE']],
'pdcoRender': [304, ['pointer64', ['void']]],
'bEnableEngUpdateDeviceSurface': [320, ['unsigned char']],
'pdcoAA': [296, ['pointer64', ['void']]],
'pNonRBRecursionCount': [100, ['unsigned long']],
'ptlW32': [16, ['pointer64', ['_TL']]],
'GdiTmpTgoList': [80, ['_LIST_ENTRY']],
'pUMPDObjs': [40, ['pointer64', ['void']]],
'pgdiDcattr': [24, ['pointer64', ['void']]],
'bIncludeSprites': [321, ['unsigned char']],
'pEThread': [0, ['pointer64', ['_ETHREAD']]],
'pSpriteState': [272, ['pointer64', ['void']]],
'pProxyPort': [64, ['pointer64', ['void']]],
'ulDevHTInfoUniqueness': [288, ['unsigned long']],
'pdcoSrc': [312, ['pointer64', ['void']]],
'pUMPDObj': [56, ['pointer64', ['void']]],
'pClientID': [72, ['pointer64', ['void']]],
}],
'tagPROPLIST': [0x18, {
'aprop': [8, ['array', 1, ['tagPROP']]],
'cEntries': [0, ['unsigned long']],
'iFirstFree': [4, ['unsigned long']],
}],
'tagSVR_INSTANCE_INFO': [0x40, {
'head': [0, ['_THROBJHEAD']],
'next': [24, ['pointer64', ['tagSVR_INSTANCE_INFO']]],
'nextInThisThread': [32, ['pointer64', ['tagSVR_INSTANCE_INFO']]],
'spwndEvent': [48, ['pointer64', ['tagWND']]],
'afCmd': [40, ['unsigned long']],
'pcii': [56, ['pointer64', ['void']]],
}],
'tagDESKTOPINFO': [0xf0, {
'spwndProgman': [192, ['pointer64', ['tagWND']]],
'pvwplMessagePPHandler': [224, ['pointer64', ['VWPL']]],
'pvDesktopLimit': [8, ['pointer64', ['void']]],
'fComposited': [232, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'spwndGestureEngine': [216, ['pointer64', ['tagWND']]],
'pvDesktopBase': [0, ['pointer64', ['void']]],
'spwndShell': [160, ['pointer64', ['tagWND']]],
'ppiShellProcess': [168, ['pointer64', ['tagPROCESSINFO']]],
'pvwplShellHook': [200, ['pointer64', ['VWPL']]],
'fIsDwmDesktop': [232, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'unsigned long'}]],
'spwndTaskman': [184, ['pointer64', ['tagWND']]],
'aphkStart': [32, ['array', 16, ['pointer64', ['tagHOOK']]]],
'fsHooks': [24, ['unsigned long']],
'cntMBox': [208, ['long']],
'spwndBkGnd': [176, ['pointer64', ['tagWND']]],
'spwnd': [16, ['pointer64', ['tagWND']]],
}],
'tagDISPLAYINFO': [0xa8, {
'hDev': [0, ['pointer64', ['void']]],
'SpatialListHead': [144, ['_KLIST_ENTRY']],
'BitCountMax': [130, ['unsigned short']],
'cyGray': [60, ['long']],
'hdcBits': [32, ['pointer64', ['HDC__']]],
'fDesktopIsRect': [132, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'long'}]],
'hbmGray': [48, ['pointer64', ['HBITMAP__']]],
'pmdev': [8, ['pointer64', ['void']]],
'cFullScreen': [160, ['short']],
'cxGray': [56, ['long']],
'dmLogPixels': [128, ['unsigned short']],
'hDevInfo': [16, ['pointer64', ['void']]],
'fAnyPalette': [132, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'long'}]],
'pspbFirst': [72, ['pointer64', ['tagSPB']]],
'pMonitorPrimary': [88, ['pointer64', ['tagMONITOR']]],
'Spare0': [162, ['short']],
'pMonitorFirst': [96, ['pointer64', ['tagMONITOR']]],
'hdcGray': [40, ['pointer64', ['HDC__']]],
'hrgnScreenReal': [120, ['pointer64', ['HRGN__']]],
'cMonitors': [80, ['unsigned long']],
'hdcScreen': [24, ['pointer64', ['HDC__']]],
'DockThresholdMax': [136, ['unsigned long']],
'rcScreenReal': [104, ['tagRECT']],
'pdceFirst': [64, ['pointer64', ['tagDCE']]],
}],
'__unnamed_1261': [0x20, {
'Buffer': [24, ['pointer64', ['void']]],
'ProviderId': [0, ['unsigned long long']],
'BufferSize': [16, ['unsigned long']],
'DataPath': [8, ['pointer64', ['void']]],
}],
'__unnamed_1263': [0x20, {
'Argument4': [24, ['pointer64', ['void']]],
'Argument2': [8, ['pointer64', ['void']]],
'Argument3': [16, ['pointer64', ['void']]],
'Argument1': [0, ['pointer64', ['void']]],
}],
'__unnamed_1265': [0x20, {
'DeviceIoControl': [0, ['__unnamed_121d']],
'QuerySecurity': [0, ['__unnamed_121f']],
'ReadWriteConfig': [0, ['__unnamed_123d']],
'Create': [0, ['__unnamed_11ff']],
'SetSecurity': [0, ['__unnamed_1221']],
'Write': [0, ['__unnamed_1209']],
'VerifyVolume': [0, ['__unnamed_1225']],
'WMI': [0, ['__unnamed_1261']],
'CreateMailslot': [0, ['__unnamed_1207']],
'FilterResourceRequirements': [0, ['__unnamed_123b']],
'SetFile': [0, ['__unnamed_1213']],
'MountVolume': [0, ['__unnamed_1225']],
'FileSystemControl': [0, ['__unnamed_1219']],
'UsageNotification': [0, ['__unnamed_124b']],
'Scsi': [0, ['__unnamed_1229']],
'WaitWake': [0, ['__unnamed_124f']],
'QueryFile': [0, ['__unnamed_1211']],
'QueryDeviceText': [0, ['__unnamed_1247']],
'CreatePipe': [0, ['__unnamed_1203']],
'Power': [0, ['__unnamed_125b']],
'QueryDeviceRelations': [0, ['__unnamed_122d']],
'Read': [0, ['__unnamed_1209']],
'StartDevice': [0, ['__unnamed_125f']],
'QueryDirectory': [0, ['__unnamed_120d']],
'PowerSequence': [0, ['__unnamed_1253']],
'QueryId': [0, ['__unnamed_1243']],
'LockControl': [0, ['__unnamed_121b']],
'NotifyDirectory': [0, ['__unnamed_120f']],
'QueryInterface': [0, ['__unnamed_1233']],
'Others': [0, ['__unnamed_1263']],
'QueryVolume': [0, ['__unnamed_1217']],
'SetLock': [0, ['__unnamed_123f']],
'DeviceCapabilities': [0, ['__unnamed_1237']],
}],
'_D3DKMDT_2DREGION': [0x8, {
'cy': [4, ['unsigned long']],
'cx': [0, ['unsigned long']],
}],
'tagMONITOR': [0x90, {
'hDev': [80, ['pointer64', ['void']]],
'head': [0, ['_HEAD']],
'hDevReal': [88, ['pointer64', ['void']]],
'rcWorkReal': [44, ['tagRECT']],
'dwMONFlags': [24, ['unsigned long']],
'Spare0': [72, ['short']],
'rcMonitorReal': [28, ['tagRECT']],
'pMonitorNext': [16, ['pointer64', ['tagMONITOR']]],
'Flink': [128, ['pointer64', ['tagMONITOR']]],
'Blink': [136, ['pointer64', ['tagMONITOR']]],
'hrgnMonitorReal': [64, ['pointer64', ['HRGN__']]],
'cWndStack': [74, ['short']],
'DockTargets': [96, ['array', 7, ['array', 4, ['unsigned char']]]],
}],
'__unnamed_123b': [0x8, {
'IoResourceRequirementList': [0, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
}],
'_D3DKMDT_VIDPN_PRESENT_PATH_COPYPROTECTION': [0x10c, {
'APSTriggerBits': [4, ['unsigned long']],
'CopyProtectionType': [0, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_VPPMT_UNINITIALIZED', 1: 'D3DKMDT_VPPMT_NOPROTECTION', 2: 'D3DKMDT_VPPMT_MACROVISION_APSTRIGGER', 3: 'D3DKMDT_VPPMT_MACROVISION_FULLSUPPORT', 255: 'D3DKMDT_VPPMT_NOTSPECIFIED'}}]],
'CopyProtectionSupport': [264, ['_D3DKMDT_VIDPN_PRESENT_PATH_COPYPROTECTION_SUPPORT']],
'OEMCopyProtection': [8, ['array', 256, ['unsigned char']]],
}],
'tagHID_TLC_INFO': [0x28, {
'cExcludeRequest': [32, ['unsigned long']],
'link': [0, ['_LIST_ENTRY']],
'cExcludeOrphaned': [36, ['unsigned long']],
'cUsagePageRequest': [28, ['unsigned long']],
'usUsagePage': [16, ['unsigned short']],
'cDevices': [20, ['unsigned long']],
'cDirectRequest': [24, ['unsigned long']],
'usUsage': [18, ['unsigned short']],
}],
'HWND__': [0x4, {
'unused': [0, ['long']],
}],
'_DMM_VIDPNPATHANDTARGETMODE_SERIALIZATION': [0x1b0, {
'TargetMode': [360, ['_D3DKMDT_VIDPN_TARGET_MODE']],
'PathInfo': [0, ['_D3DKMDT_VIDPN_PRESENT_PATH']],
}],
'tagQ': [0x158, {
'hwndDblClk': [112, ['pointer64', ['HWND__']]],
'timeDblClk': [108, ['unsigned long']],
'spwndFocus': [72, ['pointer64', ['tagWND']]],
'ExtraInfo': [328, ['long long']],
'cLockCount': [322, ['unsigned short']],
'iCursorLevel': [312, ['long']],
'ptiSysLock': [24, ['pointer64', ['tagTHREADINFO']]],
'caret': [232, ['tagCARET']],
'ptiMouse': [48, ['pointer64', ['tagTHREADINFO']]],
'spwndActivePrev': [88, ['pointer64', ['tagWND']]],
'ptMouseMove': [128, ['tagPOINT']],
'msgDblClk': [100, ['unsigned long']],
'msgJournal': [324, ['unsigned long']],
'ptiKeyboard': [56, ['pointer64', ['tagTHREADINFO']]],
'cThreads': [320, ['unsigned short']],
'QF_flags': [316, ['unsigned long']],
'mlInput': [0, ['tagMLIST']],
'spwndActive': [80, ['pointer64', ['tagWND']]],
'codeCapture': [96, ['unsigned long']],
'idSysLock': [32, ['unsigned long long']],
'spcurCurrent': [304, ['pointer64', ['tagCURSOR']]],
'ulEtwReserved1': [336, ['unsigned long']],
'ptDblClk': [120, ['tagPOINT']],
'xbtnDblClk': [104, ['unsigned short']],
'afKeyRecentDown': [136, ['array', 32, ['unsigned char']]],
'afKeyState': [168, ['array', 64, ['unsigned char']]],
'spwndCapture': [64, ['pointer64', ['tagWND']]],
'idSysPeek': [40, ['unsigned long long']],
}],
'tagUSERSTARTUPINFO': [0x1c, {
'wShowWindow': [24, ['unsigned short']],
'dwYSize': [16, ['unsigned long']],
'dwXSize': [12, ['unsigned long']],
'cbReserved2': [26, ['unsigned short']],
'cb': [0, ['unsigned long']],
'dwX': [4, ['unsigned long']],
'dwY': [8, ['unsigned long']],
'dwFlags': [20, ['unsigned long']],
}],
'_DMM_COMMITVIDPNREQUESTSET_SERIALIZATION': [0x8, {
'CommitVidPnRequestOffset': [4, ['array', 1, ['unsigned long']]],
'NumCommitVidPnRequests': [0, ['unsigned char']],
}],
'__unnamed_1805': [0xc, {
'Start': [0, ['_LARGE_INTEGER']],
'Length': [8, ['unsigned long']],
}],
'_DMM_MONITORDESCRIPTORSET_SERIALIZATION': [0x90, {
'NumDescriptors': [0, ['unsigned char']],
'DescriptorSerialization': [4, ['array', 1, ['_DMM_MONITORDESCRIPTOR_SERIALIZATION']]],
}],
'_DMM_MONITORSOURCEMODESET_SERIALIZATION': [0x70, {
'NumModes': [0, ['unsigned char']],
'ModeSerialization': [8, ['array', 1, ['_DMM_MONITOR_SOURCE_MODE_SERIALIZATION']]],
}],
'_VK_FUNCTION_PARAM': [0x8, {
'NLSFEProcIndex': [0, ['unsigned char']],
'NLSFEProcParam': [4, ['unsigned long']],
}],
'_D3DKMDT_COLOR_COEFF_DYNAMIC_RANGES': [0x10, {
'SecondChannel': [4, ['unsigned long']],
'FourthChannel': [12, ['unsigned long']],
'ThirdChannel': [8, ['unsigned long']],
'FirstChannel': [0, ['unsigned long']],
}],
'tagMLIST': [0x18, {
'cMsgs': [16, ['unsigned long']],
'pqmsgRead': [0, ['pointer64', ['tagQMSG']]],
'pqmsgWriteLast': [8, ['pointer64', ['tagQMSG']]],
}],
'__unnamed_122d': [0x4, {
'Type': [0, ['Enumeration', {'target': 'long', 'choices': {0: 'BusRelations', 1: 'EjectionRelations', 2: 'PowerRelations', 3: 'RemovalRelations', 4: 'TargetDeviceRelation', 5: 'SingleBusRelations', 6: 'TransportRelations'}}]],
}],
'tagMENUSTATE': [0x90, {
'fDragAndDrop': [8, ['BitField', {'end_bit': 11, 'start_bit': 10, 'native_type': 'unsigned long'}]],
'fInsideMenuLoop': [8, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'unsigned long'}]],
'cxAni': [116, ['long']],
'pGlobalPopupMenu': [0, ['pointer64', ['tagPOPUPMENU']]],
'uDraggingIndex': [88, ['unsigned long']],
'uDraggingHitArea': [80, ['unsigned long long']],
'fNotifyByPos': [8, ['BitField', {'end_bit': 18, 'start_bit': 17, 'native_type': 'unsigned long'}]],
'fButtonDown': [8, ['BitField', {'end_bit': 4, 'start_bit': 3, 'native_type': 'unsigned long'}]],
'ixAni': [108, ['long']],
'fInCallHandleMenuMessages': [8, ['BitField', {'end_bit': 10, 'start_bit': 9, 'native_type': 'unsigned long'}]],
'mnFocus': [20, ['long']],
'iyAni': [112, ['long']],
'dwLockCount': [40, ['unsigned long']],
'fAutoDismiss': [8, ['BitField', {'end_bit': 12, 'start_bit': 11, 'native_type': 'unsigned long'}]],
'fIsSysMenu': [8, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'unsigned long'}]],
'dwAniStartTime': [104, ['unsigned long']],
'pmnsPrev': [48, ['pointer64', ['tagMENUSTATE']]],
'fInEndMenu': [8, ['BitField', {'end_bit': 5, 'start_bit': 4, 'native_type': 'unsigned long'}]],
'hbmAni': [128, ['pointer64', ['HBITMAP__']]],
'fIgnoreButtonUp': [8, ['BitField', {'end_bit': 14, 'start_bit': 13, 'native_type': 'unsigned long'}]],
'ptButtonDown': [56, ['tagPOINT']],
'hdcWndAni': [96, ['pointer64', ['HDC__']]],
'fAboutToAutoDismiss': [8, ['BitField', {'end_bit': 13, 'start_bit': 12, 'native_type': 'unsigned long'}]],
'fMenuStarted': [8, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'uDraggingFlags': [92, ['unsigned long']],
'fUnderline': [8, ['BitField', {'end_bit': 6, 'start_bit': 5, 'native_type': 'unsigned long'}]],
'fInDoDragDrop': [8, ['BitField', {'end_bit': 16, 'start_bit': 15, 'native_type': 'unsigned long'}]],
'ptiMenuStateOwner': [32, ['pointer64', ['tagTHREADINFO']]],
'uButtonDownIndex': [72, ['unsigned long']],
'fModelessMenu': [8, ['BitField', {'end_bit': 9, 'start_bit': 8, 'native_type': 'unsigned long'}]],
'cyAni': [120, ['long']],
'uButtonDownHitArea': [64, ['unsigned long long']],
'fButtonAlwaysDown': [8, ['BitField', {'end_bit': 7, 'start_bit': 6, 'native_type': 'unsigned long'}]],
'iAniDropDir': [8, ['BitField', {'end_bit': 24, 'start_bit': 19, 'native_type': 'unsigned long'}]],
'ptMouseLast': [12, ['tagPOINT']],
'hdcAni': [136, ['pointer64', ['HDC__']]],
'vkButtonDown': [76, ['long']],
'fSetCapture': [8, ['BitField', {'end_bit': 19, 'start_bit': 18, 'native_type': 'unsigned long'}]],
'fDragging': [8, ['BitField', {'end_bit': 8, 'start_bit': 7, 'native_type': 'unsigned long'}]],
'fActiveNoForeground': [8, ['BitField', {'end_bit': 17, 'start_bit': 16, 'native_type': 'unsigned long'}]],
'fMouseOffMenu': [8, ['BitField', {'end_bit': 15, 'start_bit': 14, 'native_type': 'unsigned long'}]],
'cmdLast': [24, ['long']],
}],
'tagMSGPPINFO': [0x4, {
'dwIndexMsgPP': [0, ['unsigned long']],
}],
'VWPLELEMENT': [0x10, {
'DataOrTag': [0, ['unsigned long long']],
'pwnd': [8, ['pointer64', ['tagWND']]],
}],
'_WM_VALUES_STRINGS': [0x10, {
'pszName': [0, ['pointer64', ['unsigned char']]],
'fInternal': [8, ['unsigned char']],
'fDefined': [9, ['unsigned char']],
}],
'tagCLIP': [0x18, {
'fmt': [0, ['unsigned long']],
'fGlobalHandle': [16, ['long']],
'hData': [8, ['pointer64', ['void']]],
}],
'__unnamed_1229': [0x8, {
'Srb': [0, ['pointer64', ['_SCSI_REQUEST_BLOCK']]],
}],
'_HEAD': [0x10, {
'h': [0, ['pointer64', ['void']]],
'cLockObj': [8, ['unsigned long']],
}],
'__unnamed_1221': [0x10, {
'SecurityInformation': [0, ['unsigned long']],
'SecurityDescriptor': [8, ['pointer64', ['void']]],
}],
'__unnamed_11e6': [0x10, {
'AsynchronousParameters': [0, ['__unnamed_11e4']],
'AllocationSize': [0, ['_LARGE_INTEGER']],
}],
'tagQMSG': [0x68, {
'FromPen': [84, ['BitField', {'end_bit': 4, 'start_bit': 3, 'native_type': 'long'}]],
'pti': [88, ['pointer64', ['tagTHREADINFO']]],
'ExtraInfo': [64, ['long long']],
'Wow64Message': [84, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'long'}]],
'pqmsgPrev': [8, ['pointer64', ['tagQMSG']]],
'NoCoalesce': [84, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'long'}]],
'Padding': [80, ['BitField', {'end_bit': 32, 'start_bit': 30, 'native_type': 'unsigned long'}]],
'ptMouseReal': [72, ['tagPOINT']],
'pqmsgNext': [0, ['pointer64', ['tagQMSG']]],
'dwQEvent': [80, ['BitField', {'end_bit': 30, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'MsgPPInfo': [96, ['tagMSGPPINFO']],
'FromTouch': [84, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'long'}]],
'msg': [16, ['tagMSG']],
}],
'HWINSTA__': [0x4, {
'unused': [0, ['long']],
}],
'tagWin32PoolHead': [0x20, {
'pPrev': [8, ['pointer64', ['tagWin32PoolHead']]],
'pTrace': [24, ['pointer64', ['pointer64', ['void']]]],
'pNext': [16, ['pointer64', ['tagWin32PoolHead']]],
'size': [0, ['unsigned long long']],
}],
'tagTOUCHINPUT': [0x30, {
'hSource': [8, ['pointer64', ['void']]],
'dwExtraInfo': [32, ['unsigned long long']],
'cxContact': [40, ['unsigned long']],
'dwMask': [24, ['unsigned long']],
'y': [4, ['long']],
'x': [0, ['long']],
'dwID': [16, ['unsigned long']],
'cyContact': [44, ['unsigned long']],
'dwTime': [28, ['unsigned long']],
'dwFlags': [20, ['unsigned long']],
}],
'_CALLBACKWND': [0x18, {
'hwnd': [0, ['pointer64', ['HWND__']]],
'pActCtx': [16, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'pwnd': [8, ['pointer64', ['tagWND']]],
}],
'HMONITOR__': [0x4, {
'unused': [0, ['long']],
}],
'_D3DKMDT_GRAPHICS_RENDERING_FORMAT': [0x20, {
'VisibleRegionSize': [8, ['_D3DKMDT_2DREGION']],
'Stride': [16, ['unsigned long']],
'PixelFormat': [20, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DDDIFMT_UNKNOWN', 20: 'D3DDDIFMT_R8G8B8', 21: 'D3DDDIFMT_A8R8G8B8', 22: 'D3DDDIFMT_X8R8G8B8', 23: 'D3DDDIFMT_R5G6B5', 24: 'D3DDDIFMT_X1R5G5B5', 25: 'D3DDDIFMT_A1R5G5B5', 26: 'D3DDDIFMT_A4R4G4B4', 27: 'D3DDDIFMT_R3G3B2', 28: 'D3DDDIFMT_A8', 29: 'D3DDDIFMT_A8R3G3B2', 30: 'D3DDDIFMT_X4R4G4B4', 31: 'D3DDDIFMT_A2B10G10R10', 32: 'D3DDDIFMT_A8B8G8R8', 33: 'D3DDDIFMT_X8B8G8R8', 34: 'D3DDDIFMT_G16R16', 35: 'D3DDDIFMT_A2R10G10B10', 36: 'D3DDDIFMT_A16B16G16R16', 40: 'D3DDDIFMT_A8P8', 41: 'D3DDDIFMT_P8', 50: 'D3DDDIFMT_L8', 51: 'D3DDDIFMT_A8L8', 52: 'D3DDDIFMT_A4L4', 60: 'D3DDDIFMT_V8U8', 61: 'D3DDDIFMT_L6V5U5', 62: 'D3DDDIFMT_X8L8V8U8', 63: 'D3DDDIFMT_Q8W8V8U8', 64: 'D3DDDIFMT_V16U16', 65: 'D3DDDIFMT_W11V11U10', 67: 'D3DDDIFMT_A2W10V10U10', 877942852: 'D3DDDIFMT_DXT4', 70: 'D3DDDIFMT_D16_LOCKABLE', 71: 'D3DDDIFMT_D32', 72: 'D3DDDIFMT_S1D15', 73: 'D3DDDIFMT_D15S1', 74: 'D3DDDIFMT_S8D24', 75: 'D3DDDIFMT_D24S8', 76: 'D3DDDIFMT_X8D24', 77: 'D3DDDIFMT_D24X8', 78: 'D3DDDIFMT_X4S4D24', 79: 'D3DDDIFMT_D24X4S4', 80: 'D3DDDIFMT_D16', 81: 'D3DDDIFMT_L16', 82: 'D3DDDIFMT_D32F_LOCKABLE', 83: 'D3DDDIFMT_D24FS8', 84: 'D3DDDIFMT_D32_LOCKABLE', 85: 'D3DDDIFMT_S8_LOCKABLE', 100: 'D3DDDIFMT_VERTEXDATA', 101: 'D3DDDIFMT_INDEX16', 102: 'D3DDDIFMT_INDEX32', 110: 'D3DDDIFMT_Q16W16V16U16', 111: 'D3DDDIFMT_R16F', 112: 'D3DDDIFMT_G16R16F', 113: 'D3DDDIFMT_A16B16G16R16F', 114: 'D3DDDIFMT_R32F', 115: 'D3DDDIFMT_G32R32F', 116: 'D3DDDIFMT_A32B32G32R32F', 117: 'D3DDDIFMT_CxV8U8', 118: 'D3DDDIFMT_A1', 119: 'D3DDDIFMT_A2B10G10R10_XR_BIAS', 150: 'D3DDDIFMT_PICTUREPARAMSDATA', 151: 'D3DDDIFMT_MACROBLOCKDATA', 152: 'D3DDDIFMT_RESIDUALDIFFERENCEDATA', 153: 'D3DDDIFMT_DEBLOCKINGDATA', 154: 'D3DDDIFMT_INVERSEQUANTIZATIONDATA', 155: 'D3DDDIFMT_SLICECONTROLDATA', 156: 'D3DDDIFMT_BITSTREAMDATA', 157: 'D3DDDIFMT_MOTIONVECTORBUFFER', 158: 'D3DDDIFMT_FILMGRAINBUFFER', 159: 'D3DDDIFMT_DXVA_RESERVED9', 160: 'D3DDDIFMT_DXVA_RESERVED10', 161: 'D3DDDIFMT_DXVA_RESERVED11', 162: 'D3DDDIFMT_DXVA_RESERVED12', 163: 'D3DDDIFMT_DXVA_RESERVED13', 164: 'D3DDDIFMT_DXVA_RESERVED14', 165: 'D3DDDIFMT_DXVA_RESERVED15', 166: 'D3DDDIFMT_DXVA_RESERVED16', 167: 'D3DDDIFMT_DXVA_RESERVED17', 168: 'D3DDDIFMT_DXVA_RESERVED18', 169: 'D3DDDIFMT_DXVA_RESERVED19', 170: 'D3DDDIFMT_DXVA_RESERVED20', 171: 'D3DDDIFMT_DXVA_RESERVED21', 172: 'D3DDDIFMT_DXVA_RESERVED22', 173: 'D3DDDIFMT_DXVA_RESERVED23', 174: 'D3DDDIFMT_DXVA_RESERVED24', 175: 'D3DDDIFMT_DXVA_RESERVED25', 176: 'D3DDDIFMT_DXVA_RESERVED26', 177: 'D3DDDIFMT_DXVA_RESERVED27', 178: 'D3DDDIFMT_DXVA_RESERVED28', 179: 'D3DDDIFMT_DXVA_RESERVED29', 180: 'D3DDDIFMT_DXVA_RESERVED30', 181: 'D3DDDIFMT_DXVACOMPBUFFER_MAX', 844388420: 'D3DDDIFMT_DXT2', 199: 'D3DDDIFMT_BINARYBUFFER', 861165636: 'D3DDDIFMT_DXT3', 827611204: 'D3DDDIFMT_DXT1', 827606349: 'D3DDDIFMT_MULTI2_ARGB8', 1195525970: 'D3DDDIFMT_R8G8_B8G8', 1498831189: 'D3DDDIFMT_UYVY', 844715353: 'D3DDDIFMT_YUY2', 894720068: 'D3DDDIFMT_DXT5', 1111970375: 'D3DDDIFMT_G8R8_G8B8', 2147483647: 'D3DDDIFMT_FORCE_UINT'}}]],
'PixelValueAccessMode': [28, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_PVAM_UNINITIALIZED', 1: 'D3DKMDT_PVAM_DIRECT', 2: 'D3DKMDT_PVAM_PRESETPALETTE', 3: 'D3DKMDT_PVAM_MAXVALID'}}]],
'PrimSurfSize': [0, ['_D3DKMDT_2DREGION']],
'ColorBasis': [24, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_CB_UNINITIALIZED', 1: 'D3DKMDT_CB_INTENSITY', 2: 'D3DKMDT_CB_SRGB', 3: 'D3DKMDT_CB_SCRGB', 4: 'D3DKMDT_CB_YCBCR', 5: 'D3DKMDT_CB_MAXVALID'}}]],
}],
'_VK_TO_WCHAR_TABLE': [0x10, {
'pVkToWchars': [0, ['pointer64', ['_VK_TO_WCHARS1']]],
'cbSize': [9, ['unsigned char']],
'nModifications': [8, ['unsigned char']],
}],
'__unnamed_1153': [0x10, {
'Reserved': [8, ['BitField', {'end_bit': 61, 'start_bit': 2, 'native_type': 'unsigned long long'}]],
'HeaderType': [8, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long long'}]],
'Sequence': [0, ['BitField', {'end_bit': 25, 'start_bit': 16, 'native_type': 'unsigned long long'}]],
'Region': [8, ['BitField', {'end_bit': 64, 'start_bit': 61, 'native_type': 'unsigned long long'}]],
'Init': [8, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'unsigned long long'}]],
'Depth': [0, ['BitField', {'end_bit': 16, 'start_bit': 0, 'native_type': 'unsigned long long'}]],
'NextEntry': [0, ['BitField', {'end_bit': 64, 'start_bit': 25, 'native_type': 'unsigned long long'}]],
}],
'__unnamed_1158': [0x10, {
'Reserved': [8, ['BitField', {'end_bit': 4, 'start_bit': 2, 'native_type': 'unsigned long long'}]],
'HeaderType': [8, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long long'}]],
'Sequence': [0, ['BitField', {'end_bit': 64, 'start_bit': 16, 'native_type': 'unsigned long long'}]],
'Init': [8, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'unsigned long long'}]],
'Depth': [0, ['BitField', {'end_bit': 16, 'start_bit': 0, 'native_type': 'unsigned long long'}]],
'NextEntry': [8, ['BitField', {'end_bit': 64, 'start_bit': 4, 'native_type': 'unsigned long long'}]],
}],
'_TL': [0x18, {
'pfnFree': [16, ['pointer64', ['void']]],
'pobj': [8, ['pointer64', ['void']]],
'next': [0, ['pointer64', ['_TL']]],
}],
'tagTHREADINFO': [0x3a8, {
'pstrAppName': [416, ['pointer64', ['_UNICODE_STRING']]],
'ForceLegacyResizeNCMetr': [520, ['BitField', {'end_bit': 30, 'start_bit': 29, 'native_type': 'unsigned long long'}]],
'ptl': [336, ['pointer64', ['_TL']]],
'timeLast': [448, ['long']],
'DontJournalAttach': [516, ['BitField', {'end_bit': 29, 'start_bit': 28, 'native_type': 'unsigned long'}]],
'ppi': [344, ['pointer64', ['tagPROCESSINFO']]],
'SendMnuDblClk': [516, ['BitField', {'end_bit': 19, 'start_bit': 18, 'native_type': 'unsigned long'}]],
'DDENoSync': [520, ['BitField', {'end_bit': 19, 'start_bit': 18, 'native_type': 'unsigned long long'}]],
'EditNoMouseHide': [520, ['BitField', {'end_bit': 9, 'start_bit': 8, 'native_type': 'unsigned long long'}]],
'pDevHTInfo': [280, ['pointer64', ['void']]],
'OpenGLEMF': [520, ['BitField', {'end_bit': 27, 'start_bit': 26, 'native_type': 'unsigned long long'}]],
'dwCompatFlags': [516, ['unsigned long']],
'hTouchInputCurrent': [888, ['pointer64', ['HTOUCHINPUT__']]],
'psmsSent': [424, ['pointer64', ['tagSMS']]],
'cVisWindows': [728, ['unsigned long']],
'hPrevHidData': [880, ['pointer64', ['void']]],
'fsHooks': [552, ['unsigned long']],
'qwCompatFlags2': [520, ['unsigned long long']],
'NoPaddedBorder': [520, ['BitField', {'end_bit': 29, 'start_bit': 28, 'native_type': 'unsigned long long'}]],
'NoDrawPatRect': [520, ['BitField', {'end_bit': 4, 'start_bit': 3, 'native_type': 'unsigned long long'}]],
'ForceTTGrapchis': [516, ['BitField', {'end_bit': 16, 'start_bit': 15, 'native_type': 'unsigned long'}]],
'GetDeviceCaps': [516, ['BitField', {'end_bit': 21, 'start_bit': 20, 'native_type': 'unsigned long'}]],
'pgdiBrushAttr': [32, ['pointer64', ['void']]],
'pq': [352, ['pointer64', ['tagQ']]],
'ulWindowSystemRendering': [324, ['unsigned long']],
'dwExpWinVer': [512, ['unsigned long']],
'NoSoftCursOnMoveSize': [520, ['BitField', {'end_bit': 32, 'start_bit': 31, 'native_type': 'unsigned long long'}]],
'psmsReceiveList': [440, ['pointer64', ['tagSMS']]],
'sphkCurrent': [560, ['pointer64', ['tagHOOK']]],
'No50ExStyles': [520, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'unsigned long long'}]],
'IgnoreFaults': [516, ['BitField', {'end_bit': 26, 'start_bit': 25, 'native_type': 'unsigned long'}]],
'pClientInfo': [400, ['pointer64', ['tagCLIENTINFO']]],
'pdcoSrc': [312, ['pointer64', ['void']]],
'pEventQueueServer': [600, ['pointer64', ['_KEVENT']]],
'DealyHwndShakeChk': [516, ['BitField', {'end_bit': 12, 'start_bit': 11, 'native_type': 'unsigned long'}]],
'amdesk': [720, ['unsigned long']],
'fsChangeBitsRemoved': [704, ['unsigned short']],
'psmsCurrent': [432, ['pointer64', ['tagSMS']]],
'NoBatching': [520, ['BitField', {'end_bit': 10, 'start_bit': 9, 'native_type': 'unsigned long long'}]],
'StrictLLHook': [520, ['BitField', {'end_bit': 22, 'start_bit': 21, 'native_type': 'unsigned long long'}]],
'pdcoRender': [304, ['pointer64', ['void']]],
'NoShadow': [520, ['BitField', {'end_bit': 23, 'start_bit': 22, 'native_type': 'unsigned long long'}]],
'EnumHelv': [516, ['BitField', {'end_bit': 13, 'start_bit': 12, 'native_type': 'unsigned long'}]],
'fPack': [928, ['BitField', {'end_bit': 28, 'start_bit': 2, 'native_type': 'unsigned long'}]],
'CallTTDevice': [516, ['BitField', {'end_bit': 5, 'start_bit': 4, 'native_type': 'unsigned long'}]],
'fsReserveKeys': [708, ['unsigned long']],
'Winver31': [516, ['BitField', {'end_bit': 22, 'start_bit': 21, 'native_type': 'unsigned long'}]],
'DisableDBCSProp': [516, ['BitField', {'end_bit': 30, 'start_bit': 29, 'native_type': 'unsigned long'}]],
'Win30AvgWidth': [516, ['BitField', {'end_bit': 20, 'start_bit': 19, 'native_type': 'unsigned long'}]],
'ptlW32': [16, ['pointer64', ['_TL']]],
'AlwaysSendSyncPaint': [516, ['BitField', {'end_bit': 7, 'start_bit': 6, 'native_type': 'unsigned long'}]],
'IgnoreNoDiscard': [516, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'NoTimeCbProtect': [520, ['BitField', {'end_bit': 25, 'start_bit': 24, 'native_type': 'unsigned long long'}]],
'MsShellDlg': [520, ['BitField', {'end_bit': 5, 'start_bit': 4, 'native_type': 'unsigned long long'}]],
'hEventQueueClient': [592, ['pointer64', ['void']]],
'cPaintsReady': [480, ['long']],
'SubtractClips': [516, ['BitField', {'end_bit': 15, 'start_bit': 14, 'native_type': 'unsigned long'}]],
'PtiLink': [608, ['_LIST_ENTRY']],
'DpiAware': [520, ['BitField', {'end_bit': 26, 'start_bit': 25, 'native_type': 'unsigned long long'}]],
'spklActive': [360, ['pointer64', ['tagKL']]],
'bIncludeSprites': [321, ['unsigned char']],
'mlPost': [680, ['tagMLIST']],
'ptLastReal': [636, ['tagPOINT']],
'fThreadCleanupFinished': [928, ['BitField', {'end_bit': 29, 'start_bit': 28, 'native_type': 'unsigned long'}]],
'MultipleBands': [516, ['BitField', {'end_bit': 6, 'start_bit': 5, 'native_type': 'unsigned long'}]],
'Random31Ux': [516, ['BitField', {'end_bit': 28, 'start_bit': 27, 'native_type': 'unsigned long'}]],
'HackWinFlags': [516, ['BitField', {'end_bit': 11, 'start_bit': 10, 'native_type': 'unsigned long'}]],
'pProxyPort': [64, ['pointer64', ['void']]],
'KCOff': [520, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'unsigned long long'}]],
'wParamHkCurrent': [576, ['unsigned long long']],
'readyHead': [912, ['_LIST_ENTRY']],
'UsePrintingEscape': [516, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'unsigned long'}]],
'NoInitFlagsOnFocus': [520, ['BitField', {'end_bit': 18, 'start_bit': 17, 'native_type': 'unsigned long long'}]],
'ForceTextBand': [516, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'unsigned long'}]],
'pEThread': [0, ['pointer64', ['_ETHREAD']]],
'ptdb': [496, ['pointer64', ['tagTDB']]],
'SpareCompatFlags2': [520, ['BitField', {'end_bit': 64, 'start_bit': 33, 'native_type': 'unsigned long long'}]],
'cWindows': [724, ['unsigned long']],
'cEnterCount': [672, ['long']],
'fETWReserved': [928, ['BitField', {'end_bit': 32, 'start_bit': 29, 'native_type': 'unsigned long'}]],
'dwCompatFlags2': [520, ['unsigned long']],
'NoEMFSpooling': [516, ['BitField', {'end_bit': 27, 'start_bit': 26, 'native_type': 'unsigned long'}]],
'pMenuState': [488, ['pointer64', ['tagMENUSTATE']]],
'pRBRecursionCount': [96, ['unsigned long']],
'SmoothScrolling': [516, ['BitField', {'end_bit': 31, 'start_bit': 30, 'native_type': 'unsigned long'}]],
'iVisRgnUniqueness': [328, ['unsigned long']],
'RefCount': [8, ['unsigned long']],
'Win31DevModeSize': [516, ['BitField', {'end_bit': 24, 'start_bit': 23, 'native_type': 'unsigned long'}]],
'pwinsta': [496, ['pointer64', ['tagWINDOWSTATION']]],
'pSBTrack': [584, ['pointer64', ['tagSBTRACK']]],
'ActiveMenus': [520, ['BitField', {'end_bit': 8, 'start_bit': 7, 'native_type': 'unsigned long long'}]],
'spwndDefaultIme': [648, ['pointer64', ['tagWND']]],
'NoCustomPaperSize': [520, ['BitField', {'end_bit': 13, 'start_bit': 12, 'native_type': 'unsigned long long'}]],
'wchInjected': [706, ['wchar']],
'cTimersReady': [484, ['unsigned long']],
'EditSetTextMunge': [516, ['BitField', {'end_bit': 8, 'start_bit': 7, 'native_type': 'unsigned long'}]],
'pUMPDHeap': [48, ['pointer64', ['void']]],
'fgfSwitchInProgressSetter': [928, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'unsigned long'}]],
'iCursorLevel': [624, ['long']],
'NoScrollBarCtxMenu': [516, ['BitField', {'end_bit': 32, 'start_bit': 31, 'native_type': 'unsigned long'}]],
'ulClientDelta': [392, ['unsigned long long']],
'pdcoAA': [296, ['pointer64', ['void']]],
'cNestedStableVisRgn': [908, ['unsigned long']],
'TryExceptCallWndProc': [520, ['BitField', {'end_bit': 17, 'start_bit': 16, 'native_type': 'unsigned long long'}]],
'cti': [864, ['tagCLIENTTHREADINFO']],
'NcCalcSizeOnMove': [516, ['BitField', {'end_bit': 18, 'start_bit': 17, 'native_type': 'unsigned long'}]],
'DisableFontAssoc': [516, ['BitField', {'end_bit': 25, 'start_bit': 24, 'native_type': 'unsigned long'}]],
'pcti': [368, ['pointer64', ['tagCLIENTTHREADINFO']]],
'MsgPPInfo': [904, ['tagMSGPPINFO']],
'DDE': [520, ['BitField', {'end_bit': 14, 'start_bit': 13, 'native_type': 'unsigned long long'}]],
'ulThreadFlags2': [928, ['unsigned long']],
'tlSpriteState': [104, ['_TLSPRITESTATE']],
'NoCharDeadKey': [520, ['BitField', {'end_bit': 16, 'start_bit': 15, 'native_type': 'unsigned long long'}]],
'pqAttach': [528, ['pointer64', ['tagQ']]],
'TTIgnoreRasterDupe': [516, ['BitField', {'end_bit': 10, 'start_bit': 9, 'native_type': 'unsigned long'}]],
'aphkStart': [736, ['array', 16, ['pointer64', ['tagHOOK']]]],
'DefaultCharset': [520, ['BitField', {'end_bit': 15, 'start_bit': 14, 'native_type': 'unsigned long long'}]],
'idLast': [456, ['unsigned long long']],
'rpdesk': [376, ['pointer64', ['tagDESKTOP']]],
'NoWindowArrangement': [520, ['BitField', {'end_bit': 33, 'start_bit': 32, 'native_type': 'unsigned long long'}]],
'AnimationOff': [520, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long long'}]],
'No50ExStyleBits': [520, ['BitField', {'end_bit': 12, 'start_bit': 11, 'native_type': 'unsigned long long'}]],
'TransparentBltMirror': [520, ['BitField', {'end_bit': 28, 'start_bit': 27, 'native_type': 'unsigned long long'}]],
'DDENoAsyncReg': [520, ['BitField', {'end_bit': 21, 'start_bit': 20, 'native_type': 'unsigned long long'}]],
'bEnableEngUpdateDeviceSurface': [320, ['unsigned char']],
'pDeskInfo': [384, ['pointer64', ['tagDESKTOPINFO']]],
'hdesk': [472, ['pointer64', ['HDESK__']]],
'pNonRBRecursionCount': [100, ['unsigned long']],
'MoreExtraWndWords': [516, ['BitField', {'end_bit': 9, 'start_bit': 8, 'native_type': 'unsigned long'}]],
'hklPrev': [664, ['pointer64', ['HKL__']]],
'NoGhost': [520, ['BitField', {'end_bit': 20, 'start_bit': 19, 'native_type': 'unsigned long long'}]],
'IgnoreTopMost': [516, ['BitField', {'end_bit': 4, 'start_bit': 3, 'native_type': 'unsigned long'}]],
'pmsd': [544, ['pointer64', ['_MOVESIZEDATA']]],
'NoHRGN1': [516, ['BitField', {'end_bit': 17, 'start_bit': 16, 'native_type': 'unsigned long'}]],
'exitCode': [464, ['long']],
'NoDDETrackDying': [520, ['BitField', {'end_bit': 6, 'start_bit': 5, 'native_type': 'unsigned long long'}]],
'ptLast': [628, ['tagPOINT']],
'hGestureInfoCurrent': [896, ['pointer64', ['HGESTUREINFO__']]],
'GdiTmpTgoList': [80, ['_LIST_ENTRY']],
'pUMPDObjs': [40, ['pointer64', ['void']]],
'FontSubs': [520, ['BitField', {'end_bit': 11, 'start_bit': 10, 'native_type': 'unsigned long long'}]],
'GiveUpForegound': [520, ['BitField', {'end_bit': 7, 'start_bit': 6, 'native_type': 'unsigned long long'}]],
'spDefaultImc': [656, ['pointer64', ['tagIMC']]],
'pgdiDcattr': [24, ['pointer64', ['void']]],
'TIF_flags': [408, ['unsigned long']],
'apEvent': [712, ['pointer64', ['pointer64', ['_KEVENT']]]],
'HardwareMixer': [520, ['BitField', {'end_bit': 31, 'start_bit': 30, 'native_type': 'unsigned long long'}]],
'pUMPDObj': [56, ['pointer64', ['void']]],
'pSpriteState': [272, ['pointer64', ['void']]],
'EnumTTNotDevice': [516, ['BitField', {'end_bit': 14, 'start_bit': 13, 'native_type': 'unsigned long'}]],
'lParamHkCurrent': [568, ['long long']],
'ulDevHTInfoUniqueness': [288, ['unsigned long']],
'ptiSibling': [536, ['pointer64', ['tagTHREADINFO']]],
'psiiList': [504, ['pointer64', ['tagSVR_INSTANCE_INFO']]],
'ForceFusion': [520, ['BitField', {'end_bit': 24, 'start_bit': 23, 'native_type': 'unsigned long long'}]],
'fSpecialInitialization': [928, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'IncreaseStack': [516, ['BitField', {'end_bit': 23, 'start_bit': 22, 'native_type': 'unsigned long'}]],
'pClientID': [72, ['pointer64', ['void']]],
}],
'_MOVESIZEDATA': [0xf0, {
'fmsKbd': [164, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'unsigned long'}]],
'fMoveFromMax': [164, ['BitField', {'end_bit': 8, 'start_bit': 7, 'native_type': 'unsigned long'}]],
'fSnapMoving': [164, ['BitField', {'end_bit': 12, 'start_bit': 11, 'native_type': 'unsigned long'}]],
'ptRestore': [156, ['tagPOINT']],
'fUsePreviewRect': [164, ['BitField', {'end_bit': 27, 'start_bit': 26, 'native_type': 'unsigned long'}]],
'ptStartHitWindowRelative': [208, ['tagPOINT']],
'CurrentHitTarget': [192, ['Enumeration', {'target': 'long', 'choices': {0: 'ThresholdMarginTop', 1: 'ThresholdMarginLeft', 2: 'ThresholdMarginRight', 3: 'ThresholdMarginBottom', 4: 'ThresholdMarginMax'}}]],
'fHasSoftwareCursor': [164, ['BitField', {'end_bit': 28, 'start_bit': 27, 'native_type': 'unsigned long'}]],
'fCheckPtForcefullyRestored': [164, ['BitField', {'end_bit': 19, 'start_bit': 18, 'native_type': 'unsigned long'}]],
'fSnapMovingTemporaryAllowed': [164, ['BitField', {'end_bit': 24, 'start_bit': 23, 'native_type': 'unsigned long'}]],
'Unused': [164, ['BitField', {'end_bit': 32, 'start_bit': 28, 'native_type': 'unsigned long'}]],
'fOffScreen': [164, ['BitField', {'end_bit': 7, 'start_bit': 6, 'native_type': 'unsigned long'}]],
'fWindowWasSuperMaximized': [164, ['BitField', {'end_bit': 13, 'start_bit': 12, 'native_type': 'unsigned long'}]],
'StartCurrentHitTarget': [176, ['Enumeration', {'target': 'long', 'choices': {0: 'ThresholdMarginTop', 1: 'ThresholdMarginLeft', 2: 'ThresholdMarginRight', 3: 'ThresholdMarginBottom', 4: 'ThresholdMarginMax'}}]],
'fSnapSizing': [164, ['BitField', {'end_bit': 11, 'start_bit': 10, 'native_type': 'unsigned long'}]],
'fIsMoveSizeLoop': [164, ['BitField', {'end_bit': 21, 'start_bit': 20, 'native_type': 'unsigned long'}]],
'rcPreviewCursor': [56, ['tagRECT']],
'dyMouse': [140, ['long']],
'fVerticallyMaximizedRight': [164, ['BitField', {'end_bit': 10, 'start_bit': 9, 'native_type': 'unsigned long'}]],
'fTrackCancelled': [164, ['BitField', {'end_bit': 4, 'start_bit': 3, 'native_type': 'unsigned long'}]],
'impx': [148, ['long']],
'impy': [152, ['long']],
'fLockWindowUpdate': [164, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'unsigned long'}]],
'fStartVerticallyMaximizedLeft': [164, ['BitField', {'end_bit': 14, 'start_bit': 13, 'native_type': 'unsigned long'}]],
'ptMinTrack': [88, ['tagPOINT']],
'pMonitorCurrentHitTarget': [184, ['pointer64', ['tagMONITOR']]],
'rcWindow': [104, ['tagRECT']],
'pStartMonitorCurrentHitTarget': [168, ['pointer64', ['tagMONITOR']]],
'cmd': [144, ['long']],
'ptMaxTrack': [96, ['tagPOINT']],
'fForceSizing': [164, ['BitField', {'end_bit': 20, 'start_bit': 19, 'native_type': 'unsigned long'}]],
'fThresholdSelector': [164, ['BitField', {'end_bit': 18, 'start_bit': 15, 'native_type': 'unsigned long'}]],
'MoveRectStyle': [196, ['Enumeration', {'target': 'long', 'choices': {0: 'MoveRectKeepPositionAtCursor', 1: 'MoveRectMidTopAtCursor', 2: 'MoveRectKeepAspectRatioAtCursor', 3: 'MoveRectSidewiseKeepPositionAtCursor'}}]],
'fDragFullWindows': [164, ['BitField', {'end_bit': 6, 'start_bit': 5, 'native_type': 'unsigned long'}]],
'fForeground': [164, ['BitField', {'end_bit': 5, 'start_bit': 4, 'native_type': 'unsigned long'}]],
'ulCountDragOutOfLeftRightTarget': [228, ['unsigned long']],
'ptLastTrack': [216, ['tagPOINT']],
'frcNormalCheckPtValid': [164, ['BitField', {'end_bit': 25, 'start_bit': 24, 'native_type': 'unsigned long'}]],
'fIsHitPtOffScreen': [164, ['BitField', {'end_bit': 22, 'start_bit': 21, 'native_type': 'unsigned long'}]],
'fSnapSizingTemporaryAllowed': [164, ['BitField', {'end_bit': 23, 'start_bit': 22, 'native_type': 'unsigned long'}]],
'fInitSize': [164, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'dxMouse': [136, ['long']],
'fStartVerticallyMaximizedRight': [164, ['BitField', {'end_bit': 15, 'start_bit': 14, 'native_type': 'unsigned long'}]],
'ulCountDragOutOfTopTarget': [224, ['unsigned long']],
'fVerticallyMaximizedLeft': [164, ['BitField', {'end_bit': 9, 'start_bit': 8, 'native_type': 'unsigned long'}]],
'spwnd': [0, ['pointer64', ['tagWND']]],
'fHasPreviewRect': [164, ['BitField', {'end_bit': 26, 'start_bit': 25, 'native_type': 'unsigned long'}]],
'rcPreview': [40, ['tagRECT']],
'rcDragCursor': [24, ['tagRECT']],
'Flags': [164, ['unsigned long']],
'ptHitWindowRelative': [200, ['tagPOINT']],
'rcParent': [72, ['tagRECT']],
'ulCountSizeOutOfTopBottomTarget': [232, ['unsigned long']],
'rcNormalStartCheckPt': [120, ['tagRECT']],
'rcDrag': [8, ['tagRECT']],
}],
'_LARGE_UNICODE_STRING': [0x10, {
'Buffer': [8, ['pointer64', ['unsigned short']]],
'Length': [0, ['unsigned long']],
'MaximumLength': [4, ['BitField', {'end_bit': 31, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'bAnsi': [4, ['BitField', {'end_bit': 32, 'start_bit': 31, 'native_type': 'unsigned long'}]],
}],
'VSC_LPWSTR': [0x10, {
'vsc': [0, ['unsigned char']],
'pwsz': [8, ['pointer64', ['unsigned short']]],
}],
'_D3DKMDT_VIDPN_PRESENT_PATH_TRANSFORMATION': [0x10, {
'Scaling': [0, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_VPPS_UNINITIALIZED', 1: 'D3DKMDT_VPPS_IDENTITY', 2: 'D3DKMDT_VPPS_CENTERED', 3: 'D3DKMDT_VPPS_STRETCHED', 4: 'D3DKMDT_VPPS_ASPECTRATIOCENTEREDMAX', 5: 'D3DKMDT_VPPS_CUSTOM', 253: 'D3DKMDT_VPPS_RESERVED1', 254: 'D3DKMDT_VPPS_UNPINNED', 255: 'D3DKMDT_VPPS_NOTSPECIFIED'}}]],
'RotationSupport': [12, ['_D3DKMDT_VIDPN_PRESENT_PATH_ROTATION_SUPPORT']],
'Rotation': [8, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_VPPR_UNINITIALIZED', 1: 'D3DKMDT_VPPR_IDENTITY', 2: 'D3DKMDT_VPPR_ROTATE90', 3: 'D3DKMDT_VPPR_ROTATE180', 4: 'D3DKMDT_VPPR_ROTATE270', 254: 'D3DKMDT_VPPR_UNPINNED', 255: 'D3DKMDT_VPPR_NOTSPECIFIED'}}]],
'ScalingSupport': [4, ['_D3DKMDT_VIDPN_PRESENT_PATH_SCALING_SUPPORT']],
}],
'tagUAHMENUPOPUPMETRICS': [0x14, {
'rgcx': [0, ['array', 4, ['long']]],
'fUpdateMaxWidths': [16, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'long'}]],
}],
'__unnamed_115b': [0x10, {
'NextEntry': [8, ['BitField', {'end_bit': 64, 'start_bit': 4, 'native_type': 'unsigned long long'}]],
'Depth': [0, ['BitField', {'end_bit': 16, 'start_bit': 0, 'native_type': 'unsigned long long'}]],
'Reserved': [8, ['BitField', {'end_bit': 4, 'start_bit': 1, 'native_type': 'unsigned long long'}]],
'HeaderType': [8, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long long'}]],
'Sequence': [0, ['BitField', {'end_bit': 64, 'start_bit': 16, 'native_type': 'unsigned long long'}]],
}],
'_THROBJHEAD': [0x18, {
'h': [0, ['pointer64', ['void']]],
'pti': [16, ['pointer64', ['tagTHREADINFO']]],
'cLockObj': [8, ['unsigned long']],
}],
'_DMM_COFUNCPATHSMODALITY_SERIALIZATION': [0x8, {
'NumPathsFromSource': [0, ['unsigned char']],
'PathAndTargetModeSetOffset': [4, ['array', 1, ['unsigned long']]],
}],
'tagSBTRACK': [0x68, {
'spwndSBNotify': [24, ['pointer64', ['tagWND']]],
'hTimerSB': [64, ['unsigned long long']],
'cmdSB': [56, ['unsigned long']],
'xxxpfnSB': [48, ['pointer64', ['void']]],
'fTrackVert': [0, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'unsigned long'}]],
'posNew': [84, ['long']],
'posOld': [80, ['long']],
'fCtlSB': [0, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'unsigned long'}]],
'rcTrack': [32, ['tagRECT']],
'fTrackRecalc': [0, ['BitField', {'end_bit': 4, 'start_bit': 3, 'native_type': 'unsigned long'}]],
'spwndSB': [16, ['pointer64', ['tagWND']]],
'spwndTrack': [8, ['pointer64', ['tagWND']]],
'dpxThumb': [72, ['long']],
'pxOld': [76, ['long']],
'fHitOld': [0, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'pSBCalc': [96, ['pointer64', ['tagSBCALC']]],
'nBar': [88, ['long']],
}],
'_DMA_ADAPTER': [0x10, {
'Version': [0, ['unsigned short']],
'DmaOperations': [8, ['pointer64', ['_DMA_OPERATIONS']]],
'Size': [2, ['unsigned short']],
}],
'__unnamed_1217': [0x10, {
'FsInformationClass': [8, ['Enumeration', {'target': 'long', 'choices': {1: 'FileFsVolumeInformation', 2: 'FileFsLabelInformation', 3: 'FileFsSizeInformation', 4: 'FileFsDeviceInformation', 5: 'FileFsAttributeInformation', 6: 'FileFsControlInformation', 7: 'FileFsFullSizeInformation', 8: 'FileFsObjectIdInformation', 9: 'FileFsDriverPathInformation', 10: 'FileFsVolumeFlagsInformation', 11: 'FileFsMaximumInformation'}}]],
'Length': [0, ['unsigned long']],
}],
'tagDPISERVERINFO': [0x28, {
'hMsgFont': [16, ['pointer64', ['HFONT__']]],
'hCaptionFont': [8, ['pointer64', ['HFONT__']]],
'gclBorder': [0, ['long']],
'cxMsgFontChar': [24, ['long']],
'wMaxBtnSize': [32, ['unsigned long']],
'cyMsgFontChar': [28, ['long']],
}],
'HICON__': [0x4, {
'unused': [0, ['long']],
}],
'_DMM_VIDPNTARGETMODESET_SERIALIZATION': [0x50, {
'NumModes': [0, ['unsigned char']],
'ModeSerialization': [8, ['array', 1, ['_D3DKMDT_VIDPN_TARGET_MODE']]],
}],
'__unnamed_16c1': [0x8, {
'ActiveSize': [0, ['_D3DKMDT_2DREGION']],
'MaxPixelRate': [0, ['unsigned long long']],
}],
'__unnamed_127c': [0x48, {
'Wcb': [0, ['_WAIT_CONTEXT_BLOCK']],
'ListEntry': [0, ['_LIST_ENTRY']],
}],
'_D3DMATRIX': [0x40, {
'_33': [40, ['float']],
'_42': [52, ['float']],
'_43': [56, ['float']],
'_44': [60, ['float']],
'_34': [44, ['float']],
'_14': [12, ['float']],
'_13': [8, ['float']],
'_12': [4, ['float']],
'_11': [0, ['float']],
'_41': [48, ['float']],
'_31': [32, ['float']],
'_24': [28, ['float']],
'_32': [36, ['float']],
'_22': [20, ['float']],
'_23': [24, ['float']],
'_21': [16, ['float']],
}],
'__unnamed_18a1': [0x20, {
'Text': [0, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_TRF_UNINITIALIZED'}}]],
'Graphics': [0, ['_D3DKMDT_GRAPHICS_RENDERING_FORMAT']],
}],
'HGESTUREINFO__': [0x4, {
'unused': [0, ['long']],
}],
'_VK_TO_FUNCTION_TABLE': [0x84, {
'NLSFEProcType': [1, ['unsigned char']],
'NLSFEProcSwitch': [3, ['unsigned char']],
'Vk': [0, ['unsigned char']],
'NLSFEProcCurrent': [2, ['unsigned char']],
'NLSFEProcAlt': [68, ['array', 8, ['_VK_FUNCTION_PARAM']]],
'NLSFEProc': [4, ['array', 8, ['_VK_FUNCTION_PARAM']]],
}],
'__unnamed_16ca': [0x10, {
'Attrib': [0, ['Enumeration', {'target': 'long', 'choices': {0: 'WCA_UNDEFINED', 1: 'WCA_NCRENDERING_ENABLED', 2: 'WCA_NCRENDERING_POLICY', 3: 'WCA_TRANSITIONS_FORCEDISABLED', 4: 'WCA_ALLOW_NCPAINT', 5: 'WCA_CAPTION_BUTTON_BOUNDS', 6: 'WCA_NONCLIENT_RTL_LAYOUT', 7: 'WCA_FORCE_ICONIC_REPRESENTATION', 8: 'WCA_FLIP3D_POLICY', 9: 'WCA_EXTENDED_FRAME_BOUNDS', 10: 'WCA_HAS_ICONIC_BITMAP', 11: 'WCA_THEME_ATTRIBUTES', 12: 'WCA_NCRENDERING_EXILED', 13: 'WCA_NCADORNMENTINFO', 14: 'WCA_EXCLUDED_FROM_LIVEPREVIEW', 15: 'WCA_VIDEO_OVERLAY_ACTIVE', 16: 'WCA_FORCE_ACTIVEWINDOW_APPEARANCE', 17: 'WCA_DISALLOW_PEEK', 18: 'WCA_LAST'}}]],
'cbData': [8, ['unsigned long long']],
}],
'_DMM_VIDPNPATHANDTARGETMODESET_SERIALIZATION': [0x1b8, {
'PathInfo': [0, ['_D3DKMDT_VIDPN_PRESENT_PATH']],
'TargetModeSet': [360, ['_DMM_VIDPNTARGETMODESET_SERIALIZATION']],
}],
'HDESK__': [0x4, {
'unused': [0, ['long']],
}],
'VK_TO_BIT': [0x2, {
'Vk': [0, ['unsigned char']],
'ModBits': [1, ['unsigned char']],
}],
'tagIMEINFOEX': [0x160, {
'fSysWow64Only': [348, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'long'}]],
'wszImeFile': [188, ['array', 80, ['wchar']]],
'fLoadFlag': [76, ['long']],
'hkl': [0, ['pointer64', ['HKL__']]],
'dwImeWinVersion': [84, ['unsigned long']],
'dwProdVersion': [80, ['unsigned long']],
'wszImeDescription': [88, ['array', 50, ['wchar']]],
'fCUASLayer': [348, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'long'}]],
'ImeInfo': [8, ['tagIMEINFO']],
'wszUIClass': [36, ['array', 16, ['wchar']]],
'fInitOpen': [72, ['long']],
'fdwInitConvMode': [68, ['unsigned long']],
}],
'__unnamed_12e0': [0x2c, {
'InitialPrivilegeSet': [0, ['_INITIAL_PRIVILEGE_SET']],
'PrivilegeSet': [0, ['_PRIVILEGE_SET']],
}],
'_D3DKMDT_VIDPN_PRESENT_PATH_COPYPROTECTION_SUPPORT': [0x4, {
'MacroVisionFull': [0, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'unsigned long'}]],
'MacroVisionApsTrigger': [0, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'unsigned long'}]],
'NoProtection': [0, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'Reserved': [0, ['BitField', {'end_bit': 32, 'start_bit': 3, 'native_type': 'unsigned long'}]],
}],
'_SCATTER_GATHER_ELEMENT': [0x18, {
'Length': [8, ['unsigned long']],
'Reserved': [16, ['unsigned long long']],
'Address': [0, ['_LARGE_INTEGER']],
}],
'tagWND': [0x128, {
'bEraseBackground': [40, ['BitField', {'end_bit': 11, 'start_bit': 10, 'native_type': 'long'}]],
'spwndOwner': [104, ['pointer64', ['tagWND']]],
'bWS_EX_LAYERED': [48, ['BitField', {'end_bit': 20, 'start_bit': 19, 'native_type': 'long'}]],
'bWS_CLIPCHILDREN': [52, ['BitField', {'end_bit': 26, 'start_bit': 25, 'native_type': 'long'}]],
'bMaximizeButtonDown': [44, ['BitField', {'end_bit': 14, 'start_bit': 13, 'native_type': 'long'}]],
'cbwndExtra': [232, ['long']],
'bMakeVisibleWhenUnghosted': [48, ['BitField', {'end_bit': 12, 'start_bit': 11, 'native_type': 'long'}]],
'bUIStateActive': [48, ['BitField', {'end_bit': 27, 'start_bit': 26, 'native_type': 'long'}]],
'hMod16': [64, ['unsigned short']],
'bWS_TABSTOP': [52, ['BitField', {'end_bit': 17, 'start_bit': 16, 'native_type': 'long'}]],
'bUnused8': [52, ['BitField', {'end_bit': 18, 'start_bit': 16, 'native_type': 'long'}]],
'bWS_EX_NOPARENTNOTIFY': [48, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'long'}]],
'bForceFullNCPaintClipRgn': [44, ['BitField', {'end_bit': 24, 'start_bit': 23, 'native_type': 'long'}]],
'bDialogWindow': [40, ['BitField', {'end_bit': 17, 'start_bit': 16, 'native_type': 'long'}]],
'lpfnWndProc': [144, ['pointer64', ['void']]],
'bWS_EX_RTLREADING': [48, ['BitField', {'end_bit': 14, 'start_bit': 13, 'native_type': 'long'}]],
'bMinimizeButtonDown': [44, ['BitField', {'end_bit': 15, 'start_bit': 14, 'native_type': 'long'}]],
'bUnused2': [48, ['BitField', {'end_bit': 16, 'start_bit': 15, 'native_type': 'long'}]],
'bUnused3': [48, ['BitField', {'end_bit': 22, 'start_bit': 21, 'native_type': 'long'}]],
'bUnused4': [48, ['BitField', {'end_bit': 25, 'start_bit': 24, 'native_type': 'long'}]],
'bHasMeun': [40, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'long'}]],
'bUnused6': [52, ['BitField', {'end_bit': 17, 'start_bit': 16, 'native_type': 'long'}]],
'bUnused7': [52, ['BitField', {'end_bit': 18, 'start_bit': 16, 'native_type': 'long'}]],
'bWS_SIZEBOX': [52, ['BitField', {'end_bit': 19, 'start_bit': 18, 'native_type': 'long'}]],
'style': [52, ['unsigned long']],
'ppropList': [168, ['pointer64', ['tagPROPLIST']]],
'hrgnNewFrame': [208, ['pointer64', ['HRGN__']]],
'bHasOverlay': [288, ['BitField', {'end_bit': 10, 'start_bit': 9, 'native_type': 'long'}]],
'bUnused9': [52, ['BitField', {'end_bit': 19, 'start_bit': 16, 'native_type': 'long'}]],
'bClipboardListener': [288, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'long'}]],
'bScrollBarLineDownBtnDown': [44, ['BitField', {'end_bit': 20, 'start_bit': 19, 'native_type': 'long'}]],
'bReserved3': [52, ['BitField', {'end_bit': 16, 'start_bit': 0, 'native_type': 'long'}]],
'bRedirectedForPrint': [288, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'long'}]],
'bWS_EX_RIGHT': [48, ['BitField', {'end_bit': 13, 'start_bit': 12, 'native_type': 'long'}]],
'bStartPaint': [44, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'long'}]],
'bHasCreatestructName': [40, ['BitField', {'end_bit': 18, 'start_bit': 17, 'native_type': 'long'}]],
'bWS_EX_COMPOSITED': [48, ['BitField', {'end_bit': 26, 'start_bit': 25, 'native_type': 'long'}]],
'bFullScreen': [44, ['BitField', {'end_bit': 7, 'start_bit': 6, 'native_type': 'long'}]],
'spwndLastActive': [240, ['pointer64', ['tagWND']]],
'hrgnUpdate': [160, ['pointer64', ['HRGN__']]],
'head': [0, ['_THRDESKHEAD']],
'bConsoleWindow': [288, ['BitField', {'end_bit': 11, 'start_bit': 10, 'native_type': 'long'}]],
'bHiddenPopup': [40, ['BitField', {'end_bit': 15, 'start_bit': 14, 'native_type': 'long'}]],
'hrgnClip': [200, ['pointer64', ['HRGN__']]],
'bWS_EX_CONTROLPARENT': [48, ['BitField', {'end_bit': 17, 'start_bit': 16, 'native_type': 'long'}]],
'bWS_EX_TOPMOST': [48, ['BitField', {'end_bit': 4, 'start_bit': 3, 'native_type': 'long'}]],
'bSendEraseBackground': [40, ['BitField', {'end_bit': 10, 'start_bit': 9, 'native_type': 'long'}]],
'bScrollBarLineUpBtnDown': [44, ['BitField', {'end_bit': 17, 'start_bit': 16, 'native_type': 'long'}]],
'bWin50Compat': [44, ['BitField', {'end_bit': 11, 'start_bit': 10, 'native_type': 'long'}]],
'bRecievedQuerySuspendMsg': [40, ['BitField', {'end_bit': 25, 'start_bit': 24, 'native_type': 'long'}]],
'bMaximizeMonitorRegion': [44, ['BitField', {'end_bit': 12, 'start_bit': 11, 'native_type': 'long'}]],
'bLayeredLimbo': [288, ['BitField', {'end_bit': 6, 'start_bit': 5, 'native_type': 'long'}]],
'bRedrawIfHung': [40, ['BitField', {'end_bit': 28, 'start_bit': 27, 'native_type': 'long'}]],
'FullScreenMode': [44, ['BitField', {'end_bit': 27, 'start_bit': 24, 'native_type': 'long'}]],
'bLayeredInvalidate': [288, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'long'}]],
'bVerticallyMaximizedLeft': [288, ['BitField', {'end_bit': 8, 'start_bit': 7, 'native_type': 'long'}]],
'bWS_POPUP': [52, ['BitField', {'end_bit': 32, 'start_bit': 31, 'native_type': 'long'}]],
'bWS_EX_CONTEXTHELP': [48, ['BitField', {'end_bit': 11, 'start_bit': 10, 'native_type': 'long'}]],
'dwUserData': [256, ['unsigned long long']],
'bDisabled': [52, ['BitField', {'end_bit': 28, 'start_bit': 27, 'native_type': 'long'}]],
'bAnsiWindowProc': [40, ['BitField', {'end_bit': 20, 'start_bit': 19, 'native_type': 'long'}]],
'bWin40Compat': [44, ['BitField', {'end_bit': 10, 'start_bit': 9, 'native_type': 'long'}]],
'bWS_EX_NOINHERITLAYOUT': [48, ['BitField', {'end_bit': 21, 'start_bit': 20, 'native_type': 'long'}]],
'rcClient': [128, ['tagRECT']],
'bAnsiCreator': [40, ['BitField', {'end_bit': 30, 'start_bit': 29, 'native_type': 'long'}]],
'bAnyScrollButtonDown': [44, ['BitField', {'end_bit': 21, 'start_bit': 20, 'native_type': 'long'}]],
'bWS_EX_LAYOUTRTL': [48, ['BitField', {'end_bit': 23, 'start_bit': 22, 'native_type': 'long'}]],
'bUIStateKbdAccelHidden': [48, ['BitField', {'end_bit': 31, 'start_bit': 30, 'native_type': 'long'}]],
'bSendSizeMoveMsgs': [40, ['BitField', {'end_bit': 5, 'start_bit': 4, 'native_type': 'long'}]],
'spwndParent': [88, ['pointer64', ['tagWND']]],
'bLinked': [288, ['BitField', {'end_bit': 4, 'start_bit': 3, 'native_type': 'long'}]],
'bSendNCPaint': [40, ['BitField', {'end_bit': 12, 'start_bit': 11, 'native_type': 'long'}]],
'bToggleTopmost': [40, ['BitField', {'end_bit': 27, 'start_bit': 26, 'native_type': 'long'}]],
'bInternalPaint': [40, ['BitField', {'end_bit': 13, 'start_bit': 12, 'native_type': 'long'}]],
'bDestroyed': [40, ['BitField', {'end_bit': 32, 'start_bit': 31, 'native_type': 'long'}]],
'bHasClientEdge': [44, ['BitField', {'end_bit': 5, 'start_bit': 4, 'native_type': 'long'}]],
'bServerSideWindowProc': [40, ['BitField', {'end_bit': 19, 'start_bit': 18, 'native_type': 'long'}]],
'bCaptionTextTruncated': [44, ['BitField', {'end_bit': 28, 'start_bit': 27, 'native_type': 'long'}]],
'rcWindow': [112, ['tagRECT']],
'bEndPaintInvalidate': [44, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'long'}]],
'bHasPalette': [40, ['BitField', {'end_bit': 22, 'start_bit': 21, 'native_type': 'long'}]],
'bHasHorizontalScrollbar': [40, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'long'}]],
'bUIStateFocusRectHidden': [48, ['BitField', {'end_bit': 32, 'start_bit': 31, 'native_type': 'long'}]],
'bReserved1': [52, ['BitField', {'end_bit': 16, 'start_bit': 0, 'native_type': 'long'}]],
'bWS_EX_COMPOSITEDCompositing': [48, ['BitField', {'end_bit': 29, 'start_bit': 28, 'native_type': 'long'}]],
'bWS_EX_MDICHILD': [48, ['BitField', {'end_bit': 7, 'start_bit': 6, 'native_type': 'long'}]],
'bHasVerticalScrollbar': [40, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'long'}]],
'bReserved2': [52, ['BitField', {'end_bit': 16, 'start_bit': 0, 'native_type': 'long'}]],
'bWMCreateMsgProcessed': [44, ['BitField', {'end_bit': 32, 'start_bit': 31, 'native_type': 'long'}]],
'bMinimized': [52, ['BitField', {'end_bit': 30, 'start_bit': 29, 'native_type': 'long'}]],
'bWS_EX_NOACTIVATE': [48, ['BitField', {'end_bit': 28, 'start_bit': 27, 'native_type': 'long'}]],
'bWS_EX_APPWINDOW': [48, ['BitField', {'end_bit': 19, 'start_bit': 18, 'native_type': 'long'}]],
'pSBInfo': [176, ['pointer64', ['tagSBINFO']]],
'bSmallIconFromWMQueryDrag': [44, ['BitField', {'end_bit': 30, 'start_bit': 29, 'native_type': 'long'}]],
'bNoNCPaint': [40, ['BitField', {'end_bit': 9, 'start_bit': 8, 'native_type': 'long'}]],
'bCloseButtonDown': [44, ['BitField', {'end_bit': 13, 'start_bit': 12, 'native_type': 'long'}]],
'bUnused1': [48, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'long'}]],
'bHasSPB': [40, ['BitField', {'end_bit': 8, 'start_bit': 7, 'native_type': 'long'}]],
'bWS_MINIMIZEBOX': [52, ['BitField', {'end_bit': 18, 'start_bit': 17, 'native_type': 'long'}]],
'bMaximized': [52, ['BitField', {'end_bit': 25, 'start_bit': 24, 'native_type': 'long'}]],
'bScrollBarVerticalTracking': [44, ['BitField', {'end_bit': 22, 'start_bit': 21, 'native_type': 'long'}]],
'bWS_CHILD': [52, ['BitField', {'end_bit': 31, 'start_bit': 30, 'native_type': 'long'}]],
'bReserved5': [52, ['BitField', {'end_bit': 16, 'start_bit': 0, 'native_type': 'long'}]],
'bWS_EX_DLGMODALFRAME': [48, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'long'}]],
'bWS_EX_TRANSPARENT': [48, ['BitField', {'end_bit': 6, 'start_bit': 5, 'native_type': 'long'}]],
'spmenu': [192, ['pointer64', ['tagMENU']]],
'bWS_THICKFRAME': [52, ['BitField', {'end_bit': 19, 'start_bit': 18, 'native_type': 'long'}]],
'bPaintNotProcessed': [40, ['BitField', {'end_bit': 23, 'start_bit': 22, 'native_type': 'long'}]],
'bSyncPaintPending': [40, ['BitField', {'end_bit': 24, 'start_bit': 23, 'native_type': 'long'}]],
'pcls': [152, ['pointer64', ['tagCLS']]],
'bLayeredForDWM': [288, ['BitField', {'end_bit': 5, 'start_bit': 4, 'native_type': 'long'}]],
'bMsgBox': [40, ['BitField', {'end_bit': 6, 'start_bit': 5, 'native_type': 'long'}]],
'bShellHookRegistered': [44, ['BitField', {'end_bit': 31, 'start_bit': 30, 'native_type': 'long'}]],
'spwndChild': [96, ['pointer64', ['tagWND']]],
'bUnused5': [52, ['BitField', {'end_bit': 17, 'start_bit': 16, 'native_type': 'long'}]],
'bHelpButtonDown': [44, ['BitField', {'end_bit': 16, 'start_bit': 15, 'native_type': 'long'}]],
'bInDestroy': [44, ['BitField', {'end_bit': 8, 'start_bit': 7, 'native_type': 'long'}]],
'state': [40, ['unsigned long']],
'strName': [216, ['_LARGE_UNICODE_STRING']],
'spwndPrev': [80, ['pointer64', ['tagWND']]],
'bRedrawFrameIfHung': [40, ['BitField', {'end_bit': 29, 'start_bit': 28, 'native_type': 'long'}]],
'bWS_EX_LEFTSCROLLBAR': [48, ['BitField', {'end_bit': 15, 'start_bit': 14, 'native_type': 'long'}]],
'bWS_EX_TOOLWINDOW': [48, ['BitField', {'end_bit': 8, 'start_bit': 7, 'native_type': 'long'}]],
'bWS_VSCROLL': [52, ['BitField', {'end_bit': 22, 'start_bit': 21, 'native_type': 'long'}]],
'bMaximizesToMonitor': [40, ['BitField', {'end_bit': 31, 'start_bit': 30, 'native_type': 'long'}]],
'bNoMinmaxAnimatedRects': [44, ['BitField', {'end_bit': 29, 'start_bit': 28, 'native_type': 'long'}]],
'fnid': [66, ['unsigned short']],
'ExStyle': [48, ['unsigned long']],
'bRedirected': [48, ['BitField', {'end_bit': 30, 'start_bit': 29, 'native_type': 'long'}]],
'bActiveFrame': [40, ['BitField', {'end_bit': 7, 'start_bit': 6, 'native_type': 'long'}]],
'bReserved4': [52, ['BitField', {'end_bit': 16, 'start_bit': 0, 'native_type': 'long'}]],
'bWS_EX_WINDOWEDGE': [48, ['BitField', {'end_bit': 9, 'start_bit': 8, 'native_type': 'long'}]],
'bReserved6': [52, ['BitField', {'end_bit': 16, 'start_bit': 0, 'native_type': 'long'}]],
'bReserved7': [52, ['BitField', {'end_bit': 16, 'start_bit': 0, 'native_type': 'long'}]],
'bWS_CLIPSIBLINGS': [52, ['BitField', {'end_bit': 27, 'start_bit': 26, 'native_type': 'long'}]],
'bWS_EX_ACCEPTFILE': [48, ['BitField', {'end_bit': 5, 'start_bit': 4, 'native_type': 'long'}]],
'bWS_HSCROLL': [52, ['BitField', {'end_bit': 21, 'start_bit': 20, 'native_type': 'long'}]],
'bUpdateDirty': [40, ['BitField', {'end_bit': 14, 'start_bit': 13, 'native_type': 'long'}]],
'bBeingActivated': [40, ['BitField', {'end_bit': 21, 'start_bit': 20, 'native_type': 'long'}]],
'state2': [44, ['unsigned long']],
'spwndNext': [72, ['pointer64', ['tagWND']]],
'bScrollBarPageDownBtnDown': [44, ['BitField', {'end_bit': 19, 'start_bit': 18, 'native_type': 'long'}]],
'bWS_BORDER': [52, ['BitField', {'end_bit': 24, 'start_bit': 23, 'native_type': 'long'}]],
'bWMPaintSent': [44, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'long'}]],
'bScrollBarPageUpBtnDown': [44, ['BitField', {'end_bit': 18, 'start_bit': 17, 'native_type': 'long'}]],
'pTransform': [272, ['pointer64', ['_D3DMATRIX']]],
'bWS_MAXIMIZEBOX': [52, ['BitField', {'end_bit': 17, 'start_bit': 16, 'native_type': 'long'}]],
'bVisible': [52, ['BitField', {'end_bit': 29, 'start_bit': 28, 'native_type': 'long'}]],
'bVerticallyMaximizedRight': [288, ['BitField', {'end_bit': 9, 'start_bit': 8, 'native_type': 'long'}]],
'bWin31Compat': [44, ['BitField', {'end_bit': 9, 'start_bit': 8, 'native_type': 'long'}]],
'bWS_EX_STATICEDGE': [48, ['BitField', {'end_bit': 18, 'start_bit': 17, 'native_type': 'long'}]],
'bForceMenuDraw': [40, ['BitField', {'end_bit': 16, 'start_bit': 15, 'native_type': 'long'}]],
'bForceNCPaint': [44, ['BitField', {'end_bit': 23, 'start_bit': 22, 'native_type': 'long'}]],
'ExStyle2': [288, ['unsigned long']],
'bOldUI': [44, ['BitField', {'end_bit': 4, 'start_bit': 3, 'native_type': 'long'}]],
'bWS_DLGFRAME': [52, ['BitField', {'end_bit': 23, 'start_bit': 22, 'native_type': 'long'}]],
'bHIGHDPI_UNAWARE_Unused': [288, ['BitField', {'end_bit': 7, 'start_bit': 6, 'native_type': 'long'}]],
'bWS_SYSMENU': [52, ['BitField', {'end_bit': 20, 'start_bit': 19, 'native_type': 'long'}]],
'spwndClipboardListenerNext': [280, ['pointer64', ['tagWND']]],
'hModule': [56, ['pointer64', ['void']]],
'bWS_EX_NOPADDEDBORDER': [48, ['BitField', {'end_bit': 24, 'start_bit': 23, 'native_type': 'long'}]],
'pActCtx': [264, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'bBottomMost': [44, ['BitField', {'end_bit': 6, 'start_bit': 5, 'native_type': 'long'}]],
'spmenuSys': [184, ['pointer64', ['tagMENU']]],
'bRecievedSuspendMsg': [40, ['BitField', {'end_bit': 26, 'start_bit': 25, 'native_type': 'long'}]],
'bWS_EX_CLIENTEDGE': [48, ['BitField', {'end_bit': 10, 'start_bit': 9, 'native_type': 'long'}]],
'bHasCaption': [40, ['BitField', {'end_bit': 4, 'start_bit': 3, 'native_type': 'long'}]],
'hImc': [248, ['pointer64', ['HIMC__']]],
'bChildNoActivate': [288, ['BitField', {'end_bit': 12, 'start_bit': 11, 'native_type': 'long'}]],
'bWS_GROUP': [52, ['BitField', {'end_bit': 18, 'start_bit': 17, 'native_type': 'long'}]],
}],
'tagUAHMENUITEMMETRICS': [0x20, {
'rgsizeBar': [0, ['array', 2, ['tagSIZE']]],
'rgsizePopup': [0, ['array', 4, ['tagSIZE']]],
}],
'_DXGK_DIAG_CODE_POINT_PACKET': [0x40, {
'Header': [0, ['_DXGK_DIAG_HEADER']],
'Param3': [60, ['unsigned long']],
'Param1': [52, ['unsigned long']],
'CodePointType': [48, ['Enumeration', {'target': 'long', 'choices': {0: 'DXGK_DIAG_CODE_POINT_TYPE_NONE', 1: 'DXGK_DIAG_CODE_POINT_TYPE_RECOMMEND_FUNC_VIDPN', 2: 'DXGK_DIAG_CODE_POINT_TYPE_OS_RECOMMENDED_VIDPN', 3: 'DXGK_DIAG_CODE_POINT_TYPE_SDC_LOG_FAILURE', 4: 'DXGK_DIAG_CODE_POINT_TYPE_SDC_INVALIDATE_ERROR', 5: 'DXGK_DIAG_CODE_POINT_TYPE_CDS_LOG_FAILURE', 7: 'DXGK_DIAG_CODE_POINT_TYPE_CDS_FAILURE_DB', 8: 'DXGK_DIAG_CODE_POINT_TYPE_RETRIEVE_BTL', 9: 'DXGK_DIAG_CODE_POINT_TYPE_RETRIEVE_DB', 10: 'DXGK_DIAG_CODE_POINT_TYPE_QDC_LOG_FAILURE', 11: 'DXGK_DIAG_CODE_POINT_TYPE_POWER_ON_GDI', 12: 'DXGK_DIAG_CODE_POINT_TYPE_POWER_OFF_GDI', 13: 'DXGK_DIAG_CODE_POINT_TYPE_POWER_ON_MONITOR', 14: 'DXGK_DIAG_CODE_POINT_TYPE_POWER_OFF_MONITOR', 15: 'DXGK_DIAG_CODE_POINT_TYPE_POWER_DIM_MONITOR', 16: 'DXGK_DIAG_CODE_POINT_TYPE_POWER_UNDIM_MONITOR', 17: 'DXGK_DIAG_CODE_POINT_TYPE_BML_BACKTRACK', 18: 'DXGK_DIAG_CODE_POINT_TYPE_BML_CLOSEST_TARGET_MODE', 19: 'DXGK_DIAG_CODE_POINT_TYPE_BML_NO_EXACT_SOURCE_MODE', 20: 'DXGK_DIAG_CODE_POINT_TYPE_BML_NO_EXACT_TARGET_MODE', 21: 'DXGK_DIAG_CODE_POINT_TYPE_BML_SOURCE_MODE_NOT_PINNED', 22: 'DXGK_DIAG_CODE_POINT_TYPE_BML_TARGET_MODE_NOT_PINNED', 23: 'DXGK_DIAG_CODE_POINT_TYPE_BML_RESTARTED', 24: 'DXGK_DIAG_CODE_POINT_TYPE_TDR', 25: 'DXGK_DIAG_CODE_POINT_TYPE_ACPI_EVENT_NOTIFICATION', 26: 'DXGK_DIAG_CODE_POINT_TYPE_CREATEMDEV_USE_DEFAULT_MODE', 27: 'DXGK_DIAG_CODE_POINT_TYPE_CONNECTED_SET_LOG_FAILURE', 28: 'DXGK_DIAG_CODE_POINT_TYPE_INVALIDATE_DXGK_MODE_CACHE', 29: 'DXGK_DIAG_CODE_POINT_TYPE_REBUILD_DXGK_MODE_CACHE', 30: 'DXGK_DIAG_CODE_POINT_TYPE_CREATEFUNVIDPN_RELAX_REFRESH_MATCH', 31: 'DXGK_DIAG_CODE_POINT_TYPE_CREATEFUNVIDPN_CCDBML_FAIL_VISTABML_SUCCESSED', 32: 'DXGK_DIAG_CODE_POINT_TYPE_BML_BEST_SOURCE_MODE', 33: 'DXGK_DIAG_CODE_POINT_TYPE_BML_BEST_TARGET_MODE', 34: 'DXGK_DIAG_CODE_POINT_TYPE_ADD_DEVICE', 35: 'DXGK_DIAG_CODE_POINT_TYPE_START_ADAPTER', 36: 'DXGK_DIAG_CODE_POINT_TYPE_STOP_ADAPTER', 37: 'DXGK_DIAG_CODE_POINT_TYPE_CHILD_POLLING', 38: 'DXGK_DIAG_CODE_POINT_TYPE_CHILD_POLLING_TARGET', 39: 'DXGK_DIAG_CODE_POINT_TYPE_INDICATE_CHILD_STATUS', 40: 'DXGK_DIAG_CODE_POINT_TYPE_HANDLE_IRP', 41: 'DXGK_DIAG_CODE_POINT_TYPE_CHANGE_UNSUPPORTED_MONITOR_MODE_FLAG', 42: 'DXGK_DIAG_CODE_POINT_TYPE_ACPI_NOTIFY_CALLBACK', 43: 'DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_EXCLUDE_EVICTALL_DISABLEGDI', 44: 'DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_EXCLUDE_EVICTALL_ENABLEGDI', 45: 'DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_EXCLUDE_MODESWITCH', 46: 'DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_SYNC_MONITOR_EVENT', 47: 'DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_PNP_NOTIFY_GDI', 48: 'DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_PNP_ENABLE_VGA', 49: 'DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_TDR_SWITCH_GDI', 50: 'DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_CDD_CREATE_DEVICE_FAILED', 51: 'DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_CDD_DEVICE_REMOVED', 52: 'DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_CDD_DRVASSERTMODE_TRUE_FAILED', 53: 'DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_CDD_RECREATE_DEVICE_FAILED', 54: 'DXGK_DIAG_CODE_POINT_TYPE_CDD_MAPSHADOWBUFFER_FAILED', 55: 'DXGK_DIAG_CODE_POINT_TYPE_COMMIT_VIDPN_LOG_FAILURE', 56: 'DXGK_DIAG_CODE_POINT_TYPE_DRIVER_RECOMMEND_LOG_FAILURE', 57: 'DXGK_DIAG_CODE_POINT_TYPE_SDC_ENFORCED_CLONE_PATH_INVALID_SOURCE_IDX', 58: 'DXGK_DIAG_CODE_POINT_TYPE_DRVPROBEANDCAPTURE_FAILED', 59: 'DXGK_DIAG_CODE_POINT_TYPE_DXGKCDDENABLE_OPTIMIZED_MODE_CHANGE', 60: 'DXGK_DIAG_CODE_POINT_TYPE_DXGKSETDISPLAYMODE_OPTIMIZED_MODE_CHANGE', 61: 'DXGK_DIAG_CODE_POINT_TYPE_MON_DEPART_GETRECENTTOP_FAIL', 62: 'DXGK_DIAG_CODE_POINT_TYPE_MON_ARRIVE_INC_ADD_FAIL', 63: 'DXGK_DIAG_CODE_POINT_TYPE_CCD_DATABASE_PERSIST', 64: 'DXGK_DIAG_CODE_POINT_TYPE_MAX', -1: 'DXGK_DIAG_CODE_POINT_TYPE_FORCE_UINT32'}}]],
'Param2': [56, ['unsigned long']],
}],
'tagW32JOB': [0x40, {
'restrictions': [24, ['unsigned long']],
'Job': [8, ['pointer64', ['_EJOB']]],
'ughCrt': [48, ['unsigned long']],
'pgh': [56, ['pointer64', ['unsigned long long']]],
'ppiTable': [40, ['pointer64', ['pointer64', ['tagPROCESSINFO']]]],
'ughMax': [52, ['unsigned long']],
'pAtomTable': [16, ['pointer64', ['void']]],
'uProcessCount': [28, ['unsigned long']],
'uMaxProcesses': [32, ['unsigned long']],
'pNext': [0, ['pointer64', ['tagW32JOB']]],
}],
'tagMBSTRING': [0x28, {
'szName': [0, ['array', 15, ['wchar']]],
'uID': [32, ['unsigned long']],
'uStr': [36, ['unsigned long']],
}],
'_D3DKMDT_VIDPN_TARGET_MODE': [0x48, {
'VideoSignalInfo': [8, ['_D3DKMDT_VIDEO_SIGNAL_INFO']],
'Id': [0, ['unsigned long']],
'Preference': [64, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_MP_UNINITIALIZED', 1: 'D3DKMDT_MP_PREFERRED', 2: 'D3DKMDT_MP_MAXVALID'}}]],
}],
'__unnamed_124f': [0x4, {
'PowerState': [0, ['Enumeration', {'target': 'long', 'choices': {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'}}]],
}],
'__unnamed_124b': [0x10, {
'Type': [8, ['Enumeration', {'target': 'long', 'choices': {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'}}]],
'Reserved': [1, ['array', 3, ['unsigned char']]],
'InPath': [0, ['unsigned char']],
}],
'tagDESKTOP': [0xe0, {
'spmenuVScroll': [80, ['pointer64', ['tagMENU']]],
'dwMouseHoverTime': [212, ['unsigned long']],
'rpwinstaParent': [32, ['pointer64', ['tagWINDOWSTATION']]],
'spmenuDialogSys': [64, ['pointer64', ['tagMENU']]],
'spwndForeground': [88, ['pointer64', ['tagWND']]],
'spmenuHScroll': [72, ['pointer64', ['tagMENU']]],
'spwndTooltip': [112, ['pointer64', ['tagWND']]],
'dwSessionId': [0, ['unsigned long']],
'pDeskInfo': [8, ['pointer64', ['tagDESKTOPINFO']]],
'spwndMessage': [104, ['pointer64', ['tagWND']]],
'cciConsole': [144, ['_CONSOLE_CARET_INFO']],
'PtiList': [168, ['_LIST_ENTRY']],
'spwndTray': [96, ['pointer64', ['tagWND']]],
'rpdeskNext': [24, ['pointer64', ['tagDESKTOP']]],
'dwDTFlags': [40, ['unsigned long']],
'pMagInputTransform': [216, ['pointer64', ['_MAGNIFICATION_INPUT_TRANSFORM']]],
'spwndTrack': [184, ['pointer64', ['tagWND']]],
'htEx': [192, ['long']],
'ulHeapSize': [136, ['unsigned long']],
'pheapDesktop': [128, ['pointer64', ['tagWIN32HEAP']]],
'hsectionDesktop': [120, ['pointer64', ['void']]],
'rcMouseHover': [196, ['tagRECT']],
'dwDesktopId': [48, ['unsigned long long']],
'spmenuSys': [56, ['pointer64', ['tagMENU']]],
'pDispInfo': [16, ['pointer64', ['tagDISPLAYINFO']]],
}],
'tagPOOLRECORD': [0x40, {
'ExtraData': [0, ['pointer64', ['void']]],
'trace': [16, ['array', 6, ['pointer64', ['void']]]],
'size': [8, ['unsigned long long']],
}],
'tagSPB': [0x40, {
'hbm': [16, ['pointer64', ['HBITMAP__']]],
'hrgn': [40, ['pointer64', ['HRGN__']]],
'ulSaveId': [56, ['unsigned long long']],
'flags': [48, ['unsigned long']],
'rc': [24, ['tagRECT']],
'pspbNext': [0, ['pointer64', ['tagSPB']]],
'spwnd': [8, ['pointer64', ['tagWND']]],
}],
'_DMM_COMMITVIDPNREQUEST_DIAGINFO': [0xc, {
'CleanupAfterFailedCommitVidPn': [4, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'unsigned char'}]],
'ModeChangeRequestId': [8, ['unsigned long']],
'ReclaimClonedTarget': [4, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned char'}]],
'ForceAllActiveVidPnModeListInvalidation': [4, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'unsigned char'}]],
}],
'HFONT__': [0x4, {
'unused': [0, ['long']],
}],
'tagTEXTMETRICW': [0x3c, {
'tmCharSet': [56, ['unsigned char']],
'tmDigitizedAspectY': [40, ['long']],
'tmStruckOut': [54, ['unsigned char']],
'tmItalic': [52, ['unsigned char']],
'tmDigitizedAspectX': [36, ['long']],
'tmWeight': [28, ['long']],
'tmFirstChar': [44, ['wchar']],
'tmOverhang': [32, ['long']],
'tmDescent': [8, ['long']],
'tmPitchAndFamily': [55, ['unsigned char']],
'tmDefaultChar': [48, ['wchar']],
'tmLastChar': [46, ['wchar']],
'tmBreakChar': [50, ['wchar']],
'tmMaxCharWidth': [24, ['long']],
'tmUnderlined': [53, ['unsigned char']],
'tmInternalLeading': [12, ['long']],
'tmAscent': [4, ['long']],
'tmHeight': [0, ['long']],
'tmAveCharWidth': [20, ['long']],
'tmExternalLeading': [16, ['long']],
}],
'_KLIST_ENTRY': [0x10, {
'Flink': [0, ['pointer64', ['_KLIST_ENTRY']]],
'Blink': [8, ['pointer64', ['_KLIST_ENTRY']]],
}],
'__unnamed_1247': [0x10, {
'DeviceTextType': [0, ['Enumeration', {'target': 'long', 'choices': {0: 'DeviceTextDescription', 1: 'DeviceTextLocationInformation'}}]],
'LocaleId': [8, ['unsigned long']],
}],
'tagPROP': [0x10, {
'fs': [10, ['unsigned short']],
'hData': [0, ['pointer64', ['void']]],
'atomKey': [8, ['unsigned short']],
}],
'__unnamed_1243': [0x4, {
'IdType': [0, ['Enumeration', {'target': 'long', 'choices': {0: 'BusQueryDeviceID', 1: 'BusQueryHardwareIDs', 2: 'BusQueryCompatibleIDs', 3: 'BusQueryInstanceID', 4: 'BusQueryDeviceSerialNumber', 5: 'BusQueryContainerID'}}]],
}],
'__unnamed_123d': [0x20, {
'Buffer': [8, ['pointer64', ['void']]],
'WhichSpace': [0, ['unsigned long']],
'Length': [24, ['unsigned long']],
'Offset': [16, ['unsigned long']],
}],
'tagCLIENTTHREADINFO': [0x10, {
'fsWakeMask': [10, ['unsigned short']],
'CTIF_flags': [0, ['unsigned long']],
'fsWakeBits': [6, ['unsigned short']],
'fsWakeBitsJournal': [8, ['unsigned short']],
'fsChangeBits': [4, ['unsigned short']],
'tickLastMsgChecked': [12, ['unsigned long']],
}],
'tagKbdNlsLayer': [0x20, {
'OEMIdentifier': [0, ['unsigned short']],
'NumOfVkToF': [4, ['unsigned long']],
'pusMouseVKey': [24, ['pointer64', ['unsigned short']]],
'NumOfMouseVKey': [16, ['long']],
'pVkToF': [8, ['pointer64', ['_VK_TO_FUNCTION_TABLE']]],
'LayoutInformation': [2, ['unsigned short']],
}],
'HBITMAP__': [0x4, {
'unused': [0, ['long']],
}],
'__unnamed_11ff': [0x20, {
'ShareAccess': [18, ['unsigned short']],
'EaLength': [24, ['unsigned long']],
'SecurityContext': [0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options': [8, ['unsigned long']],
'FileAttributes': [16, ['unsigned short']],
}],
'tagPROCESS_HID_TABLE': [0x68, {
'UsagePageLast': [96, ['unsigned short']],
'fExclusiveMouseSink': [100, ['BitField', {'end_bit': 4, 'start_bit': 3, 'native_type': 'long'}]],
'fRawKeyboardSink': [100, ['BitField', {'end_bit': 7, 'start_bit': 6, 'native_type': 'long'}]],
'fAppKeys': [100, ['BitField', {'end_bit': 11, 'start_bit': 10, 'native_type': 'long'}]],
'fCaptureMouse': [100, ['BitField', {'end_bit': 9, 'start_bit': 8, 'native_type': 'long'}]],
'fNoLegacyMouse': [100, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'long'}]],
'UsageLast': [98, ['unsigned short']],
'fRawKeyboard': [100, ['BitField', {'end_bit': 5, 'start_bit': 4, 'native_type': 'long'}]],
'fNoLegacyKeyboard': [100, ['BitField', {'end_bit': 6, 'start_bit': 5, 'native_type': 'long'}]],
'nSinks': [80, ['long']],
'fNoHotKeys': [100, ['BitField', {'end_bit': 10, 'start_bit': 9, 'native_type': 'long'}]],
'spwndTargetMouse': [64, ['pointer64', ['tagWND']]],
'spwndTargetKbd': [72, ['pointer64', ['tagWND']]],
'UsagePageList': [32, ['_LIST_ENTRY']],
'link': [0, ['_LIST_ENTRY']],
'fExclusiveKeyboardSink': [100, ['BitField', {'end_bit': 8, 'start_bit': 7, 'native_type': 'long'}]],
'pLastRequest': [88, ['pointer64', ['tagPROCESS_HID_REQUEST']]],
'ExclusionList': [48, ['_LIST_ENTRY']],
'fRawMouse': [100, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'long'}]],
'fRawMouseSink': [100, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'long'}]],
'InclusionList': [16, ['_LIST_ENTRY']],
}],
'__unnamed_1809': [0x10, {
'Affinity': [8, ['unsigned long long']],
'Vector': [4, ['unsigned long']],
'Group': [0, ['unsigned short']],
'MessageCount': [2, ['unsigned short']],
}],
'_KFLOATING_SAVE': [0x4, {
'Dummy': [0, ['unsigned long']],
}],
'tagRECT': [0x10, {
'top': [4, ['long']],
'right': [8, ['long']],
'bottom': [12, ['long']],
'left': [0, ['long']],
}],
'__unnamed_1807': [0x10, {
'Affinity': [8, ['unsigned long long']],
'Vector': [4, ['unsigned long']],
'Group': [2, ['unsigned short']],
'Level': [0, ['unsigned short']],
}],
'HBRUSH__': [0x4, {
'unused': [0, ['long']],
}],
'_TLSPRITESTATE': [0xa8, {
'flOriginalSurfFlags': [4, ['unsigned long']],
'iSpriteType': [16, ['unsigned long']],
'pfnSaveScreenBits': [144, ['pointer64', ['void']]],
'bInsideDriverCall': [0, ['unsigned char']],
'pfnStrokePath': [48, ['pointer64', ['void']]],
'pfnTransparentBlt': [112, ['pointer64', ['void']]],
'pfnPaint': [64, ['pointer64', ['void']]],
'pfnFillPath': [56, ['pointer64', ['void']]],
'pfnStretchBltROP': [152, ['pointer64', ['void']]],
'iType': [24, ['unsigned long']],
'pfnPlgBlt': [128, ['pointer64', ['void']]],
'pfnCopyBits': [80, ['pointer64', ['void']]],
'pState': [32, ['pointer64', ['void']]],
'iOriginalType': [8, ['unsigned long']],
'pfnTextOut': [96, ['pointer64', ['void']]],
'pfnDrawStream': [160, ['pointer64', ['void']]],
'pfnStrokeAndFillPath': [40, ['pointer64', ['void']]],
'pfnLineTo': [104, ['pointer64', ['void']]],
'pfnStretchBlt': [88, ['pointer64', ['void']]],
'pfnGradientFill': [136, ['pointer64', ['void']]],
'pfnAlphaBlend': [120, ['pointer64', ['void']]],
'flags': [20, ['unsigned long']],
'flSpriteSurfFlags': [12, ['unsigned long']],
'pfnBitBlt': [72, ['pointer64', ['void']]],
}],
'tagSMS': [0x70, {
'wParam': [72, ['unsigned long long']],
'lParam': [80, ['long long']],
'lRet': [56, ['long long']],
'psmsReceiveNext': [8, ['pointer64', ['tagSMS']]],
'tSent': [64, ['unsigned long']],
'psmsNext': [0, ['pointer64', ['tagSMS']]],
'ptiCallBackSender': [48, ['pointer64', ['tagTHREADINFO']]],
'ptiReceiver': [24, ['pointer64', ['tagTHREADINFO']]],
'lpResultCallBack': [32, ['pointer64', ['void']]],
'message': [88, ['unsigned long']],
'dwData': [40, ['unsigned long long']],
'ptiSender': [16, ['pointer64', ['tagTHREADINFO']]],
'flags': [68, ['unsigned long']],
'pvCapture': [104, ['pointer64', ['void']]],
'spwnd': [96, ['pointer64', ['tagWND']]],
}],
'_D3DKMDT_FREQUENCY_RANGE': [0x20, {
'MinVSyncFreq': [0, ['_D3DDDI_RATIONAL']],
'MaxVSyncFreq': [8, ['_D3DDDI_RATIONAL']],
'MaxHSyncFreq': [24, ['_D3DDDI_RATIONAL']],
'MinHSyncFreq': [16, ['_D3DDDI_RATIONAL']],
}],
'__unnamed_11f8': [0x58, {
'Apc': [0, ['_KAPC']],
'CompletionKey': [0, ['pointer64', ['void']]],
'Overlay': [0, ['__unnamed_11f5']],
}],
'__unnamed_18bf': [0x4, {
'BaseMiddle': [0, ['unsigned char']],
'BaseHigh': [3, ['unsigned char']],
'Flags1': [1, ['unsigned char']],
'Flags2': [2, ['unsigned char']],
}],
'__unnamed_11f5': [0x50, {
'AuxiliaryBuffer': [40, ['pointer64', ['unsigned char']]],
'Thread': [32, ['pointer64', ['_ETHREAD']]],
'OriginalFileObject': [72, ['pointer64', ['_FILE_OBJECT']]],
'DeviceQueueEntry': [0, ['_KDEVICE_QUEUE_ENTRY']],
'PacketType': [64, ['unsigned long']],
'CurrentStackLocation': [64, ['pointer64', ['_IO_STACK_LOCATION']]],
'ListEntry': [48, ['_LIST_ENTRY']],
'DriverContext': [0, ['array', 4, ['pointer64', ['void']]]],
}],
'HRGN__': [0x4, {
'unused': [0, ['long']],
}],
'tagSIZE': [0x8, {
'cy': [4, ['long']],
'cx': [0, ['long']],
}],
'tagDESKTOPVIEW': [0x18, {
'ulClientDelta': [16, ['unsigned long long']],
'pdesk': [8, ['pointer64', ['tagDESKTOP']]],
'pdvNext': [0, ['pointer64', ['tagDESKTOPVIEW']]],
}],
'__unnamed_180b': [0x10, {
'Translated': [0, ['__unnamed_1807']],
'Raw': [0, ['__unnamed_1809']],
}],
'__unnamed_180d': [0xc, {
'Reserved1': [8, ['unsigned long']],
'Port': [4, ['unsigned long']],
'Channel': [0, ['unsigned long']],
}],
'MODIFIERS': [0x10, {
'wMaxModBits': [8, ['unsigned short']],
'pVkToBit': [0, ['pointer64', ['VK_TO_BIT']]],
'ModNumber': [10, ['array', 0, ['unsigned char']]],
}],
'__unnamed_120f': [0x10, {
'CompletionFilter': [8, ['unsigned long']],
'Length': [0, ['unsigned long']],
}],
'__unnamed_120d': [0x20, {
'Length': [0, ['unsigned long']],
'FileIndex': [24, ['unsigned long']],
'FileInformationClass': [16, ['Enumeration', {'target': 'long', 'choices': {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileAttributeCacheInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileMaximumInformation'}}]],
'FileName': [8, ['pointer64', ['_UNICODE_STRING']]],
}],
'_DMM_VIDPNPATHSFROMSOURCE_SERIALIZATION': [0x1e0, {
'PathAndTargetModeSerialization': [48, ['array', 1, ['_DMM_VIDPNPATHANDTARGETMODE_SERIALIZATION']]],
'NumPathsFromSource': [40, ['unsigned char']],
'SourceMode': [0, ['_D3DKMDT_VIDPN_SOURCE_MODE']],
}],
'_D3DDDI_GAMMA_RAMP_RGB256x3x16': [0x600, {
'Blue': [1024, ['array', 256, ['unsigned short']]],
'Green': [512, ['array', 256, ['unsigned short']]],
'Red': [0, ['array', 256, ['unsigned short']]],
}],
'_CALLPROCDATA': [0x40, {
'head': [0, ['_PROCDESKHEAD']],
'pfnClientPrevious': [48, ['unsigned long long']],
'wType': [56, ['unsigned short']],
'spcpdNext': [40, ['pointer64', ['_CALLPROCDATA']]],
}],
'_D3DDDI_RATIONAL': [0x8, {
'Denominator': [4, ['unsigned long']],
'Numerator': [0, ['unsigned long']],
}],
'_PFNCLIENT': [0xb8, {
'pfnDispatchDefWindowProc': [160, ['pointer64', ['void']]],
'pfnStaticWndProc': [112, ['pointer64', ['void']]],
'pfnDispatchHook': [152, ['pointer64', ['void']]],
'pfnDesktopWndProc': [24, ['pointer64', ['void']]],
'pfnImeWndProc': [120, ['pointer64', ['void']]],
'pfnScrollBarWndProc': [0, ['pointer64', ['void']]],
'pfnEditWndProc': [88, ['pointer64', ['void']]],
'pfnGhostWndProc': [128, ['pointer64', ['void']]],
'pfnMessageWindowProc': [40, ['pointer64', ['void']]],
'pfnSwitchWindowProc': [48, ['pointer64', ['void']]],
'pfnComboListBoxProc': [72, ['pointer64', ['void']]],
'pfnComboBoxWndProc': [64, ['pointer64', ['void']]],
'pfnMDIClientWndProc': [104, ['pointer64', ['void']]],
'pfnDialogWndProc': [80, ['pointer64', ['void']]],
'pfnHkINLPCWPSTRUCT': [136, ['pointer64', ['void']]],
'pfnTitleWndProc': [8, ['pointer64', ['void']]],
'pfnHkINLPCWPRETSTRUCT': [144, ['pointer64', ['void']]],
'pfnButtonWndProc': [56, ['pointer64', ['void']]],
'pfnMenuWndProc': [16, ['pointer64', ['void']]],
'pfnListBoxWndProc': [96, ['pointer64', ['void']]],
'pfnDispatchMessage': [168, ['pointer64', ['void']]],
'pfnDefWindowProc': [32, ['pointer64', ['void']]],
'pfnMDIActivateDlgProc': [176, ['pointer64', ['void']]],
}],
'_THRDESKHEAD': [0x28, {
'h': [0, ['pointer64', ['void']]],
'pSelf': [32, ['pointer64', ['unsigned char']]],
'rpdesk': [24, ['pointer64', ['tagDESKTOP']]],
'pti': [16, ['pointer64', ['tagTHREADINFO']]],
'cLockObj': [8, ['unsigned long']],
}],
'_D3DKMDT_MONITOR_SOURCE_MODE': [0x60, {
'Origin': [84, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_MCO_UNINITIALIZED', 1: 'D3DKMDT_MCO_DEFAULTMONITORPROFILE', 2: 'D3DKMDT_MCO_MONITORDESCRIPTOR', 3: 'D3DKMDT_MCO_MONITORDESCRIPTOR_REGISTRYOVERRIDE', 4: 'D3DKMDT_MCO_SPECIFICCAP_REGISTRYOVERRIDE', 5: 'D3DKMDT_MCO_MAXVALID'}}]],
'VideoSignalInfo': [8, ['_D3DKMDT_VIDEO_SIGNAL_INFO']],
'ColorCoeffDynamicRanges': [68, ['_D3DKMDT_COLOR_COEFF_DYNAMIC_RANGES']],
'Preference': [88, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_MP_UNINITIALIZED', 1: 'D3DKMDT_MP_PREFERRED', 2: 'D3DKMDT_MP_MAXVALID'}}]],
'Id': [0, ['unsigned long']],
'ColorBasis': [64, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_CB_UNINITIALIZED', 1: 'D3DKMDT_CB_INTENSITY', 2: 'D3DKMDT_CB_SRGB', 3: 'D3DKMDT_CB_SCRGB', 4: 'D3DKMDT_CB_YCBCR', 5: 'D3DKMDT_CB_MAXVALID'}}]],
}],
'VWPL': [0x10, {
'fTagged': [12, ['long']],
'cElem': [4, ['unsigned long']],
'cThreshhold': [8, ['unsigned long']],
'aElement': [16, ['array', 0, ['VWPLELEMENT']]],
'cPwnd': [0, ['unsigned long']],
}],
'tagCURSOR': [0x88, {
'rt': [58, ['unsigned short']],
'head': [0, ['_PROCMARKHEAD']],
'hbmUserAlpha': [112, ['pointer64', ['HBITMAP__']]],
'cx': [124, ['unsigned long']],
'xHotspot': [68, ['short']],
'hbmColor': [80, ['pointer64', ['HBITMAP__']]],
'pcurNext': [32, ['pointer64', ['tagCURSOR']]],
'CURSORF_flags': [64, ['unsigned long']],
'hbmMask': [72, ['pointer64', ['HBITMAP__']]],
'bpp': [120, ['unsigned long']],
'cy': [128, ['unsigned long']],
'strName': [40, ['_UNICODE_STRING']],
'rcBounds': [96, ['tagRECT']],
'atomModName': [56, ['unsigned short']],
'hbmAlpha': [88, ['pointer64', ['HBITMAP__']]],
'yHotspot': [70, ['short']],
}],
'__unnamed_1203': [0x20, {
'ShareAccess': [18, ['unsigned short']],
'Reserved': [16, ['unsigned short']],
'SecurityContext': [0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options': [8, ['unsigned long']],
'Parameters': [24, ['pointer64', ['_NAMED_PIPE_CREATE_PARAMETERS']]],
}],
'__unnamed_1207': [0x20, {
'ShareAccess': [18, ['unsigned short']],
'Reserved': [16, ['unsigned short']],
'SecurityContext': [0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options': [8, ['unsigned long']],
'Parameters': [24, ['pointer64', ['_MAILSLOT_CREATE_PARAMETERS']]],
}],
'HKL__': [0x4, {
'unused': [0, ['long']],
}],
'__unnamed_1209': [0x18, {
'Length': [0, ['unsigned long']],
'ByteOffset': [16, ['_LARGE_INTEGER']],
'Key': [8, ['unsigned long']],
}],
'tagDCE': [0x60, {
'hrgnClipPublic': [48, ['pointer64', ['HRGN__']]],
'pdceNext': [0, ['pointer64', ['tagDCE']]],
'hrgnSavedVis': [56, ['pointer64', ['HRGN__']]],
'pwndRedirect': [32, ['pointer64', ['tagWND']]],
'pMonitor': [88, ['pointer64', ['tagMONITOR']]],
'ppiOwner': [80, ['pointer64', ['tagPROCESSINFO']]],
'pwndOrg': [16, ['pointer64', ['tagWND']]],
'hrgnClip': [40, ['pointer64', ['HRGN__']]],
'hdc': [8, ['pointer64', ['HDC__']]],
'ptiOwner': [72, ['pointer64', ['tagTHREADINFO']]],
'DCX_flags': [64, ['unsigned long']],
'pwndClip': [24, ['pointer64', ['tagWND']]],
}],
'tagPROCESS_HID_REQUEST': [0x28, {
'link': [0, ['_LIST_ENTRY']],
'fExclusiveOrphaned': [20, ['BitField', {'end_bit': 4, 'start_bit': 3, 'native_type': 'long'}]],
'spwndTarget': [32, ['pointer64', ['tagWND']]],
'fSinkable': [20, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'long'}]],
'pTLCInfo': [24, ['pointer64', ['tagHID_TLC_INFO']]],
'fDevNotify': [20, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'long'}]],
'fExSinkable': [20, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'long'}]],
'usUsage': [18, ['unsigned short']],
'ptr': [24, ['pointer64', ['void']]],
'pPORequest': [24, ['pointer64', ['tagHID_PAGEONLY_REQUEST']]],
'usUsagePage': [16, ['unsigned short']],
}],
'tagWOWTHREADINFO': [0x28, {
'idParentProcess': [24, ['unsigned long']],
'pwtiNext': [0, ['pointer64', ['tagWOWTHREADINFO']]],
'idTask': [8, ['unsigned long']],
'pIdleEvent': [32, ['pointer64', ['_KEVENT']]],
'idWaitObject': [16, ['unsigned long long']],
}],
'__unnamed_1962': [0x18, {
'Dma': [0, ['__unnamed_1956']],
'Generic': [0, ['__unnamed_1950']],
'Memory': [0, ['__unnamed_1950']],
'BusNumber': [0, ['__unnamed_1958']],
'Memory48': [0, ['__unnamed_195e']],
'Memory40': [0, ['__unnamed_195c']],
'DevicePrivate': [0, ['__unnamed_180f']],
'ConfigData': [0, ['__unnamed_195a']],
'Memory64': [0, ['__unnamed_1960']],
'Interrupt': [0, ['__unnamed_1954']],
'Port': [0, ['__unnamed_1950']],
}],
'__unnamed_1960': [0x18, {
'Length64': [0, ['unsigned long']],
'MaximumAddress': [16, ['_LARGE_INTEGER']],
'MinimumAddress': [8, ['_LARGE_INTEGER']],
'Alignment64': [4, ['unsigned long']],
}],
'tagSBDATA': [0x10, {
'posMax': [4, ['long']],
'posMin': [0, ['long']],
'page': [8, ['long']],
'pos': [12, ['long']],
}],
'__unnamed_1233': [0x20, {
'Interface': [16, ['pointer64', ['_INTERFACE']]],
'InterfaceSpecificData': [24, ['pointer64', ['void']]],
'Version': [10, ['unsigned short']],
'InterfaceType': [0, ['pointer64', ['_GUID']]],
'Size': [8, ['unsigned short']],
}],
'__unnamed_1237': [0x8, {
'Capabilities': [0, ['pointer64', ['_DEVICE_CAPABILITIES']]],
}],
'tagIMEINFO': [0x1c, {
'fdwProperty': [4, ['unsigned long']],
'fdwSelectCaps': [24, ['unsigned long']],
'fdwUICaps': [16, ['unsigned long']],
'dwPrivateDataSize': [0, ['unsigned long']],
'fdwSCSCaps': [20, ['unsigned long']],
'fdwSentenceCaps': [12, ['unsigned long']],
'fdwConversionCaps': [8, ['unsigned long']],
}],
'_D3DKMDT_VIDPN_SOURCE_MODE': [0x28, {
'Type': [4, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_RMT_UNINITIALIZED', 1: 'D3DKMDT_RMT_GRAPHICS', 2: 'D3DKMDT_RMT_TEXT'}}]],
'Id': [0, ['unsigned long']],
'Format': [8, ['__unnamed_18a1']],
}],
'_PROCMARKHEAD': [0x20, {
'h': [0, ['pointer64', ['void']]],
'ppi': [24, ['pointer64', ['tagPROCESSINFO']]],
'hTaskWow': [16, ['unsigned long']],
'cLockObj': [8, ['unsigned long']],
}],
'tagKBDFILE': [0x78, {
'head': [0, ['_HEAD']],
'awchDllName': [56, ['array', 32, ['wchar']]],
'pKbdTbl': [32, ['pointer64', ['tagKbdLayer']]],
'pkfNext': [16, ['pointer64', ['tagKBDFILE']]],
'pKbdNlsTbl': [48, ['pointer64', ['tagKbdNlsLayer']]],
'hBase': [24, ['pointer64', ['void']]],
'Size': [40, ['unsigned long']],
}],
'tagCLIENTINFO': [0xd8, {
'msgDbcsCB': [160, ['tagMSG']],
'dwCompatFlags': [20, ['unsigned long']],
'achDbcsCF': [154, ['array', 2, ['unsigned char']]],
'dwTIFlags': [28, ['unsigned long']],
'pClientThreadInfo': [96, ['pointer64', ['tagCLIENTTHREADINFO']]],
'CodePage': [152, ['unsigned short']],
'dwKeyCache': [112, ['unsigned long']],
'dwHookCurrent': [88, ['unsigned long']],
'afAsyncKeyStateRecentDown': [136, ['array', 8, ['unsigned char']]],
'dwCompatFlags2': [24, ['unsigned long']],
'fsHooks': [56, ['unsigned long']],
'ulClientDelta': [40, ['unsigned long long']],
'pDeskInfo': [32, ['pointer64', ['tagDESKTOPINFO']]],
'dwExpWinVer': [16, ['unsigned long']],
'dwHookData': [104, ['unsigned long long']],
'afAsyncKeyState': [128, ['array', 8, ['unsigned char']]],
'CallbackWnd': [64, ['_CALLBACKWND']],
'lpdwRegisteredClasses': [208, ['pointer64', ['unsigned long']]],
'cInDDEMLCallback': [92, ['long']],
'cSpins': [8, ['unsigned long long']],
'hKL': [144, ['pointer64', ['HKL__']]],
'dwAsyncKeyCache': [124, ['unsigned long']],
'afKeyState': [116, ['array', 8, ['unsigned char']]],
'CI_flags': [0, ['unsigned long long']],
'phkCurrent': [48, ['pointer64', ['tagHOOK']]],
}],
'tagCLS': [0xa0, {
'spcur': [120, ['pointer64', ['tagCURSOR']]],
'cbwndExtra': [100, ['long']],
'pclsClone': [72, ['pointer64', ['tagCLS']]],
'lpszClientAnsiMenuName': [40, ['pointer64', ['unsigned char']]],
'pclsBase': [64, ['pointer64', ['tagCLS']]],
'atomNVClassName': [10, ['unsigned short']],
'style': [84, ['unsigned long']],
'pclsNext': [0, ['pointer64', ['tagCLS']]],
'CSF_flags': [34, ['unsigned short']],
'lpfnWndProc': [88, ['pointer64', ['void']]],
'lpszAnsiClassName': [144, ['pointer64', ['unsigned char']]],
'spcpdFirst': [56, ['pointer64', ['_CALLPROCDATA']]],
'lpszClientUnicodeMenuName': [48, ['pointer64', ['unsigned short']]],
'cbclsExtra': [96, ['long']],
'lpszMenuName': [136, ['pointer64', ['unsigned short']]],
'spicnSm': [152, ['pointer64', ['tagCURSOR']]],
'hTaskWow': [32, ['unsigned short']],
'cWndReferenceCount': [80, ['long']],
'hbrBackground': [128, ['pointer64', ['HBRUSH__']]],
'spicn': [112, ['pointer64', ['tagCURSOR']]],
'fnid': [12, ['unsigned short']],
'pdce': [24, ['pointer64', ['tagDCE']]],
'hModule': [104, ['pointer64', ['void']]],
'rpdeskParent': [16, ['pointer64', ['tagDESKTOP']]],
'atomClassName': [8, ['unsigned short']],
}],
'_DMM_VIDPN_SERIALIZATION': [0xc, {
'PathsFromSourceSerializationOffsets': [8, ['array', 1, ['unsigned long']]],
'NumActiveSources': [4, ['unsigned char']],
'Size': [0, ['unsigned long']],
}],
'tagHID_PAGEONLY_REQUEST': [0x18, {
'usUsagePage': [16, ['unsigned short']],
'link': [0, ['_LIST_ENTRY']],
'cRefCount': [20, ['unsigned long']],
}],
'tagWINDOWSTATION': [0x98, {
'pClipBase': [88, ['pointer64', ['tagCLIP']]],
'dwSessionId': [0, ['unsigned long']],
'cNumClipFormats': [96, ['unsigned long']],
'luidUser': [136, ['_LUID']],
'pGlobalAtomTable': [120, ['pointer64', ['void']]],
'ptiClipLock': [48, ['pointer64', ['tagTHREADINFO']]],
'dwWSF_Flags': [32, ['unsigned long']],
'rpdeskList': [16, ['pointer64', ['tagDESKTOP']]],
'spklList': [40, ['pointer64', ['tagKL']]],
'spwndClipOpen': [64, ['pointer64', ['tagWND']]],
'luidEndSession': [128, ['_LUID']],
'pTerm': [24, ['pointer64', ['tagTERMINAL']]],
'rpwinstaNext': [8, ['pointer64', ['tagWINDOWSTATION']]],
'spwndClipboardListener': [112, ['pointer64', ['tagWND']]],
'spwndClipViewer': [72, ['pointer64', ['tagWND']]],
'iClipSequenceNumber': [104, ['unsigned long']],
'ptiDrawingClipboard': [56, ['pointer64', ['tagTHREADINFO']]],
'spwndClipOwner': [80, ['pointer64', ['tagWND']]],
'psidUser': [144, ['pointer64', ['void']]],
'iClipSerialNumber': [100, ['unsigned long']],
}],
'__unnamed_11e4': [0x10, {
'UserApcContext': [8, ['pointer64', ['void']]],
'UserApcRoutine': [0, ['pointer64', ['void']]],
'IssuingProcess': [0, ['pointer64', ['void']]],
}],
'tagPROFILEVALUEINFO': [0x10, {
'dwValue': [0, ['unsigned long']],
'uSection': [4, ['unsigned long']],
'pwszKeyName': [8, ['pointer64', ['wchar']]],
}],
'tagOEMBITMAPINFO': [0x10, {
'y': [4, ['long']],
'x': [0, ['long']],
'cy': [12, ['long']],
'cx': [8, ['long']],
}],
'_DMM_COMMITVIDPNREQUEST_SERIALIZATION': [0x1c, {
'RequestDiagInfo': [4, ['_DMM_COMMITVIDPNREQUEST_DIAGINFO']],
'AffectedVidPnSourceId': [0, ['unsigned long']],
'VidPnSerialization': [16, ['_DMM_VIDPN_SERIALIZATION']],
}],
'_WNDMSG': [0x10, {
'abMsgs': [8, ['pointer64', ['unsigned char']]],
'maxMsgs': [0, ['unsigned long']],
}],
'tagTDB': [0x28, {
'pti': [16, ['pointer64', ['tagTHREADINFO']]],
'TDB_Flags': [34, ['unsigned short']],
'hTaskWow': [32, ['unsigned short']],
'pwti': [24, ['pointer64', ['tagWOWTHREADINFO']]],
'nEvents': [8, ['long']],
'nPriority': [12, ['long']],
'ptdbNext': [0, ['pointer64', ['tagTDB']]],
}],
'_LIGATURE1': [0x6, {
'wch': [4, ['array', 1, ['wchar']]],
'VirtualKey': [0, ['unsigned char']],
'ModificationNumber': [2, ['unsigned short']],
}],
'_D3DKMDT_VIDPN_PRESENT_PATH': [0x168, {
'GammaRamp': [336, ['_D3DKMDT_GAMMA_RAMP']],
'VidPnSourceId': [0, ['unsigned long']],
'Content': [64, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_VPPC_UNINITIALIZED', 1: 'D3DKMDT_VPPC_GRAPHICS', 2: 'D3DKMDT_VPPC_VIDEO', 255: 'D3DKMDT_VPPC_NOTSPECIFIED'}}]],
'VisibleFromActiveBROffset': [36, ['_D3DKMDT_2DREGION']],
'VidPnTargetColorBasis': [44, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_CB_UNINITIALIZED', 1: 'D3DKMDT_CB_INTENSITY', 2: 'D3DKMDT_CB_SRGB', 3: 'D3DKMDT_CB_SCRGB', 4: 'D3DKMDT_CB_YCBCR', 5: 'D3DKMDT_CB_MAXVALID'}}]],
'ContentTransformation': [12, ['_D3DKMDT_VIDPN_PRESENT_PATH_TRANSFORMATION']],
'VidPnTargetId': [4, ['unsigned long']],
'VisibleFromActiveTLOffset': [28, ['_D3DKMDT_2DREGION']],
'CopyProtection': [68, ['_D3DKMDT_VIDPN_PRESENT_PATH_COPYPROTECTION']],
'VidPnTargetColorCoeffDynamicRanges': [48, ['_D3DKMDT_COLOR_COEFF_DYNAMIC_RANGES']],
'ImportanceOrdinal': [8, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_VPPI_UNINITIALIZED', 1: 'D3DKMDT_VPPI_PRIMARY', 2: 'D3DKMDT_VPPI_SECONDARY', 3: 'D3DKMDT_VPPI_TERTIARY', 4: 'D3DKMDT_VPPI_QUATERNARY', 5: 'D3DKMDT_VPPI_QUINARY', 6: 'D3DKMDT_VPPI_SENARY', 7: 'D3DKMDT_VPPI_SEPTENARY', 8: 'D3DKMDT_VPPI_OCTONARY', 9: 'D3DKMDT_VPPI_NONARY', 10: 'D3DKMDT_VPPI_DENARY', 32: 'D3DKMDT_VPPI_MAX', 255: 'D3DKMDT_VPPI_NOTSPECIFIED'}}]],
}],
'__unnamed_1253': [0x8, {
'PowerSequence': [0, ['pointer64', ['_POWER_SEQUENCE']]],
}],
'_PROCDESKHEAD': [0x28, {
'h': [0, ['pointer64', ['void']]],
'pSelf': [32, ['pointer64', ['unsigned char']]],
'rpdesk': [24, ['pointer64', ['tagDESKTOP']]],
'hTaskWow': [16, ['unsigned long']],
'cLockObj': [8, ['unsigned long']],
}],
'_D3DKMDT_VIDPN_PRESENT_PATH_ROTATION_SUPPORT': [0x4, {
'Rotate270': [0, ['BitField', {'end_bit': 4, 'start_bit': 3, 'native_type': 'unsigned long'}]],
'Rotate90': [0, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'unsigned long'}]],
'Identity': [0, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'Rotate180': [0, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'unsigned long'}]],
}],
'__unnamed_1958': [0x10, {
'MinBusNumber': [4, ['unsigned long']],
'Length': [0, ['unsigned long']],
'Reserved': [12, ['unsigned long']],
'MaxBusNumber': [8, ['unsigned long']],
}],
'_CONSOLE_CARET_INFO': [0x18, {
'hwnd': [0, ['pointer64', ['HWND__']]],
'rc': [8, ['tagRECT']],
}],
'tagPROCESSINFO': [0x300, {
'fHasMagContext': [736, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'hwinsta': [608, ['pointer64', ['HWINSTA__']]],
'ptiList': [256, ['pointer64', ['tagTHREADINFO']]],
'pHidTable': [744, ['pointer64', ['tagPROCESS_HID_TABLE']]],
'W32PF_Flags': [12, ['unsigned long']],
'UserHandleCount': [68, ['long']],
'dwhmodLibLoadedMask': [340, ['unsigned long']],
'GDIBrushAttrFreeList': [208, ['_LIST_ENTRY']],
'hdeskStartup': [328, ['pointer64', ['HDESK__']]],
'dwImeCompatFlags': [696, ['unsigned long']],
'dwRegisteredClasses': [752, ['unsigned long']],
'pBrushAttrList': [48, ['pointer64', ['void']]],
'usi': [708, ['tagUSERSTARTUPINFO']],
'InputIdleEvent': [16, ['pointer64', ['_KEVENT']]],
'W32Pid': [56, ['unsigned long']],
'bmHandleFlags': [648, ['_RTL_BITMAP']],
'UserHandleCountPeak': [72, ['unsigned long']],
'GDIEngUserMemAllocTable': [88, ['_RTL_AVL_TABLE']],
'cSysExpunge': [336, ['unsigned long']],
'pdvList': [632, ['pointer64', ['tagDESKTOPVIEW']]],
'pwpi': [296, ['pointer64', ['tagWOWPROCESSINFO']]],
'ppiNextRunning': [312, ['pointer64', ['tagPROCESSINFO']]],
'Process': [0, ['pointer64', ['_EPROCESS']]],
'pCursorCache': [664, ['pointer64', ['tagCURSOR']]],
'pClientBase': [672, ['pointer64', ['void']]],
'dwLpkEntryPoints': [680, ['unsigned long']],
'GDIDcAttrFreeList': [192, ['_LIST_ENTRY']],
'DxProcess': [248, ['pointer64', ['void']]],
'NextStart': [32, ['pointer64', ['_W32PROCESS']]],
'RefCount': [8, ['unsigned long']],
'dwLayout': [740, ['unsigned long']],
'pclsPublicList': [288, ['pointer64', ['tagCLS']]],
'Unused': [736, ['BitField', {'end_bit': 32, 'start_bit': 1, 'native_type': 'unsigned long'}]],
'GDIPushLock': [80, ['_EX_PUSH_LOCK']],
'hMonitor': [624, ['pointer64', ['HMONITOR__']]],
'ptiMainThread': [264, ['pointer64', ['tagTHREADINFO']]],
'pvwplWndGCList': [760, ['pointer64', ['VWPL']]],
'pW32Job': [688, ['pointer64', ['tagW32JOB']]],
'luidSession': [700, ['_LUID']],
'GDIHandleCount': [60, ['long']],
'cThreads': [320, ['unsigned long']],
'rpdeskStartup': [272, ['pointer64', ['tagDESKTOP']]],
'hSecureGdiSharedHandleTable': [240, ['pointer64', ['void']]],
'pclsPrivateList': [280, ['pointer64', ['tagCLS']]],
'GDIHandleCountPeak': [64, ['unsigned long']],
'StartCursorHideTime': [24, ['unsigned long']],
'ppiNext': [304, ['pointer64', ['tagPROCESSINFO']]],
'Flags': [736, ['unsigned long']],
'dwHotkey': [620, ['unsigned long']],
'amwinsta': [616, ['unsigned long']],
'rpwinsta': [600, ['pointer64', ['tagWINDOWSTATION']]],
'ahmodLibLoaded': [344, ['array', 32, ['pointer64', ['void']]]],
'iClipSerialNumber': [640, ['unsigned long']],
'GDIW32PIDLockedBitmaps': [224, ['_LIST_ENTRY']],
'pDCAttrList': [40, ['pointer64', ['void']]],
}],
'__unnamed_181b': [0x10, {
'Dma': [0, ['__unnamed_180d']],
'MessageInterrupt': [0, ['__unnamed_180b']],
'Generic': [0, ['__unnamed_1805']],
'Memory': [0, ['__unnamed_1805']],
'BusNumber': [0, ['__unnamed_1811']],
'DeviceSpecificData': [0, ['__unnamed_1813']],
'Memory48': [0, ['__unnamed_1817']],
'Memory40': [0, ['__unnamed_1815']],
'DevicePrivate': [0, ['__unnamed_180f']],
'Memory64': [0, ['__unnamed_1819']],
'Interrupt': [0, ['__unnamed_1807']],
'Port': [0, ['__unnamed_1805']],
}],
'__unnamed_195e': [0x18, {
'Length48': [0, ['unsigned long']],
'Alignment48': [4, ['unsigned long']],
'MinimumAddress': [8, ['_LARGE_INTEGER']],
'MaximumAddress': [16, ['_LARGE_INTEGER']],
}],
'__unnamed_195c': [0x18, {
'Length40': [0, ['unsigned long']],
'Alignment40': [4, ['unsigned long']],
'MinimumAddress': [8, ['_LARGE_INTEGER']],
'MaximumAddress': [16, ['_LARGE_INTEGER']],
}],
'__unnamed_195a': [0xc, {
'Priority': [0, ['unsigned long']],
'Reserved1': [4, ['unsigned long']],
'Reserved2': [8, ['unsigned long']],
}],
'__unnamed_125f': [0x10, {
'AllocatedResources': [0, ['pointer64', ['_CM_RESOURCE_LIST']]],
'AllocatedResourcesTranslated': [8, ['pointer64', ['_CM_RESOURCE_LIST']]],
}],
'__unnamed_125b': [0x20, {
'State': [16, ['_POWER_STATE']],
'Type': [8, ['Enumeration', {'target': 'long', 'choices': {0: 'SystemPowerState', 1: 'DevicePowerState'}}]],
'SystemContext': [0, ['unsigned long']],
'ShutdownType': [24, ['Enumeration', {'target': 'long', 'choices': {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'}}]],
'SystemPowerStateContext': [0, ['_SYSTEM_POWER_STATE_CONTEXT']],
}],
'tagKbdLayer': [0x68, {
'pVkToWcharTable': [8, ['pointer64', ['_VK_TO_WCHAR_TABLE']]],
'pusVSCtoVK': [48, ['pointer64', ['unsigned short']]],
'fLocaleFlags': [80, ['unsigned long']],
'pKeyNamesExt': [32, ['pointer64', ['VSC_LPWSTR']]],
'dwSubType': [100, ['unsigned long']],
'pDeadKey': [16, ['pointer64', ['DEADKEY']]],
'pCharModifiers': [0, ['pointer64', ['MODIFIERS']]],
'pKeyNamesDead': [40, ['pointer64', ['pointer64', ['unsigned short']]]],
'bMaxVSCtoVK': [56, ['unsigned char']],
'pKeyNames': [24, ['pointer64', ['VSC_LPWSTR']]],
'dwType': [96, ['unsigned long']],
'pLigature': [88, ['pointer64', ['_LIGATURE1']]],
'nLgMax': [84, ['unsigned char']],
'pVSCtoVK_E1': [72, ['pointer64', ['_VSC_VK']]],
'pVSCtoVK_E0': [64, ['pointer64', ['_VSC_VK']]],
'cbLgEntry': [85, ['unsigned char']],
}],
'HDC__': [0x4, {
'unused': [0, ['long']],
}],
'tagWin32AllocStats': [0x20, {
'dwMaxAlloc': [16, ['unsigned long']],
'pHead': [24, ['pointer64', ['tagWin32PoolHead']]],
'dwMaxMem': [0, ['unsigned long long']],
'dwCrtMem': [8, ['unsigned long long']],
'dwCrtAlloc': [20, ['unsigned long']],
}],
'__unnamed_18c5': [0x4, {
'DefaultBig': [0, ['BitField', {'end_bit': 23, 'start_bit': 22, 'native_type': 'unsigned long'}]],
'BaseMiddle': [0, ['BitField', {'end_bit': 8, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'Granularity': [0, ['BitField', {'end_bit': 24, 'start_bit': 23, 'native_type': 'unsigned long'}]],
'LimitHigh': [0, ['BitField', {'end_bit': 20, 'start_bit': 16, 'native_type': 'unsigned long'}]],
'BaseHigh': [0, ['BitField', {'end_bit': 32, 'start_bit': 24, 'native_type': 'unsigned long'}]],
'Dpl': [0, ['BitField', {'end_bit': 15, 'start_bit': 13, 'native_type': 'unsigned long'}]],
'Type': [0, ['BitField', {'end_bit': 13, 'start_bit': 8, 'native_type': 'unsigned long'}]],
'System': [0, ['BitField', {'end_bit': 21, 'start_bit': 20, 'native_type': 'unsigned long'}]],
'Present': [0, ['BitField', {'end_bit': 16, 'start_bit': 15, 'native_type': 'unsigned long'}]],
'LongMode': [0, ['BitField', {'end_bit': 22, 'start_bit': 21, 'native_type': 'unsigned long'}]],
}],
'__unnamed_1817': [0xc, {
'Length48': [8, ['unsigned long']],
'Start': [0, ['_LARGE_INTEGER']],
}],
'__unnamed_1815': [0xc, {
'Length40': [8, ['unsigned long']],
'Start': [0, ['_LARGE_INTEGER']],
}],
'__unnamed_1813': [0xc, {
'DataSize': [0, ['unsigned long']],
'Reserved1': [4, ['unsigned long']],
'Reserved2': [8, ['unsigned long']],
}],
'_D3DKMDT_VIDPN_PRESENT_PATH_SCALING_SUPPORT': [0x4, {
'Centered': [0, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'unsigned long'}]],
'AspectRatioCenteredMax': [0, ['BitField', {'end_bit': 4, 'start_bit': 3, 'native_type': 'unsigned long'}]],
'Stretched': [0, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'unsigned long'}]],
'Identity': [0, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'Custom': [0, ['BitField', {'end_bit': 5, 'start_bit': 4, 'native_type': 'unsigned long'}]],
}],
'__unnamed_1811': [0xc, {
'Start': [0, ['unsigned long']],
'Length': [4, ['unsigned long']],
'Reserved': [8, ['unsigned long']],
}],
'__unnamed_1956': [0x8, {
'MinimumChannel': [0, ['unsigned long']],
'MaximumChannel': [4, ['unsigned long']],
}],
'__unnamed_1954': [0x18, {
'AffinityPolicy': [8, ['unsigned short']],
'Group': [10, ['unsigned short']],
'PriorityPolicy': [12, ['Enumeration', {'target': 'long', 'choices': {0: 'IrqPriorityUndefined', 1: 'IrqPriorityLow', 2: 'IrqPriorityNormal', 3: 'IrqPriorityHigh'}}]],
'MinimumVector': [0, ['unsigned long']],
'MaximumVector': [4, ['unsigned long']],
'TargetedProcessors': [16, ['unsigned long long']],
}],
'tagMSG': [0x30, {
'wParam': [16, ['unsigned long long']],
'lParam': [24, ['long long']],
'pt': [36, ['tagPOINT']],
'hwnd': [0, ['pointer64', ['HWND__']]],
'time': [32, ['unsigned long']],
'message': [8, ['unsigned long']],
}],
'__unnamed_1819': [0xc, {
'Start': [0, ['_LARGE_INTEGER']],
'Length64': [8, ['unsigned long']],
}],
'_DMM_VIDPNSET_SERIALIZATION': [0x8, {
'VidPnOffset': [4, ['array', 1, ['unsigned long']]],
'NumVidPns': [0, ['unsigned char']],
}],
'tagWOWPROCESSINFO': [0x48, {
'ptdbHead': [16, ['pointer64', ['tagTDB']]],
'lpfnWowExitTask': [24, ['pointer64', ['void']]],
'CSOwningThread': [56, ['pointer64', ['tagTHREADINFO']]],
'ptiScheduled': [8, ['pointer64', ['tagTHREADINFO']]],
'nSendLock': [48, ['unsigned long']],
'nRecvLock': [52, ['unsigned long']],
'CSLockCount': [64, ['long']],
'hEventWowExecClient': [40, ['pointer64', ['void']]],
'pwpiNext': [0, ['pointer64', ['tagWOWPROCESSINFO']]],
'pEventWowExec': [32, ['pointer64', ['_KEVENT']]],
}],
'tagMENU': [0x98, {
'iItem': [44, ['long']],
'head': [0, ['_PROCDESKHEAD']],
'umpm': [132, ['tagUAHMENUPOPUPMETRICS']],
'cItems': [52, ['unsigned long']],
'pParentMenus': [88, ['pointer64', ['tagMENULIST']]],
'fFlags': [40, ['unsigned long']],
'cxMenu': [56, ['unsigned long']],
'dwContextHelpId': [96, ['unsigned long']],
'hbrBack': [112, ['pointer64', ['HBRUSH__']]],
'cxTextAlign': [64, ['unsigned long']],
'cAlloced': [48, ['unsigned long']],
'spwndNotify': [72, ['pointer64', ['tagWND']]],
'dwArrowsOn': [128, ['BitField', {'end_bit': 2, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'iMaxTop': [124, ['long']],
'dwMenuData': [104, ['unsigned long long']],
'cyMenu': [60, ['unsigned long']],
'rgItems': [80, ['pointer64', ['tagITEM']]],
'iTop': [120, ['long']],
'cyMax': [100, ['unsigned long']],
}],
'_D3DDDI_GAMMA_RAMP_DXGI_1': [0x3024, {
'GammaCurve': [24, ['array', 1025, ['D3DDDI_DXGI_RGB']]],
'Scale': [0, ['D3DDDI_DXGI_RGB']],
'Offset': [12, ['D3DDDI_DXGI_RGB']],
}],
'tagPOPUPMENU': [0x58, {
'fUseMonitorRect': [0, ['BitField', {'end_bit': 29, 'start_bit': 28, 'native_type': 'unsigned long'}]],
'fDroppedLeft': [0, ['BitField', {'end_bit': 5, 'start_bit': 4, 'native_type': 'unsigned long'}]],
'fHierarchyDropped': [0, ['BitField', {'end_bit': 6, 'start_bit': 5, 'native_type': 'unsigned long'}]],
'posDropped': [84, ['unsigned long']],
'spwndNextPopup': [24, ['pointer64', ['tagWND']]],
'fIsMenuBar': [0, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'spwndPrevPopup': [32, ['pointer64', ['tagWND']]],
'fHasMenuBar': [0, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'unsigned long'}]],
'spwndActivePopup': [56, ['pointer64', ['tagWND']]],
'fTrackMouseEvent': [0, ['BitField', {'end_bit': 21, 'start_bit': 20, 'native_type': 'unsigned long'}]],
'fNoNotify': [0, ['BitField', {'end_bit': 12, 'start_bit': 11, 'native_type': 'unsigned long'}]],
'posSelectedItem': [80, ['unsigned long']],
'fIsSysMenu': [0, ['BitField', {'end_bit': 3, 'start_bit': 2, 'native_type': 'unsigned long'}]],
'fFlushDelayedFree': [0, ['BitField', {'end_bit': 18, 'start_bit': 17, 'native_type': 'unsigned long'}]],
'ppmDelayedFree': [72, ['pointer64', ['tagPOPUPMENU']]],
'fFreed': [0, ['BitField', {'end_bit': 19, 'start_bit': 18, 'native_type': 'unsigned long'}]],
'fSynchronous': [0, ['BitField', {'end_bit': 9, 'start_bit': 8, 'native_type': 'unsigned long'}]],
'fDropNextPopup': [0, ['BitField', {'end_bit': 11, 'start_bit': 10, 'native_type': 'unsigned long'}]],
'fRightButton': [0, ['BitField', {'end_bit': 7, 'start_bit': 6, 'native_type': 'unsigned long'}]],
'spmenuAlternate': [48, ['pointer64', ['tagMENU']]],
'spmenu': [40, ['pointer64', ['tagMENU']]],
'spwndPopupMenu': [16, ['pointer64', ['tagWND']]],
'fDestroyed': [0, ['BitField', {'end_bit': 16, 'start_bit': 15, 'native_type': 'unsigned long'}]],
'iDropDir': [0, ['BitField', {'end_bit': 28, 'start_bit': 23, 'native_type': 'unsigned long'}]],
'ppopupmenuRoot': [64, ['pointer64', ['tagPOPUPMENU']]],
'fFirstClick': [0, ['BitField', {'end_bit': 10, 'start_bit': 9, 'native_type': 'unsigned long'}]],
'spwndNotify': [8, ['pointer64', ['tagWND']]],
'fRtoL': [0, ['BitField', {'end_bit': 23, 'start_bit': 22, 'native_type': 'unsigned long'}]],
'fIsTrackPopup': [0, ['BitField', {'end_bit': 4, 'start_bit': 3, 'native_type': 'unsigned long'}]],
'fSendUninit': [0, ['BitField', {'end_bit': 22, 'start_bit': 21, 'native_type': 'unsigned long'}]],
'fShowTimer': [0, ['BitField', {'end_bit': 14, 'start_bit': 13, 'native_type': 'unsigned long'}]],
'fInCancel': [0, ['BitField', {'end_bit': 20, 'start_bit': 19, 'native_type': 'unsigned long'}]],
'fToggle': [0, ['BitField', {'end_bit': 8, 'start_bit': 7, 'native_type': 'unsigned long'}]],
'fDelayedFree': [0, ['BitField', {'end_bit': 17, 'start_bit': 16, 'native_type': 'unsigned long'}]],
'fHideTimer': [0, ['BitField', {'end_bit': 15, 'start_bit': 14, 'native_type': 'unsigned long'}]],
'fAboutToHide': [0, ['BitField', {'end_bit': 13, 'start_bit': 12, 'native_type': 'unsigned long'}]],
}],
'_DMM_MONITORDESCRIPTOR_SERIALIZATION': [0x8c, {
'Origin': [8, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_MCO_UNINITIALIZED', 1: 'D3DKMDT_MCO_DEFAULTMONITORPROFILE', 2: 'D3DKMDT_MCO_MONITORDESCRIPTOR', 3: 'D3DKMDT_MCO_MONITORDESCRIPTOR_REGISTRYOVERRIDE', 4: 'D3DKMDT_MCO_SPECIFICCAP_REGISTRYOVERRIDE', 5: 'D3DKMDT_MCO_MAXVALID'}}]],
'Data': [12, ['array', 128, ['unsigned char']]],
'Type': [4, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_MDT_UNINITIALIZED', 1: 'D3DKMDT_MDT_VESA_EDID_V1_BASEBLOCK', 2: 'D3DKMDT_MDT_VESA_EDID_V1_BLOCKMAP', 255: 'D3DKMDT_MDT_OTHER'}}]],
'Id': [0, ['unsigned long']],
}],
'HTOUCHINPUT__': [0x4, {
'unused': [0, ['long']],
}],
'_VK_VALUES_STRINGS': [0x10, {
'fReserved': [8, ['unsigned char']],
'pszMultiNames': [0, ['pointer64', ['unsigned char']]],
}],
'_DMM_MONITOR_SOURCE_MODE_SERIALIZATION': [0x68, {
'Info': [0, ['_D3DKMDT_MONITOR_SOURCE_MODE']],
'TimingType': [96, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_MTT_UNINITIALIZED', 1: 'D3DKMDT_MTT_ESTABLISHED', 2: 'D3DKMDT_MTT_STANDARD', 3: 'D3DKMDT_MTT_EXTRASTANDARD', 4: 'D3DKMDT_MTT_DETAILED', 5: 'D3DKMDT_MTT_DEFAULTMONITORPROFILE', 6: 'D3DKMDT_MTT_MAXVALID'}}]],
}],
'tagSBCALC': [0x40, {
'posMax': [4, ['long']],
'pxThumbTop': [52, ['long']],
'pxThumbBottom': [48, ['long']],
'cpxThumb': [32, ['long']],
'pxMin': [60, ['long']],
'pxStart': [44, ['long']],
'pxDownArrow': [40, ['long']],
'pos': [12, ['long']],
'cpx': [56, ['long']],
'pxBottom': [20, ['long']],
'pxTop': [16, ['long']],
'pxLeft': [24, ['long']],
'pxRight': [28, ['long']],
'pxUpArrow': [36, ['long']],
'posMin': [0, ['long']],
'page': [8, ['long']],
}],
'HIMC__': [0x4, {
'unused': [0, ['long']],
}],
'tagSBINFO': [0x24, {
'WSBflags': [0, ['long']],
'Horz': [4, ['tagSBDATA']],
'Vert': [20, ['tagSBDATA']],
}],
'__unnamed_1211': [0x10, {
'Length': [0, ['unsigned long']],
'FileInformationClass': [8, ['Enumeration', {'target': 'long', 'choices': {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileAttributeCacheInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileMaximumInformation'}}]],
}],
'__unnamed_1213': [0x20, {
'FileInformationClass': [8, ['Enumeration', {'target': 'long', 'choices': {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileAttributeCacheInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileMaximumInformation'}}]],
'AdvanceOnly': [25, ['unsigned char']],
'ClusterCount': [24, ['unsigned long']],
'Length': [0, ['unsigned long']],
'DeleteHandle': [24, ['pointer64', ['void']]],
'ReplaceIfExists': [24, ['unsigned char']],
'FileObject': [16, ['pointer64', ['_FILE_OBJECT']]],
}],
'__unnamed_1219': [0x20, {
'Type3InputBuffer': [24, ['pointer64', ['void']]],
'OutputBufferLength': [0, ['unsigned long']],
'FsControlCode': [16, ['unsigned long']],
'InputBufferLength': [8, ['unsigned long']],
}],
'__unnamed_1950': [0x18, {
'Length': [0, ['unsigned long']],
'MaximumAddress': [16, ['_LARGE_INTEGER']],
'MinimumAddress': [8, ['_LARGE_INTEGER']],
'Alignment': [4, ['unsigned long']],
}],
'tagITEM': [0x90, {
'ulX': [84, ['unsigned long']],
'wID': [8, ['unsigned long']],
'dwItemData': [56, ['unsigned long long']],
'cyItem': [76, ['unsigned long']],
'hbmpChecked': [24, ['pointer64', ['void']]],
'xItem': [64, ['unsigned long']],
'spSubMenu': [16, ['pointer64', ['tagMENU']]],
'hbmpUnchecked': [32, ['pointer64', ['void']]],
'fState': [4, ['unsigned long']],
'dxTab': [80, ['unsigned long']],
'hbmp': [96, ['pointer64', ['HBITMAP__']]],
'yItem': [68, ['unsigned long']],
'fType': [0, ['unsigned long']],
'umim': [112, ['tagUAHMENUITEMMETRICS']],
'cch': [48, ['unsigned long']],
'ulWidth': [88, ['unsigned long']],
'cyBmp': [108, ['long']],
'cxBmp': [104, ['long']],
'lpstr': [40, ['pointer64', ['unsigned short']]],
'cxItem': [72, ['unsigned long']],
}],
'_VSC_VK': [0x4, {
'Vsc': [0, ['unsigned char']],
'Vk': [2, ['unsigned short']],
}],
'__unnamed_123f': [0x1, {
'Lock': [0, ['unsigned char']],
}],
'_DMM_MONITOR_SERIALIZATION': [0x28, {
'FrequencyRangeSetOffset': [28, ['unsigned long']],
'ModePruningAlgorithm': [16, ['Enumeration', {'target': 'long', 'choices': {0: 'DMM_MPA_UNINITIALIZED', 1: 'DMM_MPA_GDI', 2: 'DMM_MPA_VISTA', 3: 'DMM_MPA_MAXVALID'}}]],
'VideoPresentTargetId': [4, ['unsigned long']],
'IsSimulatedMonitor': [12, ['unsigned char']],
'SourceModeSetOffset': [24, ['unsigned long']],
'Orientation': [8, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_MO_UNINITIALIZED', 1: 'D3DKMDT_MO_0DEG', 2: 'D3DKMDT_MO_90DEG', 3: 'D3DKMDT_MO_180DEG', 4: 'D3DKMDT_MO_270DEG'}}]],
'DescriptorSetOffset': [32, ['unsigned long']],
'MonitorPowerState': [20, ['Enumeration', {'target': 'long', 'choices': {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'}}]],
'IsUsingDefaultProfile': [13, ['unsigned char']],
'MonitorType': [36, ['Enumeration', {'target': 'long', 'choices': {0: 'DMM_VMT_UNINITIALIZED', 1: 'DMM_VMT_PHYSICAL_MONITOR', 2: 'DMM_VMT_BOOT_PERSISTENT_MONITOR', 3: 'DMM_VMT_PERSISTENT_MONITOR', 4: 'DMM_VMT_TEMPORARY_MONITOR', 5: 'DMM_VMT_SIMULATED_MONITOR'}}]],
'Size': [0, ['unsigned long']],
}],
'_VK_TO_WCHARS1': [0x4, {
'Attributes': [1, ['unsigned char']],
'VirtualKey': [0, ['unsigned char']],
'wch': [2, ['array', 1, ['wchar']]],
}],
'__unnamed_121b': [0x18, {
'Length': [0, ['pointer64', ['_LARGE_INTEGER']]],
'ByteOffset': [16, ['_LARGE_INTEGER']],
'Key': [8, ['unsigned long']],
}],
'__unnamed_121d': [0x20, {
'Type3InputBuffer': [24, ['pointer64', ['void']]],
'OutputBufferLength': [0, ['unsigned long']],
'IoControlCode': [16, ['unsigned long']],
'InputBufferLength': [8, ['unsigned long']],
}],
'__unnamed_121f': [0x10, {
'Length': [8, ['unsigned long']],
'SecurityInformation': [0, ['unsigned long']],
}],
'_DMM_MONITORFREQUENCYRANGESET_SERIALIZATION': [0x38, {
'NumFrequencyRanges': [0, ['unsigned char']],
'FrequencyRangeSerialization': [8, ['array', 1, ['_D3DKMDT_MONITOR_FREQUENCY_RANGE']]],
}],
'_D3DKMDT_GAMMA_RAMP': [0x18, {
'Data': [16, ['__unnamed_182e']],
'DataSize': [8, ['unsigned long long']],
'Type': [0, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DDDI_GAMMARAMP_UNINITIALIZED', 1: 'D3DDDI_GAMMARAMP_DEFAULT', 2: 'D3DDDI_GAMMARAMP_RGB256x3x16', 3: 'D3DDDI_GAMMARAMP_DXGI_1'}}]],
}],
'_W32PROCESS': [0x100, {
'GDIPushLock': [80, ['_EX_PUSH_LOCK']],
'DxProcess': [248, ['pointer64', ['void']]],
'pBrushAttrList': [48, ['pointer64', ['void']]],
'Process': [0, ['pointer64', ['_EPROCESS']]],
'NextStart': [32, ['pointer64', ['_W32PROCESS']]],
'GDIW32PIDLockedBitmaps': [224, ['_LIST_ENTRY']],
'RefCount': [8, ['unsigned long']],
'StartCursorHideTime': [24, ['unsigned long']],
'GDIBrushAttrFreeList': [208, ['_LIST_ENTRY']],
'InputIdleEvent': [16, ['pointer64', ['_KEVENT']]],
'W32PF_Flags': [12, ['unsigned long']],
'GDIHandleCount': [60, ['long']],
'hSecureGdiSharedHandleTable': [240, ['pointer64', ['void']]],
'UserHandleCountPeak': [72, ['unsigned long']],
'W32Pid': [56, ['unsigned long']],
'UserHandleCount': [68, ['long']],
'pDCAttrList': [40, ['pointer64', ['void']]],
'GDIEngUserMemAllocTable': [88, ['_RTL_AVL_TABLE']],
'GDIHandleCountPeak': [64, ['unsigned long']],
'GDIDcAttrFreeList': [192, ['_LIST_ENTRY']],
}],
'tagSERVERINFO': [0x1220, {
'uiShellMsg': [912, ['unsigned long']],
'atomSysClass': [852, ['array', 25, ['unsigned short']]],
'dtScroll': [2800, ['unsigned long']],
'dwKeyCache': [2952, ['unsigned long']],
'atomIconSmProp': [1356, ['unsigned short']],
'argbSystemUnmatched': [2268, ['array', 31, ['unsigned long']]],
'atomContextHelpIdProp': [1360, ['unsigned short']],
'cySysFontChar': [2832, ['long']],
'mpFnid_serverCBWndProc': [328, ['array', 31, ['unsigned short']]],
'PUSIFlags': [4476, ['unsigned long']],
'dtLBSearch': [2804, ['unsigned long']],
'tmSysFont': [2836, ['tagTEXTMETRICW']],
'ahbrSystem': [2520, ['array', 31, ['pointer64', ['HBRUSH__']]]],
'dwDefaultHeapSize': [908, ['unsigned long']],
'dwSRVIFlags': [0, ['unsigned long']],
'BitsPixel': [4473, ['unsigned char']],
'wMaxLeftOverlapChars': [2820, ['long']],
'dwLastSystemRITEventTickCountUpdate': [4488, ['unsigned long']],
'dpiSystem': [2896, ['tagDPISERVERINFO']],
'hIcoWindows': [2944, ['pointer64', ['HICON__']]],
'dwAsyncKeyCache': [2956, ['unsigned long']],
'dwTagCount': [4632, ['unsigned long']],
'adwDBGTAGFlags': [4492, ['array', 35, ['unsigned long']]],
'aiSysMet': [1880, ['array', 97, ['long']]],
'acAnsiToOem': [1620, ['array', 256, ['unsigned char']]],
'aStoCidPfn': [272, ['array', 7, ['pointer64', ['void']]]],
'dwLastRITEventTickCount': [2792, ['unsigned long']],
'cbHandleTable': [848, ['unsigned long']],
'atomFrostedWindowProp': [1362, ['unsigned short']],
'ucWheelScrollLines': [2812, ['unsigned long']],
'ptCursorReal': [2784, ['tagPOINT']],
'ucWheelScrollChars': [2816, ['unsigned long']],
'acOemToAnsi': [1364, ['array', 256, ['unsigned char']]],
'hbrGray': [2768, ['pointer64', ['HBRUSH__']]],
'BitCount': [4468, ['unsigned short']],
'argbSystem': [2392, ['array', 31, ['unsigned long']]],
'dtCaretBlink': [2808, ['unsigned long']],
'dwInstalledEventHooks': [1876, ['unsigned long']],
'cxSysFontChar': [2828, ['long']],
'wMaxRightOverlapChars': [2824, ['long']],
'oembmi': [2964, ['array', 93, ['tagOEMBITMAPINFO']]],
'apfnClientWorker': [760, ['_PFNCLIENTWORKER']],
'dwDefaultHeapBase': [904, ['unsigned long']],
'apfnClientA': [392, ['_PFNCLIENT']],
'dmLogPixels': [4470, ['unsigned short']],
'nEvents': [2796, ['long']],
'atomIconProp': [1358, ['unsigned short']],
'Planes': [4472, ['unsigned char']],
'apfnClientW': [576, ['_PFNCLIENT']],
'MBStrings': [916, ['array', 11, ['tagMBSTRING']]],
'UILangID': [4484, ['unsigned short']],
'dwRIPFlags': [4636, ['unsigned long']],
'uCaretWidth': [4480, ['unsigned long']],
'cCaptures': [2960, ['unsigned long']],
'cHandleEntries': [8, ['unsigned long long']],
'ptCursor': [2776, ['tagPOINT']],
'hIconSmWindows': [2936, ['pointer64', ['HICON__']]],
'mpFnidPfn': [16, ['array', 32, ['pointer64', ['void']]]],
'rcScreenReal': [4452, ['tagRECT']],
}],
'_D3DKMDT_VIDEO_SIGNAL_INFO': [0x38, {
'VSyncFreq': [20, ['_D3DDDI_RATIONAL']],
'ActiveSize': [12, ['_D3DKMDT_2DREGION']],
'PixelRate': [40, ['unsigned long long']],
'TotalSize': [4, ['_D3DKMDT_2DREGION']],
'VideoStandard': [0, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_VSS_UNINITIALIZED', 1: 'D3DKMDT_VSS_VESA_DMT', 2: 'D3DKMDT_VSS_VESA_GTF', 3: 'D3DKMDT_VSS_VESA_CVT', 4: 'D3DKMDT_VSS_IBM', 5: 'D3DKMDT_VSS_APPLE', 6: 'D3DKMDT_VSS_NTSC_M', 7: 'D3DKMDT_VSS_NTSC_J', 8: 'D3DKMDT_VSS_NTSC_443', 9: 'D3DKMDT_VSS_PAL_B', 10: 'D3DKMDT_VSS_PAL_B1', 11: 'D3DKMDT_VSS_PAL_G', 12: 'D3DKMDT_VSS_PAL_H', 13: 'D3DKMDT_VSS_PAL_I', 14: 'D3DKMDT_VSS_PAL_D', 15: 'D3DKMDT_VSS_PAL_N', 16: 'D3DKMDT_VSS_PAL_NC', 17: 'D3DKMDT_VSS_SECAM_B', 18: 'D3DKMDT_VSS_SECAM_D', 19: 'D3DKMDT_VSS_SECAM_G', 20: 'D3DKMDT_VSS_SECAM_H', 21: 'D3DKMDT_VSS_SECAM_K', 22: 'D3DKMDT_VSS_SECAM_K1', 23: 'D3DKMDT_VSS_SECAM_L', 24: 'D3DKMDT_VSS_SECAM_L1', 25: 'D3DKMDT_VSS_EIA_861', 26: 'D3DKMDT_VSS_EIA_861A', 27: 'D3DKMDT_VSS_EIA_861B', 28: 'D3DKMDT_VSS_PAL_K', 29: 'D3DKMDT_VSS_PAL_K1', 30: 'D3DKMDT_VSS_PAL_L', 31: 'D3DKMDT_VSS_PAL_M', 255: 'D3DKMDT_VSS_OTHER'}}]],
'ScanLineOrdering': [48, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DDDI_VSSLO_UNINITIALIZED', 1: 'D3DDDI_VSSLO_PROGRESSIVE', 2: 'D3DDDI_VSSLO_INTERLACED_UPPERFIELDFIRST', 3: 'D3DDDI_VSSLO_INTERLACED_LOWERFIELDFIRST', 255: 'D3DDDI_VSSLO_OTHER'}}]],
'HSyncFreq': [28, ['_D3DDDI_RATIONAL']],
}],
'__unnamed_11df': [0x8, {
'IrpCount': [0, ['long']],
'SystemBuffer': [0, ['pointer64', ['void']]],
'MasterIrp': [0, ['pointer64', ['_IRP']]],
}],
'D3DDDI_DXGI_RGB': [0xc, {
'Blue': [8, ['float']],
'Green': [4, ['float']],
'Red': [0, ['float']],
}],
'_MAGNIFICATION_INPUT_TRANSFORM': [0x30, {
'rcScreen': [16, ['tagRECT']],
'magFactorX': [40, ['long']],
'magFactorY': [44, ['long']],
'ptiMagThreadInfo': [32, ['pointer64', ['tagTHREADINFO']]],
'rcSource': [0, ['tagRECT']],
}],
'_D3DKMDT_MONITOR_FREQUENCY_RANGE': [0x30, {
'Origin': [0, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_MCO_UNINITIALIZED', 1: 'D3DKMDT_MCO_DEFAULTMONITORPROFILE', 2: 'D3DKMDT_MCO_MONITORDESCRIPTOR', 3: 'D3DKMDT_MCO_MONITORDESCRIPTOR_REGISTRYOVERRIDE', 4: 'D3DKMDT_MCO_SPECIFICCAP_REGISTRYOVERRIDE', 5: 'D3DKMDT_MCO_MAXVALID'}}]],
'ConstraintType': [36, ['Enumeration', {'target': 'long', 'choices': {0: 'D3DKMDT_MFRC_UNINITIALIZED', 1: 'D3DKMDT_MFRC_ACTIVESIZE', 2: 'D3DKMDT_MFRC_MAXPIXELRATE'}}]],
'RangeLimits': [4, ['_D3DKMDT_FREQUENCY_RANGE']],
'Constraint': [40, ['__unnamed_16c1']],
}],
'_PFNCLIENTWORKER': [0x58, {
'pfnComboBoxWndProc': [8, ['pointer64', ['void']]],
'pfnMDIClientWndProc': [48, ['pointer64', ['void']]],
'pfnDialogWndProc': [24, ['pointer64', ['void']]],
'pfnStaticWndProc': [56, ['pointer64', ['void']]],
'pfnCtfHookProc': [80, ['pointer64', ['void']]],
'pfnButtonWndProc': [0, ['pointer64', ['void']]],
'pfnImeWndProc': [64, ['pointer64', ['void']]],
'pfnEditWndProc': [32, ['pointer64', ['void']]],
'pfnListBoxWndProc': [40, ['pointer64', ['void']]],
'pfnGhostWndProc': [72, ['pointer64', ['void']]],
'pfnComboListBoxProc': [16, ['pointer64', ['void']]],
}],
'_DMA_OPERATIONS': [0x80, {
'PutDmaAdapter': [8, ['pointer64', ['void']]],
'FreeMapRegisters': [56, ['pointer64', ['void']]],
'MapTransfer': [64, ['pointer64', ['void']]],
'FreeCommonBuffer': [24, ['pointer64', ['void']]],
'ReadDmaCounter': [80, ['pointer64', ['void']]],
'AllocateCommonBuffer': [16, ['pointer64', ['void']]],
'PutScatterGatherList': [96, ['pointer64', ['void']]],
'CalculateScatterGatherList': [104, ['pointer64', ['void']]],
'BuildMdlFromScatterGatherList': [120, ['pointer64', ['void']]],
'GetScatterGatherList': [88, ['pointer64', ['void']]],
'AllocateAdapterChannel': [32, ['pointer64', ['void']]],
'FreeAdapterChannel': [48, ['pointer64', ['void']]],
'GetDmaAlignment': [72, ['pointer64', ['void']]],
'FlushAdapterBuffers': [40, ['pointer64', ['void']]],
'BuildScatterGatherList': [112, ['pointer64', ['void']]],
'Size': [0, ['unsigned long']],
}],
'_DXGK_DIAG_HEADER': [0x30, {
'Index': [40, ['unsigned long']],
'ProcessName': [16, ['array', 16, ['unsigned char']]],
'LogTimestamp': [8, ['unsigned long long']],
'ThreadId': [32, ['unsigned long long']],
'Type': [0, ['Enumeration', {'target': 'long', 'choices': {0: 'DXGK_DIAG_TYPE_NONE', 1: 'DXGK_DIAG_TYPE_SDC', 2: 'DXGK_DIAG_TYPE_HPD', 3: 'DXGK_DIAG_TYPE_DC_ORIGIN', 4: 'DXGK_DIAG_TYPE_USER_CDS', 5: 'DXGK_DIAG_TYPE_DRV_CDS', 6: 'DXGK_DIAG_TYPE_CODE_POINT', 7: 'DXGK_DIAG_TYPE_QDC', 8: 'DXGK_DIAG_TYPE_MONITOR_MGR', 9: 'DXGK_DIAG_TYPE_CONNECTEDSET_NOT_FOUND', 10: 'DXGK_DIAG_TYPE_DISPDIAG_COLLECTED', 11: 'DXGK_DIAG_TYPE_BML_PACKET', 12: 'DXGK_DIAG_TYPE_BML_PACKET_EX', 13: 'DXGK_DIAG_TYPE_COMMIT_VIDPN_FAILED', 14: 'DXGK_DIAG_TYPE_MAX', -1: 'DXGK_DIAG_TYPE_FORCE_UINT32'}}]],
'WdLogIdx': [44, ['unsigned long']],
'Size': [4, ['unsigned long']],
}],
'__unnamed_1225': [0x10, {
'DeviceObject': [8, ['pointer64', ['_DEVICE_OBJECT']]],
'Vpb': [0, ['pointer64', ['_VPB']]],
}],
'_SM_VALUES_STRINGS': [0x18, {
'StorageType': [16, ['Enumeration', {'target': 'long', 'choices': {0: 'SmStorageActual', 1: 'SmStorageNonActual'}}]],
'pszName': [0, ['pointer64', ['unsigned char']]],
'ulValue': [8, ['unsigned long']],
'RangeType': [12, ['Enumeration', {'target': 'long', 'choices': {0: 'SmRangeSharedInfo', 1: 'SmRangeNonSharedInfo', 2: 'SmRangeBool'}}]],
}],
'tagTERMINAL': [0x40, {
'spwndDesktopOwner': [8, ['pointer64', ['tagWND']]],
'dwTERMF_Flags': [0, ['unsigned long']],
'dwNestedLevel': [32, ['unsigned long']],
'pqDesktop': [24, ['pointer64', ['tagQ']]],
'pEventInputReady': [56, ['pointer64', ['_KEVENT']]],
'rpdeskDestroy': [48, ['pointer64', ['tagDESKTOP']]],
'ptiDesktop': [16, ['pointer64', ['tagTHREADINFO']]],
'pEventTermInit': [40, ['pointer64', ['_KEVENT']]],
}],
'_SCATTER_GATHER_LIST': [0x10, {
'Elements': [16, ['array', 0, ['_SCATTER_GATHER_ELEMENT']]],
'Reserved': [8, ['unsigned long long']],
'NumberOfElements': [0, ['unsigned long']],
}],
'tagMENULIST': [0x10, {
'pMenu': [8, ['pointer64', ['tagMENU']]],
'pNext': [0, ['pointer64', ['tagMENULIST']]],
}],
'tagPOINT': [0x8, {
'y': [4, ['long']],
'x': [0, ['long']],
}],
'tagSHAREDINFO': [0x238, {
'psi': [0, ['pointer64', ['tagSERVERINFO']]],
'DefWindowSpecMsgs': [552, ['_WNDMSG']],
'awmControl': [40, ['array', 31, ['_WNDMSG']]],
'ulSharedDelta': [32, ['unsigned long long']],
'pDispInfo': [24, ['pointer64', ['tagDISPLAYINFO']]],
'aheList': [8, ['pointer64', ['_HANDLEENTRY']]],
'DefWindowMsgs': [536, ['_WNDMSG']],
'HeEntrySize': [16, ['unsigned long']],
}],
'tagIMC': [0x40, {
'dwClientImcData': [48, ['unsigned long long']],
'head': [0, ['_THRDESKHEAD']],
'hImeWnd': [56, ['pointer64', ['HWND__']]],
'pImcNext': [40, ['pointer64', ['tagIMC']]],
}],
'tagKL': [0x78, {
'uNumTbl': [88, ['unsigned long']],
'pklPrev': [24, ['pointer64', ['tagKL']]],
'head': [0, ['_HEAD']],
'pklNext': [16, ['pointer64', ['tagKL']]],
'spkfPrimary': [56, ['pointer64', ['tagKBDFILE']]],
'dwFontSigs': [64, ['unsigned long']],
'dwLastKbdType': [104, ['unsigned long']],
'CodePage': [72, ['unsigned short']],
'dwKL_Flags': [32, ['unsigned long']],
'iBaseCharset': [68, ['unsigned long']],
'dwKLID': [112, ['unsigned long']],
'spkf': [48, ['pointer64', ['tagKBDFILE']]],
'piiex': [80, ['pointer64', ['tagIMEINFOEX']]],
'hkl': [40, ['pointer64', ['HKL__']]],
'pspkfExtra': [96, ['pointer64', ['pointer64', ['tagKBDFILE']]]],
'wchDiacritic': [74, ['wchar']],
'dwLastKbdSubType': [108, ['unsigned long']],
}],
'__unnamed_182e': [0x8, {
'pRgb256x3x16': [0, ['pointer64', ['_D3DDDI_GAMMA_RAMP_RGB256x3x16']]],
'pRaw': [0, ['pointer64', ['void']]],
'pDxgi1': [0, ['pointer64', ['_D3DDDI_GAMMA_RAMP_DXGI_1']]],
}],
'tagCARET': [0x48, {
'iHideLevel': [12, ['long']],
'yOwnDc': [56, ['long']],
'y': [20, ['long']],
'cy': [24, ['long']],
'cx': [28, ['long']],
'hBitmap': [32, ['pointer64', ['HBITMAP__']]],
'cyOwnDc': [64, ['long']],
'fOn': [8, ['BitField', {'end_bit': 2, 'start_bit': 1, 'native_type': 'unsigned long'}]],
'hTimer': [40, ['unsigned long long']],
'xOwnDc': [52, ['long']],
'fVisible': [8, ['BitField', {'end_bit': 1, 'start_bit': 0, 'native_type': 'unsigned long'}]],
'cxOwnDc': [60, ['long']],
'tid': [48, ['unsigned long']],
'x': [16, ['long']],
'spwnd': [0, ['pointer64', ['tagWND']]],
}],
}
|
gpl-2.0
|
Sorsly/subtle
|
google-cloud-sdk/platform/gsutil/gslib/tests/test_notification.py
|
38
|
2770
|
# -*- coding: utf-8 -*-
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration tests for notification command."""
from __future__ import absolute_import
import re
import uuid
import boto
import gslib.tests.testcase as testcase
from gslib.tests.util import ObjectToURI as suri
from gslib.tests.util import unittest
def _LoadNotificationUrl():
return boto.config.get_value('GSUtil', 'test_notification_url')
NOTIFICATION_URL = _LoadNotificationUrl()
class TestNotification(testcase.GsUtilIntegrationTestCase):
"""Integration tests for notification command."""
@unittest.skipUnless(NOTIFICATION_URL,
'Test requires notification URL configuration.')
def test_watch_bucket(self):
"""Tests creating a notification channel on a bucket."""
bucket_uri = self.CreateBucket()
self.RunGsUtil([
'notification', 'watchbucket', NOTIFICATION_URL, suri(bucket_uri)])
identifier = str(uuid.uuid4())
token = str(uuid.uuid4())
stderr = self.RunGsUtil([
'notification', 'watchbucket', '-i', identifier, '-t', token,
NOTIFICATION_URL, suri(bucket_uri)], return_stderr=True)
self.assertIn('token: %s' % token, stderr)
self.assertIn('identifier: %s' % identifier, stderr)
@unittest.skipUnless(NOTIFICATION_URL,
'Test requires notification URL configuration.')
def test_stop_channel(self):
"""Tests stopping a notification channel on a bucket."""
bucket_uri = self.CreateBucket()
stderr = self.RunGsUtil(
['notification', 'watchbucket', NOTIFICATION_URL, suri(bucket_uri)],
return_stderr=True)
channel_id = re.findall(r'channel identifier: (?P<id>.*)', stderr)
self.assertEqual(len(channel_id), 1)
resource_id = re.findall(r'resource identifier: (?P<id>.*)', stderr)
self.assertEqual(len(resource_id), 1)
channel_id = channel_id[0]
resource_id = resource_id[0]
self.RunGsUtil(['notification', 'stopchannel', channel_id, resource_id])
def test_invalid_subcommand(self):
stderr = self.RunGsUtil(['notification', 'foo', 'bar', 'baz'],
return_stderr=True, expected_status=1)
self.assertIn('Invalid subcommand', stderr)
|
mit
|
wilvk/ansible
|
test/units/module_utils/test_distribution_version.py
|
13
|
35740
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from itertools import product
import pytest
# the module we are actually testing (sort of)
from ansible.module_utils.facts.system.distribution import DistributionFactCollector
# to generate the testcase data, you can use the script gen_distribution_version_testcase.py in hacking/tests
TESTSETS = [
{
"platform.dist": [
"centos",
"7.2.1511",
"Core"
],
"input": {
"/etc/redhat-release": "CentOS Linux release 7.2.1511 (Core) \n",
"/etc/os-release": (
"NAME=\"CentOS Linux\"\nVERSION=\"7 (Core)\"\nID=\"centos\"\nID_LIKE=\"rhel fedora\"\nVERSION_ID=\"7\"\n"
"PRETTY_NAME=\"CentOS Linux 7 (Core)\"\nANSI_COLOR=\"0;31\"\nCPE_NAME=\"cpe:/o:centos:centos:7\"\n"
"HOME_URL=\"https://www.centos.org/\"\nBUG_REPORT_URL=\"https://bugs.centos.org/\"\n\nCENTOS_MANTISBT_PROJECT=\"CentOS-7\"\n"
"CENTOS_MANTISBT_PROJECT_VERSION=\"7\"\nREDHAT_SUPPORT_PRODUCT=\"centos\"\nREDHAT_SUPPORT_PRODUCT_VERSION=\"7\"\n\n"
),
"/etc/system-release": "CentOS Linux release 7.2.1511 (Core) \n"
},
"name": "CentOS 7.2.1511",
"result": {
"distribution_release": "Core",
"distribution": "CentOS",
"distribution_major_version": "7",
"os_family": "RedHat",
"distribution_version": "7.2.1511",
}
},
{
"name": "CentOS 6.7",
"platform.dist": [
"centos",
"6.7",
"Final"
],
"input": {
"/etc/redhat-release": "CentOS release 6.7 (Final)\n",
"/etc/lsb-release": (
"LSB_VERSION=base-4.0-amd64:base-4.0-noarch:core-4.0-amd64:core-4.0-noarch:graphics-4.0-amd64:graphics-4.0-noarch:"
"printing-4.0-amd64:printing-4.0-noarch\n"
),
"/etc/system-release": "CentOS release 6.7 (Final)\n"
},
"result": {
"distribution_release": "Final",
"distribution": "CentOS",
"distribution_major_version": "6",
"os_family": "RedHat",
"distribution_version": "6.7"
}
},
{
"name": "RedHat 7.2",
"platform.dist": [
"redhat",
"7.2",
"Maipo"
],
"input": {
"/etc/redhat-release": "Red Hat Enterprise Linux Server release 7.2 (Maipo)\n",
"/etc/os-release": (
"NAME=\"Red Hat Enterprise Linux Server\"\nVERSION=\"7.2 (Maipo)\"\nID=\"rhel\"\nID_LIKE=\"fedora\"\nVERSION_ID=\"7.2\"\n"
"PRETTY_NAME=\"Red Hat Enterprise Linux Server 7.2 (Maipo)\"\nANSI_COLOR=\"0;31\"\n"
"CPE_NAME=\"cpe:/o:redhat:enterprise_linux:7.2:GA:server\"\nHOME_URL=\"https://www.redhat.com/\"\n"
"BUG_REPORT_URL=\"https://bugzilla.redhat.com/\"\n\nREDHAT_BUGZILLA_PRODUCT=\"Red Hat Enterprise Linux 7\"\n"
"REDHAT_BUGZILLA_PRODUCT_VERSION=7.2\nREDHAT_SUPPORT_PRODUCT=\"Red Hat Enterprise Linux\"\n"
"REDHAT_SUPPORT_PRODUCT_VERSION=\"7.2\"\n"
),
"/etc/system-release": "Red Hat Enterprise Linux Server release 7.2 (Maipo)\n"
},
"result": {
"distribution_release": "Maipo",
"distribution": "RedHat",
"distribution_major_version": "7",
"os_family": "RedHat",
"distribution_version": "7.2"
}
},
{
"name": "RedHat 6.7",
"platform.dist": [
"redhat",
"6.7",
"Santiago"
],
"input": {
"/etc/redhat-release": "Red Hat Enterprise Linux Server release 6.7 (Santiago)\n",
"/etc/lsb-release": (
"LSB_VERSION=base-4.0-amd64:base-4.0-noarch:core-4.0-amd64:core-4.0-noarch:graphics-4.0-amd64:graphics-4.0-noarch:"
"printing-4.0-amd64:printing-4.0-noarch\n"
),
"/etc/system-release": "Red Hat Enterprise Linux Server release 6.7 (Santiago)\n"
},
"result": {
"distribution_release": "Santiago",
"distribution": "RedHat",
"distribution_major_version": "6",
"os_family": "RedHat",
"distribution_version": "6.7"
}
},
{
"name": "Virtuozzo 7.3",
"platform.dist": [
"redhat",
"7.3",
""
],
"input": {
"/etc/redhat-release": "Virtuozzo Linux release 7.3\n",
"/etc/os-release": (
"NAME=\"Virtuozzo\"\n"
"VERSION=\"7.0.3\"\n"
"ID=\"virtuozzo\"\n"
"ID_LIKE=\"rhel fedora\"\n"
"VERSION_ID=\"7\"\n"
"PRETTY_NAME=\"Virtuozzo release 7.0.3\"\n"
"ANSI_COLOR=\"0;31\"\n"
"CPE_NAME=\"cpe:/o:virtuozzoproject:vz:7\"\n"
"HOME_URL=\"http://www.virtuozzo.com\"\n"
"BUG_REPORT_URL=\"https://bugs.openvz.org/\"\n"
),
"/etc/system-release": "Virtuozzo release 7.0.3 (640)\n"
},
"result": {
"distribution_release": "NA",
"distribution": "Virtuozzo",
"distribution_major_version": "7",
"os_family": "RedHat",
"distribution_version": "7.3"
}
},
{
"name": "openSUSE Leap 42.1",
"input": {
"/etc/os-release": """
NAME="openSUSE Leap"
VERSION="42.1"
VERSION_ID="42.1"
PRETTY_NAME="openSUSE Leap 42.1 (x86_64)"
ID=opensuse
ANSI_COLOR="0;32"
CPE_NAME="cpe:/o:opensuse:opensuse:42.1"
BUG_REPORT_URL="https://bugs.opensuse.org"
HOME_URL="https://opensuse.org/"
ID_LIKE="suse"
""",
"/etc/SuSE-release": """
openSUSE 42.1 (x86_64)
VERSION = 42.1
CODENAME = Malachite
# /etc/SuSE-release is deprecated and will be removed in the future, use /etc/os-release instead
"""
},
"platform.dist": ['SuSE', '42.1', 'x86_64'],
"result": {
"distribution": "openSUSE Leap",
"distribution_major_version": "42",
"distribution_release": "1",
"os_family": "Suse",
"distribution_version": "42.1",
}
},
{
'name': 'openSUSE 13.2',
'input': {
'/etc/SuSE-release': """openSUSE 13.2 (x86_64)
VERSION = 13.2
CODENAME = Harlequin
# /etc/SuSE-release is deprecated and will be removed in the future, use /etc/os-release instead
""",
'/etc/os-release': """NAME=openSUSE
VERSION="13.2 (Harlequin)"
VERSION_ID="13.2"
PRETTY_NAME="openSUSE 13.2 (Harlequin) (x86_64)"
ID=opensuse
ANSI_COLOR="0;32"
CPE_NAME="cpe:/o:opensuse:opensuse:13.2"
BUG_REPORT_URL="https://bugs.opensuse.org"
HOME_URL="https://opensuse.org/"
ID_LIKE="suse"
"""
},
'platform.dist': ('SuSE', '13.2', 'x86_64'),
'result': {
'distribution': u'openSUSE',
'distribution_major_version': u'13',
'distribution_release': u'2',
'os_family': u'Suse',
'distribution_version': u'13.2'
}
},
{
"platform.dist": [
"",
"",
""
],
"input": {
"/etc/os-release": (
"NAME=\"openSUSE Tumbleweed\"\n# VERSION=\"20160917\"\nID=opensuse\nID_LIKE=\"suse\"\nVERSION_ID=\"20160917\"\n"
"PRETTY_NAME=\"openSUSE Tumbleweed\"\nANSI_COLOR=\"0;32\"\nCPE_NAME=\"cpe:/o:opensuse:tumbleweed:20160917\"\n"
"BUG_REPORT_URL=\"https://bugs.opensuse.org\"\nHOME_URL=\"https://www.opensuse.org/\"\n"
)
},
"name": "openSUSE Tumbleweed 20160917",
"result": {
"distribution_release": "",
"distribution": "openSUSE Tumbleweed",
"distribution_major_version": "NA",
"os_family": "Suse",
"distribution_version": "20160917"
}
},
{ # see https://github.com/ansible/ansible/issues/14837
"name": "SLES 11.3",
"input": {
"/etc/SuSE-release": """
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 3
"""
},
"platform.dist": ['SuSE', '11', 'x86_64'],
"result": {
"distribution": "SLES",
"distribution_major_version": "11",
"distribution_release": "3",
"os_family": "Suse",
"distribution_version": "11.3",
}
},
{ # see https://github.com/ansible/ansible/issues/14837
"name": "SLES 11.4",
"input": {
"/etc/SuSE-release": """
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 4
""",
"/etc/os-release": """
NAME="SLES"
VERSION="11.4"
VERSION_ID="11.4"
PRETTY_NAME="SUSE Linux Enterprise Server 11 SP4"
ID="sles"
ANSI_COLOR="0;32"
CPE_NAME="cpe:/o:suse:sles:11:4"
""",
},
"platform.dist": ['SuSE', '11', 'x86_64'],
"result":{
"distribution": "SLES",
"distribution_major_version": "11",
"distribution_release": "4",
"os_family": "Suse",
"distribution_version": "11.4",
}
},
{ # see https://github.com/ansible/ansible/issues/14837
"name": "SLES 12 SP0",
"input": {
"/etc/SuSE-release": """
SUSE Linux Enterprise Server 12 (x86_64)
VERSION = 12
PATCHLEVEL = 0
# This file is deprecated and will be removed in a future service pack or release.
# Please check /etc/os-release for details about this release.
""",
"/etc/os-release": """
NAME="SLES"
VERSION="12"
VERSION_ID="12"
PRETTY_NAME="SUSE Linux Enterprise Server 12"
ID="sles"
ANSI_COLOR="0;32"
CPE_NAME="cpe:/o:suse:sles:12"
""",
},
"platform.dist": ['SuSE', '12', 'x86_64'],
"result": {
"distribution": "SLES",
"distribution_major_version": "12",
"distribution_release": "0",
"os_family": "Suse",
"distribution_version": "12",
}
},
{ # see https://github.com/ansible/ansible/issues/14837
"name": "SLES 12 SP1",
"input": {
"/etc/SuSE-release": """
SUSE Linux Enterprise Server 12 (x86_64)
VERSION = 12
PATCHLEVEL = 0
# This file is deprecated and will be removed in a future service pack or release.
# Please check /etc/os-release for details about this release.
""",
"/etc/os-release": """
NAME="SLES"
VERSION="12-SP1"
VERSION_ID="12.1"
PRETTY_NAME="SUSE Linux Enterprise Server 12 SP1"
ID="sles"
ANSI_COLOR="0;32"
CPE_NAME="cpe:/o:suse:sles:12:sp1"
""",
},
"platform.dist": ['SuSE', '12', 'x86_64'],
"result":{
"distribution": "SLES",
"distribution_major_version": "12",
"distribution_release": "1",
"os_family": "Suse",
"distribution_version": "12.1",
}
},
{
"name": "Debian stretch/sid",
"input": {
"/etc/os-release": """
PRETTY_NAME="Debian GNU/Linux stretch/sid"
NAME="Debian GNU/Linux"
ID=debian
HOME_URL="https://www.debian.org/"
SUPPORT_URL="https://www.debian.org/support"
BUG_REPORT_URL="https://bugs.debian.org/"
""",
"/etc/debian_version": """
stretch/sid
""",
},
"platform.dist": ('debian', 'stretch/sid', ''),
"result": {
"distribution": "Debian",
"distribution_major_version": "stretch/sid",
"distribution_release": "NA",
"os_family": "Debian",
"distribution_version": "stretch/sid",
}
},
{
'name': "Debian 7.9",
'input': {
'/etc/os-release': """PRETTY_NAME="Debian GNU/Linux 7 (wheezy)"
NAME="Debian GNU/Linux"
VERSION_ID="7"
VERSION="7 (wheezy)"
ID=debian
ANSI_COLOR="1;31"
HOME_URL="http://www.debian.org/"
SUPPORT_URL="http://www.debian.org/support/"
BUG_REPORT_URL="http://bugs.debian.org/"
"""
},
'platform.dist': ('debian', '7.9', ''),
'result': {
'distribution': u'Debian',
'distribution_major_version': u'7',
'distribution_release': u'wheezy',
"os_family": "Debian",
'distribution_version': u'7.9'
}
},
{
"platform.dist": [
"Ubuntu",
"16.04",
"xenial"
],
"input": {
"/etc/os-release": (
"NAME=\"Ubuntu\"\nVERSION=\"16.04 LTS (Xenial Xerus)\"\nID=ubuntu\nID_LIKE=debian\nPRETTY_NAME=\"Ubuntu 16.04 LTS\"\n"
"VERSION_ID=\"16.04\"\nHOME_URL=\"http://www.ubuntu.com/\"\nSUPPORT_URL=\"http://help.ubuntu.com/\"\n"
"BUG_REPORT_URL=\"http://bugs.launchpad.net/ubuntu/\"\nUBUNTU_CODENAME=xenial\n"
),
"/etc/lsb-release": "DISTRIB_ID=Ubuntu\nDISTRIB_RELEASE=16.04\nDISTRIB_CODENAME=xenial\nDISTRIB_DESCRIPTION=\"Ubuntu 16.04 LTS\"\n"
},
"name": "Ubuntu 16.04",
"result": {
"distribution_release": "xenial",
"distribution": "Ubuntu",
"distribution_major_version": "16",
"os_family": "Debian",
"distribution_version": "16.04"
}
},
{
'name': "Ubuntu 10.04 guess",
'input':
{
'/etc/lsb-release': """DISTRIB_ID=Ubuntu
DISTRIB_RELEASE=10.04
DISTRIB_CODENAME=lucid
DISTRIB_DESCRIPTION="Ubuntu 10.04.4 LTS
"""
},
'platform.dist': ('Ubuntu', '10.04', 'lucid'),
'result':
{
'distribution': u'Ubuntu',
'distribution_major_version': u'10',
'distribution_release': u'lucid',
"os_family": "Debian",
'distribution_version': u'10.04'
}
},
{
'name': "Ubuntu 14.04",
'input': {
'/etc/lsb-release': """DISTRIB_ID=Ubuntu
DISTRIB_RELEASE=14.04
DISTRIB_CODENAME=trusty
DISTRIB_DESCRIPTION="Ubuntu 14.04.4 LTS"
""",
'/etc/os-release': """NAME="Ubuntu"
VERSION="14.04.4 LTS, Trusty Tahr"
ID=ubuntu
ID_LIKE=debian
PRETTY_NAME="Ubuntu 14.04.4 LTS"
VERSION_ID="14.04"
HOME_URL="http://www.ubuntu.com/"
SUPPORT_URL="http://help.ubuntu.com/"
BUG_REPORT_URL="http://bugs.launchpad.net/ubuntu/"
"""
},
'platform.dist': ('Ubuntu', '14.04', 'trusty'),
'result': {
'distribution': u'Ubuntu',
'distribution_major_version': u'14',
'distribution_release': u'trusty',
"os_family": "Debian",
'distribution_version': u'14.04'
}
},
{
'name': "Ubuntu 12.04",
'input': {'/etc/lsb-release': """DISTRIB_ID=Ubuntu
DISTRIB_RELEASE=12.04
DISTRIB_CODENAME=precise
DISTRIB_DESCRIPTION="Ubuntu 12.04.5 LTS"
""",
'/etc/os-release': """NAME="Ubuntu"
VERSION="12.04.5 LTS, Precise Pangolin"
ID=ubuntu
ID_LIKE=debian
PRETTY_NAME="Ubuntu precise (12.04.5 LTS)"
VERSION_ID="12.04"
"""},
'platform.dist': ('Ubuntu', '12.04', 'precise'),
'result': {'distribution': u'Ubuntu',
'distribution_major_version': u'12',
'distribution_release': u'precise',
"os_family": "Debian",
'distribution_version': u'12.04'}
},
{
"platform.dist": [
"neon",
"16.04",
"xenial"
],
"input": {
"/etc/os-release": ("NAME=\"KDE neon\"\nVERSION=\"5.8\"\nID=neon\nID_LIKE=\"ubuntu debian\"\nPRETTY_NAME=\"KDE neon User Edition 5.8\"\n"
"VERSION_ID=\"16.04\"\nHOME_URL=\"http://neon.kde.org/\"\nSUPPORT_URL=\"http://neon.kde.org/\"\n"
"BUG_REPORT_URL=\"http://bugs.kde.org/\"\nVERSION_CODENAME=xenial\nUBUNTU_CODENAME=xenial\n"),
"/etc/lsb-release": "DISTRIB_ID=neon\nDISTRIB_RELEASE=16.04\nDISTRIB_CODENAME=xenial\nDISTRIB_DESCRIPTION=\"KDE neon User Edition 5.8\"\n"
},
"name": "KDE neon 16.04",
"result": {
"distribution_release": "xenial",
"distribution": "KDE neon",
"distribution_major_version": "16",
"os_family": "Debian",
"distribution_version": "16.04"
}
},
{
'name': 'Core OS',
'input': {
'/etc/os-release': """
NAME=CoreOS
ID=coreos
VERSION=976.0.0
VERSION_ID=976.0.0
BUILD_ID=2016-03-03-2324
PRETTY_NAME="CoreOS 976.0.0 (Coeur Rouge)"
ANSI_COLOR="1;32"
HOME_URL="https://coreos.com/"
BUG_REPORT_URL="https://github.com/coreos/bugs/issues"
""",
'/etc/lsb-release': """DISTRIB_ID=CoreOS
DISTRIB_RELEASE=976.0.0
DISTRIB_CODENAME="Coeur Rouge"
DISTRIB_DESCRIPTION="CoreOS 976.0.0 (Coeur Rouge)"
""",
},
'platform.dist': ('', '', ''),
'platform.release': '',
'result': {
"distribution": "CoreOS",
"distribution_major_version": "NA",
"distribution_release": "NA",
"distribution_version": "976.0.0",
}
},
# Solaris and derivatives: https://gist.github.com/natefoo/7af6f3d47bb008669467
{
"name": "SmartOS Global Zone",
"uname_v": "joyent_20160330T234717Z",
"result": {
"distribution_release": "SmartOS 20160330T234717Z x86_64",
"distribution": "SmartOS",
"os_family": "Solaris",
"distribution_version": "joyent_20160330T234717Z"
},
"platform.dist": [
"",
"",
""
],
"input": {
"/etc/release": (" SmartOS 20160330T234717Z x86_64\n"
" Copyright 2010 Sun Microsystems, Inc. All Rights Reserved.\n"
" Copyright 2010-2012 Joyent, Inc. All Rights Reserved.\n"
" Use is subject to license terms.\n\n"
" Built with the following components:\n\n[\n"
" { \"repo\": \"smartos-live\", \"branch\": \"release-20160331\", \"rev\": \"a77c410f2afe6dc9853a915733caec3609cc50f1\", "
"\"commit_date\": \"1459340323\", \"url\": \"[email protected]:joyent/smartos-live.git\" }\n , "
"{ \"repo\": \"illumos-joyent\", \"branch\": \"release-20160331\", \"rev\": \"ab664c06caf06e9ce7586bff956e7709df1e702e\", "
"\"commit_date\": \"1459362533\", \"url\": \"/root/data/jenkins/workspace/smartos/MG/build/illumos-joyent\" }\n"
" , { \"repo\": \"illumos-extra\", \"branch\": \"release-20160331\", "
"\"rev\": \"cc723855bceace3df7860b607c9e3827d47e0ff4\", \"commit_date\": \"1458153188\", "
"\"url\": \"/root/data/jenkins/workspace/smartos/MG/build/illumos-extra\" }\n , "
"{ \"repo\": \"kvm\", \"branch\": \"release-20160331\", \"rev\": \"a8befd521c7e673749c64f118585814009fe4b73\", "
"\"commit_date\": \"1450081968\", \"url\": \"/root/data/jenkins/workspace/smartos/MG/build/illumos-kvm\" }\n , "
"{ \"repo\": \"kvm-cmd\", \"branch\": \"release-20160331\", \"rev\": \"c1a197c8e4582c68739ab08f7e3198b2392c9820\", "
"\"commit_date\": \"1454723558\", \"url\": \"/root/data/jenkins/workspace/smartos/MG/build/illumos-kvm-cmd\" }\n , "
"{ \"repo\": \"mdata-client\", \"branch\": \"release-20160331\", \"rev\": \"58158c44603a3316928975deccc5d10864832770\", "
"\"commit_date\": \"1429917227\", \"url\": \"/root/data/jenkins/workspace/smartos/MG/build/mdata-client\" }\n]\n")
},
"platform.system": "SunOS"
},
{
"name": "SmartOS Zone",
"uname_v": "joyent_20160330T234717Z",
"result": {
"distribution_release": "SmartOS x86_64",
"distribution": "SmartOS",
"os_family": "Solaris",
"distribution_version": "14.3.0"
},
"platform.dist": [
"",
"",
""
],
"input": {
"/etc/release": (" SmartOS x86_64\n Copyright 2010 Sun Microsystems, Inc. All Rights Reserved.\n"
" Copyright 2010-2013 Joyent, Inc. All Rights Reserved.\n Use is subject to license terms.\n"
" See joyent_20141002T182809Z for assembly date and time.\n"),
"/etc/product": "Name: Joyent Instance\nImage: base64 14.3.0\nDocumentation: http://wiki.joyent.com/jpc2/Base+Instance\n"
},
"platform.system": "SunOS"
},
{
"name": "OpenIndiana",
"uname_v": "oi_151a9",
"result": {
"distribution_release": "OpenIndiana Development oi_151.1.9 X86 (powered by illumos)",
"distribution": "OpenIndiana",
"os_family": "Solaris",
"distribution_version": "oi_151a9"
},
"platform.dist": [
"",
"",
""
],
"input": {
"/etc/release": (" OpenIndiana Development oi_151.1.9 X86 (powered by illumos)\n Copyright 2011 Oracle and/or its affiliates. "
"All rights reserved.\n Use is subject to license terms.\n "
"Assembled 17 January 2014\n")
},
"platform.system": "SunOS"
},
{
"name": "OmniOS",
"uname_v": "omnios-10b9c79",
"result": {
"distribution_release": "OmniOS v11 r151012",
"distribution": "OmniOS",
"os_family": "Solaris",
"distribution_version": "r151012"
},
"platform.dist": [
"",
"",
""
],
# "platform.release": 'OmniOS',
"input": {
"/etc/release": (
" OmniOS v11 r151012\n Copyright 2014 OmniTI Computer Consulting, Inc. All rights reserved.\n Use is subject to license terms.\n\n"
)
},
"platform.system": "SunOS"
},
{
"name": "Nexenta 3",
"uname_v": "NexentaOS_134f",
"result": {
"distribution_release": "Open Storage Appliance v3.1.6",
"distribution": "Nexenta",
"os_family": "Solaris",
"distribution_version": "3.1.6"
},
"platform.dist": [
"",
"",
""
],
"platform.release:": "",
"input": {
"/etc/release": (" Open Storage Appliance v3.1.6\n Copyright (c) 2014 Nexenta Systems, Inc. "
"All Rights Reserved.\n Copyright (c) 2011 Oracle. All Rights Reserved.\n "
"Use is subject to license terms.\n")
},
"platform.system": "SunOS"
},
{
"name": "Nexenta 4",
"uname_v": "NexentaOS_4:cd604cd066",
"result": {
"distribution_release": "Open Storage Appliance 4.0.3-FP2",
"distribution": "Nexenta",
"os_family": "Solaris",
"distribution_version": "4.0.3-FP2"
},
"platform.dist": [
"",
"",
""
],
"input": {
"/etc/release": (" Open Storage Appliance 4.0.3-FP2\n Copyright (c) 2014 Nexenta Systems, Inc. "
"All Rights Reserved.\n Copyright (c) 2010 Oracle. All Rights Reserved.\n "
"Use is subject to license terms.\n")
},
"platform.system": "SunOS"
},
{
"name": "Solaris 10",
"uname_v": "Generic_141445-09",
"result": {
"distribution_release": "Solaris 10 10/09 s10x_u8wos_08a X86",
"distribution": "Solaris",
"os_family": "Solaris",
"distribution_version": "10"
},
"platform.dist": [
"",
"",
""
],
"input": {
"/etc/release": (" Solaris 10 10/09 s10x_u8wos_08a X86\n Copyright 2009 Sun Microsystems, Inc. "
"All Rights Reserved.\n Use is subject to license terms.\n "
"Assembled 16 September 2009\n")
},
"platform.system": "SunOS"
},
{
"name": "Solaris 11",
"uname_v": "11.0",
"result": {
"distribution_release": "Oracle Solaris 11 11/11 X86",
"distribution": "Solaris",
"os_family": "Solaris",
"distribution_version": "11"
},
"platform.dist": [
"",
"",
""
],
"input": {
"/etc/release": (" Oracle Solaris 11 11/11 X86\n Copyright (c) 1983, 2011, Oracle and/or its affiliates. "
"All rights reserved.\n Assembled 18 October 2011\n")
},
"platform.system": "SunOS"
},
{
"name": "Solaris 11.3",
"platform.dist": [
"",
"",
""
],
"input": {
"/etc/release": (
" Oracle Solaris 11.3 X86\n Copyright (c) 1983, 2015, Oracle and/or its affiliates. "
"All rights reserved.\n Assembled 06 October 2015\n"
)
},
"platform.system": "SunOS",
"result": {
"distribution_release": "Oracle Solaris 11.3 X86",
"distribution": "Solaris",
"os_family": "Solaris",
"distribution_version": "11.3"
}
},
{
"name": "Solaris 10",
"platform.dist": [
"",
"",
""
],
"input": {
"/etc/release": (" Oracle Solaris 10 1/13 s10x_u11wos_24a X86\n Copyright (c) 1983, 2013, Oracle and/or its affiliates. "
"All rights reserved.\n Assembled 17 January 2013\n")
},
"platform.system": "SunOS",
"result": {
"distribution_release": "Oracle Solaris 10 1/13 s10x_u11wos_24a X86",
"distribution": "Solaris",
"os_family": "Solaris",
"distribution_version": "10"
}
},
{
"name": "Fedora 22",
"platform.dist": [
"fedora",
"22",
"Twenty Two"
],
"input": {
"/etc/redhat-release": "Fedora release 22 (Twenty Two)\n",
"/etc/os-release": (
"NAME=Fedora\nVERSION=\"22 (Twenty Two)\"\nID=fedora\nVERSION_ID=22\nPRETTY_NAME=\"Fedora 22 (Twenty Two)\"\n"
"ANSI_COLOR=\"0;34\"\nCPE_NAME=\"cpe:/o:fedoraproject:fedora:22\"\nHOME_URL=\"https://fedoraproject.org/\"\n"
"BUG_REPORT_URL=\"https://bugzilla.redhat.com/\"\nREDHAT_BUGZILLA_PRODUCT=\"Fedora\"\nREDHAT_BUGZILLA_PRODUCT_VERSION=22\n"
"REDHAT_SUPPORT_PRODUCT=\"Fedora\"\nREDHAT_SUPPORT_PRODUCT_VERSION=22\n"
"PRIVACY_POLICY_URL=https://fedoraproject.org/wiki/Legal:PrivacyPolicy\n"
),
"/etc/system-release": "Fedora release 22 (Twenty Two)\n"
},
"result": {
"distribution_release": "Twenty Two",
"distribution": "Fedora",
"distribution_major_version": "22",
"os_family": "RedHat",
"distribution_version": "22"
}
},
{
"platform.dist": [
"fedora",
"25",
"Rawhide"
],
"input": {
"/etc/redhat-release": "Fedora release 25 (Rawhide)\n",
"/etc/os-release": (
"NAME=Fedora\nVERSION=\"25 (Workstation Edition)\"\nID=fedora\nVERSION_ID=25\n"
"PRETTY_NAME=\"Fedora 25 (Workstation Edition)\"\nANSI_COLOR=\"0;34\"\nCPE_NAME=\"cpe:/o:fedoraproject:fedora:25\"\n"
"HOME_URL=\"https://fedoraproject.org/\"\nBUG_REPORT_URL=\"https://bugzilla.redhat.com/\"\n"
"REDHAT_BUGZILLA_PRODUCT=\"Fedora\"\nREDHAT_BUGZILLA_PRODUCT_VERSION=rawhide\nREDHAT_SUPPORT_PRODUCT=\"Fedora\"\n"
"REDHAT_SUPPORT_PRODUCT_VERSION=rawhide\nPRIVACY_POLICY_URL=https://fedoraproject.org/wiki/Legal:PrivacyPolicy\n"
"VARIANT=\"Workstation Edition\"\nVARIANT_ID=workstation\n"
),
"/etc/system-release": "Fedora release 25 (Rawhide)\n"
},
"name": "Fedora 25",
"result": {
"distribution_release": "Rawhide",
"distribution": "Fedora",
"distribution_major_version": "25",
"os_family": "RedHat",
"distribution_version": "25"
}
},
{
"platform.dist": [
"",
"",
""
],
"input": {
"/etc/sourcemage-release": ("Source Mage GNU/Linux x86_64-pc-linux-gnu\nInstalled from tarball using chroot image (Grimoire 0.61-rc) "
"on Thu May 17 17:31:37 UTC 2012\n")
},
"name": "SMGL NA",
"result": {
"distribution_release": "NA",
"distribution": "SMGL",
"distribution_major_version": "NA",
"os_family": "SMGL",
"distribution_version": "NA"
}
},
# ArchLinux with an empty /etc/arch-release and a /etc/os-release with "NAME=Arch Linux"
{
"platform.dist": [
"",
"",
""
],
"input": {
"/etc/os-release": "NAME=\"Arch Linux\"\nPRETTY_NAME=\"Arch Linux\"\nID=arch\nID_LIKE=archlinux\nANSI_COLOR=\"0;36\"\nHOME_URL=\"https://www.archlinux.org/\"\nSUPPORT_URL=\"https://bbs.archlinux.org/\"\nBUG_REPORT_URL=\"https://bugs.archlinux.org/\"\n\n", # noqa
"/etc/arch-release": "",
},
"name": "Arch Linux NA",
"result": {
"distribution_release": "NA",
"distribution": "Archlinux",
"distribution_major_version": "NA",
"os_family": "Archlinux",
"distribution_version": "NA"
}
},
# ClearLinux https://github.com/ansible/ansible/issues/31501#issuecomment-340861535
{
"platform.dist": [
"Clear Linux OS for Intel Architecture",
"18450",
"clear-linux-os"
],
"input": {
"/usr/lib/os-release": '''
NAME="Clear Linux OS for Intel Architecture"
VERSION=1
ID=clear-linux-os
VERSION_ID=18450
PRETTY_NAME="Clear Linux OS for Intel Architecture"
ANSI_COLOR="1;35"
HOME_URL="https://clearlinux.org"
SUPPORT_URL="https://clearlinux.org"
BUG_REPORT_URL="mailto:[email protected]"
PRIVACY_POLICY_URL="http://www.intel.com/privacy"
'''
},
"name": "Clear Linux OS for Intel Architecture 1",
"result": {
"distribution_release": "clear-linux-os",
"distribution": "ClearLinux",
"distribution_major_version": "18450",
"os_family": "ClearLinux",
"distribution_version": "18450"
}
},
# ArchLinux with no /etc/arch-release but with a /etc/os-release with NAME=Arch Linux
# The fact needs to map 'Arch Linux' to 'Archlinux' for compat with 2.3 and earlier facts
{
"platform.dist": [
"",
"",
""
],
"input": {
"/etc/os-release": "NAME=\"Arch Linux\"\nPRETTY_NAME=\"Arch Linux\"\nID=arch\nID_LIKE=archlinux\nANSI_COLOR=\"0;36\"\nHOME_URL=\"https://www.archlinux.org/\"\nSUPPORT_URL=\"https://bbs.archlinux.org/\"\nBUG_REPORT_URL=\"https://bugs.archlinux.org/\"\n\n", # noqa
},
"name": "Arch Linux no arch-release NA",
"result": {
"distribution_release": "NA",
"distribution": "Archlinux",
"distribution_major_version": "NA",
"os_family": "Archlinux",
"distribution_version": "NA"
}
}
]
@pytest.mark.parametrize("stdin, testcase", product([{}], TESTSETS), ids=lambda x: x['name'], indirect=['stdin'])
def test_distribution_version(am, mocker, testcase):
"""tests the distribution parsing code of the Facts class
testsets have
* a name (for output/debugging only)
* input files that are faked
* those should be complete and also include "irrelevant" files that might be mistaken as coming from other distributions
* all files that are not listed here are assumed to not exist at all
* the output of pythons platform.dist()
* results for the ansible variables distribution* and os_family
"""
# prepare some mock functions to get the testdata in
def mock_get_file_content(fname, default=None, strip=True):
"""give fake content if it exists, otherwise pretend the file is empty"""
data = default
if fname in testcase['input']:
# for debugging
print('faked %s for %s' % (fname, testcase['name']))
data = testcase['input'][fname].strip()
if strip and data is not None:
data = data.strip()
return data
def mock_get_uname_version(am):
return testcase.get('uname_v', None)
def mock_file_exists(fname, allow_empty=False):
if fname not in testcase['input']:
return False
if allow_empty:
return True
return bool(len(testcase['input'][fname]))
def mock_platform_system():
return testcase.get('platform.system', 'Linux')
def mock_platform_release():
return testcase.get('platform.release', '')
def mock_platform_version():
return testcase.get('platform.version', '')
mocker.patch('ansible.module_utils.facts.system.distribution.get_file_content', mock_get_file_content)
mocker.patch('ansible.module_utils.facts.system.distribution.get_uname_version', mock_get_uname_version)
mocker.patch('ansible.module_utils.facts.system.distribution._file_exists', mock_file_exists)
mocker.patch('platform.dist', lambda: testcase['platform.dist'])
mocker.patch('platform.system', mock_platform_system)
mocker.patch('platform.release', mock_platform_release)
mocker.patch('platform.version', mock_platform_version)
# run Facts()
distro_collector = DistributionFactCollector()
generated_facts = distro_collector.collect(am)
# compare with the expected output
# testcase['result'] has a list of variables and values it expects Facts() to set
for key, val in testcase['result'].items():
assert key in generated_facts
msg = 'Comparing value of %s on %s, should: %s, is: %s' %\
(key, testcase['name'], val, generated_facts[key])
assert generated_facts[key] == val, msg
|
gpl-3.0
|
Ziqi-Li/bknqgis
|
bokeh/examples/embed/embed_multiple.py
|
7
|
2314
|
import io
from jinja2 import Template
from bokeh.embed import components
from bokeh.models import Range1d
from bokeh.plotting import figure
from bokeh.resources import INLINE
from bokeh.util.browser import view
# create some data
x1 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12]
y1 = [0, 8, 2, 4, 6, 9, 5, 6, 25, 28, 4, 7]
x2 = [2, 5, 7, 15, 18, 19, 25, 28, 9, 10, 4]
y2 = [2, 4, 6, 9, 15, 18, 0, 8, 2, 25, 28]
x3 = [0, 1, 0, 8, 2, 4, 6, 9, 7, 8, 9]
y3 = [0, 8, 4, 6, 9, 15, 18, 19, 19, 25, 28]
# select the tools we want
TOOLS="pan,wheel_zoom,box_zoom,reset,save"
# the red and blue graphs will share this data range
xr1 = Range1d(start=0, end=30)
yr1 = Range1d(start=0, end=30)
# only the green will use this data range
xr2 = Range1d(start=0, end=30)
yr2 = Range1d(start=0, end=30)
# build our figures
p1 = figure(x_range=xr1, y_range=yr1, tools=TOOLS, plot_width=300, plot_height=300)
p1.scatter(x1, y1, size=12, color="red", alpha=0.5)
p2 = figure(x_range=xr1, y_range=yr1, tools=TOOLS, plot_width=300, plot_height=300)
p2.scatter(x2, y2, size=12, color="blue", alpha=0.5)
p3 = figure(x_range=xr2, y_range=yr2, tools=TOOLS, plot_width=300, plot_height=300)
p3.scatter(x3, y3, size=12, color="green", alpha=0.5)
# plots can be a single Bokeh model, a list/tuple, or even a dictionary
plots = {'Red': p1, 'Blue': p2, 'Green': p3}
script, div = components(plots)
template = Template('''<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Bokeh Scatter Plots</title>
{{ js_resources }}
{{ css_resources }}
{{ script }}
<style>
.embed-wrapper {
width: 50%;
height: 400px;
margin: auto;
}
</style>
</head>
<body>
{% for key in div.keys() %}
<div class="embed-wrapper">
{{ div[key] }}
</div>
{% endfor %}
</body>
</html>
''')
js_resources = INLINE.render_js()
css_resources = INLINE.render_css()
filename = 'embed_multiple.html'
html = template.render(js_resources=js_resources,
css_resources=css_resources,
script=script,
div=div)
with io.open(filename, mode='w', encoding='utf-8') as f:
f.write(html)
view(filename)
|
gpl-2.0
|
ulif/pulp
|
server/test/unit/server/db/model/test_consumer.py
|
15
|
14706
|
# -*- coding: utf-8 -*-
"""
This module contains tests for the pulp.server.db.model.consumer module.
"""
import unittest
import mock
from ....base import PulpServerTests
from pulp.server.db.model import consumer
class TestRepoProfileApplicability(PulpServerTests):
"""
Test the RepoProfileApplicability Model.
"""
def setUp(self):
self.collection = consumer.RepoProfileApplicability.get_collection()
def tearDown(self):
self.collection.drop()
def test___init___no__id(self):
"""
Test the constructor without passing an _id.
"""
profile_hash = 'hash'
repo_id = 'repo_id'
profile = ['a', 'profile']
applicability_data = {'type_id': ['package a', 'package c']}
applicability = consumer.RepoProfileApplicability(
profile_hash=profile_hash, repo_id=repo_id, profile=profile,
applicability=applicability_data)
self.assertEqual(applicability.profile_hash, profile_hash)
self.assertEqual(applicability.repo_id, repo_id)
self.assertEqual(applicability.profile, profile)
self.assertEqual(applicability.applicability, applicability_data)
# Since we didn't set an _id, it should be None
self.assertEqual(applicability._id, None)
def test___init___with__id(self):
"""
Test the constructor with an _id.
"""
profile_hash = 'hash'
repo_id = 'repo_id'
profile = ['a', 'profile']
applicability_data = {'type_id': ['package a', 'package c']}
_id = 'an ID'
applicability = consumer.RepoProfileApplicability(
profile_hash=profile_hash, repo_id=repo_id, profile=profile,
applicability=applicability_data, _id=_id)
self.assertEqual(applicability.profile_hash, profile_hash)
self.assertEqual(applicability.repo_id, repo_id)
self.assertEqual(applicability.profile, profile)
self.assertEqual(applicability.applicability, applicability_data)
# Since we didn't set an _id, it should be None
self.assertEqual(applicability._id, _id)
def test_delete(self):
profile_hash = 'hash'
repo_id = 'repo_id'
profile = ['a', 'profile']
applicability_data = {'type_id': ['package a', 'package c']}
# Start by making a RepoProfileApplicability object
applicability = consumer.RepoProfileApplicability(
profile_hash=profile_hash, repo_id=repo_id, profile=profile,
applicability=applicability_data)
# At this point, there should be nothing in the database
self.assertEqual(self.collection.find().count(), 0)
# Saving the model should store it in the database
applicability.save()
# Now, we have our one object in the DB
self.assertEqual(self.collection.find().count(), 1)
# Calling delete() on the object should "take care" of it, if you know what I mean
applicability.delete()
# Now there should be no objects
self.assertEqual(self.collection.find().count(), 0)
def test_save_existing(self):
"""
Test the save() method with an existing object.
"""
profile_hash = 'hash'
repo_id = 'repo_id'
profile = ['a', 'profile']
applicability_data = {'type_id': ['package a', 'package c']}
# Start by making a RepoProfileApplicability object
applicability = consumer.RepoProfileApplicability(
profile_hash=profile_hash, repo_id=repo_id, profile=profile,
applicability=applicability_data)
# At this point, there should be nothing in the database
self.assertEqual(self.collection.find().count(), 0)
# Saving the model should store it in the database
applicability.save()
# Now, let's alter the applicability data a bit
applicability_data['new_type_id'] = ['package d']
applicability.applicability = applicability_data
# Saving the object should write the changes to the DB
applicability.save()
# There should be one entry in the DB
self.assertEqual(self.collection.find().count(), 1)
document = self.collection.find_one()
self.assertEqual(document['profile_hash'], profile_hash)
self.assertEqual(document['repo_id'], repo_id)
self.assertEqual(document['profile'], profile)
self.assertEqual(document['applicability'], applicability_data)
# Our applicability object should still have the correct _id attribute
self.assertEqual(applicability._id, document['_id'])
def test_save_new(self):
"""
Test the save() method with a new object that is not in the DB yet.
"""
profile_hash = 'hash'
repo_id = 'repo_id'
profile = ['a', 'profile']
applicability_data = {'type_id': ['package a', 'package c']}
# Start by making a RepoProfileApplicability object
applicability = consumer.RepoProfileApplicability(
profile_hash=profile_hash, repo_id=repo_id, profile=profile,
applicability=applicability_data)
# At this point, there should be nothing in the database
self.assertEqual(self.collection.find().count(), 0)
# Saving the model should store it in the database
applicability.save()
# There should now be one entry in the DB
self.assertEqual(self.collection.find().count(), 1)
document = self.collection.find_one()
self.assertEqual(document['profile_hash'], profile_hash)
self.assertEqual(document['repo_id'], repo_id)
self.assertEqual(document['profile'], profile)
self.assertEqual(document['applicability'], applicability_data)
# Our applicability object should now have the correct _id attribute
self.assertEqual(applicability._id, document['_id'])
class TestUnitProfile(unittest.TestCase):
"""
Test the UnitProfile class.
"""
@mock.patch('pulp.server.db.model.consumer.Model.__init__', side_effect=consumer.Model.__init__,
autospec=True)
def test___init__(self, __init__):
"""
Test the constructor.
"""
profile = consumer.UnitProfile('consumer_id', 'content_type', 'profile')
self.assertEqual(profile.consumer_id, 'consumer_id')
self.assertEqual(profile.content_type, 'content_type')
self.assertEqual(profile.profile, 'profile')
self.assertEqual(profile.profile_hash, profile.calculate_hash(profile.profile))
# The superclass __init__ should have been called
__init__.assert_called_once_with(profile)
@mock.patch('pulp.server.db.model.consumer.Model.__init__', side_effect=consumer.Model.__init__,
autospec=True)
def test___init___with_hash(self, __init__):
"""
Test the constructor, passing the optional profile_hash
"""
profile = consumer.UnitProfile('consumer_id', 'content_type', 'profile', 'profile_hash')
self.assertEqual(profile.consumer_id, 'consumer_id')
self.assertEqual(profile.content_type, 'content_type')
self.assertEqual(profile.profile, 'profile')
self.assertEqual(profile.profile_hash, 'profile_hash')
# The superclass __init__ should have been called
__init__.assert_called_once_with(profile)
def test_calculate_hash_different_profiles(self):
"""
Test that two different profiles have different hashes.
"""
profile_1 = consumer.UnitProfile('consumer_1', 'rpm', [
{'vendor': 'Fedora Project', 'name': 'perl-Filter', 'epoch': 0, 'version': '1.49',
'release': '1.fc18', 'arch': 'x86_64'},
{'vendor': 'Fedora Project', 'name': 'python-slip-dbus', 'epoch': 0, 'version': '0.4.0',
'release': '1.fc18', 'arch': 'noarch'},
{'vendor': 'Fedora Project', 'name': 'kmod', 'epoch': 0, 'version': '12',
'release': '3.fc18', 'arch': 'x86_64'}])
profile_2 = consumer.UnitProfile('consumer_2', 'rpm', [
{'vendor': 'Fedora Project', 'name': 'libestr', 'epoch': 0, 'version': '0.1.5',
'release': '1.fc18', 'arch': 'x86_64'},
{'vendor': 'Fedora Project', 'name': 'openssh-clients', 'epoch': 0, 'version': '6.1p1',
'release': '8.fc18', 'arch': 'x86_64'},
{'vendor': 'Fedora Project', 'name': 'procps-ng', 'epoch': 0, 'version': '3.3.3',
'release': '4.20120807git.fc18', 'arch': 'x86_64'}])
self.assertNotEqual(consumer.UnitProfile.calculate_hash(profile_1.profile),
consumer.UnitProfile.calculate_hash(profile_2.profile))
def test_calculate_hash_identical_profiles(self):
"""
Test that the hashes of two identical profiles are equal.
"""
profile_1 = consumer.UnitProfile('consumer_1', 'rpm', [
{'vendor': 'Fedora Project', 'name': 'perl-Filter', 'epoch': 0, 'version': '1.49',
'release': '1.fc18', 'arch': 'x86_64'},
{'vendor': 'Fedora Project', 'name': 'python-slip-dbus', 'epoch': 0, 'version': '0.4.0',
'release': '1.fc18', 'arch': 'noarch'},
{'vendor': 'Fedora Project', 'name': 'kmod', 'epoch': 0, 'version': '12',
'release': '3.fc18', 'arch': 'x86_64'}])
profile_2 = consumer.UnitProfile('consumer_2', 'rpm', [
{'vendor': 'Fedora Project', 'name': 'perl-Filter', 'epoch': 0, 'version': '1.49',
'release': '1.fc18', 'arch': 'x86_64'},
{'vendor': 'Fedora Project', 'name': 'python-slip-dbus', 'epoch': 0, 'version': '0.4.0',
'release': '1.fc18', 'arch': 'noarch'},
{'vendor': 'Fedora Project', 'name': 'kmod', 'epoch': 0, 'version': '12',
'release': '3.fc18', 'arch': 'x86_64'}])
self.assertEqual(consumer.UnitProfile.calculate_hash(profile_1.profile),
consumer.UnitProfile.calculate_hash(profile_2.profile))
def test_calculate_hash_non_ascii_identical_profiles(self):
"""
Test that the hashes of two identical profiles that include non-ASCII characters are equal.
"""
profile_1 = consumer.UnitProfile('consumer_1', 'rpm', [
{'vendor': 'Fedora Project', 'name': u'¿Donde esta el baño?', 'epoch': 0,
'version': '1.49', 'release': '1.fc18', 'arch': 'x86_64'}])
profile_2 = consumer.UnitProfile('consumer_2', 'rpm', [
{'vendor': 'Fedora Project', 'name': u'¿Donde esta el baño?', 'epoch': 0,
'version': '1.49', 'release': '1.fc18', 'arch': 'x86_64'}])
self.assertEqual(consumer.UnitProfile.calculate_hash(profile_1.profile),
consumer.UnitProfile.calculate_hash(profile_2.profile))
def test_calculate_hash_non_ascii_non_identical_profiles(self):
"""
Test that the hashes of two non-identical profiles that include non-ASCII characters are
not equal.
"""
profile_1 = consumer.UnitProfile('consumer_1', 'rpm', [
{'vendor': 'Fedora Project', 'name': u'¿Donde esta el baño?', 'epoch': 0,
'version': '1.49', 'release': '1.fc18', 'arch': 'x86_64'}])
# profile_2 has the codepoints for the two Spanish characters above, so this test ensures
# that this is considered to be different
profile_2 = consumer.UnitProfile('consumer_2', 'rpm', [
{'vendor': 'Fedora Project', 'name': '\u00bfDonde esta el ba\u00f1o?', 'epoch': 0,
'version': '1.49', 'release': '1.fc18', 'arch': 'x86_64'}])
self.assertNotEqual(consumer.UnitProfile.calculate_hash(profile_1.profile),
consumer.UnitProfile.calculate_hash(profile_2.profile))
def test_calculate_hash_reordered_profiles(self):
"""
Test that the hashes of two equivalent, but differently ordered profile lists are not the
same.
"""
profile_1 = consumer.UnitProfile('consumer_1', 'rpm', [
{'vendor': 'Fedora Project', 'name': 'perl-Filter', 'epoch': 0, 'version': '1.49',
'release': '1.fc18', 'arch': 'x86_64'},
{'vendor': 'Fedora Project', 'name': 'python-slip-dbus', 'epoch': 0, 'version': '0.4.0',
'release': '1.fc18', 'arch': 'noarch'},
{'vendor': 'Fedora Project', 'name': 'kmod', 'epoch': 0, 'version': '12',
'release': '3.fc18', 'arch': 'x86_64'}])
profile_2 = consumer.UnitProfile('consumer_2', 'rpm', [
{'vendor': 'Fedora Project', 'name': 'python-slip-dbus', 'epoch': 0, 'version': '0.4.0',
'release': '1.fc18', 'arch': 'noarch'},
{'vendor': 'Fedora Project', 'name': 'perl-Filter', 'epoch': 0, 'version': '1.49',
'release': '1.fc18', 'arch': 'x86_64'},
{'vendor': 'Fedora Project', 'name': 'kmod', 'epoch': 0, 'version': '12',
'release': '3.fc18', 'arch': 'x86_64'}])
self.assertNotEqual(consumer.UnitProfile.calculate_hash(profile_1.profile),
consumer.UnitProfile.calculate_hash(profile_2.profile))
def test_calculate_hash_similar_profiles(self):
"""
Test hashing "similar" profiles to make sure they get different results.
"""
profile_1 = consumer.UnitProfile('consumer_1', 'rpm', [
{'vendor': 'Fedora Project', 'name': 'perl-Filter', 'epoch': 0, 'version': '1.49',
'release': '1.fc18', 'arch': 'x86_64'},
{'vendor': 'Fedora Project', 'name': 'python-slip-dbus', 'epoch': 0, 'version': '0.4.0',
'release': '1.fc18', 'arch': 'noarch'},
{'vendor': 'Fedora Project', 'name': 'kmod', 'epoch': 0, 'version': '12',
'release': '3.fc18', 'arch': 'x86_64'}])
# profile_2 is almost the same as profile_1, but it has a different release on the
# python-slip-dbus package
profile_2 = consumer.UnitProfile('consumer_2', 'rpm', [
{'vendor': 'Fedora Project', 'name': 'perl-Filter', 'epoch': 0, 'version': '1.49',
'release': '1.fc18', 'arch': 'x86_64'},
{'vendor': 'Fedora Project', 'name': 'python-slip-dbus', 'epoch': 0, 'version': '0.4.0',
'release': '2.fc18', 'arch': 'noarch'},
{'vendor': 'Fedora Project', 'name': 'kmod', 'epoch': 0, 'version': '12',
'release': '3.fc18', 'arch': 'x86_64'}])
self.assertNotEqual(consumer.UnitProfile.calculate_hash(profile_1.profile),
consumer.UnitProfile.calculate_hash(profile_2.profile))
|
gpl-2.0
|
tectronics/gpo2py
|
models/menu.py
|
7
|
6122
|
# -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
#########################################################################
## Customize your APP title, subtitle and menus here
#########################################################################
response.title = ' '.join(word.capitalize() for word in request.application.split('_'))
response.subtitle = T('customize me!')
## read more at http://dev.w3.org/html5/markup/meta.name.html
response.meta.author = 'Your Name <[email protected]>'
response.meta.description = 'a cool new app'
response.meta.keywords = 'web2py, python, framework'
response.meta.generator = 'Web2py Web Framework'
response.meta.copyright = 'Copyright 2011'
## your http://google.com/analytics id
response.google_analytics_id = None
#########################################################################
## this is the main application menu add/remove items as required
#########################################################################
response.menu = [
(T('Home'), False, URL('default','index'), [])
]
#########################################################################
## provide shortcuts for development. remove in production
#########################################################################
def _():
# shortcuts
app = request.application
ctr = request.controller
# useful links to internal and external resources
response.menu+=[
(SPAN('web2py',_style='color:yellow'),False, None, [
(T('My Sites'),False,URL('admin','default','site')),
(T('This App'),False,URL('admin','default','design/%s' % app), [
(T('Controller'),False,
URL('admin','default','edit/%s/controllers/%s.py' % (app,ctr))),
(T('View'),False,
URL('admin','default','edit/%s/views/%s' % (app,response.view))),
(T('Layout'),False,
URL('admin','default','edit/%s/views/layout.html' % app)),
(T('Stylesheet'),False,
URL('admin','default','edit/%s/static/css/web2py.css' % app)),
(T('DB Model'),False,
URL('admin','default','edit/%s/models/db.py' % app)),
(T('Menu Model'),False,
URL('admin','default','edit/%s/models/menu.py' % app)),
(T('Database'),False, URL(app,'appadmin','index')),
(T('Errors'),False, URL('admin','default','errors/' + app)),
(T('About'),False, URL('admin','default','about/' + app)),
]),
('web2py.com',False,'http://www.web2py.com', [
(T('Download'),False,'http://www.web2py.com/examples/default/download'),
(T('Support'),False,'http://www.web2py.com/examples/default/support'),
(T('Demo'),False,'http://web2py.com/demo_admin'),
(T('Quick Examples'),False,'http://web2py.com/examples/default/examples'),
(T('FAQ'),False,'http://web2py.com/AlterEgo'),
(T('Videos'),False,'http://www.web2py.com/examples/default/videos/'),
(T('Free Applications'),False,'http://web2py.com/appliances'),
(T('Plugins'),False,'http://web2py.com/plugins'),
(T('Layouts'),False,'http://web2py.com/layouts'),
(T('Recipes'),False,'http://web2pyslices.com/'),
(T('Semantic'),False,'http://web2py.com/semantic'),
]),
(T('Documentation'),False,'http://www.web2py.com/book', [
(T('Preface'),False,'http://www.web2py.com/book/default/chapter/00'),
(T('Introduction'),False,'http://www.web2py.com/book/default/chapter/01'),
(T('Python'),False,'http://www.web2py.com/book/default/chapter/02'),
(T('Overview'),False,'http://www.web2py.com/book/default/chapter/03'),
(T('The Core'),False,'http://www.web2py.com/book/default/chapter/04'),
(T('The Views'),False,'http://www.web2py.com/book/default/chapter/05'),
(T('Database'),False,'http://www.web2py.com/book/default/chapter/06'),
(T('Forms and Validators'),False,'http://www.web2py.com/book/default/chapter/07'),
(T('Email and SMS'),False,'http://www.web2py.com/book/default/chapter/08'),
(T('Access Control'),False,'http://www.web2py.com/book/default/chapter/09'),
(T('Services'),False,'http://www.web2py.com/book/default/chapter/10'),
(T('Ajax Recipes'),False,'http://www.web2py.com/book/default/chapter/11'),
(T('Components and Plugins'),False,'http://www.web2py.com/book/default/chapter/12'),
(T('Deployment Recipes'),False,'http://www.web2py.com/book/default/chapter/13'),
(T('Other Recipes'),False,'http://www.web2py.com/book/default/chapter/14'),
(T('Buy this book'),False,'http://stores.lulu.com/web2py'),
]),
(T('Community'),False, None, [
(T('Groups'),False,'http://www.web2py.com/examples/default/usergroups'),
(T('Twitter'),False,'http://twitter.com/web2py'),
(T('Live Chat'),False,'http://webchat.freenode.net/?channels=web2py'),
]),
(T('Plugins'),False,None, [
('plugin_wiki',False,'http://web2py.com/examples/default/download'),
(T('Other Plugins'),False,'http://web2py.com/plugins'),
(T('Layout Plugins'),False,'http://web2py.com/layouts'),
])
]
)]
_()
|
agpl-3.0
|
naototty/pyflag
|
src/plugins/Themes/Standard.py
|
7
|
6877
|
""" These are the standard themes that come with Pyflag. """
import pyflag.conf
config=pyflag.conf.ConfObject()
import pyflag.FlagFramework as FlagFramework
import pyflag.Registry as Registry
import pyflag.Theme as Theme
class BlueTheme(Theme.BasicTheme):
""" This class encapsulates the theme elements. The results from this class really depend on the UI used - for example the HTMLUI will expect HTML to come back from here. """
hilight_bar = '''<table cellspacing=0 cellpadding=0 width="100%%" background="/images/topfill.jpg" border=0> <tbody>
<tr><td align=left>%s</td>
<td height=25 align=center>%s</td>
<td height=25>
<div align="right"><font face="Arial, Helvetica, sans-serif" size="2"><font face="Geneva, Arial, Helvetica, san-serif"><b><font face="Georgia, Times New Roman, Times, serif"><i><font face="Verdana, Arial, Helvetica, sans-serif">F</font></i></font></b><font face="Verdana, Arial, Helvetica, sans-serif"><i>orensics
and <b>L</b>og <b>A</b>nalysis <b>G</b>UI</i></font></font></font></div>
</td>
</tr>
</tbody>
</table>'''
def navbar(self,query=None,next=None,previous=None,pageno=None):
""" Returns the HTML for the navigation bar. """
if query==None: query=FlagFramework.query_type(())
if not query.has_key('family'):
query['family']=''
if next:
#Make a link
q=query.clone()
q.FillQueryTarget(next)
next = '<a href="f?%s"><img height=25 src="/images/forward.png" border="0"></a>' % (str(q))
else:
next = '<img src="/images/arrow_right_grey.gif" height=25 border="0">'
if previous<0:
previous = '<img src="/images/arrow_left_grey.gif" height=25 border="0">'
else:
q=query.clone()
q.FillQueryTarget(previous)
previous = '<a href="f?%s"><img height=25 src="/images/back.png" border="0"></a>' % (str(q))
bar = {'family': Theme.propegate(query,FlagFramework.query_type()),'back': previous,'case': query['case'],'pageno': pageno,'next': next,'reset': str(query)+'&reset=1','stop': str(query)+'&stop=1'}
toolbar = '''<table><tr>
<td valign="bottom"><a href="%(family)s"><img height=25 src="/images/home_grey.png" border="0"></a></td><td valign="bottom">%(back)s</td><td>%(case)s - page %(pageno)s</td><td valign="bottom">%(next)s</td> <td valign="bottom">
<td valign="bottom"><a href="flag?%(reset)s"><img height=25 src="/images/reset_grey.png" border="0"></a></td></tr></table>''' % bar
return toolbar
def render(self, query=FlagFramework.query_type(()), meta='',data='',next=None,previous=None,pageno=None,ui=None,title="FLAG - Forensic Log Analysis GUI. %s" % config.VERSION):
if not ui.toolbar_ui:
toolbar_str=' '
else:
toolbar_str=ui.toolbar_ui.__str__()
toolbar=self.navbar(query=query , next=next , previous=previous , pageno=pageno)
return " ".join((self.header % title,self.banner,meta,'''</td><td width=10><img src="/images/logo.png"></td></tr></tbody></table> </tr></table>\n''',self.hilight_bar % (toolbar_str,toolbar), data ,self.hilight_bar % (toolbar_str,toolbar),self.footer))
def menu(self,flag,query, result):
""" Draws the menu for the current family.
@arg flag: A Flag object containing the appropriate dispatcher
@arg query: The user query
"""
family = query['family']
module_list = Registry.REPORTS.get_families()
Theme.order_families(module_list)
result=flag.ui()
result.result='''<table cellspacing=0 cellpadding=0 width="100%" border=0
hspace="0" vspace="0" height="300">
<tbody>
<tr>
<td width=5><img height=22 alt="table corner"
src="images/metbarleft.gif"
width=5></td>
<td width="918">
<table cellspacing=0 cellpadding=0 width="100%"
background="images/metbarfill.gif"
border=0 hspace="0" vspace="0">
<tbody> <tr>
<td height=22>
<div align="left"><font
face="Verdana, Arial, Helvetica, sans-serif"
size=2></font><font
face="Verdana, Arial, Helvetica, sans-serif"><b><font size="2">Main
Menu</font></b></font></div>
</td>
</tr> </tbody>
</table>
</td>
<td width=10><img height=22 alt="table corner"
src="images/metbarend.gif"
width=8></td>
</tr> <tr>
<td width=5
background="images/sidebarleft.gif"> </td>
<td valign=top width="918">
<p><font size="+1" face="Arial, Helvetica, sans-serif">'''
for k in module_list:
link = flag.ui()
link.link(k,family=k)
result.result+=''' %s<br />\n''' % (link,)
if family==k:
report_list = Registry.REPORTS.family[family]
for r in report_list:
if r.hidden: continue
link = flag.ui()
link.link(r.name,target=Theme.propegate(query,FlagFramework.query_type()),tooltip=r.__doc__,report=r.name)
result.result+=" <strong><big>·</big></strong> %s <br />\n" % link
result.result+="<br/>"
result.result+=''' </font></p>
</td>
<td width=10
background="images/sidebarrgt.gif"> </td>
</tr>
<tr>
<td width=5><img height=22 alt="table corner"
src="images/greenbarleft.gif"
width=5></td>
<td width="918">
<table cellspacing=0 cellpadding=0 width="100%"
background="images/greenbarfill.gif"
border=0>
<tbody>
<tr>
<td height=22> </td>
</tr>
</tbody>
</table>
</td>
<td width=10><img height=22 alt="table corner" src="images/greenbarrgt.gif" width=8></td>
</tr>
</tbody>
</table>
'''
return result
default=None
|
gpl-2.0
|
matrix-org/synapse
|
synapse/rest/client/v1/login.py
|
1
|
18517
|
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import re
from typing import TYPE_CHECKING, Awaitable, Callable, Dict, List, Optional
from synapse.api.errors import Codes, LoginError, SynapseError
from synapse.api.ratelimiting import Ratelimiter
from synapse.api.urls import CLIENT_API_PREFIX
from synapse.appservice import ApplicationService
from synapse.handlers.sso import SsoIdentityProvider
from synapse.http import get_request_uri
from synapse.http.server import HttpServer, finish_request
from synapse.http.servlet import (
RestServlet,
parse_bytes_from_args,
parse_json_object_from_request,
parse_string,
)
from synapse.http.site import SynapseRequest
from synapse.rest.client.v2_alpha._base import client_patterns
from synapse.rest.well_known import WellKnownBuilder
from synapse.types import JsonDict, UserID
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
class LoginRestServlet(RestServlet):
PATTERNS = client_patterns("/login$", v1=True)
CAS_TYPE = "m.login.cas"
SSO_TYPE = "m.login.sso"
TOKEN_TYPE = "m.login.token"
JWT_TYPE = "org.matrix.login.jwt"
JWT_TYPE_DEPRECATED = "m.login.jwt"
APPSERVICE_TYPE = "uk.half-shot.msc2778.login.application_service"
def __init__(self, hs: "HomeServer"):
super().__init__()
self.hs = hs
# JWT configuration variables.
self.jwt_enabled = hs.config.jwt_enabled
self.jwt_secret = hs.config.jwt_secret
self.jwt_algorithm = hs.config.jwt_algorithm
self.jwt_issuer = hs.config.jwt_issuer
self.jwt_audiences = hs.config.jwt_audiences
# SSO configuration.
self.saml2_enabled = hs.config.saml2_enabled
self.cas_enabled = hs.config.cas_enabled
self.oidc_enabled = hs.config.oidc_enabled
self._msc2858_enabled = hs.config.experimental.msc2858_enabled
self.auth = hs.get_auth()
self.auth_handler = self.hs.get_auth_handler()
self.registration_handler = hs.get_registration_handler()
self._sso_handler = hs.get_sso_handler()
self._well_known_builder = WellKnownBuilder(hs)
self._address_ratelimiter = Ratelimiter(
store=hs.get_datastore(),
clock=hs.get_clock(),
rate_hz=self.hs.config.rc_login_address.per_second,
burst_count=self.hs.config.rc_login_address.burst_count,
)
self._account_ratelimiter = Ratelimiter(
store=hs.get_datastore(),
clock=hs.get_clock(),
rate_hz=self.hs.config.rc_login_account.per_second,
burst_count=self.hs.config.rc_login_account.burst_count,
)
def on_GET(self, request: SynapseRequest):
flows = []
if self.jwt_enabled:
flows.append({"type": LoginRestServlet.JWT_TYPE})
flows.append({"type": LoginRestServlet.JWT_TYPE_DEPRECATED})
if self.cas_enabled:
# we advertise CAS for backwards compat, though MSC1721 renamed it
# to SSO.
flows.append({"type": LoginRestServlet.CAS_TYPE})
if self.cas_enabled or self.saml2_enabled or self.oidc_enabled:
sso_flow = {
"type": LoginRestServlet.SSO_TYPE,
"identity_providers": [
_get_auth_flow_dict_for_idp(
idp,
)
for idp in self._sso_handler.get_identity_providers().values()
],
} # type: JsonDict
if self._msc2858_enabled:
# backwards-compatibility support for clients which don't
# support the stable API yet
sso_flow["org.matrix.msc2858.identity_providers"] = [
_get_auth_flow_dict_for_idp(idp, use_unstable_brands=True)
for idp in self._sso_handler.get_identity_providers().values()
]
flows.append(sso_flow)
# While it's valid for us to advertise this login type generally,
# synapse currently only gives out these tokens as part of the
# SSO login flow.
# Generally we don't want to advertise login flows that clients
# don't know how to implement, since they (currently) will always
# fall back to the fallback API if they don't understand one of the
# login flow types returned.
flows.append({"type": LoginRestServlet.TOKEN_TYPE})
flows.extend(
({"type": t} for t in self.auth_handler.get_supported_login_types())
)
flows.append({"type": LoginRestServlet.APPSERVICE_TYPE})
return 200, {"flows": flows}
async def on_POST(self, request: SynapseRequest):
login_submission = parse_json_object_from_request(request)
try:
if login_submission["type"] == LoginRestServlet.APPSERVICE_TYPE:
appservice = self.auth.get_appservice_by_req(request)
if appservice.is_rate_limited():
await self._address_ratelimiter.ratelimit(
None, request.getClientIP()
)
result = await self._do_appservice_login(login_submission, appservice)
elif self.jwt_enabled and (
login_submission["type"] == LoginRestServlet.JWT_TYPE
or login_submission["type"] == LoginRestServlet.JWT_TYPE_DEPRECATED
):
await self._address_ratelimiter.ratelimit(None, request.getClientIP())
result = await self._do_jwt_login(login_submission)
elif login_submission["type"] == LoginRestServlet.TOKEN_TYPE:
await self._address_ratelimiter.ratelimit(None, request.getClientIP())
result = await self._do_token_login(login_submission)
else:
await self._address_ratelimiter.ratelimit(None, request.getClientIP())
result = await self._do_other_login(login_submission)
except KeyError:
raise SynapseError(400, "Missing JSON keys.")
well_known_data = self._well_known_builder.get_well_known()
if well_known_data:
result["well_known"] = well_known_data
return 200, result
async def _do_appservice_login(
self, login_submission: JsonDict, appservice: ApplicationService
):
identifier = login_submission.get("identifier")
logger.info("Got appservice login request with identifier: %r", identifier)
if not isinstance(identifier, dict):
raise SynapseError(
400, "Invalid identifier in login submission", Codes.INVALID_PARAM
)
# this login flow only supports identifiers of type "m.id.user".
if identifier.get("type") != "m.id.user":
raise SynapseError(
400, "Unknown login identifier type", Codes.INVALID_PARAM
)
user = identifier.get("user")
if not isinstance(user, str):
raise SynapseError(400, "Invalid user in identifier", Codes.INVALID_PARAM)
if user.startswith("@"):
qualified_user_id = user
else:
qualified_user_id = UserID(user, self.hs.hostname).to_string()
if not appservice.is_interested_in_user(qualified_user_id):
raise LoginError(403, "Invalid access_token", errcode=Codes.FORBIDDEN)
return await self._complete_login(
qualified_user_id, login_submission, ratelimit=appservice.is_rate_limited()
)
async def _do_other_login(self, login_submission: JsonDict) -> Dict[str, str]:
"""Handle non-token/saml/jwt logins
Args:
login_submission:
Returns:
HTTP response
"""
# Log the request we got, but only certain fields to minimise the chance of
# logging someone's password (even if they accidentally put it in the wrong
# field)
logger.info(
"Got login request with identifier: %r, medium: %r, address: %r, user: %r",
login_submission.get("identifier"),
login_submission.get("medium"),
login_submission.get("address"),
login_submission.get("user"),
)
canonical_user_id, callback = await self.auth_handler.validate_login(
login_submission, ratelimit=True
)
result = await self._complete_login(
canonical_user_id, login_submission, callback
)
return result
async def _complete_login(
self,
user_id: str,
login_submission: JsonDict,
callback: Optional[Callable[[Dict[str, str]], Awaitable[None]]] = None,
create_non_existent_users: bool = False,
ratelimit: bool = True,
auth_provider_id: Optional[str] = None,
) -> Dict[str, str]:
"""Called when we've successfully authed the user and now need to
actually login them in (e.g. create devices). This gets called on
all successful logins.
Applies the ratelimiting for successful login attempts against an
account.
Args:
user_id: ID of the user to register.
login_submission: Dictionary of login information.
callback: Callback function to run after login.
create_non_existent_users: Whether to create the user if they don't
exist. Defaults to False.
ratelimit: Whether to ratelimit the login request.
auth_provider_id: The SSO IdP the user used, if any (just used for the
prometheus metrics).
Returns:
result: Dictionary of account information after successful login.
"""
# Before we actually log them in we check if they've already logged in
# too often. This happens here rather than before as we don't
# necessarily know the user before now.
if ratelimit:
await self._account_ratelimiter.ratelimit(None, user_id.lower())
if create_non_existent_users:
canonical_uid = await self.auth_handler.check_user_exists(user_id)
if not canonical_uid:
canonical_uid = await self.registration_handler.register_user(
localpart=UserID.from_string(user_id).localpart
)
user_id = canonical_uid
device_id = login_submission.get("device_id")
initial_display_name = login_submission.get("initial_device_display_name")
device_id, access_token = await self.registration_handler.register_device(
user_id, device_id, initial_display_name, auth_provider_id=auth_provider_id
)
result = {
"user_id": user_id,
"access_token": access_token,
"home_server": self.hs.hostname,
"device_id": device_id,
}
if callback is not None:
await callback(result)
return result
async def _do_token_login(self, login_submission: JsonDict) -> Dict[str, str]:
"""
Handle the final stage of SSO login.
Args:
login_submission: The JSON request body.
Returns:
The body of the JSON response.
"""
token = login_submission["token"]
auth_handler = self.auth_handler
res = await auth_handler.validate_short_term_login_token(token)
return await self._complete_login(
res.user_id,
login_submission,
self.auth_handler._sso_login_callback,
auth_provider_id=res.auth_provider_id,
)
async def _do_jwt_login(self, login_submission: JsonDict) -> Dict[str, str]:
token = login_submission.get("token", None)
if token is None:
raise LoginError(
403, "Token field for JWT is missing", errcode=Codes.FORBIDDEN
)
import jwt
try:
payload = jwt.decode(
token,
self.jwt_secret,
algorithms=[self.jwt_algorithm],
issuer=self.jwt_issuer,
audience=self.jwt_audiences,
)
except jwt.PyJWTError as e:
# A JWT error occurred, return some info back to the client.
raise LoginError(
403,
"JWT validation failed: %s" % (str(e),),
errcode=Codes.FORBIDDEN,
)
user = payload.get("sub", None)
if user is None:
raise LoginError(403, "Invalid JWT", errcode=Codes.FORBIDDEN)
user_id = UserID(user, self.hs.hostname).to_string()
result = await self._complete_login(
user_id, login_submission, create_non_existent_users=True
)
return result
def _get_auth_flow_dict_for_idp(
idp: SsoIdentityProvider, use_unstable_brands: bool = False
) -> JsonDict:
"""Return an entry for the login flow dict
Returns an entry suitable for inclusion in "identity_providers" in the
response to GET /_matrix/client/r0/login
Args:
idp: the identity provider to describe
use_unstable_brands: whether we should use brand identifiers suitable
for the unstable API
"""
e = {"id": idp.idp_id, "name": idp.idp_name} # type: JsonDict
if idp.idp_icon:
e["icon"] = idp.idp_icon
if idp.idp_brand:
e["brand"] = idp.idp_brand
# use the stable brand identifier if the unstable identifier isn't defined.
if use_unstable_brands and idp.unstable_idp_brand:
e["brand"] = idp.unstable_idp_brand
return e
class SsoRedirectServlet(RestServlet):
PATTERNS = list(client_patterns("/login/(cas|sso)/redirect$", v1=True)) + [
re.compile(
"^"
+ CLIENT_API_PREFIX
+ "/r0/login/sso/redirect/(?P<idp_id>[A-Za-z0-9_.~-]+)$"
)
]
def __init__(self, hs: "HomeServer"):
# make sure that the relevant handlers are instantiated, so that they
# register themselves with the main SSOHandler.
if hs.config.cas_enabled:
hs.get_cas_handler()
if hs.config.saml2_enabled:
hs.get_saml_handler()
if hs.config.oidc_enabled:
hs.get_oidc_handler()
self._sso_handler = hs.get_sso_handler()
self._msc2858_enabled = hs.config.experimental.msc2858_enabled
self._public_baseurl = hs.config.public_baseurl
def register(self, http_server: HttpServer) -> None:
super().register(http_server)
if self._msc2858_enabled:
# expose additional endpoint for MSC2858 support: backwards-compat support
# for clients which don't yet support the stable endpoints.
http_server.register_paths(
"GET",
client_patterns(
"/org.matrix.msc2858/login/sso/redirect/(?P<idp_id>[A-Za-z0-9_.~-]+)$",
releases=(),
unstable=True,
),
self.on_GET,
self.__class__.__name__,
)
async def on_GET(
self, request: SynapseRequest, idp_id: Optional[str] = None
) -> None:
if not self._public_baseurl:
raise SynapseError(400, "SSO requires a valid public_baseurl")
# if this isn't the expected hostname, redirect to the right one, so that we
# get our cookies back.
requested_uri = get_request_uri(request)
baseurl_bytes = self._public_baseurl.encode("utf-8")
if not requested_uri.startswith(baseurl_bytes):
# swap out the incorrect base URL for the right one.
#
# The idea here is to redirect from
# https://foo.bar/whatever/_matrix/...
# to
# https://public.baseurl/_matrix/...
#
i = requested_uri.index(b"/_matrix")
new_uri = baseurl_bytes[:-1] + requested_uri[i:]
logger.info(
"Requested URI %s is not canonical: redirecting to %s",
requested_uri.decode("utf-8", errors="replace"),
new_uri.decode("utf-8", errors="replace"),
)
request.redirect(new_uri)
finish_request(request)
return
args = request.args # type: Dict[bytes, List[bytes]] # type: ignore
client_redirect_url = parse_bytes_from_args(args, "redirectUrl", required=True)
sso_url = await self._sso_handler.handle_redirect_request(
request,
client_redirect_url,
idp_id,
)
logger.info("Redirecting to %s", sso_url)
request.redirect(sso_url)
finish_request(request)
class CasTicketServlet(RestServlet):
PATTERNS = client_patterns("/login/cas/ticket", v1=True)
def __init__(self, hs):
super().__init__()
self._cas_handler = hs.get_cas_handler()
async def on_GET(self, request: SynapseRequest) -> None:
client_redirect_url = parse_string(request, "redirectUrl")
ticket = parse_string(request, "ticket", required=True)
# Maybe get a session ID (if this ticket is from user interactive
# authentication).
session = parse_string(request, "session")
# Either client_redirect_url or session must be provided.
if not client_redirect_url and not session:
message = "Missing string query parameter redirectUrl or session"
raise SynapseError(400, message, errcode=Codes.MISSING_PARAM)
await self._cas_handler.handle_ticket(
request, ticket, client_redirect_url, session
)
def register_servlets(hs, http_server):
LoginRestServlet(hs).register(http_server)
SsoRedirectServlet(hs).register(http_server)
if hs.config.cas_enabled:
CasTicketServlet(hs).register(http_server)
|
apache-2.0
|
Orav/kbengine
|
kbe/src/lib/python/Doc/tools/sphinxext/pyspecific.py
|
1
|
13744
|
# -*- coding: utf-8 -*-
"""
pyspecific.py
~~~~~~~~~~~~~
Sphinx extension with Python doc-specific markup.
:copyright: 2008-2014 by Georg Brandl.
:license: Python license.
"""
ISSUE_URI = 'http://bugs.python.org/issue%s'
SOURCE_URI = 'https://hg.python.org/cpython/file/3.4/%s'
from docutils import nodes, utils
import sphinx
from sphinx.util.nodes import split_explicit_title
from sphinx.util.compat import Directive
from sphinx.writers.html import HTMLTranslator
from sphinx.writers.latex import LaTeXTranslator
from sphinx.locale import versionlabels
# monkey-patch reST parser to disable alphabetic and roman enumerated lists
from docutils.parsers.rst.states import Body
Body.enum.converters['loweralpha'] = \
Body.enum.converters['upperalpha'] = \
Body.enum.converters['lowerroman'] = \
Body.enum.converters['upperroman'] = lambda x: None
SPHINX11 = sphinx.__version__[:3] < '1.2'
if SPHINX11:
# monkey-patch HTML translator to give versionmodified paragraphs a class
def new_visit_versionmodified(self, node):
self.body.append(self.starttag(node, 'p', CLASS=node['type']))
text = versionlabels[node['type']] % node['version']
if len(node):
text += ':'
else:
text += '.'
self.body.append('<span class="versionmodified">%s</span> ' % text)
HTMLTranslator.visit_versionmodified = new_visit_versionmodified
# monkey-patch HTML and LaTeX translators to keep doctest blocks in the
# doctest docs themselves
orig_visit_literal_block = HTMLTranslator.visit_literal_block
def new_visit_literal_block(self, node):
meta = self.builder.env.metadata[self.builder.current_docname]
old_trim_doctest_flags = self.highlighter.trim_doctest_flags
if 'keepdoctest' in meta:
self.highlighter.trim_doctest_flags = False
try:
orig_visit_literal_block(self, node)
finally:
self.highlighter.trim_doctest_flags = old_trim_doctest_flags
HTMLTranslator.visit_literal_block = new_visit_literal_block
orig_depart_literal_block = LaTeXTranslator.depart_literal_block
def new_depart_literal_block(self, node):
meta = self.builder.env.metadata[self.curfilestack[-1]]
old_trim_doctest_flags = self.highlighter.trim_doctest_flags
if 'keepdoctest' in meta:
self.highlighter.trim_doctest_flags = False
try:
orig_depart_literal_block(self, node)
finally:
self.highlighter.trim_doctest_flags = old_trim_doctest_flags
LaTeXTranslator.depart_literal_block = new_depart_literal_block
# Support for marking up and linking to bugs.python.org issues
def issue_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
issue = utils.unescape(text)
text = 'issue ' + issue
refnode = nodes.reference(text, text, refuri=ISSUE_URI % issue)
return [refnode], []
# Support for linking to Python source files easily
def source_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
has_t, title, target = split_explicit_title(text)
title = utils.unescape(title)
target = utils.unescape(target)
refnode = nodes.reference(title, title, refuri=SOURCE_URI % target)
return [refnode], []
# Support for marking up implementation details
class ImplementationDetail(Directive):
has_content = True
required_arguments = 0
optional_arguments = 1
final_argument_whitespace = True
def run(self):
pnode = nodes.compound(classes=['impl-detail'])
content = self.content
add_text = nodes.strong('CPython implementation detail:',
'CPython implementation detail:')
if self.arguments:
n, m = self.state.inline_text(self.arguments[0], self.lineno)
pnode.append(nodes.paragraph('', '', *(n + m)))
self.state.nested_parse(content, self.content_offset, pnode)
if pnode.children and isinstance(pnode[0], nodes.paragraph):
pnode[0].insert(0, add_text)
pnode[0].insert(1, nodes.Text(' '))
else:
pnode.insert(0, nodes.paragraph('', '', add_text))
return [pnode]
# Support for documenting decorators
from sphinx import addnodes
from sphinx.domains.python import PyModulelevel, PyClassmember
class PyDecoratorMixin(object):
def handle_signature(self, sig, signode):
ret = super(PyDecoratorMixin, self).handle_signature(sig, signode)
signode.insert(0, addnodes.desc_addname('@', '@'))
return ret
def needs_arglist(self):
return False
class PyDecoratorFunction(PyDecoratorMixin, PyModulelevel):
def run(self):
# a decorator function is a function after all
self.name = 'py:function'
return PyModulelevel.run(self)
class PyDecoratorMethod(PyDecoratorMixin, PyClassmember):
def run(self):
self.name = 'py:method'
return PyClassmember.run(self)
# Support for documenting version of removal in deprecations
class DeprecatedRemoved(Directive):
has_content = True
required_arguments = 2
optional_arguments = 1
final_argument_whitespace = True
option_spec = {}
_label = 'Deprecated since version %s, will be removed in version %s'
def run(self):
node = addnodes.versionmodified()
node.document = self.state.document
node['type'] = 'deprecated-removed'
version = (self.arguments[0], self.arguments[1])
node['version'] = version
text = self._label % version
if len(self.arguments) == 3:
inodes, messages = self.state.inline_text(self.arguments[2],
self.lineno+1)
para = nodes.paragraph(self.arguments[2], '', *inodes)
node.append(para)
else:
messages = []
if self.content:
self.state.nested_parse(self.content, self.content_offset, node)
if isinstance(node[0], nodes.paragraph) and node[0].rawsource:
content = nodes.inline(node[0].rawsource, translatable=True)
content.source = node[0].source
content.line = node[0].line
content += node[0].children
node[0].replace_self(nodes.paragraph('', '', content))
if not SPHINX11:
node[0].insert(0, nodes.inline('', '%s: ' % text,
classes=['versionmodified']))
elif not SPHINX11:
para = nodes.paragraph('', '',
nodes.inline('', '%s.' % text, classes=['versionmodified']))
if len(node):
node.insert(0, para)
else:
node.append(para)
env = self.state.document.settings.env
env.note_versionchange('deprecated', version[0], node, self.lineno)
return [node] + messages
# for Sphinx < 1.2
versionlabels['deprecated-removed'] = DeprecatedRemoved._label
# Support for including Misc/NEWS
import re
import codecs
issue_re = re.compile('([Ii])ssue #([0-9]+)')
whatsnew_re = re.compile(r"(?im)^what's new in (.*?)\??$")
class MiscNews(Directive):
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = False
option_spec = {}
def run(self):
fname = self.arguments[0]
source = self.state_machine.input_lines.source(
self.lineno - self.state_machine.input_offset - 1)
source_dir = path.dirname(path.abspath(source))
fpath = path.join(source_dir, fname)
self.state.document.settings.record_dependencies.add(fpath)
try:
fp = codecs.open(fpath, encoding='utf-8')
try:
content = fp.read()
finally:
fp.close()
except Exception:
text = 'The NEWS file is not available.'
node = nodes.strong(text, text)
return [node]
content = issue_re.sub(r'`\1ssue #\2 <http://bugs.python.org/\2>`__',
content)
content = whatsnew_re.sub(r'\1', content)
# remove first 3 lines as they are the main heading
lines = ['.. default-role:: obj', ''] + content.splitlines()[3:]
self.state_machine.insert_input(lines, fname)
return []
# Support for building "topic help" for pydoc
pydoc_topic_labels = [
'assert', 'assignment', 'atom-identifiers', 'atom-literals',
'attribute-access', 'attribute-references', 'augassign', 'binary',
'bitwise', 'bltin-code-objects', 'bltin-ellipsis-object',
'bltin-null-object', 'bltin-type-objects', 'booleans',
'break', 'callable-types', 'calls', 'class', 'comparisons', 'compound',
'context-managers', 'continue', 'conversions', 'customization', 'debugger',
'del', 'dict', 'dynamic-features', 'else', 'exceptions', 'execmodel',
'exprlists', 'floating', 'for', 'formatstrings', 'function', 'global',
'id-classes', 'identifiers', 'if', 'imaginary', 'import', 'in', 'integers',
'lambda', 'lists', 'naming', 'nonlocal', 'numbers', 'numeric-types',
'objects', 'operator-summary', 'pass', 'power', 'raise', 'return',
'sequence-types', 'shifting', 'slicings', 'specialattrs', 'specialnames',
'string-methods', 'strings', 'subscriptions', 'truth', 'try', 'types',
'typesfunctions', 'typesmapping', 'typesmethods', 'typesmodules',
'typesseq', 'typesseq-mutable', 'unary', 'while', 'with', 'yield'
]
from os import path
from time import asctime
from pprint import pformat
from docutils.io import StringOutput
from docutils.utils import new_document
from sphinx.builders import Builder
from sphinx.writers.text import TextWriter
class PydocTopicsBuilder(Builder):
name = 'pydoc-topics'
def init(self):
self.topics = {}
def get_outdated_docs(self):
return 'all pydoc topics'
def get_target_uri(self, docname, typ=None):
return '' # no URIs
def write(self, *ignored):
writer = TextWriter(self)
for label in self.status_iterator(pydoc_topic_labels,
'building topics... ',
length=len(pydoc_topic_labels)):
if label not in self.env.domaindata['std']['labels']:
self.warn('label %r not in documentation' % label)
continue
docname, labelid, sectname = self.env.domaindata['std']['labels'][label]
doctree = self.env.get_and_resolve_doctree(docname, self)
document = new_document('<section node>')
document.append(doctree.ids[labelid])
destination = StringOutput(encoding='utf-8')
writer.write(document, destination)
self.topics[label] = writer.output.encode('utf-8')
def finish(self):
f = open(path.join(self.outdir, 'topics.py'), 'w')
try:
f.write('# -*- coding: utf-8 -*-\n')
f.write('# Autogenerated by Sphinx on %s\n' % asctime())
f.write('topics = ' + pformat(self.topics) + '\n')
finally:
f.close()
# Support for checking for suspicious markup
import suspicious
# Support for documenting Opcodes
import re
opcode_sig_re = re.compile(r'(\w+(?:\+\d)?)(?:\s*\((.*)\))?')
def parse_opcode_signature(env, sig, signode):
"""Transform an opcode signature into RST nodes."""
m = opcode_sig_re.match(sig)
if m is None:
raise ValueError
opname, arglist = m.groups()
signode += addnodes.desc_name(opname, opname)
if arglist is not None:
paramlist = addnodes.desc_parameterlist()
signode += paramlist
paramlist += addnodes.desc_parameter(arglist, arglist)
return opname.strip()
# Support for documenting pdb commands
pdbcmd_sig_re = re.compile(r'([a-z()!]+)\s*(.*)')
# later...
#pdbargs_tokens_re = re.compile(r'''[a-zA-Z]+ | # identifiers
# [.,:]+ | # punctuation
# [\[\]()] | # parens
# \s+ # whitespace
# ''', re.X)
def parse_pdb_command(env, sig, signode):
"""Transform a pdb command signature into RST nodes."""
m = pdbcmd_sig_re.match(sig)
if m is None:
raise ValueError
name, args = m.groups()
fullname = name.replace('(', '').replace(')', '')
signode += addnodes.desc_name(name, name)
if args:
signode += addnodes.desc_addname(' '+args, ' '+args)
return fullname
def setup(app):
app.add_role('issue', issue_role)
app.add_role('source', source_role)
app.add_directive('impl-detail', ImplementationDetail)
app.add_directive('deprecated-removed', DeprecatedRemoved)
app.add_builder(PydocTopicsBuilder)
app.add_builder(suspicious.CheckSuspiciousMarkupBuilder)
app.add_description_unit('opcode', 'opcode', '%s (opcode)',
parse_opcode_signature)
app.add_description_unit('pdbcommand', 'pdbcmd', '%s (pdb command)',
parse_pdb_command)
app.add_description_unit('2to3fixer', '2to3fixer', '%s (2to3 fixer)')
app.add_directive_to_domain('py', 'decorator', PyDecoratorFunction)
app.add_directive_to_domain('py', 'decoratormethod', PyDecoratorMethod)
app.add_directive('miscnews', MiscNews)
|
lgpl-3.0
|
ThePletch/ansible
|
lib/ansible/modules/cloud/vmware/vmware_vmkernel.py
|
48
|
7589
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Joseph Callen <jcallen () csc.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: vmware_vmkernel
short_description: Create a VMware VMkernel Interface
description:
- Create a VMware VMkernel Interface
version_added: 2.0
author: "Joseph Callen (@jcpowermac), Russell Teague (@mtnbikenc)"
notes:
- Tested on vSphere 5.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
vswitch_name:
description:
- The name of the vswitch where to add the VMK interface
required: True
portgroup_name:
description:
- The name of the portgroup for the VMK interface
required: True
ip_address:
description:
- The IP Address for the VMK interface
required: True
subnet_mask:
description:
- The Subnet Mask for the VMK interface
required: True
vland_id:
description:
- The VLAN ID for the VMK interface
required: True
mtu:
description:
- The MTU for the VMK interface
required: False
enable_vsan:
description:
- Enable the VMK interface for VSAN traffic
required: False
enable_vmotion:
description:
- Enable the VMK interface for vMotion traffic
required: False
enable_mgmt:
description:
- Enable the VMK interface for Management traffic
required: False
enable_ft:
description:
- Enable the VMK interface for Fault Tolerance traffic
required: False
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
# Example command from Ansible Playbook
- name: Add Management vmkernel port (vmk1)
local_action:
module: vmware_vmkernel
hostname: esxi_hostname
username: esxi_username
password: esxi_password
vswitch_name: vswitch_name
portgroup_name: portgroup_name
vlan_id: vlan_id
ip_address: ip_address
subnet_mask: subnet_mask
enable_mgmt: True
'''
try:
from pyVmomi import vim, vmodl
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
def create_vmkernel_adapter(host_system, port_group_name,
vlan_id, vswitch_name,
ip_address, subnet_mask,
mtu, enable_vsan, enable_vmotion, enable_mgmt, enable_ft):
host_config_manager = host_system.configManager
host_network_system = host_config_manager.networkSystem
host_virtual_vic_manager = host_config_manager.virtualNicManager
config = vim.host.NetworkConfig()
config.portgroup = [vim.host.PortGroup.Config()]
config.portgroup[0].changeOperation = "add"
config.portgroup[0].spec = vim.host.PortGroup.Specification()
config.portgroup[0].spec.name = port_group_name
config.portgroup[0].spec.vlanId = vlan_id
config.portgroup[0].spec.vswitchName = vswitch_name
config.portgroup[0].spec.policy = vim.host.NetworkPolicy()
config.vnic = [vim.host.VirtualNic.Config()]
config.vnic[0].changeOperation = "add"
config.vnic[0].portgroup = port_group_name
config.vnic[0].spec = vim.host.VirtualNic.Specification()
config.vnic[0].spec.ip = vim.host.IpConfig()
config.vnic[0].spec.ip.dhcp = False
config.vnic[0].spec.ip.ipAddress = ip_address
config.vnic[0].spec.ip.subnetMask = subnet_mask
if mtu:
config.vnic[0].spec.mtu = mtu
host_network_config_result = host_network_system.UpdateNetworkConfig(config, "modify")
for vnic_device in host_network_config_result.vnicDevice:
if enable_vsan:
vsan_system = host_config_manager.vsanSystem
vsan_config = vim.vsan.host.ConfigInfo()
vsan_config.networkInfo = vim.vsan.host.ConfigInfo.NetworkInfo()
vsan_config.networkInfo.port = [vim.vsan.host.ConfigInfo.NetworkInfo.PortConfig()]
vsan_config.networkInfo.port[0].device = vnic_device
host_vsan_config_result = vsan_system.UpdateVsan_Task(vsan_config)
if enable_vmotion:
host_virtual_vic_manager.SelectVnicForNicType("vmotion", vnic_device)
if enable_mgmt:
host_virtual_vic_manager.SelectVnicForNicType("management", vnic_device)
if enable_ft:
host_virtual_vic_manager.SelectVnicForNicType("faultToleranceLogging", vnic_device)
return True
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(dict(portgroup_name=dict(required=True, type='str'),
ip_address=dict(required=True, type='str'),
subnet_mask=dict(required=True, type='str'),
mtu=dict(required=False, type='int'),
enable_vsan=dict(required=False, type='bool'),
enable_vmotion=dict(required=False, type='bool'),
enable_mgmt=dict(required=False, type='bool'),
enable_ft=dict(required=False, type='bool'),
vswitch_name=dict(required=True, type='str'),
vlan_id=dict(required=True, type='int')))
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi is required for this module')
port_group_name = module.params['portgroup_name']
ip_address = module.params['ip_address']
subnet_mask = module.params['subnet_mask']
mtu = module.params['mtu']
enable_vsan = module.params['enable_vsan']
enable_vmotion = module.params['enable_vmotion']
enable_mgmt = module.params['enable_mgmt']
enable_ft = module.params['enable_ft']
vswitch_name = module.params['vswitch_name']
vlan_id = module.params['vlan_id']
try:
content = connect_to_api(module)
host = get_all_objs(content, [vim.HostSystem])
if not host:
module.fail_json(msg="Unable to locate Physical Host.")
host_system = host.keys()[0]
changed = create_vmkernel_adapter(host_system, port_group_name,
vlan_id, vswitch_name,
ip_address, subnet_mask,
mtu, enable_vsan, enable_vmotion, enable_mgmt, enable_ft)
module.exit_json(changed=changed)
except vmodl.RuntimeFault as runtime_fault:
module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
module.fail_json(msg=method_fault.msg)
except Exception as e:
module.fail_json(msg=str(e))
from ansible.module_utils.vmware import *
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
mayankcu/Django-social
|
venv/Lib/site-packages/django/contrib/gis/gdal/geometries.py
|
388
|
26357
|
"""
The OGRGeometry is a wrapper for using the OGR Geometry class
(see http://www.gdal.org/ogr/classOGRGeometry.html). OGRGeometry
may be instantiated when reading geometries from OGR Data Sources
(e.g. SHP files), or when given OGC WKT (a string).
While the 'full' API is not present yet, the API is "pythonic" unlike
the traditional and "next-generation" OGR Python bindings. One major
advantage OGR Geometries have over their GEOS counterparts is support
for spatial reference systems and their transformation.
Example:
>>> from django.contrib.gis.gdal import OGRGeometry, OGRGeomType, SpatialReference
>>> wkt1, wkt2 = 'POINT(-90 30)', 'POLYGON((0 0, 5 0, 5 5, 0 5)'
>>> pnt = OGRGeometry(wkt1)
>>> print pnt
POINT (-90 30)
>>> mpnt = OGRGeometry(OGRGeomType('MultiPoint'), SpatialReference('WGS84'))
>>> mpnt.add(wkt1)
>>> mpnt.add(wkt1)
>>> print mpnt
MULTIPOINT (-90 30,-90 30)
>>> print mpnt.srs.name
WGS 84
>>> print mpnt.srs.proj
+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs
>>> mpnt.transform_to(SpatialReference('NAD27'))
>>> print mpnt.proj
+proj=longlat +ellps=clrk66 +datum=NAD27 +no_defs
>>> print mpnt
MULTIPOINT (-89.999930378602485 29.999797886557641,-89.999930378602485 29.999797886557641)
The OGRGeomType class is to make it easy to specify an OGR geometry type:
>>> from django.contrib.gis.gdal import OGRGeomType
>>> gt1 = OGRGeomType(3) # Using an integer for the type
>>> gt2 = OGRGeomType('Polygon') # Using a string
>>> gt3 = OGRGeomType('POLYGON') # It's case-insensitive
>>> print gt1 == 3, gt1 == 'Polygon' # Equivalence works w/non-OGRGeomType objects
True
"""
# Python library requisites.
import sys
from binascii import a2b_hex
from ctypes import byref, string_at, c_char_p, c_double, c_ubyte, c_void_p
# Getting GDAL prerequisites
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.envelope import Envelope, OGREnvelope
from django.contrib.gis.gdal.error import OGRException, OGRIndexError, SRSException
from django.contrib.gis.gdal.geomtype import OGRGeomType
from django.contrib.gis.gdal.libgdal import GEOJSON, GDAL_VERSION
from django.contrib.gis.gdal.srs import SpatialReference, CoordTransform
# Getting the ctypes prototype functions that interface w/the GDAL C library.
from django.contrib.gis.gdal.prototypes import geom as capi, srs as srs_api
# For recognizing geometry input.
from django.contrib.gis.geometry.regex import hex_regex, wkt_regex, json_regex
# For more information, see the OGR C API source code:
# http://www.gdal.org/ogr/ogr__api_8h.html
#
# The OGR_G_* routines are relevant here.
#### OGRGeometry Class ####
class OGRGeometry(GDALBase):
"Generally encapsulates an OGR geometry."
def __init__(self, geom_input, srs=None):
"Initializes Geometry on either WKT or an OGR pointer as input."
str_instance = isinstance(geom_input, basestring)
# If HEX, unpack input to to a binary buffer.
if str_instance and hex_regex.match(geom_input):
geom_input = buffer(a2b_hex(geom_input.upper()))
str_instance = False
# Constructing the geometry,
if str_instance:
# Checking if unicode
if isinstance(geom_input, unicode):
# Encoding to ASCII, WKT or HEX doesn't need any more.
geom_input = geom_input.encode('ascii')
wkt_m = wkt_regex.match(geom_input)
json_m = json_regex.match(geom_input)
if wkt_m:
if wkt_m.group('srid'):
# If there's EWKT, set the SRS w/value of the SRID.
srs = int(wkt_m.group('srid'))
if wkt_m.group('type').upper() == 'LINEARRING':
# OGR_G_CreateFromWkt doesn't work with LINEARRING WKT.
# See http://trac.osgeo.org/gdal/ticket/1992.
g = capi.create_geom(OGRGeomType(wkt_m.group('type')).num)
capi.import_wkt(g, byref(c_char_p(wkt_m.group('wkt'))))
else:
g = capi.from_wkt(byref(c_char_p(wkt_m.group('wkt'))), None, byref(c_void_p()))
elif json_m:
if GEOJSON:
g = capi.from_json(geom_input)
else:
raise NotImplementedError('GeoJSON input only supported on GDAL 1.5+.')
else:
# Seeing if the input is a valid short-hand string
# (e.g., 'Point', 'POLYGON').
ogr_t = OGRGeomType(geom_input)
g = capi.create_geom(OGRGeomType(geom_input).num)
elif isinstance(geom_input, buffer):
# WKB was passed in
g = capi.from_wkb(str(geom_input), None, byref(c_void_p()), len(geom_input))
elif isinstance(geom_input, OGRGeomType):
# OGRGeomType was passed in, an empty geometry will be created.
g = capi.create_geom(geom_input.num)
elif isinstance(geom_input, self.ptr_type):
# OGR pointer (c_void_p) was the input.
g = geom_input
else:
raise OGRException('Invalid input type for OGR Geometry construction: %s' % type(geom_input))
# Now checking the Geometry pointer before finishing initialization
# by setting the pointer for the object.
if not g:
raise OGRException('Cannot create OGR Geometry from input: %s' % str(geom_input))
self.ptr = g
# Assigning the SpatialReference object to the geometry, if valid.
if bool(srs): self.srs = srs
# Setting the class depending upon the OGR Geometry Type
self.__class__ = GEO_CLASSES[self.geom_type.num]
def __del__(self):
"Deletes this Geometry."
if self._ptr: capi.destroy_geom(self._ptr)
# Pickle routines
def __getstate__(self):
srs = self.srs
if srs:
srs = srs.wkt
else:
srs = None
return str(self.wkb), srs
def __setstate__(self, state):
wkb, srs = state
ptr = capi.from_wkb(wkb, None, byref(c_void_p()), len(wkb))
if not ptr: raise OGRException('Invalid OGRGeometry loaded from pickled state.')
self.ptr = ptr
self.srs = srs
@classmethod
def from_bbox(cls, bbox):
"Constructs a Polygon from a bounding box (4-tuple)."
x0, y0, x1, y1 = bbox
return OGRGeometry( 'POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' % (
x0, y0, x0, y1, x1, y1, x1, y0, x0, y0) )
### Geometry set-like operations ###
# g = g1 | g2
def __or__(self, other):
"Returns the union of the two geometries."
return self.union(other)
# g = g1 & g2
def __and__(self, other):
"Returns the intersection of this Geometry and the other."
return self.intersection(other)
# g = g1 - g2
def __sub__(self, other):
"Return the difference this Geometry and the other."
return self.difference(other)
# g = g1 ^ g2
def __xor__(self, other):
"Return the symmetric difference of this Geometry and the other."
return self.sym_difference(other)
def __eq__(self, other):
"Is this Geometry equal to the other?"
if isinstance(other, OGRGeometry):
return self.equals(other)
else:
return False
def __ne__(self, other):
"Tests for inequality."
return not (self == other)
def __str__(self):
"WKT is used for the string representation."
return self.wkt
#### Geometry Properties ####
@property
def dimension(self):
"Returns 0 for points, 1 for lines, and 2 for surfaces."
return capi.get_dims(self.ptr)
def _get_coord_dim(self):
"Returns the coordinate dimension of the Geometry."
if isinstance(self, GeometryCollection) and GDAL_VERSION < (1, 5, 2):
# On GDAL versions prior to 1.5.2, there exists a bug in which
# the coordinate dimension of geometry collections is always 2:
# http://trac.osgeo.org/gdal/ticket/2334
# Here we workaround by returning the coordinate dimension of the
# first geometry in the collection instead.
if len(self):
return capi.get_coord_dim(capi.get_geom_ref(self.ptr, 0))
return capi.get_coord_dim(self.ptr)
def _set_coord_dim(self, dim):
"Sets the coordinate dimension of this Geometry."
if not dim in (2, 3):
raise ValueError('Geometry dimension must be either 2 or 3')
capi.set_coord_dim(self.ptr, dim)
coord_dim = property(_get_coord_dim, _set_coord_dim)
@property
def geom_count(self):
"The number of elements in this Geometry."
return capi.get_geom_count(self.ptr)
@property
def point_count(self):
"Returns the number of Points in this Geometry."
return capi.get_point_count(self.ptr)
@property
def num_points(self):
"Alias for `point_count` (same name method in GEOS API.)"
return self.point_count
@property
def num_coords(self):
"Alais for `point_count`."
return self.point_count
@property
def geom_type(self):
"Returns the Type for this Geometry."
return OGRGeomType(capi.get_geom_type(self.ptr))
@property
def geom_name(self):
"Returns the Name of this Geometry."
return capi.get_geom_name(self.ptr)
@property
def area(self):
"Returns the area for a LinearRing, Polygon, or MultiPolygon; 0 otherwise."
return capi.get_area(self.ptr)
@property
def envelope(self):
"Returns the envelope for this Geometry."
# TODO: Fix Envelope() for Point geometries.
return Envelope(capi.get_envelope(self.ptr, byref(OGREnvelope())))
@property
def extent(self):
"Returns the envelope as a 4-tuple, instead of as an Envelope object."
return self.envelope.tuple
#### SpatialReference-related Properties ####
# The SRS property
def _get_srs(self):
"Returns the Spatial Reference for this Geometry."
try:
srs_ptr = capi.get_geom_srs(self.ptr)
return SpatialReference(srs_api.clone_srs(srs_ptr))
except SRSException:
return None
def _set_srs(self, srs):
"Sets the SpatialReference for this geometry."
# Do not have to clone the `SpatialReference` object pointer because
# when it is assigned to this `OGRGeometry` it's internal OGR
# reference count is incremented, and will likewise be released
# (decremented) when this geometry's destructor is called.
if isinstance(srs, SpatialReference):
srs_ptr = srs.ptr
elif isinstance(srs, (int, long, basestring)):
sr = SpatialReference(srs)
srs_ptr = sr.ptr
else:
raise TypeError('Cannot assign spatial reference with object of type: %s' % type(srs))
capi.assign_srs(self.ptr, srs_ptr)
srs = property(_get_srs, _set_srs)
# The SRID property
def _get_srid(self):
srs = self.srs
if srs: return srs.srid
return None
def _set_srid(self, srid):
if isinstance(srid, (int, long)):
self.srs = srid
else:
raise TypeError('SRID must be set with an integer.')
srid = property(_get_srid, _set_srid)
#### Output Methods ####
@property
def geos(self):
"Returns a GEOSGeometry object from this OGRGeometry."
from django.contrib.gis.geos import GEOSGeometry
return GEOSGeometry(self.wkb, self.srid)
@property
def gml(self):
"Returns the GML representation of the Geometry."
return capi.to_gml(self.ptr)
@property
def hex(self):
"Returns the hexadecimal representation of the WKB (a string)."
return str(self.wkb).encode('hex').upper()
#return b2a_hex(self.wkb).upper()
@property
def json(self):
"""
Returns the GeoJSON representation of this Geometry (requires
GDAL 1.5+).
"""
if GEOJSON:
return capi.to_json(self.ptr)
else:
raise NotImplementedError('GeoJSON output only supported on GDAL 1.5+.')
geojson = json
@property
def kml(self):
"Returns the KML representation of the Geometry."
if GEOJSON:
return capi.to_kml(self.ptr, None)
else:
raise NotImplementedError('KML output only supported on GDAL 1.5+.')
@property
def wkb_size(self):
"Returns the size of the WKB buffer."
return capi.get_wkbsize(self.ptr)
@property
def wkb(self):
"Returns the WKB representation of the Geometry."
if sys.byteorder == 'little':
byteorder = 1 # wkbNDR (from ogr_core.h)
else:
byteorder = 0 # wkbXDR
sz = self.wkb_size
# Creating the unsigned character buffer, and passing it in by reference.
buf = (c_ubyte * sz)()
wkb = capi.to_wkb(self.ptr, byteorder, byref(buf))
# Returning a buffer of the string at the pointer.
return buffer(string_at(buf, sz))
@property
def wkt(self):
"Returns the WKT representation of the Geometry."
return capi.to_wkt(self.ptr, byref(c_char_p()))
@property
def ewkt(self):
"Returns the EWKT representation of the Geometry."
srs = self.srs
if srs and srs.srid:
return 'SRID=%s;%s' % (srs.srid, self.wkt)
else:
return self.wkt
#### Geometry Methods ####
def clone(self):
"Clones this OGR Geometry."
return OGRGeometry(capi.clone_geom(self.ptr), self.srs)
def close_rings(self):
"""
If there are any rings within this geometry that have not been
closed, this routine will do so by adding the starting point at the
end.
"""
# Closing the open rings.
capi.geom_close_rings(self.ptr)
def transform(self, coord_trans, clone=False):
"""
Transforms this geometry to a different spatial reference system.
May take a CoordTransform object, a SpatialReference object, string
WKT or PROJ.4, and/or an integer SRID. By default nothing is returned
and the geometry is transformed in-place. However, if the `clone`
keyword is set, then a transformed clone of this geometry will be
returned.
"""
if clone:
klone = self.clone()
klone.transform(coord_trans)
return klone
# Have to get the coordinate dimension of the original geometry
# so it can be used to reset the transformed geometry's dimension
# afterwards. This is done because of GDAL bug (in versions prior
# to 1.7) that turns geometries 3D after transformation, see:
# http://trac.osgeo.org/gdal/changeset/17792
if GDAL_VERSION < (1, 7):
orig_dim = self.coord_dim
# Depending on the input type, use the appropriate OGR routine
# to perform the transformation.
if isinstance(coord_trans, CoordTransform):
capi.geom_transform(self.ptr, coord_trans.ptr)
elif isinstance(coord_trans, SpatialReference):
capi.geom_transform_to(self.ptr, coord_trans.ptr)
elif isinstance(coord_trans, (int, long, basestring)):
sr = SpatialReference(coord_trans)
capi.geom_transform_to(self.ptr, sr.ptr)
else:
raise TypeError('Transform only accepts CoordTransform, '
'SpatialReference, string, and integer objects.')
# Setting with original dimension, see comment above.
if GDAL_VERSION < (1, 7):
if isinstance(self, GeometryCollection):
# With geometry collections have to set dimension on
# each internal geometry reference, as the collection
# dimension isn't affected.
for i in xrange(len(self)):
internal_ptr = capi.get_geom_ref(self.ptr, i)
if orig_dim != capi.get_coord_dim(internal_ptr):
capi.set_coord_dim(internal_ptr, orig_dim)
else:
if self.coord_dim != orig_dim:
self.coord_dim = orig_dim
def transform_to(self, srs):
"For backwards-compatibility."
self.transform(srs)
#### Topology Methods ####
def _topology(self, func, other):
"""A generalized function for topology operations, takes a GDAL function and
the other geometry to perform the operation on."""
if not isinstance(other, OGRGeometry):
raise TypeError('Must use another OGRGeometry object for topology operations!')
# Returning the output of the given function with the other geometry's
# pointer.
return func(self.ptr, other.ptr)
def intersects(self, other):
"Returns True if this geometry intersects with the other."
return self._topology(capi.ogr_intersects, other)
def equals(self, other):
"Returns True if this geometry is equivalent to the other."
return self._topology(capi.ogr_equals, other)
def disjoint(self, other):
"Returns True if this geometry and the other are spatially disjoint."
return self._topology(capi.ogr_disjoint, other)
def touches(self, other):
"Returns True if this geometry touches the other."
return self._topology(capi.ogr_touches, other)
def crosses(self, other):
"Returns True if this geometry crosses the other."
return self._topology(capi.ogr_crosses, other)
def within(self, other):
"Returns True if this geometry is within the other."
return self._topology(capi.ogr_within, other)
def contains(self, other):
"Returns True if this geometry contains the other."
return self._topology(capi.ogr_contains, other)
def overlaps(self, other):
"Returns True if this geometry overlaps the other."
return self._topology(capi.ogr_overlaps, other)
#### Geometry-generation Methods ####
def _geomgen(self, gen_func, other=None):
"A helper routine for the OGR routines that generate geometries."
if isinstance(other, OGRGeometry):
return OGRGeometry(gen_func(self.ptr, other.ptr), self.srs)
else:
return OGRGeometry(gen_func(self.ptr), self.srs)
@property
def boundary(self):
"Returns the boundary of this geometry."
return self._geomgen(capi.get_boundary)
@property
def convex_hull(self):
"""
Returns the smallest convex Polygon that contains all the points in
this Geometry.
"""
return self._geomgen(capi.geom_convex_hull)
def difference(self, other):
"""
Returns a new geometry consisting of the region which is the difference
of this geometry and the other.
"""
return self._geomgen(capi.geom_diff, other)
def intersection(self, other):
"""
Returns a new geometry consisting of the region of intersection of this
geometry and the other.
"""
return self._geomgen(capi.geom_intersection, other)
def sym_difference(self, other):
"""
Returns a new geometry which is the symmetric difference of this
geometry and the other.
"""
return self._geomgen(capi.geom_sym_diff, other)
def union(self, other):
"""
Returns a new geometry consisting of the region which is the union of
this geometry and the other.
"""
return self._geomgen(capi.geom_union, other)
# The subclasses for OGR Geometry.
class Point(OGRGeometry):
@property
def x(self):
"Returns the X coordinate for this Point."
return capi.getx(self.ptr, 0)
@property
def y(self):
"Returns the Y coordinate for this Point."
return capi.gety(self.ptr, 0)
@property
def z(self):
"Returns the Z coordinate for this Point."
if self.coord_dim == 3:
return capi.getz(self.ptr, 0)
@property
def tuple(self):
"Returns the tuple of this point."
if self.coord_dim == 2:
return (self.x, self.y)
elif self.coord_dim == 3:
return (self.x, self.y, self.z)
coords = tuple
class LineString(OGRGeometry):
def __getitem__(self, index):
"Returns the Point at the given index."
if index >= 0 and index < self.point_count:
x, y, z = c_double(), c_double(), c_double()
capi.get_point(self.ptr, index, byref(x), byref(y), byref(z))
dim = self.coord_dim
if dim == 1:
return (x.value,)
elif dim == 2:
return (x.value, y.value)
elif dim == 3:
return (x.value, y.value, z.value)
else:
raise OGRIndexError('index out of range: %s' % str(index))
def __iter__(self):
"Iterates over each point in the LineString."
for i in xrange(self.point_count):
yield self[i]
def __len__(self):
"The length returns the number of points in the LineString."
return self.point_count
@property
def tuple(self):
"Returns the tuple representation of this LineString."
return tuple([self[i] for i in xrange(len(self))])
coords = tuple
def _listarr(self, func):
"""
Internal routine that returns a sequence (list) corresponding with
the given function.
"""
return [func(self.ptr, i) for i in xrange(len(self))]
@property
def x(self):
"Returns the X coordinates in a list."
return self._listarr(capi.getx)
@property
def y(self):
"Returns the Y coordinates in a list."
return self._listarr(capi.gety)
@property
def z(self):
"Returns the Z coordinates in a list."
if self.coord_dim == 3:
return self._listarr(capi.getz)
# LinearRings are used in Polygons.
class LinearRing(LineString): pass
class Polygon(OGRGeometry):
def __len__(self):
"The number of interior rings in this Polygon."
return self.geom_count
def __iter__(self):
"Iterates through each ring in the Polygon."
for i in xrange(self.geom_count):
yield self[i]
def __getitem__(self, index):
"Gets the ring at the specified index."
if index < 0 or index >= self.geom_count:
raise OGRIndexError('index out of range: %s' % index)
else:
return OGRGeometry(capi.clone_geom(capi.get_geom_ref(self.ptr, index)), self.srs)
# Polygon Properties
@property
def shell(self):
"Returns the shell of this Polygon."
return self[0] # First ring is the shell
exterior_ring = shell
@property
def tuple(self):
"Returns a tuple of LinearRing coordinate tuples."
return tuple([self[i].tuple for i in xrange(self.geom_count)])
coords = tuple
@property
def point_count(self):
"The number of Points in this Polygon."
# Summing up the number of points in each ring of the Polygon.
return sum([self[i].point_count for i in xrange(self.geom_count)])
@property
def centroid(self):
"Returns the centroid (a Point) of this Polygon."
# The centroid is a Point, create a geometry for this.
p = OGRGeometry(OGRGeomType('Point'))
capi.get_centroid(self.ptr, p.ptr)
return p
# Geometry Collection base class.
class GeometryCollection(OGRGeometry):
"The Geometry Collection class."
def __getitem__(self, index):
"Gets the Geometry at the specified index."
if index < 0 or index >= self.geom_count:
raise OGRIndexError('index out of range: %s' % index)
else:
return OGRGeometry(capi.clone_geom(capi.get_geom_ref(self.ptr, index)), self.srs)
def __iter__(self):
"Iterates over each Geometry."
for i in xrange(self.geom_count):
yield self[i]
def __len__(self):
"The number of geometries in this Geometry Collection."
return self.geom_count
def add(self, geom):
"Add the geometry to this Geometry Collection."
if isinstance(geom, OGRGeometry):
if isinstance(geom, self.__class__):
for g in geom: capi.add_geom(self.ptr, g.ptr)
else:
capi.add_geom(self.ptr, geom.ptr)
elif isinstance(geom, basestring):
tmp = OGRGeometry(geom)
capi.add_geom(self.ptr, tmp.ptr)
else:
raise OGRException('Must add an OGRGeometry.')
@property
def point_count(self):
"The number of Points in this Geometry Collection."
# Summing up the number of points in each geometry in this collection
return sum([self[i].point_count for i in xrange(self.geom_count)])
@property
def tuple(self):
"Returns a tuple representation of this Geometry Collection."
return tuple([self[i].tuple for i in xrange(self.geom_count)])
coords = tuple
# Multiple Geometry types.
class MultiPoint(GeometryCollection): pass
class MultiLineString(GeometryCollection): pass
class MultiPolygon(GeometryCollection): pass
# Class mapping dictionary (using the OGRwkbGeometryType as the key)
GEO_CLASSES = {1 : Point,
2 : LineString,
3 : Polygon,
4 : MultiPoint,
5 : MultiLineString,
6 : MultiPolygon,
7 : GeometryCollection,
101: LinearRing,
1 + OGRGeomType.wkb25bit : Point,
2 + OGRGeomType.wkb25bit : LineString,
3 + OGRGeomType.wkb25bit : Polygon,
4 + OGRGeomType.wkb25bit : MultiPoint,
5 + OGRGeomType.wkb25bit : MultiLineString,
6 + OGRGeomType.wkb25bit : MultiPolygon,
7 + OGRGeomType.wkb25bit : GeometryCollection,
}
|
bsd-3-clause
|
JLMARIN/paparazziUAV
|
sw/tools/attitude_viz.py
|
52
|
11943
|
#! /usr/bin/python
# Tool for visualizing quaternion as rotated cube
from OpenGL.GLUT import *
from OpenGL.GLU import *
from OpenGL.GL import *
import sys
import math
from ivy.std_api import *
import logging
import getopt
import pygame
import time
import platform
import os
_NAME = 'attitude_viz'
class TelemetryQuat:
def __init__(self, message_name, index, name, integer):
self.message_name = message_name
self.index = index
self.name = name
self.qi = 1
self.qx = 0
self.qy = 0
self.qz = 0
# optional scaling for fixed point telemetry
if integer:
self.scale = 0.00003051757812
else:
self.scale = 1.0
class TelemetryValue:
def __init__(self, message_name, index, name, offset, scale, max):
self.message_name = message_name
self.index = index
self.name = name
self.offset = offset
self.scale = scale
self.max = max
self.value = 0
class Visualization:
def __init__(self, parent):
self.quats = []
self.graph_values = []
self.throttle = 0.0
self.mode = 0.0
self.airspeed = 0.0
self.display_list = None
self.display_dirty = True
self.rotate_theta = parent.rotate_theta
for message_name, index, name, bfp in VEHICLE_QUATS:
self.quats.append(TelemetryQuat(message_name, index, name, bfp))
for message_name, index, name, offset, scale, max in BAR_VALUES:
self.graph_values.append(TelemetryValue(message_name, index, name, offset, scale, max))
def onmsgproc(self, agent, *larg):
data = str(larg[0]).split(' ')
for telemetry_quat in self.quats:
if telemetry_quat.message_name == data[1]:
self.display_dirty = True
telemetry_quat.qi = float(data[telemetry_quat.index + 0])
telemetry_quat.qx = float(data[telemetry_quat.index + 1])
telemetry_quat.qy = float(data[telemetry_quat.index + 2])
telemetry_quat.qz = float(data[telemetry_quat.index + 3])
for graph_value in self.graph_values:
if graph_value.message_name == data[1]:
self.display_dirty = True
graph_value.value = (float(data[graph_value.index + 0]) + graph_value.offset) / graph_value.scale
def DrawCircle(self, radius):
glBegin(GL_TRIANGLE_FAN)
glVertex3f(0, 0, 0)
for angle in range(0, 361, 12):
glVertex3f(math.sin(math.radians(angle)) * radius, math.cos(math.radians(angle)) * radius, 0)
glEnd()
# draw quad centered at origin, z = 0
def DrawQuad(self, width, height):
glBegin(GL_QUADS)
glVertex3f(width, height, 0)
glVertex3f(-width, height, 0)
glVertex3f(-width, -height, 0)
glVertex3f(width, -height, 0)
glEnd()
def DrawBox(self, width, height, depth):
glPushMatrix()
glTranslate(0, 0, depth)
self.DrawQuad(width, height)
glTranslate(0, 0, -2 * depth)
self.DrawQuad(width, height)
glPopMatrix()
glPushMatrix()
glRotate(90, 1, 0, 0)
glTranslate(0, 0, height)
self.DrawQuad(width, depth)
glTranslate(0, 0, -2 * height)
self.DrawQuad(width, depth)
glPopMatrix()
glPushMatrix()
glRotate(90, 0, 1, 0)
glTranslate(0, 0, width)
self.DrawQuad(depth, height)
glTranslate(0, 0, -2 * width)
self.DrawQuad(depth, height)
glPopMatrix()
def DrawVehicle(self, name):
wingspan = 2.7
separation = 0.7
chord = 0.35
thickness = 0.08
strutcount = 3
discradius = 0.45
discseparation = 0.01
#wings
glColor3f(0.1, 0.1, 0.9)
glPushMatrix()
glTranslate(0, 0, 0.05)
self.DrawBox(wingspan, chord, thickness)
glColor3f(0.0, 0.0, 0.0)
glTranslate(-wingspan, -0.2, thickness + 0.01)
glScale(0.004, 0.004, 0.004)
for c in name:
glutStrokeCharacter(GLUT_STROKE_ROMAN, ord(c))
glPopMatrix()
glPushMatrix()
glTranslate(0, 0, -0.05)
glColor3f(0.6, 0.6, 0.2)
self.DrawBox(wingspan, chord, thickness)
glColor3f(0.0, 0.0, 0.0)
glTranslate(wingspan, -0.2, -0.01 - thickness)
glScale(0.004, 0.004, 0.004)
glRotate(180, 0, 1, 0)
for c in name:
glutStrokeCharacter(GLUT_STROKE_ROMAN, ord(c))
glPopMatrix()
if self.display_list is None:
self.display_list = glGenLists(1)
glNewList(self.display_list, GL_COMPILE)
# struts
glColor3f(0.4, 0.4, 0.4)
glPushMatrix()
glTranslate(-wingspan / 2, 0, separation / 2)
glRotate(90, 0, 1, 0)
for x in range(0, strutcount - 1):
self.DrawBox(separation / 2, chord - .01, thickness)
glTranslate(0, 0, wingspan)
glTranslate(separation, 0, -5 * wingspan / 2)
for x in range(0, strutcount - 1):
self.DrawBox(separation / 2, chord - .01, thickness)
glTranslate(0, 0, 2 * wingspan)
glPopMatrix()
#rotors
glColor3f(0.9, 0.1, 0.1)
glPushMatrix()
glRotate(90, 1, 0, 0)
glTranslate(-wingspan / 2, separation, -(chord + .01))
for x in range(0, strutcount):
if (x != strutcount / 2):
self.DrawCircle(discradius)
glTranslate(2 * wingspan / (strutcount + 1), 0, 0)
glPopMatrix()
glPushMatrix()
glRotate(90, 1, 0, 0)
glTranslate(-wingspan, -separation, -(chord + .01))
for x in range(0, strutcount):
if (x != strutcount / 2):
self.DrawCircle(discradius)
glTranslate(2 * wingspan / (strutcount - 1), 0, 0)
glPopMatrix()
glEndList()
glCallList(self.display_list)
def DrawBar(self, name, value):
bar_height = 0.12
bar_length = 3
glPushMatrix()
glColor3f(0, 0, 0)
glTranslate(-bar_length, -0.09, 0.02)
glScale(0.0015, 0.0015, 0.0015)
for c in name:
glutStrokeCharacter(GLUT_STROKE_ROMAN, ord(c))
glPopMatrix()
glColor3f(0.92, 0.92, 0.92)
glPushMatrix()
glTranslate(0, 0, 0)
self.DrawQuad(bar_length, bar_height)
glPopMatrix()
glPushMatrix()
glTranslate(bar_length * value - bar_length, 0, 0.01)
glColor3f(0.6, 0.6, 0.6)
self.DrawQuad(bar_length * value, bar_height)
glPopMatrix()
def Draw(self):
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glPushMatrix()
height = 5
glDisable(GL_LIGHTING)
glPushMatrix()
for graph_value in self.graph_values:
self.DrawBar(graph_value.name % (graph_value.value), graph_value.value / graph_value.max)
glTranslate(0, 0.35, 0)
glPopMatrix()
glEnable(GL_LIGHTING)
glTranslate(0, -height + (height / len(self.quats) + 1), 0)
for telemetry_quat in self.quats:
glPushMatrix()
try:
scaled_quat = [telemetry_quat.qi * telemetry_quat.scale, telemetry_quat.qx * telemetry_quat.scale,
telemetry_quat.qy * telemetry_quat.scale, telemetry_quat.qz * telemetry_quat.scale]
glRotate(360 * math.acos(scaled_quat[0]) / math.pi, scaled_quat[2], -scaled_quat[3], -scaled_quat[1])
glRotate(self.rotate_theta, 1, 0, 0)
self.DrawVehicle(telemetry_quat.name)
except Exception:
raise Exception
finally:
glPopMatrix()
glTranslate(0, 2 * height / (len(self.quats)), 0)
glPopMatrix()
class Visualizer:
def __init__(self, rotate_theta):
self.rotate_theta = rotate_theta
self.visualization = Visualization(self)
# listen to Ivy
logging.getLogger('Ivy').setLevel(logging.WARN)
IvyInit(_NAME,
"",
0,
lambda x, y: y,
lambda x, z: z)
if os.getenv('IVY_BUS') is not None:
IvyStart(os.getenv('IVY_BUS'))
else:
if platform.system() == 'Darwin':
IvyStart("224.255.255.255:2010")
else:
IvyStart()
# list of all message names
messages = []
# append all message names
for vehicle_quat in VEHICLE_QUATS:
messages.append(vehicle_quat[0])
for bar_value in BAR_VALUES:
messages.append(bar_value[0])
# bind to set of messages (ie, only bind each message once)
for message_name in set(messages):
bind_string = "(^.*" + message_name + ".*$)"
IvyBindMsg(self.visualization.onmsgproc, bind_string)
def Draw(self):
if self.visualization.display_dirty:
self.visualization.Draw()
self.visualization.display_dirty = False
def OnClose(self):
IvyStop()
SCREEN_SIZE = (800, 800)
def resize(width, height):
glViewport(0, 0, width, height)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(60.0, float(width / height), .1, 100.)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def init():
glutInit()
glEnable(GL_LINE_SMOOTH)
glEnable(GL_DEPTH_TEST)
glEnable(GL_LIGHTING)
glEnable(GL_LIGHT0)
glEnable(GL_BLEND)
glShadeModel(GL_SMOOTH)
glClearColor(1.0, 1.0, 1.0, 1.0)
glClearDepth(1.0)
glPointSize(3.0)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(7.0, 1.0, 95.0, 105.0)
glMatrixMode(GL_MODELVIEW)
glLight(GL_LIGHT0, GL_POSITION, [5, 30, -20])
glLight(GL_LIGHT0, GL_AMBIENT, [0.5, 0.5, 0.5])
glLight(GL_LIGHT0, GL_SPECULAR, [0.0, 0.0, 0.0])
glLight(GL_LIGHT0, GL_DIFFUSE, [0.8, 0.8, 0.8])
glEnable(GL_COLOR_MATERIAL)
glColorMaterial(GL_FRONT, GL_AMBIENT_AND_DIFFUSE)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
gluLookAt(0.0, 0.0, 100.0,
0.0, 0.0, 0.0,
0.0, 1.0, 0.0)
def run():
global VEHICLE_QUATS, BAR_VALUES
VEHICLE_QUATS = [["AHRS_REF_QUAT", 6, "Estimate", True], ["AHRS_REF_QUAT", 2, "Reference", True]]
BAR_VALUES = [["ROTORCRAFT_RADIO_CONTROL", 5, "Throttle (%%) %i", 0, 100, 100]]
window_title = "Attitude_Viz"
rotate_theta = -90
try:
opts, args = getopt.getopt(sys.argv[1:], "t:r:", ["title", "rotate_theta"])
for o, a in opts:
if o in ("-t", "--title"):
window_title = a
if o in ("-r", "--rotate_theta"):
rotate_theta = int(a)
except getopt.error as msg:
print(msg)
print("""usage:
-t, --title set window title
-r, --rotate_theta rotate the quaternion by n degrees over the pitch axis (default: -90)
""")
pygame.init()
screen = pygame.display.set_mode(SCREEN_SIZE, pygame.OPENGL | pygame.DOUBLEBUF)
#resize(*SCREEN_SIZE)
init()
visualizer = Visualizer(rotate_theta)
try:
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
visualizer.OnClose()
return
if event.type == pygame.KEYUP and event.key == pygame.K_ESCAPE:
visualizer.OnClose()
return
visualizer.Draw()
pygame.display.flip()
time.sleep(.02)
except KeyboardInterrupt:
visualizer.OnClose()
return
if __name__ == "__main__":
run()
|
gpl-2.0
|
amarian12/p2pool-adaptive-drk
|
nattraverso/pynupnp/upnp.py
|
283
|
18985
|
"""
This module is the heart of the upnp support. Device discover, ip discovery
and port mappings are implemented here.
@author: Raphael Slinckx
@author: Anthony Baxter
@copyright: Copyright 2005
@license: LGPL
@contact: U{[email protected]<mailto:[email protected]>}
@version: 0.1.0
"""
__revision__ = "$id"
import socket, random, urlparse, logging
from twisted.internet import reactor, defer
from twisted.web import client
from twisted.internet.protocol import DatagramProtocol
from twisted.internet.error import CannotListenError
from twisted.python import failure
from nattraverso.pynupnp.soap import SoapProxy
from nattraverso.pynupnp.upnpxml import UPnPXml
from nattraverso import ipdiscover, portmapper
class UPnPError(Exception):
"""
A generic UPnP error, with a descriptive message as content.
"""
pass
class UPnPMapper(portmapper.NATMapper):
"""
This is the UPnP port mapper implementing the
L{NATMapper<portmapper.NATMapper>} interface.
@see: L{NATMapper<portmapper.NATMapper>}
"""
def __init__(self, upnp):
"""
Creates the mapper, with the given L{UPnPDevice} instance.
@param upnp: L{UPnPDevice} instance
"""
self._mapped = {}
self._upnp = upnp
def map(self, port):
"""
See interface
"""
self._check_valid_port(port)
#Port is already mapped
if port in self._mapped:
return defer.succeed(self._mapped[port])
#Trigger a new mapping creation, first fetch local ip.
result = ipdiscover.get_local_ip()
self._mapped[port] = result
return result.addCallback(self._map_got_local_ip, port)
def info(self, port):
"""
See interface
"""
# If the mapping exists, everything's ok
if port in self._mapped:
return self._mapped[port]
else:
raise ValueError('Port %r is not currently mapped'%(port))
def unmap(self, port):
"""
See interface
"""
if port in self._mapped:
existing = self._mapped[port]
#Pending mapping, queue an unmap,return existing deferred
if type(existing) is not tuple:
existing.addCallback(lambda x: self.unmap(port))
return existing
#Remove our local mapping
del self._mapped[port]
#Ask the UPnP to remove the mapping
extaddr, extport = existing
return self._upnp.remove_port_mapping(extport, port.getHost().type)
else:
raise ValueError('Port %r is not currently mapped'%(port))
def get_port_mappings(self):
"""
See interface
"""
return self._upnp.get_port_mappings()
def _map_got_local_ip(self, ip_result, port):
"""
We got the local ip address, retreive the existing port mappings
in the device.
@param ip_result: result of L{ipdiscover.get_local_ip}
@param port: a L{twisted.internet.interfaces.IListeningPort} we
want to map
"""
local, ip = ip_result
return self._upnp.get_port_mappings().addCallback(
self._map_got_port_mappings, ip, port)
def _map_got_port_mappings(self, mappings, ip, port):
"""
We got all the existing mappings in the device, find an unused one
and assign it for the requested port.
@param ip: The local ip of this host "x.x.x.x"
@param port: a L{twisted.internet.interfaces.IListeningPort} we
want to map
@param mappings: result of L{UPnPDevice.get_port_mappings}
"""
#Get the requested mapping's info
ptype = port.getHost().type
intport = port.getHost().port
for extport in [random.randrange(1025, 65536) for val in range(20)]:
# Check if there is an existing mapping, if it does not exist, bingo
if not (ptype, extport) in mappings:
break
if (ptype, extport) in mappings:
existing = mappings[ptype, extport]
local_ip, local_port = existing
if local_ip == ip and local_port == intport:
# Existing binding for this host/port/proto - replace it
break
# Triggers the creation of the mapping on the device
result = self._upnp.add_port_mapping(ip, intport, extport, 'pynupnp', ptype)
# We also need the external IP, so we queue first an
# External IP Discovery, then we add the mapping.
return result.addCallback(
lambda x: self._upnp.get_external_ip()).addCallback(
self._port_mapping_added, extport, port)
def _port_mapping_added(self, extaddr, extport, port):
"""
The port mapping was added in the device, this means::
Internet NAT LAN
|
> IP:extaddr |> IP:local ip
> Port:extport |> Port:port
|
@param extaddr: The exernal ip address
@param extport: The external port as number
@param port: The internal port as a
L{twisted.internet.interfaces.IListeningPort} object, that has been
mapped
"""
self._mapped[port] = (extaddr, extport)
return (extaddr, extport)
class UPnPDevice:
"""
Represents an UPnP device, with the associated infos, and remote methods.
"""
def __init__(self, soap_proxy, info):
"""
Build the device, with the given SOAP proxy, and the meta-infos.
@param soap_proxy: an initialized L{SoapProxy} to the device
@param info: a dictionnary of various infos concerning the
device extracted with L{UPnPXml}
"""
self._soap_proxy = soap_proxy
self._info = info
def get_external_ip(self):
"""
Triggers an external ip discovery on the upnp device. Returns
a deferred called with the external ip of this host.
@return: A deferred called with the ip address, as "x.x.x.x"
@rtype: L{twisted.internet.defer.Deferred}
"""
result = self._soap_proxy.call('GetExternalIPAddress')
result.addCallback(self._on_external_ip)
return result
def get_port_mappings(self):
"""
Retreive the existing port mappings
@see: L{portmapper.NATMapper.get_port_mappings}
@return: A deferred called with the dictionnary as defined
in the interface L{portmapper.NATMapper.get_port_mappings}
@rtype: L{twisted.internet.defer.Deferred}
"""
return self._get_port_mapping()
def add_port_mapping(self, local_ip, intport, extport, desc, proto, lease=0):
"""
Add a port mapping in the upnp device. Returns a deferred.
@param local_ip: the LAN ip of this host as "x.x.x.x"
@param intport: the internal port number
@param extport: the external port number
@param desc: the description of this mapping (string)
@param proto: "UDP" or "TCP"
@param lease: The duration of the lease in (mili)seconds(??)
@return: A deferred called with None when the mapping is done
@rtype: L{twisted.internet.defer.Deferred}
"""
result = self._soap_proxy.call('AddPortMapping', NewRemoteHost="",
NewExternalPort=extport,
NewProtocol=proto,
NewInternalPort=intport,
NewInternalClient=local_ip,
NewEnabled=1,
NewPortMappingDescription=desc,
NewLeaseDuration=lease)
return result.addCallbacks(self._on_port_mapping_added,
self._on_no_port_mapping_added)
def remove_port_mapping(self, extport, proto):
"""
Remove an existing port mapping on the device. Returns a deferred
@param extport: the external port number associated to the mapping
to be removed
@param proto: either "UDP" or "TCP"
@return: A deferred called with None when the mapping is done
@rtype: L{twisted.internet.defer.Deferred}
"""
result = self._soap_proxy.call('DeletePortMapping', NewRemoteHost="",
NewExternalPort=extport,
NewProtocol=proto)
return result.addCallbacks(self._on_port_mapping_removed,
self._on_no_port_mapping_removed)
# Private --------
def _on_external_ip(self, res):
"""
Called when we received the external ip address from the device.
@param res: the SOAPpy structure of the result
@return: the external ip string, as "x.x.x.x"
"""
logging.debug("Got external ip struct: %r", res)
return res['NewExternalIPAddress']
def _get_port_mapping(self, mapping_id=0, mappings=None):
"""
Fetch the existing mappings starting at index
"mapping_id" from the device.
To retreive all the mappings call this without parameters.
@param mapping_id: The index of the mapping to start fetching from
@param mappings: the dictionnary of already fetched mappings
@return: A deferred called with the existing mappings when all have been
retreived, see L{get_port_mappings}
@rtype: L{twisted.internet.defer.Deferred}
"""
if mappings == None:
mappings = {}
result = self._soap_proxy.call('GetGenericPortMappingEntry',
NewPortMappingIndex=mapping_id)
return result.addCallbacks(
lambda x: self._on_port_mapping_received(x, mapping_id+1, mappings),
lambda x: self._on_no_port_mapping_received( x, mappings))
def _on_port_mapping_received(self, response, mapping_id, mappings):
"""
Called we we receive a single mapping from the device.
@param response: a SOAPpy structure, representing the device's answer
@param mapping_id: The index of the next mapping in the device
@param mappings: the already fetched mappings, see L{get_port_mappings}
@return: A deferred called with the existing mappings when all have been
retreived, see L{get_port_mappings}
@rtype: L{twisted.internet.defer.Deferred}
"""
logging.debug("Got mapping struct: %r", response)
mappings[
response['NewProtocol'], response['NewExternalPort']
] = (response['NewInternalClient'], response['NewInternalPort'])
return self._get_port_mapping(mapping_id, mappings)
def _on_no_port_mapping_received(self, failure, mappings):
"""
Called when we have no more port mappings to retreive, or an
error occured while retreiving them.
Either we have a "SpecifiedArrayIndexInvalid" SOAP error, and that's ok,
it just means we have finished. If it returns some other error, then we
fail with an UPnPError.
@param mappings: the already retreived mappings
@param failure: the failure
@return: The existing mappings as defined in L{get_port_mappings}
@raise UPnPError: When we got any other error
than "SpecifiedArrayIndexInvalid"
"""
logging.debug("_on_no_port_mapping_received: %s", failure)
err = failure.value
message = err.args[0]["UPnPError"]["errorDescription"]
if "SpecifiedArrayIndexInvalid" == message:
return mappings
else:
return failure
def _on_port_mapping_added(self, response):
"""
The port mapping was successfully added, return None to the deferred.
"""
return None
def _on_no_port_mapping_added(self, failure):
"""
Called when the port mapping could not be added. Immediately
raise an UPnPError, with the SOAPpy structure inside.
@raise UPnPError: When the port mapping could not be added
"""
return failure
def _on_port_mapping_removed(self, response):
"""
The port mapping was successfully removed, return None to the deferred.
"""
return None
def _on_no_port_mapping_removed(self, failure):
"""
Called when the port mapping could not be removed. Immediately
raise an UPnPError, with the SOAPpy structure inside.
@raise UPnPError: When the port mapping could not be deleted
"""
return failure
# UPNP multicast address, port and request string
_UPNP_MCAST = '239.255.255.250'
_UPNP_PORT = 1900
_UPNP_SEARCH_REQUEST = """M-SEARCH * HTTP/1.1\r
Host:%s:%s\r
ST:urn:schemas-upnp-org:device:InternetGatewayDevice:1\r
Man:"ssdp:discover"\r
MX:3\r
\r
""" % (_UPNP_MCAST, _UPNP_PORT)
class UPnPProtocol(DatagramProtocol, object):
"""
The UPnP Device discovery udp multicast twisted protocol.
"""
def __init__(self, *args, **kwargs):
"""
Init the protocol, no parameters needed.
"""
super(UPnPProtocol, self).__init__(*args, **kwargs)
#Device discovery deferred
self._discovery = None
self._discovery_timeout = None
self.mcast = None
self._done = False
# Public methods
def search_device(self):
"""
Triggers a UPnP device discovery.
The returned deferred will be called with the L{UPnPDevice} that has
been found in the LAN.
@return: A deferred called with the detected L{UPnPDevice} instance.
@rtype: L{twisted.internet.defer.Deferred}
"""
if self._discovery is not None:
raise ValueError('already used')
self._discovery = defer.Deferred()
self._discovery_timeout = reactor.callLater(6, self._on_discovery_timeout)
attempt = 0
mcast = None
while True:
try:
self.mcast = reactor.listenMulticast(1900+attempt, self)
break
except CannotListenError:
attempt = random.randint(0, 500)
# joined multicast group, starting upnp search
self.mcast.joinGroup('239.255.255.250', socket.INADDR_ANY)
self.transport.write(_UPNP_SEARCH_REQUEST, (_UPNP_MCAST, _UPNP_PORT))
self.transport.write(_UPNP_SEARCH_REQUEST, (_UPNP_MCAST, _UPNP_PORT))
self.transport.write(_UPNP_SEARCH_REQUEST, (_UPNP_MCAST, _UPNP_PORT))
return self._discovery
#Private methods
def datagramReceived(self, dgram, address):
if self._done:
return
"""
This is private, handle the multicast answer from the upnp device.
"""
logging.debug("Got UPNP multicast search answer:\n%s", dgram)
#This is an HTTP response
response, message = dgram.split('\r\n', 1)
# Prepare status line
version, status, textstatus = response.split(None, 2)
if not version.startswith('HTTP'):
return
if status != "200":
return
# Launch the info fetching
def parse_discovery_response(message):
"""Separate headers and body from the received http answer."""
hdict = {}
body = ''
remaining = message
while remaining:
line, remaining = remaining.split('\r\n', 1)
line = line.strip()
if not line:
body = remaining
break
key, val = line.split(':', 1)
key = key.lower()
hdict.setdefault(key, []).append(val.strip())
return hdict, body
headers, body = parse_discovery_response(message)
if not 'location' in headers:
self._on_discovery_failed(
UPnPError(
"No location header in response to M-SEARCH!: %r"%headers))
return
loc = headers['location'][0]
result = client.getPage(url=loc)
result.addCallback(self._on_gateway_response, loc).addErrback(self._on_discovery_failed)
def _on_gateway_response(self, body, loc):
if self._done:
return
"""
Called with the UPnP device XML description fetched via HTTP.
If the device has suitable services for ip discovery and port mappings,
the callback returned in L{search_device} is called with
the discovered L{UPnPDevice}.
@raise UPnPError: When no suitable service has been
found in the description, or another error occurs.
@param body: The xml description of the device.
@param loc: the url used to retreive the xml description
"""
# Parse answer
upnpinfo = UPnPXml(body)
# Check if we have a base url, if not consider location as base url
urlbase = upnpinfo.urlbase
if urlbase == None:
urlbase = loc
# Check the control url, if None, then the device cannot do what we want
controlurl = upnpinfo.controlurl
if controlurl == None:
self._on_discovery_failed(UPnPError("upnp response showed no WANConnections"))
return
control_url2 = urlparse.urljoin(urlbase, controlurl)
soap_proxy = SoapProxy(control_url2, upnpinfo.wanservice)
self._on_discovery_succeeded(UPnPDevice(soap_proxy, upnpinfo.deviceinfos))
def _on_discovery_succeeded(self, res):
if self._done:
return
self._done = True
self.mcast.stopListening()
self._discovery_timeout.cancel()
self._discovery.callback(res)
def _on_discovery_failed(self, err):
if self._done:
return
self._done = True
self.mcast.stopListening()
self._discovery_timeout.cancel()
self._discovery.errback(err)
def _on_discovery_timeout(self):
if self._done:
return
self._done = True
self.mcast.stopListening()
self._discovery.errback(failure.Failure(defer.TimeoutError('in _on_discovery_timeout')))
def search_upnp_device ():
"""
Check the network for an UPnP device. Returns a deferred
with the L{UPnPDevice} instance as result, if found.
@return: A deferred called with the L{UPnPDevice} instance
@rtype: L{twisted.internet.defer.Deferred}
"""
return defer.maybeDeferred(UPnPProtocol().search_device)
|
gpl-3.0
|
kmod/icbd
|
stdlib/python2.5/ctypes/test/test_funcptr.py
|
66
|
3962
|
import os, unittest
from ctypes import *
try:
WINFUNCTYPE
except NameError:
# fake to enable this test on Linux
WINFUNCTYPE = CFUNCTYPE
import _ctypes_test
lib = CDLL(_ctypes_test.__file__)
class CFuncPtrTestCase(unittest.TestCase):
def test_basic(self):
X = WINFUNCTYPE(c_int, c_int, c_int)
def func(*args):
return len(args)
x = X(func)
self.failUnlessEqual(x.restype, c_int)
self.failUnlessEqual(x.argtypes, (c_int, c_int))
self.failUnlessEqual(sizeof(x), sizeof(c_voidp))
self.failUnlessEqual(sizeof(X), sizeof(c_voidp))
def test_first(self):
StdCallback = WINFUNCTYPE(c_int, c_int, c_int)
CdeclCallback = CFUNCTYPE(c_int, c_int, c_int)
def func(a, b):
return a + b
s = StdCallback(func)
c = CdeclCallback(func)
self.failUnlessEqual(s(1, 2), 3)
self.failUnlessEqual(c(1, 2), 3)
# The following no longer raises a TypeError - it is now
# possible, as in C, to call cdecl functions with more parameters.
#self.assertRaises(TypeError, c, 1, 2, 3)
self.failUnlessEqual(c(1, 2, 3, 4, 5, 6), 3)
if not WINFUNCTYPE is CFUNCTYPE and os.name != "ce":
self.assertRaises(TypeError, s, 1, 2, 3)
def test_structures(self):
WNDPROC = WINFUNCTYPE(c_long, c_int, c_int, c_int, c_int)
def wndproc(hwnd, msg, wParam, lParam):
return hwnd + msg + wParam + lParam
HINSTANCE = c_int
HICON = c_int
HCURSOR = c_int
LPCTSTR = c_char_p
class WNDCLASS(Structure):
_fields_ = [("style", c_uint),
("lpfnWndProc", WNDPROC),
("cbClsExtra", c_int),
("cbWndExtra", c_int),
("hInstance", HINSTANCE),
("hIcon", HICON),
("hCursor", HCURSOR),
("lpszMenuName", LPCTSTR),
("lpszClassName", LPCTSTR)]
wndclass = WNDCLASS()
wndclass.lpfnWndProc = WNDPROC(wndproc)
WNDPROC_2 = WINFUNCTYPE(c_long, c_int, c_int, c_int, c_int)
# This is no longer true, now that WINFUNCTYPE caches created types internally.
## # CFuncPtr subclasses are compared by identity, so this raises a TypeError:
## self.assertRaises(TypeError, setattr, wndclass,
## "lpfnWndProc", WNDPROC_2(wndproc))
# instead:
self.failUnless(WNDPROC is WNDPROC_2)
# 'wndclass.lpfnWndProc' leaks 94 references. Why?
self.failUnlessEqual(wndclass.lpfnWndProc(1, 2, 3, 4), 10)
f = wndclass.lpfnWndProc
del wndclass
del wndproc
self.failUnlessEqual(f(10, 11, 12, 13), 46)
def test_dllfunctions(self):
def NoNullHandle(value):
if not value:
raise WinError()
return value
strchr = lib.my_strchr
strchr.restype = c_char_p
strchr.argtypes = (c_char_p, c_char)
self.failUnlessEqual(strchr("abcdefghi", "b"), "bcdefghi")
self.failUnlessEqual(strchr("abcdefghi", "x"), None)
strtok = lib.my_strtok
strtok.restype = c_char_p
# Neither of this does work: strtok changes the buffer it is passed
## strtok.argtypes = (c_char_p, c_char_p)
## strtok.argtypes = (c_string, c_char_p)
def c_string(init):
size = len(init) + 1
return (c_char*size)(*init)
s = "a\nb\nc"
b = c_string(s)
## b = (c_char * (len(s)+1))()
## b.value = s
## b = c_string(s)
self.failUnlessEqual(strtok(b, "\n"), "a")
self.failUnlessEqual(strtok(None, "\n"), "b")
self.failUnlessEqual(strtok(None, "\n"), "c")
self.failUnlessEqual(strtok(None, "\n"), None)
if __name__ == '__main__':
unittest.main()
|
mit
|
simonwydooghe/ansible
|
lib/ansible/modules/network/f5/bigip_appsvcs_extension.py
|
19
|
16524
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_appsvcs_extension
short_description: Manage application service deployments
description:
- Manages application service deployments via the App Services Extension functionality
in BIG-IP.
version_added: 2.7
options:
content:
description:
- Declaration of tenants configured on the system.
- This parameter is most often used along with the C(file) or C(template) lookup plugins.
Refer to the examples section for correct usage.
- For anything advanced or with formatting consider using the C(template) lookup.
- This can additionally be used for specifying application service configurations
directly in YAML, however that is not an encouraged practice and, if used at all,
should only be used for the absolute smallest of configurations to prevent your
Playbooks from becoming too large.
- If you C(content) includes encrypted values (such as ciphertexts, passphrases, etc),
the returned C(changed) value will always be true.
- If you are using the C(to_nice_json) filter, it will cause this module to fail because
the purpose of that filter is to format the JSON to be human-readable and this process
includes inserting "extra characters that break JSON validators.
type: raw
required: True
tenants:
description:
- A list of tenants that you want to remove.
- This parameter is only relevant when C(state) is C(absent). It will be ignored when
C(state) is C(present).
- A value of C(all) will remove all tenants.
- Tenants can be specified as a list as well to remove only specific tenants.
type: raw
force:
description:
- Force updates a declaration.
- This parameter should be used in cases where your declaration includes items that
are encrypted or in cases (such as WAF Policies) where you want a large reload to take place.
type: bool
default: no
state:
description:
- When C(state) is C(present), ensures the configuration exists.
- When C(state) is C(absent), ensures that the configuration is removed.
type: str
choices:
- present
- absent
default: present
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Deploy an app service configuration
bigip_appsvcs_extension:
content: "{{ lookup('file', '/path/to/appsvcs.json') }}"
state: present
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Remove all app service configurations
bigip_appsvcs_extension:
tenants: all
state: absent
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Remove tenants T1 and T2 from app service configurations
bigip_appsvcs_extension:
tenants:
- T1
- T2
state: absent
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
action:
description:
- The action performed.
returned: changed
type: str
sample: deploy
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
from ansible.module_utils.six import string_types
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
import json
except ImportError:
import simplejson as json
class Parameters(AnsibleF5Parameters):
api_map = {
'class': 'class_name',
'patchBody': 'patch_body',
'resourceTimeout': 'resource_timeout',
'targetTimeout': 'target_timeout',
'targetTokens': 'target_tokens',
'targetPassphrase': 'target_passphrase',
'targetUsername': 'target_username',
'targetPort': 'target_port',
'targetHost': 'target_host',
'retrieveAge': 'retrieve_age',
'logLevel': 'log_level',
'historyLimit': 'history_limit',
'syncToGroup': 'sync_to_group',
'redeployUpdateMode': 'redeploy_update_mode',
'redeployAge': 'redeploy_age',
}
api_attributes = [
'class',
'action',
'persist',
'declaration',
'patchBody',
'resourceTimeout',
'targetTimeout',
'targetTokens',
'targetPassphrase',
'targetUsername',
'targetPort',
'targetHost',
'retrieveAge',
'trace',
'logLevel',
'historyLimit',
'syncToGroup',
'redeployUpdateMode',
'redeployAge',
]
returnables = [
'class_name',
'action',
'persist',
'declaration',
'patch_body',
'resource_timeout',
'target_timeout',
'target_tokens',
'target_passphrase',
'target_username',
'target_port',
'target_host',
'retrieve_age',
'trace',
'log_level',
'history_limit',
'sync_to_group',
'redeploy_update_mode',
'redeploy_age',
]
updatables = [
'class_name',
'action',
'persist',
'declaration',
'patch_body',
'resource_timeout',
'target_timeout',
'target_tokens',
'target_passphrase',
'target_username',
'target_port',
'target_host',
'retrieve_age',
'trace',
'log_level',
'history_limit',
'sync_to_group',
'redeploy_update_mode',
'redeploy_age',
]
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
@property
def content(self):
if self._values['content'] is None:
return None
if isinstance(self._values['content'], string_types):
return json.loads(self._values['content'] or 'null')
else:
return self._values['content']
@property
def class_name(self):
return self._values['content'].get('class', None)
@property
def action(self):
return self._values['content'].get('action', None)
@property
def declaration(self):
return self._values['content'].get('declaration', None)
@property
def persist(self):
if self._values['content']:
return self._values['content'].get('persist', None)
elif self.param_persist:
return self.param_persist
return None
@property
def param_persist(self):
if self._values['parameters'] is None:
return None
result = self._values['parameters'].get('persist', None)
return result
@property
def tenants(self):
if self._values['tenants'] in [None, 'all']:
return ''
if isinstance(self._values['tenants'], list):
return ','.join(self._values['tenants'])
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
@property
def declaration(self):
return None
@property
def target_passphrase(self):
return None
@property
def class_name(self):
return None
@property
def persist(self):
return None
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
return result
def present(self):
if self.exists():
return False
return self.upsert()
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.tenant_exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def upsert(self):
self._set_changed_options()
if self.module.check_mode:
return True
if self.want.content == 'all':
raise F5ModuleError(
"'all' keyword cannot be used when 'state' is 'present'."
)
self.upsert_on_device()
return True
def _get_errors_from_response(self, messages):
results = []
if 'results' not in messages:
if 'message' in messages:
results.append(messages['message'])
if 'errors' in messages:
results += messages['errors']
else:
for message in messages['results']:
if 'message' in message and message['message'] == 'declaration failed':
results.append(message['response'])
if 'errors' in message:
results += message['errors']
return results
def upsert_on_device(self):
uri = 'https://{0}:{1}/mgmt/shared/appsvcs/declare/'.format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.post(uri, json=self.want.content)
if resp.status != 200:
result = resp.json()
errors = self._get_errors_from_response(result)
if errors:
message = "{0}".format('. '.join(errors))
raise F5ModuleError(message)
raise F5ModuleError(resp.content)
else:
result = resp.json()
errors = self._get_errors_from_response(result)
if errors:
message = "{0}".format('. '.join(errors))
raise F5ModuleError(message)
def ignore_changes(self, obj):
if isinstance(obj, dict):
if 'passphrase' in obj:
obj['passphrase']['ignoreChanges'] = True
if 'class' in obj and obj['class'] == 'WAF_Policy':
obj['ignoreChanges'] = True
return dict((k, self.ignore_changes(v)) for k, v in iteritems(obj))
else:
return obj
def exists(self):
declaration = {}
if self.want.content is None:
raise F5ModuleError(
"Empty content cannot be specified when 'state' is 'present'."
)
try:
declaration.update(self.want.content)
except ValueError:
raise F5ModuleError(
"The provided 'content' could not be converted into valid json. If you "
"are using the 'to_nice_json' filter, please remove it."
)
declaration['action'] = 'dry-run'
# This deals with cases where you're comparing a passphrase.
#
# Passphrases will always cause an idempotent operation to register
# a change. Therefore, by specifying "force", you are instructing
# the module to **not** ignore the passphrase.
#
# This will cause the module to not append ignore clauses to the
# classes that support them.
if not self.want.force:
self.ignore_changes(declaration)
uri = "https://{0}:{1}/mgmt/shared/appsvcs/declare/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.post(uri, json=declaration)
try:
response = resp.json()
except ValueError:
return False
try:
if response['results'][0]['message'] == 'no change':
return True
except KeyError:
return False
def tenant_exists(self):
uri = "https://{0}:{1}/mgmt/shared/appsvcs/declare/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.tenants
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'statusCode' in response and response['statusCode'] == 404:
return False
return True
def absent(self):
if self.tenant_exists():
return self.remove()
return False
def remove_from_device(self):
uri = 'https://{0}:{1}/mgmt/shared/appsvcs/declare/{2}'.format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.tenants
)
response = self.client.api.delete(uri)
if response.status != 200:
result = response.json()
errors = self._get_errors_from_response(result)
if errors:
message = "{0}".format('. '.join(errors))
raise F5ModuleError(message)
raise F5ModuleError(response.content)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
content=dict(type='raw'),
state=dict(
default='present',
choices=['present', 'absent']
),
tenants=dict(type='raw'),
force=dict(type='bool', default='no')
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.required_if = [
['state', 'present', ['content']]
]
self.mutually_exclusive = [
['content', 'tenants']
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
required_if=spec.required_if
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
gpl-3.0
|
licess/shadowsocks
|
shadowsocks/asyncdns.py
|
655
|
17416
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import socket
import struct
import re
import logging
from shadowsocks import common, lru_cache, eventloop, shell
CACHE_SWEEP_INTERVAL = 30
VALID_HOSTNAME = re.compile(br"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
common.patch_socket()
# rfc1035
# format
# +---------------------+
# | Header |
# +---------------------+
# | Question | the question for the name server
# +---------------------+
# | Answer | RRs answering the question
# +---------------------+
# | Authority | RRs pointing toward an authority
# +---------------------+
# | Additional | RRs holding additional information
# +---------------------+
#
# header
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ID |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# |QR| Opcode |AA|TC|RD|RA| Z | RCODE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | QDCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ANCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | NSCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ARCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
QTYPE_ANY = 255
QTYPE_A = 1
QTYPE_AAAA = 28
QTYPE_CNAME = 5
QTYPE_NS = 2
QCLASS_IN = 1
def build_address(address):
address = address.strip(b'.')
labels = address.split(b'.')
results = []
for label in labels:
l = len(label)
if l > 63:
return None
results.append(common.chr(l))
results.append(label)
results.append(b'\0')
return b''.join(results)
def build_request(address, qtype):
request_id = os.urandom(2)
header = struct.pack('!BBHHHH', 1, 0, 1, 0, 0, 0)
addr = build_address(address)
qtype_qclass = struct.pack('!HH', qtype, QCLASS_IN)
return request_id + header + addr + qtype_qclass
def parse_ip(addrtype, data, length, offset):
if addrtype == QTYPE_A:
return socket.inet_ntop(socket.AF_INET, data[offset:offset + length])
elif addrtype == QTYPE_AAAA:
return socket.inet_ntop(socket.AF_INET6, data[offset:offset + length])
elif addrtype in [QTYPE_CNAME, QTYPE_NS]:
return parse_name(data, offset)[1]
else:
return data[offset:offset + length]
def parse_name(data, offset):
p = offset
labels = []
l = common.ord(data[p])
while l > 0:
if (l & (128 + 64)) == (128 + 64):
# pointer
pointer = struct.unpack('!H', data[p:p + 2])[0]
pointer &= 0x3FFF
r = parse_name(data, pointer)
labels.append(r[1])
p += 2
# pointer is the end
return p - offset, b'.'.join(labels)
else:
labels.append(data[p + 1:p + 1 + l])
p += 1 + l
l = common.ord(data[p])
return p - offset + 1, b'.'.join(labels)
# rfc1035
# record
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | |
# / /
# / NAME /
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TYPE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | CLASS |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TTL |
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | RDLENGTH |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--|
# / RDATA /
# / /
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
def parse_record(data, offset, question=False):
nlen, name = parse_name(data, offset)
if not question:
record_type, record_class, record_ttl, record_rdlength = struct.unpack(
'!HHiH', data[offset + nlen:offset + nlen + 10]
)
ip = parse_ip(record_type, data, record_rdlength, offset + nlen + 10)
return nlen + 10 + record_rdlength, \
(name, ip, record_type, record_class, record_ttl)
else:
record_type, record_class = struct.unpack(
'!HH', data[offset + nlen:offset + nlen + 4]
)
return nlen + 4, (name, None, record_type, record_class, None, None)
def parse_header(data):
if len(data) >= 12:
header = struct.unpack('!HBBHHHH', data[:12])
res_id = header[0]
res_qr = header[1] & 128
res_tc = header[1] & 2
res_ra = header[2] & 128
res_rcode = header[2] & 15
# assert res_tc == 0
# assert res_rcode in [0, 3]
res_qdcount = header[3]
res_ancount = header[4]
res_nscount = header[5]
res_arcount = header[6]
return (res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount,
res_ancount, res_nscount, res_arcount)
return None
def parse_response(data):
try:
if len(data) >= 12:
header = parse_header(data)
if not header:
return None
res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount, \
res_ancount, res_nscount, res_arcount = header
qds = []
ans = []
offset = 12
for i in range(0, res_qdcount):
l, r = parse_record(data, offset, True)
offset += l
if r:
qds.append(r)
for i in range(0, res_ancount):
l, r = parse_record(data, offset)
offset += l
if r:
ans.append(r)
for i in range(0, res_nscount):
l, r = parse_record(data, offset)
offset += l
for i in range(0, res_arcount):
l, r = parse_record(data, offset)
offset += l
response = DNSResponse()
if qds:
response.hostname = qds[0][0]
for an in qds:
response.questions.append((an[1], an[2], an[3]))
for an in ans:
response.answers.append((an[1], an[2], an[3]))
return response
except Exception as e:
shell.print_exception(e)
return None
def is_valid_hostname(hostname):
if len(hostname) > 255:
return False
if hostname[-1] == b'.':
hostname = hostname[:-1]
return all(VALID_HOSTNAME.match(x) for x in hostname.split(b'.'))
class DNSResponse(object):
def __init__(self):
self.hostname = None
self.questions = [] # each: (addr, type, class)
self.answers = [] # each: (addr, type, class)
def __str__(self):
return '%s: %s' % (self.hostname, str(self.answers))
STATUS_IPV4 = 0
STATUS_IPV6 = 1
class DNSResolver(object):
def __init__(self, server_list=None):
self._loop = None
self._hosts = {}
self._hostname_status = {}
self._hostname_to_cb = {}
self._cb_to_hostname = {}
self._cache = lru_cache.LRUCache(timeout=300)
self._sock = None
if server_list is None:
self._servers = None
self._parse_resolv()
else:
self._servers = server_list
self._parse_hosts()
# TODO monitor hosts change and reload hosts
# TODO parse /etc/gai.conf and follow its rules
def _parse_resolv(self):
self._servers = []
try:
with open('/etc/resolv.conf', 'rb') as f:
content = f.readlines()
for line in content:
line = line.strip()
if line:
if line.startswith(b'nameserver'):
parts = line.split()
if len(parts) >= 2:
server = parts[1]
if common.is_ip(server) == socket.AF_INET:
if type(server) != str:
server = server.decode('utf8')
self._servers.append(server)
except IOError:
pass
if not self._servers:
self._servers = ['8.8.4.4', '8.8.8.8']
def _parse_hosts(self):
etc_path = '/etc/hosts'
if 'WINDIR' in os.environ:
etc_path = os.environ['WINDIR'] + '/system32/drivers/etc/hosts'
try:
with open(etc_path, 'rb') as f:
for line in f.readlines():
line = line.strip()
parts = line.split()
if len(parts) >= 2:
ip = parts[0]
if common.is_ip(ip):
for i in range(1, len(parts)):
hostname = parts[i]
if hostname:
self._hosts[hostname] = ip
except IOError:
self._hosts['localhost'] = '127.0.0.1'
def add_to_loop(self, loop):
if self._loop:
raise Exception('already add to loop')
self._loop = loop
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
loop.add(self._sock, eventloop.POLL_IN, self)
loop.add_periodic(self.handle_periodic)
def _call_callback(self, hostname, ip, error=None):
callbacks = self._hostname_to_cb.get(hostname, [])
for callback in callbacks:
if callback in self._cb_to_hostname:
del self._cb_to_hostname[callback]
if ip or error:
callback((hostname, ip), error)
else:
callback((hostname, None),
Exception('unknown hostname %s' % hostname))
if hostname in self._hostname_to_cb:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _handle_data(self, data):
response = parse_response(data)
if response and response.hostname:
hostname = response.hostname
ip = None
for answer in response.answers:
if answer[1] in (QTYPE_A, QTYPE_AAAA) and \
answer[2] == QCLASS_IN:
ip = answer[0]
break
if not ip and self._hostname_status.get(hostname, STATUS_IPV6) \
== STATUS_IPV4:
self._hostname_status[hostname] = STATUS_IPV6
self._send_req(hostname, QTYPE_AAAA)
else:
if ip:
self._cache[hostname] = ip
self._call_callback(hostname, ip)
elif self._hostname_status.get(hostname, None) == STATUS_IPV6:
for question in response.questions:
if question[1] == QTYPE_AAAA:
self._call_callback(hostname, None)
break
def handle_event(self, sock, fd, event):
if sock != self._sock:
return
if event & eventloop.POLL_ERR:
logging.error('dns socket err')
self._loop.remove(self._sock)
self._sock.close()
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
self._loop.add(self._sock, eventloop.POLL_IN, self)
else:
data, addr = sock.recvfrom(1024)
if addr[0] not in self._servers:
logging.warn('received a packet other than our dns')
return
self._handle_data(data)
def handle_periodic(self):
self._cache.sweep()
def remove_callback(self, callback):
hostname = self._cb_to_hostname.get(callback)
if hostname:
del self._cb_to_hostname[callback]
arr = self._hostname_to_cb.get(hostname, None)
if arr:
arr.remove(callback)
if not arr:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _send_req(self, hostname, qtype):
req = build_request(hostname, qtype)
for server in self._servers:
logging.debug('resolving %s with type %d using server %s',
hostname, qtype, server)
self._sock.sendto(req, (server, 53))
def resolve(self, hostname, callback):
if type(hostname) != bytes:
hostname = hostname.encode('utf8')
if not hostname:
callback(None, Exception('empty hostname'))
elif common.is_ip(hostname):
callback((hostname, hostname), None)
elif hostname in self._hosts:
logging.debug('hit hosts: %s', hostname)
ip = self._hosts[hostname]
callback((hostname, ip), None)
elif hostname in self._cache:
logging.debug('hit cache: %s', hostname)
ip = self._cache[hostname]
callback((hostname, ip), None)
else:
if not is_valid_hostname(hostname):
callback(None, Exception('invalid hostname: %s' % hostname))
return
arr = self._hostname_to_cb.get(hostname, None)
if not arr:
self._hostname_status[hostname] = STATUS_IPV4
self._send_req(hostname, QTYPE_A)
self._hostname_to_cb[hostname] = [callback]
self._cb_to_hostname[callback] = hostname
else:
arr.append(callback)
# TODO send again only if waited too long
self._send_req(hostname, QTYPE_A)
def close(self):
if self._sock:
if self._loop:
self._loop.remove_periodic(self.handle_periodic)
self._loop.remove(self._sock)
self._sock.close()
self._sock = None
def test():
dns_resolver = DNSResolver()
loop = eventloop.EventLoop()
dns_resolver.add_to_loop(loop)
global counter
counter = 0
def make_callback():
global counter
def callback(result, error):
global counter
# TODO: what can we assert?
print(result, error)
counter += 1
if counter == 9:
dns_resolver.close()
loop.stop()
a_callback = callback
return a_callback
assert(make_callback() != make_callback())
dns_resolver.resolve(b'google.com', make_callback())
dns_resolver.resolve('google.com', make_callback())
dns_resolver.resolve('example.com', make_callback())
dns_resolver.resolve('ipv6.google.com', make_callback())
dns_resolver.resolve('www.facebook.com', make_callback())
dns_resolver.resolve('ns2.google.com', make_callback())
dns_resolver.resolve('invalid.@!#$%^&[email protected]', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
loop.run()
if __name__ == '__main__':
test()
|
apache-2.0
|
pabelanger/did
|
did/base.py
|
2
|
13985
|
# coding: utf-8
""" Config, Date, User and Exceptions """
from __future__ import unicode_literals, absolute_import
import os
import re
import sys
import codecs
import datetime
import optparse
import StringIO
import xmlrpclib
import ConfigParser
from dateutil.relativedelta import MO as MONDAY
from ConfigParser import NoOptionError, NoSectionError
from dateutil.relativedelta import relativedelta as delta
from did import utils
from did.utils import log
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Constants
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Config file location
CONFIG = os.path.expanduser("~/.did")
# Default maximum width
MAX_WIDTH = 79
# Today's date
TODAY = datetime.date.today()
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Exceptions
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class ConfigError(Exception):
""" General problem with configuration file """
pass
class OptionError(Exception):
""" General problem with configuration file """
pass
class ReportError(Exception):
""" General problem with report generation """
pass
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Config
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Config(object):
""" User config file """
parser = None
def __init__(self, config=None, path=None):
"""
Read the config file
Parse config from given string (config) or file (path).
If no config or path given, default to "~/.did/config" which
can be overrided by the DID_CONFIG environment variable.
"""
# Read the config only once (unless explicitly provided)
if self.parser is not None and config is None and path is None:
return
Config.parser = ConfigParser.SafeConfigParser()
# If config provided as string, parse it directly
if config is not None:
log.info("Inspecting config file from string")
log.debug(utils.pretty(config))
self.parser.readfp(StringIO.StringIO(config))
return
# Check the environment for config file override
# (unless path is explicitly provided)
if path is None:
path = Config.path()
# Parse the config from file
try:
log.info("Inspecting config file '{0}'".format(path))
self.parser.readfp(codecs.open(path, "r", "utf8"))
except IOError as error:
log.debug(error)
raise ConfigError(
"Unable to read the config file {0}".format(path))
@property
def email(self):
""" User email(s) """
try:
return self.parser.get("general", "email")
except (NoOptionError, NoSectionError) as error:
log.debug(error)
return []
@property
def width(self):
""" Maximum width of the report """
try:
return int(self.parser.get("general", "width"))
except (NoOptionError, NoSectionError):
return MAX_WIDTH
def sections(self, kind=None):
""" Return all sections (optionally of given kind only) """
result = []
for section in self.parser.sections():
# Selected kind only if provided
if kind is not None:
try:
section_type = self.parser.get(section, "type")
if section_type != kind:
continue
except NoOptionError:
# Implicit header/footer type for backward compatibility
if (section == kind == "header" or
section == kind == "footer"):
pass
else:
continue
result.append(section)
return result
def section(self, section, skip=None):
""" Return section items, skip selected (type/order by default) """
if skip is None:
skip = ['type', 'order']
return [(key, val) for key, val in self.parser.items(section)
if key not in skip]
def item(self, section, it):
""" Return content of given item in selected section """
for key, value in self.section(section, skip=['type']):
if key == it:
return value
raise ConfigError(
"Item '{0}' not found in section '{1}'".format(it, section))
@staticmethod
def path():
""" Detect config file path """
# Detect config directory
try:
directory = os.environ["DID_CONFIG"]
except KeyError:
directory = CONFIG
# Detect config file (even before options are parsed)
filename = "config"
arguments = " ".join(sys.argv)
if "--config" in arguments:
matched = re.search("--config[ =](\S+)", arguments)
if matched:
filename = matched.groups()[0]
return directory.rstrip("/") + "/" + filename
@staticmethod
def example():
""" Return config example """
return "[general]\nemail = Name Surname <[email protected]>\n"
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Date
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Date(object):
""" Date parsing for common word formats """
def __init__(self, date=None):
""" Parse the date string """
if isinstance(date, datetime.date):
self.date = date
elif date is None or date.lower() == "today":
self.date = TODAY
elif date.lower() == "yesterday":
self.date = TODAY - delta(days=1)
else:
try:
self.date = datetime.date(*[int(i) for i in date.split("-")])
except StandardError as error:
log.debug(error)
raise OptionError(
"Invalid date format: '{0}', use YYYY-MM-DD.".format(date))
self.datetime = datetime.datetime(
self.date.year, self.date.month, self.date.day, 0, 0, 0)
def __str__(self):
""" Ascii version of the string representation """
return utils.ascii(unicode(self))
def __unicode__(self):
""" String format for printing """
return unicode(self.date)
@staticmethod
def this_week():
""" Return start and end date of the current week. """
since = TODAY + delta(weekday=MONDAY(-1))
until = since + delta(weeks=1)
return Date(since), Date(until)
@staticmethod
def last_week():
""" Return start and end date of the last week. """
since = TODAY + delta(weekday=MONDAY(-2))
until = since + delta(weeks=1)
return Date(since), Date(until)
@staticmethod
def this_month():
""" Return start and end date of this month. """
since = TODAY + delta(day=1)
until = since + delta(months=1)
return Date(since), Date(until)
@staticmethod
def last_month():
""" Return start and end date of this month. """
since = TODAY + delta(day=1, months=-1)
until = since + delta(months=1)
return Date(since), Date(until)
@staticmethod
def this_quarter():
""" Return start and end date of this quarter. """
since = TODAY + delta(day=1)
while since.month % 3 != 0:
since -= delta(months=1)
until = since + delta(months=3)
return Date(since), Date(until)
@staticmethod
def last_quarter():
""" Return start and end date of this quarter. """
since, until = Date.this_quarter()
since = since.date - delta(months=3)
until = until.date - delta(months=3)
return Date(since), Date(until)
@staticmethod
def this_year():
""" Return start and end date of this fiscal year """
since = TODAY
while since.month != 3 or since.day != 1:
since -= delta(days=1)
until = since + delta(years=1)
return Date(since), Date(until)
@staticmethod
def last_year():
""" Return start and end date of the last fiscal year """
since, until = Date.this_year()
since = since.date - delta(years=1)
until = until.date - delta(years=1)
return Date(since), Date(until)
@staticmethod
def period(argument):
""" Detect desired time period for the argument """
since, until, period = None, None, None
if "today" in argument:
since = Date("today")
until = Date("today")
until.date += delta(days=1)
period = "today"
elif "year" in argument:
if "last" in argument:
since, until = Date.last_year()
period = "the last fiscal year"
else:
since, until = Date.this_year()
period = "this fiscal year"
elif "quarter" in argument:
if "last" in argument:
since, until = Date.last_quarter()
period = "the last quarter"
else:
since, until = Date.this_quarter()
period = "this quarter"
elif "month" in argument:
if "last" in argument:
since, until = Date.last_month()
period = "the last month"
else:
since, until = Date.this_month()
period = "this month"
else:
if "last" in argument:
since, until = Date.last_week()
period = "the last week"
else:
since, until = Date.this_week()
period = "this week"
return since, until, period
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# User
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class User(object):
"""
User information
The User object holds name, login and email which are used for
performing queries by individual plugins. This information is
parsed from given email address. Both short & full email format
are supported::
[email protected]
Name Surname <[email protected]>
In addition, it's possible to provide email and login aliases for
individual stats. This is useful if you use different email/login
for different services. The syntax consists of ``stats: login`` or
``stats: email`` pairs appended at the end of the email address::
[email protected]; bz: [email protected]; gh: githublogin
Use config section name to identify stats where given alias should
be used. The exactly same syntax can be used both in the config file
and on the command line. Finally it's also possible to include the
alias directly in the respective config section::
[github]
type = github
url = https://api.github.com/
login = psss
"""
def __init__(self, email, stats=None):
""" Detect name, login and email """
# Make sure we received the email string, save the original for cloning
if not email:
raise ConfigError("Email required for user initialization.")
self._original = email
# Separate aliases if provided
try:
email, aliases = re.split(r"\s*;\s*", self._original, 1)
except ValueError:
email = self._original
aliases = None
# Extract everything from the email string provided
parts = utils.EMAIL_REGEXP.search(email)
if parts is None:
raise ConfigError("Invalid email address '{0}'".format(email))
self.name = parts.groups()[0] or "Unknown"
self.email = parts.groups()[1]
self.login = self.email.split('@')[0]
# Check for possible aliases
self.alias(aliases, stats)
def __unicode__(self):
""" Use name & email for string representation. """
return u"{0} <{1}>".format(self.name, self.email)
def clone(self, stats):
""" Create a user copy with alias enabled for given stats. """
return User(self._original, stats)
def alias(self, aliases, stats):
""" Apply the login/email alias if configured. """
login = email = None
if stats is None:
return
# Use alias directly from the config section
try:
config = dict(Config().section(stats))
try:
email = config["email"]
except KeyError:
pass
try:
login = config["login"]
except KeyError:
pass
except NoSectionError:
pass
# Check for aliases specified in the email string
if aliases is not None:
try:
aliases = dict([
re.split(r"\s*:\s*", definition, 1)
for definition in re.split(r"\s*;\s*", aliases.strip())])
except ValueError:
raise ConfigError(
"Invalid alias definition: '{0}'".format(aliases))
if stats in aliases:
if "@" in aliases[stats]:
email = aliases[stats]
else:
login = aliases[stats]
# Update login/email if alias detected
if email is not None:
self.email = email
log.info("Using email alias '{0}' for '{1}'".format(email, stats))
if login is None:
login = email.split("@")[0]
if login is not None:
self.login = login
log.info("Using login alias '{0}' for '{1}'".format(login, stats))
|
gpl-2.0
|
levkar/odoo
|
addons/base_iban/models/res_partner_bank.py
|
30
|
6689
|
# -*- coding: utf-8 -*-
import re
from odoo import api, models, _
from odoo.exceptions import UserError, ValidationError
def normalize_iban(iban):
return re.sub('[\W_]', '', iban or '')
def pretty_iban(iban):
""" return iban in groups of four characters separated by a single space """
return ' '.join([iban[i:i + 4] for i in range(0, len(iban), 4)])
def get_bban_from_iban(iban):
""" Returns the basic bank account number corresponding to an IBAN.
Note : the BBAN is not the same as the domestic bank account number !
The relation between IBAN, BBAN and domestic can be found here : http://www.ecbs.org/iban.htm
"""
return normalize_iban(iban)[4:]
def validate_iban(iban):
iban = normalize_iban(iban)
if not iban:
raise ValidationError(_("No IBAN !"))
country_code = iban[:2].lower()
if country_code not in _map_iban_template:
raise ValidationError(_("The IBAN is invalid, it should begin with the country code"))
iban_template = _map_iban_template[country_code]
if len(iban) != len(iban_template.replace(' ', '')):
raise ValidationError(_("The IBAN does not seem to be correct. You should have entered something like this %s\n"
"Where B = National bank code, S = Branch code, C = Account No, k = Check digit") % iban_template)
check_chars = iban[4:] + iban[:4]
digits = int(''.join(str(int(char, 36)) for char in check_chars)) # BASE 36: 0..9,A..Z -> 0..35
if digits % 97 != 1:
raise ValidationError(_("This IBAN does not pass the validation check, please verify it."))
class ResPartnerBank(models.Model):
_inherit = "res.partner.bank"
@api.one
@api.depends('acc_number')
def _compute_acc_type(self):
try:
validate_iban(self.acc_number)
self.acc_type = 'iban'
except ValidationError:
super(ResPartnerBank, self)._compute_acc_type()
def get_bban(self):
if self.acc_type != 'iban':
raise UserError(_("Cannot compute the BBAN because the account number is not an IBAN."))
return get_bban_from_iban(self.acc_number)
@api.model
def create(self, vals):
if (vals.get('acc_type') == 'iban') and vals.get('acc_number'):
vals['acc_number'] = pretty_iban(normalize_iban(vals['acc_number']))
return super(ResPartnerBank, self).create(vals)
@api.multi
def write(self, vals):
if (vals.get('acc_type') == 'iban') and vals.get('acc_number'):
vals['acc_number'] = pretty_iban(normalize_iban(vals['acc_number']))
return super(ResPartnerBank, self).write(vals)
@api.one
@api.constrains('acc_number')
def _check_iban(self):
if self.acc_type == 'iban':
validate_iban(self.acc_number)
# Map ISO 3166-1 -> IBAN template, as described here :
# http://en.wikipedia.org/wiki/International_Bank_Account_Number#IBAN_formats_by_country
_map_iban_template = {
'ad': 'ADkk BBBB SSSS CCCC CCCC CCCC', # Andorra
'ae': 'AEkk BBBC CCCC CCCC CCCC CCC', # United Arab Emirates
'al': 'ALkk BBBS SSSK CCCC CCCC CCCC CCCC', # Albania
'at': 'ATkk BBBB BCCC CCCC CCCC', # Austria
'az': 'AZkk BBBB CCCC CCCC CCCC CCCC CCCC', # Azerbaijan
'ba': 'BAkk BBBS SSCC CCCC CCKK', # Bosnia and Herzegovina
'be': 'BEkk BBBC CCCC CCXX', # Belgium
'bg': 'BGkk BBBB SSSS DDCC CCCC CC', # Bulgaria
'bh': 'BHkk BBBB CCCC CCCC CCCC CC', # Bahrain
'br': 'BRkk BBBB BBBB SSSS SCCC CCCC CCCT N', # Brazil
'ch': 'CHkk BBBB BCCC CCCC CCCC C', # Switzerland
'cr': 'CRkk BBBC CCCC CCCC CCCC C', # Costa Rica
'cy': 'CYkk BBBS SSSS CCCC CCCC CCCC CCCC', # Cyprus
'cz': 'CZkk BBBB SSSS SSCC CCCC CCCC', # Czech Republic
'de': 'DEkk BBBB BBBB CCCC CCCC CC', # Germany
'dk': 'DKkk BBBB CCCC CCCC CC', # Denmark
'do': 'DOkk BBBB CCCC CCCC CCCC CCCC CCCC', # Dominican Republic
'ee': 'EEkk BBSS CCCC CCCC CCCK', # Estonia
'es': 'ESkk BBBB SSSS KKCC CCCC CCCC', # Spain
'fi': 'FIkk BBBB BBCC CCCC CK', # Finland
'fo': 'FOkk CCCC CCCC CCCC CC', # Faroe Islands
'fr': 'FRkk BBBB BGGG GGCC CCCC CCCC CKK', # France
'gb': 'GBkk BBBB SSSS SSCC CCCC CC', # United Kingdom
'ge': 'GEkk BBCC CCCC CCCC CCCC CC', # Georgia
'gi': 'GIkk BBBB CCCC CCCC CCCC CCC', # Gibraltar
'gl': 'GLkk BBBB CCCC CCCC CC', # Greenland
'gr': 'GRkk BBBS SSSC CCCC CCCC CCCC CCC', # Greece
'gt': 'GTkk BBBB MMTT CCCC CCCC CCCC CCCC', # Guatemala
'hr': 'HRkk BBBB BBBC CCCC CCCC C', # Croatia
'hu': 'HUkk BBBS SSSC CCCC CCCC CCCC CCCC', # Hungary
'ie': 'IEkk BBBB SSSS SSCC CCCC CC', # Ireland
'il': 'ILkk BBBS SSCC CCCC CCCC CCC', # Israel
'is': 'ISkk BBBB SSCC CCCC XXXX XXXX XX', # Iceland
'it': 'ITkk KBBB BBSS SSSC CCCC CCCC CCC', # Italy
'jo': 'JOkk BBBB NNNN CCCC CCCC CCCC CCCC CC', # Jordan
'kw': 'KWkk BBBB CCCC CCCC CCCC CCCC CCCC CC', # Kuwait
'kz': 'KZkk BBBC CCCC CCCC CCCC', # Kazakhstan
'lb': 'LBkk BBBB CCCC CCCC CCCC CCCC CCCC', # Lebanon
'li': 'LIkk BBBB BCCC CCCC CCCC C', # Liechtenstein
'lt': 'LTkk BBBB BCCC CCCC CCCC', # Lithuania
'lu': 'LUkk BBBC CCCC CCCC CCCC', # Luxembourg
'lv': 'LVkk BBBB CCCC CCCC CCCC C', # Latvia
'mc': 'MCkk BBBB BGGG GGCC CCCC CCCC CKK', # Monaco
'md': 'MDkk BBCC CCCC CCCC CCCC CCCC', # Moldova
'me': 'MEkk BBBC CCCC CCCC CCCC KK', # Montenegro
'mk': 'MKkk BBBC CCCC CCCC CKK', # Macedonia
'mr': 'MRkk BBBB BSSS SSCC CCCC CCCC CKK', # Mauritania
'mt': 'MTkk BBBB SSSS SCCC CCCC CCCC CCCC CCC', # Malta
'mu': 'MUkk BBBB BBSS CCCC CCCC CCCC CCCC CC', # Mauritius
'nl': 'NLkk BBBB CCCC CCCC CC', # Netherlands
'no': 'NOkk BBBB CCCC CCK', # Norway
'pk': 'PKkk BBBB CCCC CCCC CCCC CCCC', # Pakistan
'pl': 'PLkk BBBS SSSK CCCC CCCC CCCC CCCC', # Poland
'ps': 'PSkk BBBB XXXX XXXX XCCC CCCC CCCC C', # Palestinian
'pt': 'PTkk BBBB SSSS CCCC CCCC CCCK K', # Portugal
'qa': 'QAkk BBBB CCCC CCCC CCCC CCCC CCCC C', # Qatar
'ro': 'ROkk BBBB CCCC CCCC CCCC CCCC', # Romania
'rs': 'RSkk BBBC CCCC CCCC CCCC KK', # Serbia
'sa': 'SAkk BBCC CCCC CCCC CCCC CCCC', # Saudi Arabia
'se': 'SEkk BBBB CCCC CCCC CCCC CCCC', # Sweden
'si': 'SIkk BBSS SCCC CCCC CKK', # Slovenia
'sk': 'SKkk BBBB SSSS SSCC CCCC CCCC', # Slovakia
'sm': 'SMkk KBBB BBSS SSSC CCCC CCCC CCC', # San Marino
'tn': 'TNkk BBSS SCCC CCCC CCCC CCCC', # Tunisia
'tr': 'TRkk BBBB BRCC CCCC CCCC CCCC CC', # Turkey
'vg': 'VGkk BBBB CCCC CCCC CCCC CCCC', # Virgin Islands
'xk': 'XKkk BBBB CCCC CCCC CCCC', # Kosovo
}
|
agpl-3.0
|
h4ck3rm1k3/pip
|
pip/_vendor/requests/packages/chardet/gb2312prober.py
|
2994
|
1681
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import GB2312DistributionAnalysis
from .mbcssm import GB2312SMModel
class GB2312Prober(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(GB2312SMModel)
self._mDistributionAnalyzer = GB2312DistributionAnalysis()
self.reset()
def get_charset_name(self):
return "GB2312"
|
mit
|
rosedu/synergy
|
tools/gtest-1.6.0/test/gtest_env_var_test.py
|
2408
|
3487
|
#!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly parses environment variables."""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_env_var_test_')
environ = os.environ.copy()
def AssertEq(expected, actual):
if expected != actual:
print 'Expected: %s' % (expected,)
print ' Actual: %s' % (actual,)
raise AssertionError
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def GetFlag(flag):
"""Runs gtest_env_var_test_ and returns its output."""
args = [COMMAND]
if flag is not None:
args += [flag]
return gtest_test_utils.Subprocess(args, env=environ).output
def TestFlag(flag, test_val, default_val):
"""Verifies that the given flag is affected by the corresponding env var."""
env_var = 'GTEST_' + flag.upper()
SetEnvVar(env_var, test_val)
AssertEq(test_val, GetFlag(flag))
SetEnvVar(env_var, None)
AssertEq(default_val, GetFlag(flag))
class GTestEnvVarTest(gtest_test_utils.TestCase):
def testEnvVarAffectsFlag(self):
"""Tests that environment variable should affect the corresponding flag."""
TestFlag('break_on_failure', '1', '0')
TestFlag('color', 'yes', 'auto')
TestFlag('filter', 'FooTest.Bar', '*')
TestFlag('output', 'xml:tmp/foo.xml', '')
TestFlag('print_time', '0', '1')
TestFlag('repeat', '999', '1')
TestFlag('throw_on_failure', '1', '0')
TestFlag('death_test_style', 'threadsafe', 'fast')
TestFlag('catch_exceptions', '0', '1')
if IS_LINUX:
TestFlag('death_test_use_fork', '1', '0')
TestFlag('stack_trace_depth', '0', '100')
if __name__ == '__main__':
gtest_test_utils.Main()
|
gpl-2.0
|
mfwarren/createbucket
|
createbucket/create_s3_bucket.py
|
1
|
3009
|
#!/usr/bin/env python
from __future__ import print_function
import os
import re
import boto
from boto.s3.connection import S3Connection
from boto.iam.connection import IAMConnection
import inquirer
key_policy_json = """{
"Statement": [
{
"Action": "iam:*AccessKey*",
"Effect": "Allow",
"Resource": "%s"
}
]
}"""
user_bucket_policy_json = """{
"Statement": [
{
"Effect": "Allow",
"Action": %s,
"Resource": [
"arn:aws:s3:::%s",
"arn:aws:s3:::%s/*"
]
},
{
"Effect": "Deny",
"Action": ["s3:*"],
"NotResource": [
"arn:aws:s3:::%s",
"arn:aws:s3:::%s/*"
]
}]}"""
public_read_bucket_policy = """{
"Statement":[{
"Sid":"AllowPublicRead",
"Effect":"Allow",
"Principal": {
"AWS": "*"
},
"Action":["s3:GetObject"],
"Resource":["arn:aws:s3:::%s/*"
]
}]}"""
questions = [
inquirer.Text('bucket_name',
message='Bucket Name to create',
validate=lambda _, x: re.match('[a-z0-9]+', x),
),
inquirer.Text('username',
message='Username to create',
validate=lambda _, value: re.match('[a-z0-9]+', value),
),
inquirer.List('acl',
message='Choose ACL Policy for this user',
choices=['read-only', 'all']
),
inquirer.List('public',
message='Should this bucket be readable by public',
choices=['Yes', 'No']
)
]
def s3_bucket_maker(answers):
access_key = os.environ['ACCESS_KEY_ID']
secret_key = os.environ['SECRET_ACCESS_KEY']
s3conn = S3Connection(access_key, secret_key)
iamconn = IAMConnection(access_key, secret_key)
bucket = s3conn.create_bucket(answers['bucket_name'])
print("BUCKET: %s created" % answers['bucket_name'])
user = None
try:
user = iamconn.get_user(answers['username'])
except boto.exception.BotoServerError, e:
if e.status == 404:
print('User not found... creating one')
user = iamconn.create_user(answers['username'])
keys = iamconn.create_access_key(answers['username'])
print(keys)
else:
raise e
print(user)
policy = key_policy_json % (user.arn)
iamconn.put_user_policy(answers['username'], 'UserKeyPolicy', policy)
actions = "[\"s3:*\"]"
if (answers['acl'] == 'read-only'):
actions = "[\"s3:ListBucket\",\"s3:GetObject\",\"s3:GetObjectVersion\"]"
policy = user_bucket_policy_json % (actions, answers['bucket_name'], answers['bucket_name'], answers['bucket_name'], answers['bucket_name'])
iamconn.put_user_policy(answers['username'], 'UserS3Policy', policy)
if (answers['public'] == 'Yes'):
bucket.set_policy(public_read_bucket_policy % answers['bucket_name'])
def main():
inq = inquirer.prompt(questions)
s3_bucket_maker(inq)
|
mit
|
slohse/ansible
|
lib/ansible/modules/network/nxos/nxos_vrrp.py
|
68
|
12591
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_vrrp
extends_documentation_fragment: nxos
version_added: "2.1"
short_description: Manages VRRP configuration on NX-OS switches.
description:
- Manages VRRP configuration on NX-OS switches.
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- VRRP feature needs to be enabled first on the system.
- SVIs must exist before using this module.
- Interface must be a L3 port before using this module.
- C(state=absent) removes the VRRP group if it exists on the device.
- VRRP cannot be configured on loopback interfaces.
options:
group:
description:
- VRRP group number.
required: true
interface:
description:
- Full name of interface that is being managed for VRRP.
required: true
interval:
description:
- Time interval between advertisement or 'default' keyword
required: false
default: 1
version_added: 2.6
priority:
description:
- VRRP priority or 'default' keyword
default: 100
preempt:
description:
- Enable/Disable preempt.
type: bool
default: 'yes'
vip:
description:
- VRRP virtual IP address or 'default' keyword
authentication:
description:
- Clear text authentication string or 'default' keyword
admin_state:
description:
- Used to enable or disable the VRRP process.
choices: ['shutdown', 'no shutdown', 'default']
default: shutdown
state:
description:
- Specify desired state of the resource.
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- name: Ensure vrrp group 100 and vip 10.1.100.1 is on vlan10
nxos_vrrp:
interface: vlan10
group: 100
vip: 10.1.100.1
- name: Ensure removal of the vrrp group config
# vip is required to ensure the user knows what they are removing
nxos_vrrp:
interface: vlan10
group: 100
vip: 10.1.100.1
state: absent
- name: Re-config with more params
nxos_vrrp:
interface: vlan10
group: 100
vip: 10.1.100.1
preempt: false
priority: 130
authentication: AUTHKEY
'''
RETURN = '''
commands:
description: commands sent to the device
returned: always
type: list
sample: ["interface vlan10", "vrrp 150", "address 10.1.15.1",
"authentication text testing", "no shutdown"]
'''
from ansible.module_utils.network.nxos.nxos import load_config, run_commands
from ansible.module_utils.network.nxos.nxos import get_capabilities, nxos_argument_spec
from ansible.module_utils.network.nxos.nxos import get_interface_type
from ansible.module_utils.basic import AnsibleModule
PARAM_TO_DEFAULT_KEYMAP = {
'priority': '100',
'interval': '1',
'vip': '0.0.0.0',
'admin_state': 'shutdown',
}
def execute_show_command(command, module):
if 'show run' not in command:
output = 'json'
else:
output = 'text'
commands = [{
'command': command,
'output': output,
}]
return run_commands(module, commands)[0]
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
if value:
new_dict[new_key] = str(value)
else:
new_dict[new_key] = value
return new_dict
def is_default(interface, module):
command = 'show run interface {0}'.format(interface)
try:
body = execute_show_command(command, module)
if 'invalid' in body.lower():
return 'DNE'
else:
raw_list = body.split('\n')
if raw_list[-1].startswith('interface'):
return True
else:
return False
except (KeyError):
return 'DNE'
def get_interface_mode(interface, intf_type, module):
command = 'show interface {0}'.format(interface)
interface = {}
mode = 'unknown'
body = execute_show_command(command, module)
interface_table = body['TABLE_interface']['ROW_interface']
name = interface_table.get('interface')
if intf_type in ['ethernet', 'portchannel']:
mode = str(interface_table.get('eth_mode', 'layer3'))
if mode == 'access' or mode == 'trunk':
mode = 'layer2'
elif intf_type == 'svi':
mode = 'layer3'
return mode, name
def get_vrr_status(group, module, interface):
command = 'show run all | section interface.{0}$'.format(interface)
body = execute_show_command(command, module)
vrf_index = None
admin_state = 'shutdown'
if body:
splitted_body = body.splitlines()
for index in range(0, len(splitted_body) - 1):
if splitted_body[index].strip() == 'vrrp {0}'.format(group):
vrf_index = index
vrf_section = splitted_body[vrf_index::]
for line in vrf_section:
if line.strip() == 'no shutdown':
admin_state = 'no shutdown'
break
return admin_state
def get_existing_vrrp(interface, group, module, name):
command = 'show vrrp detail interface {0}'.format(interface)
body = execute_show_command(command, module)
vrrp = {}
vrrp_key = {
'sh_group_id': 'group',
'sh_vip_addr': 'vip',
'sh_priority': 'priority',
'sh_group_preempt': 'preempt',
'sh_auth_text': 'authentication',
'sh_adv_interval': 'interval'
}
try:
vrrp_table = body['TABLE_vrrp_group']
except (AttributeError, IndexError, TypeError):
return {}
if isinstance(vrrp_table, dict):
vrrp_table = [vrrp_table]
for each_vrrp in vrrp_table:
vrrp_row = each_vrrp['ROW_vrrp_group']
parsed_vrrp = apply_key_map(vrrp_key, vrrp_row)
if parsed_vrrp['preempt'] == 'Disable':
parsed_vrrp['preempt'] = False
elif parsed_vrrp['preempt'] == 'Enable':
parsed_vrrp['preempt'] = True
if parsed_vrrp['group'] == group:
parsed_vrrp['admin_state'] = get_vrr_status(group, module, name)
return parsed_vrrp
return vrrp
def get_commands_config_vrrp(delta, existing, group):
commands = []
CMDS = {
'priority': 'priority {0}',
'preempt': 'preempt',
'vip': 'address {0}',
'interval': 'advertisement-interval {0}',
'auth': 'authentication text {0}',
'admin_state': '{0}',
}
for arg in ['vip', 'priority', 'interval', 'admin_state']:
val = delta.get(arg)
if val == 'default':
val = PARAM_TO_DEFAULT_KEYMAP.get(arg)
if val != existing.get(arg):
commands.append((CMDS.get(arg)).format(val))
elif val:
commands.append((CMDS.get(arg)).format(val))
preempt = delta.get('preempt')
auth = delta.get('authentication')
if preempt:
commands.append(CMDS.get('preempt'))
elif preempt is False:
commands.append('no ' + CMDS.get('preempt'))
if auth:
if auth != 'default':
commands.append((CMDS.get('auth')).format(auth))
elif existing.get('authentication'):
commands.append('no authentication')
if commands:
commands.insert(0, 'vrrp {0}'.format(group))
return commands
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def validate_params(param, module):
value = module.params[param]
if param == 'group':
try:
if (int(value) < 1 or int(value) > 255):
raise ValueError
except ValueError:
module.fail_json(msg="Warning! 'group' must be an integer between"
" 1 and 255", group=value)
elif param == 'priority':
try:
if (int(value) < 1 or int(value) > 254):
raise ValueError
except ValueError:
module.fail_json(msg="Warning! 'priority' must be an integer "
"between 1 and 254", priority=value)
def main():
argument_spec = dict(
group=dict(required=True, type='str'),
interface=dict(required=True),
interval=dict(required=False, type='str'),
priority=dict(required=False, type='str'),
preempt=dict(required=False, type='bool'),
vip=dict(required=False, type='str'),
admin_state=dict(required=False, type='str',
choices=['shutdown', 'no shutdown', 'default'],
default='shutdown'),
authentication=dict(required=False, type='str'),
state=dict(choices=['absent', 'present'], required=False, default='present')
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
warnings = list()
results = {'changed': False, 'commands': [], 'warnings': warnings}
state = module.params['state']
interface = module.params['interface'].lower()
group = module.params['group']
priority = module.params['priority']
interval = module.params['interval']
preempt = module.params['preempt']
vip = module.params['vip']
authentication = module.params['authentication']
admin_state = module.params['admin_state']
device_info = get_capabilities(module)
network_api = device_info.get('network_api', 'nxapi')
if state == 'present' and not vip:
module.fail_json(msg='the "vip" param is required when state=present')
intf_type = get_interface_type(interface)
if (intf_type != 'ethernet' and network_api == 'cliconf'):
if is_default(interface, module) == 'DNE':
module.fail_json(msg='That interface does not exist yet. Create '
'it first.', interface=interface)
if intf_type == 'loopback':
module.fail_json(msg="Loopback interfaces don't support VRRP.",
interface=interface)
mode, name = get_interface_mode(interface, intf_type, module)
if mode == 'layer2':
module.fail_json(msg='That interface is a layer2 port.\nMake it '
'a layer 3 port first.', interface=interface)
args = dict(group=group, priority=priority, preempt=preempt,
vip=vip, authentication=authentication, interval=interval,
admin_state=admin_state)
proposed = dict((k, v) for k, v in args.items() if v is not None)
existing = get_existing_vrrp(interface, group, module, name)
changed = False
end_state = existing
commands = []
if state == 'present':
delta = dict(
set(proposed.items()).difference(existing.items()))
if delta:
command = get_commands_config_vrrp(delta, existing, group)
if command:
commands.append(command)
elif state == 'absent':
if existing:
commands.append(['no vrrp {0}'.format(group)])
if commands:
commands.insert(0, ['interface {0}'.format(interface)])
commands = flatten_list(commands)
results['commands'] = commands
results['changed'] = True
if not module.check_mode:
load_config(module, commands)
if 'configure' in commands:
commands.pop(0)
module.exit_json(**results)
if __name__ == '__main__':
main()
|
gpl-3.0
|
ominux/skia
|
tools/skpdiff/skpdiff_server.py
|
161
|
24230
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import BaseHTTPServer
import json
import os
import os.path
import re
import subprocess
import sys
import tempfile
import urllib2
# Grab the script path because that is where all the static assets are
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
# Find the tools directory for python imports
TOOLS_DIR = os.path.dirname(SCRIPT_DIR)
# Find the root of the skia trunk for finding skpdiff binary
SKIA_ROOT_DIR = os.path.dirname(TOOLS_DIR)
# Find the default location of gm expectations
DEFAULT_GM_EXPECTATIONS_DIR = os.path.join(SKIA_ROOT_DIR, 'expectations', 'gm')
# Imports from within Skia
if TOOLS_DIR not in sys.path:
sys.path.append(TOOLS_DIR)
GM_DIR = os.path.join(SKIA_ROOT_DIR, 'gm')
if GM_DIR not in sys.path:
sys.path.append(GM_DIR)
import gm_json
import jsondiff
# A simple dictionary of file name extensions to MIME types. The empty string
# entry is used as the default when no extension was given or if the extension
# has no entry in this dictionary.
MIME_TYPE_MAP = {'': 'application/octet-stream',
'html': 'text/html',
'css': 'text/css',
'png': 'image/png',
'js': 'application/javascript',
'json': 'application/json'
}
IMAGE_FILENAME_RE = re.compile(gm_json.IMAGE_FILENAME_PATTERN)
SKPDIFF_INVOKE_FORMAT = '{} --jsonp=false -o {} -f {} {}'
def get_skpdiff_path(user_path=None):
"""Find the skpdiff binary.
@param user_path If none, searches in Release and Debug out directories of
the skia root. If set, checks that the path is a real file and
returns it.
"""
skpdiff_path = None
possible_paths = []
# Use the user given path, or try out some good default paths.
if user_path:
possible_paths.append(user_path)
else:
possible_paths.append(os.path.join(SKIA_ROOT_DIR, 'out',
'Release', 'skpdiff'))
possible_paths.append(os.path.join(SKIA_ROOT_DIR, 'out',
'Release', 'skpdiff.exe'))
possible_paths.append(os.path.join(SKIA_ROOT_DIR, 'out',
'Debug', 'skpdiff'))
possible_paths.append(os.path.join(SKIA_ROOT_DIR, 'out',
'Debug', 'skpdiff.exe'))
# Use the first path that actually points to the binary
for possible_path in possible_paths:
if os.path.isfile(possible_path):
skpdiff_path = possible_path
break
# If skpdiff was not found, print out diagnostic info for the user.
if skpdiff_path is None:
print('Could not find skpdiff binary. Either build it into the ' +
'default directory, or specify the path on the command line.')
print('skpdiff paths tried:')
for possible_path in possible_paths:
print(' ', possible_path)
return skpdiff_path
def download_file(url, output_path):
"""Download the file at url and place it in output_path"""
reader = urllib2.urlopen(url)
with open(output_path, 'wb') as writer:
writer.write(reader.read())
def download_gm_image(image_name, image_path, hash_val):
"""Download the gm result into the given path.
@param image_name The GM file name, for example imageblur_gpu.png.
@param image_path Path to place the image.
@param hash_val The hash value of the image.
"""
if hash_val is None:
return
# Separate the test name from a image name
image_match = IMAGE_FILENAME_RE.match(image_name)
test_name = image_match.group(1)
# Calculate the URL of the requested image
image_url = gm_json.CreateGmActualUrl(
test_name, gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5, hash_val)
# Download the image as requested
download_file(image_url, image_path)
def get_image_set_from_skpdiff(skpdiff_records):
"""Get the set of all images references in the given records.
@param skpdiff_records An array of records, which are dictionary objects.
"""
expected_set = frozenset([r['baselinePath'] for r in skpdiff_records])
actual_set = frozenset([r['testPath'] for r in skpdiff_records])
return expected_set | actual_set
def set_expected_hash_in_json(expected_results_json, image_name, hash_value):
"""Set the expected hash for the object extracted from
expected-results.json. Note that this only work with bitmap-64bitMD5 hash
types.
@param expected_results_json The Python dictionary with the results to
modify.
@param image_name The name of the image to set the hash of.
@param hash_value The hash to set for the image.
"""
expected_results = expected_results_json[gm_json.JSONKEY_EXPECTEDRESULTS]
if image_name in expected_results:
expected_results[image_name][gm_json.JSONKEY_EXPECTEDRESULTS_ALLOWEDDIGESTS][0][1] = hash_value
else:
expected_results[image_name] = {
gm_json.JSONKEY_EXPECTEDRESULTS_ALLOWEDDIGESTS:
[
[
gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5,
hash_value
]
]
}
def get_head_version(path):
"""Get the version of the file at the given path stored inside the HEAD of
the git repository. It is returned as a string.
@param path The path of the file whose HEAD is returned. It is assumed the
path is inside a git repo rooted at SKIA_ROOT_DIR.
"""
# git-show will not work with absolute paths. This ensures we give it a path
# relative to the skia root. This path also has to use forward slashes, even
# on windows.
git_path = os.path.relpath(path, SKIA_ROOT_DIR).replace('\\', '/')
git_show_proc = subprocess.Popen(['git', 'show', 'HEAD:' + git_path],
stdout=subprocess.PIPE)
# When invoked outside a shell, git will output the last committed version
# of the file directly to stdout.
git_version_content, _ = git_show_proc.communicate()
return git_version_content
class GMInstance:
"""Information about a GM test result on a specific device:
- device_name = the name of the device that rendered it
- image_name = the GM test name and config
- expected_hash = the current expected hash value
- actual_hash = the actual hash value
- is_rebaselined = True if actual_hash is what is currently in the expected
results file, False otherwise.
"""
def __init__(self,
device_name, image_name,
expected_hash, actual_hash,
is_rebaselined):
self.device_name = device_name
self.image_name = image_name
self.expected_hash = expected_hash
self.actual_hash = actual_hash
self.is_rebaselined = is_rebaselined
class ExpectationsManager:
def __init__(self, expectations_dir, expected_name, updated_name,
skpdiff_path):
"""
@param expectations_dir The directory to traverse for results files.
This should resemble expectations/gm in the Skia trunk.
@param expected_name The name of the expected result files. These
are in the format of expected-results.json.
@param updated_name The name of the updated expected result files.
Normally this matches --expectations-filename-output for the
rebaseline.py tool.
@param skpdiff_path The path used to execute the skpdiff command.
"""
self._expectations_dir = expectations_dir
self._expected_name = expected_name
self._updated_name = updated_name
self._skpdiff_path = skpdiff_path
self._generate_gm_comparison()
def _generate_gm_comparison(self):
"""Generate all the data needed to compare GMs:
- determine which GMs changed
- download the changed images
- compare them with skpdiff
"""
# Get the expectations and compare them with actual hashes
self._get_expectations()
# Create a temporary file tree that makes sense for skpdiff to operate
# on. We take the realpath of the new temp directory because some OSs
# (*cough* osx) put the temp directory behind a symlink that gets
# resolved later down the pipeline and breaks the image map.
image_output_dir = os.path.realpath(tempfile.mkdtemp('skpdiff'))
expected_image_dir = os.path.join(image_output_dir, 'expected')
actual_image_dir = os.path.join(image_output_dir, 'actual')
os.mkdir(expected_image_dir)
os.mkdir(actual_image_dir)
# Download expected and actual images that differed into the temporary
# file tree.
self._download_expectation_images(expected_image_dir, actual_image_dir)
# Invoke skpdiff with our downloaded images and place its results in the
# temporary directory.
self._skpdiff_output_path = os.path.join(image_output_dir,
'skpdiff_output.json')
skpdiff_cmd = SKPDIFF_INVOKE_FORMAT.format(self._skpdiff_path,
self._skpdiff_output_path,
expected_image_dir,
actual_image_dir)
os.system(skpdiff_cmd)
self._load_skpdiff_output()
def _get_expectations(self):
"""Fills self._expectations with GMInstance objects for each test whose
expectation is different between the following two files:
- the local filesystem's updated results file
- git's head version of the expected results file
"""
differ = jsondiff.GMDiffer()
self._expectations = []
for root, dirs, files in os.walk(self._expectations_dir):
for expectation_file in files:
# There are many files in the expectations directory. We only
# care about expected results.
if expectation_file != self._expected_name:
continue
# Get the name of the results file, and be sure there is an
# updated result to compare against. If there is not, there is
# no point in diffing this device.
expected_file_path = os.path.join(root, self._expected_name)
updated_file_path = os.path.join(root, self._updated_name)
if not os.path.isfile(updated_file_path):
continue
# Always get the expected results from git because we may have
# changed them in a previous instance of the server.
expected_contents = get_head_version(expected_file_path)
updated_contents = None
with open(updated_file_path, 'rb') as updated_file:
updated_contents = updated_file.read()
# Read the expected results on disk to determine what we've
# already rebaselined.
commited_contents = None
with open(expected_file_path, 'rb') as expected_file:
commited_contents = expected_file.read()
# Find all expectations that did not match.
expected_diff = differ.GenerateDiffDictFromStrings(
expected_contents,
updated_contents)
# Generate a set of images that have already been rebaselined
# onto disk.
rebaselined_diff = differ.GenerateDiffDictFromStrings(
expected_contents,
commited_contents)
rebaselined_set = set(rebaselined_diff.keys())
# The name of the device corresponds to the name of the folder
# we are in.
device_name = os.path.basename(root)
# Store old and new versions of the expectation for each GM
for image_name, hashes in expected_diff.iteritems():
self._expectations.append(
GMInstance(device_name, image_name,
hashes['old'], hashes['new'],
image_name in rebaselined_set))
def _load_skpdiff_output(self):
"""Loads the results of skpdiff and annotates them with whether they
have already been rebaselined or not. The resulting data is store in
self.skpdiff_records."""
self.skpdiff_records = None
with open(self._skpdiff_output_path, 'rb') as skpdiff_output_file:
self.skpdiff_records = json.load(skpdiff_output_file)['records']
for record in self.skpdiff_records:
record['isRebaselined'] = self.image_map[record['baselinePath']][1].is_rebaselined
def _download_expectation_images(self, expected_image_dir, actual_image_dir):
"""Download the expected and actual images for the _expectations array.
@param expected_image_dir The directory to download expected images
into.
@param actual_image_dir The directory to download actual images into.
"""
image_map = {}
# Look through expectations and download their images.
for expectation in self._expectations:
# Build appropriate paths to download the images into.
expected_image_path = os.path.join(expected_image_dir,
expectation.device_name + '-' +
expectation.image_name)
actual_image_path = os.path.join(actual_image_dir,
expectation.device_name + '-' +
expectation.image_name)
print('Downloading %s for device %s' % (
expectation.image_name, expectation.device_name))
# Download images
download_gm_image(expectation.image_name,
expected_image_path,
expectation.expected_hash)
download_gm_image(expectation.image_name,
actual_image_path,
expectation.actual_hash)
# Annotate the expectations with where the images were downloaded
# to.
expectation.expected_image_path = expected_image_path
expectation.actual_image_path = actual_image_path
# Map the image paths back to the expectations.
image_map[expected_image_path] = (False, expectation)
image_map[actual_image_path] = (True, expectation)
self.image_map = image_map
def _set_expected_hash(self, device_name, image_name, hash_value):
"""Set the expected hash for the image of the given device. This always
writes directly to the expected results file of the given device
@param device_name The name of the device to write the hash to.
@param image_name The name of the image whose hash to set.
@param hash_value The value of the hash to set.
"""
# Retrieve the expected results file as it is in the working tree
json_path = os.path.join(self._expectations_dir, device_name,
self._expected_name)
expectations = gm_json.LoadFromFile(json_path)
# Set the specified hash.
set_expected_hash_in_json(expectations, image_name, hash_value)
# Write it out to disk using gm_json to keep the formatting consistent.
gm_json.WriteToFile(expectations, json_path)
def commit_rebaselines(self, rebaselines):
"""Sets the expected results file to use the hashes of the images in
the rebaselines list. If a expected result image is not in rebaselines
at all, the old hash will be used.
@param rebaselines A list of image paths to use the hash of.
"""
# Reset all expectations to their old hashes because some of them may
# have been set to the new hash by a previous call to this function.
for expectation in self._expectations:
expectation.is_rebaselined = False
self._set_expected_hash(expectation.device_name,
expectation.image_name,
expectation.expected_hash)
# Take all the images to rebaseline
for image_path in rebaselines:
# Get the metadata about the image at the path.
is_actual, expectation = self.image_map[image_path]
expectation.is_rebaselined = is_actual
expectation_hash = expectation.actual_hash if is_actual else\
expectation.expected_hash
# Write out that image's hash directly to the expected results file.
self._set_expected_hash(expectation.device_name,
expectation.image_name,
expectation_hash)
self._load_skpdiff_output()
class SkPDiffHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def send_file(self, file_path):
# Grab the extension if there is one
extension = os.path.splitext(file_path)[1]
if len(extension) >= 1:
extension = extension[1:]
# Determine the MIME type of the file from its extension
mime_type = MIME_TYPE_MAP.get(extension, MIME_TYPE_MAP[''])
# Open the file and send it over HTTP
if os.path.isfile(file_path):
with open(file_path, 'rb') as sending_file:
self.send_response(200)
self.send_header('Content-type', mime_type)
self.end_headers()
self.wfile.write(sending_file.read())
else:
self.send_error(404)
def serve_if_in_dir(self, dir_path, file_path):
# Determine if the file exists relative to the given dir_path AND exists
# under the dir_path. This is to prevent accidentally serving files
# outside the directory intended using symlinks, or '../'.
real_path = os.path.normpath(os.path.join(dir_path, file_path))
if os.path.commonprefix([real_path, dir_path]) == dir_path:
if os.path.isfile(real_path):
self.send_file(real_path)
return True
return False
def do_GET(self):
# Simple rewrite rule of the root path to 'viewer.html'
if self.path == '' or self.path == '/':
self.path = '/viewer.html'
# The [1:] chops off the leading '/'
file_path = self.path[1:]
# Handle skpdiff_output.json manually because it is was processed by the
# server when it was started and does not exist as a file.
if file_path == 'skpdiff_output.json':
self.send_response(200)
self.send_header('Content-type', MIME_TYPE_MAP['json'])
self.end_headers()
# Add JSONP padding to the JSON because the web page expects it. It
# expects it because it was designed to run with or without a web
# server. Without a web server, the only way to load JSON is with
# JSONP.
skpdiff_records = self.server.expectations_manager.skpdiff_records
self.wfile.write('var SkPDiffRecords = ')
json.dump({'records': skpdiff_records}, self.wfile)
self.wfile.write(';')
return
# Attempt to send static asset files first.
if self.serve_if_in_dir(SCRIPT_DIR, file_path):
return
# WARNING: Serving any file the user wants is incredibly insecure. Its
# redeeming quality is that we only serve gm files on a white list.
if self.path in self.server.image_set:
self.send_file(self.path)
return
# If no file to send was found, just give the standard 404
self.send_error(404)
def do_POST(self):
if self.path == '/commit_rebaselines':
content_length = int(self.headers['Content-length'])
request_data = json.loads(self.rfile.read(content_length))
rebaselines = request_data['rebaselines']
self.server.expectations_manager.commit_rebaselines(rebaselines)
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
self.wfile.write('{"success":true}')
return
# If the we have no handler for this path, give em' the 404
self.send_error(404)
def run_server(expectations_manager, port=8080):
# It's important to parse the results file so that we can make a set of
# images that the web page might request.
skpdiff_records = expectations_manager.skpdiff_records
image_set = get_image_set_from_skpdiff(skpdiff_records)
# Do not bind to interfaces other than localhost because the server will
# attempt to serve files relative to the root directory as a last resort
# before 404ing. This means all of your files can be accessed from this
# server, so DO NOT let this server listen to anything but localhost.
server_address = ('127.0.0.1', port)
http_server = BaseHTTPServer.HTTPServer(server_address, SkPDiffHandler)
http_server.image_set = image_set
http_server.expectations_manager = expectations_manager
print('Navigate thine browser to: http://{}:{}/'.format(*server_address))
http_server.serve_forever()
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--port', '-p', metavar='PORT',
type=int,
default=8080,
help='port to bind the server to; ' +
'defaults to %(default)s',
)
parser.add_argument('--expectations-dir', metavar='EXPECTATIONS_DIR',
default=DEFAULT_GM_EXPECTATIONS_DIR,
help='path to the gm expectations; ' +
'defaults to %(default)s'
)
parser.add_argument('--expected',
metavar='EXPECTATIONS_FILE_NAME',
default='expected-results.json',
help='the file name of the expectations JSON; ' +
'defaults to %(default)s'
)
parser.add_argument('--updated',
metavar='UPDATED_FILE_NAME',
default='updated-results.json',
help='the file name of the updated expectations JSON;' +
' defaults to %(default)s'
)
parser.add_argument('--skpdiff-path', metavar='SKPDIFF_PATH',
default=None,
help='the path to the skpdiff binary to use; ' +
'defaults to out/Release/skpdiff or out/Default/skpdiff'
)
args = vars(parser.parse_args()) # Convert args into a python dict
# Make sure we have access to an skpdiff binary
skpdiff_path = get_skpdiff_path(args['skpdiff_path'])
if skpdiff_path is None:
sys.exit(1)
# Print out the paths of things for easier debugging
print('script dir :', SCRIPT_DIR)
print('tools dir :', TOOLS_DIR)
print('root dir :', SKIA_ROOT_DIR)
print('expectations dir :', args['expectations_dir'])
print('skpdiff path :', skpdiff_path)
expectations_manager = ExpectationsManager(args['expectations_dir'],
args['expected'],
args['updated'],
skpdiff_path)
run_server(expectations_manager, port=args['port'])
if __name__ == '__main__':
main()
|
apache-2.0
|
garverp/gnuradio
|
gr-blocks/examples/ctrlport/usrp_sink_controller.py
|
24
|
1510
|
#!/usr/bin/env python
import sys
import pmt
from gnuradio.ctrlport.GNURadioControlPortClient import GNURadioControlPortClient
from optparse import OptionParser
parser = OptionParser(usage="%prog: [options]")
parser.add_option("-H", "--host", type="string", default="localhost",
help="Hostname to connect to (default=%default)")
parser.add_option("-p", "--port", type="int", default=9090,
help="Port of Controlport instance on host (default=%default)")
parser.add_option("-a", "--alias", type="string", default="gr uhd usrp sink0",
help="The UHD block's alias to control (default=%default)")
options, args = parser.parse_args()
if(len(args) < 2):
sys.stderr.write('Not enough arguments: usrp_source_controller.py [options] <command> <value>\n')
sys.stderr.write('See the "UHD Interface" section of the manual for available commands.\n\n')
sys.exit(1)
port = 'command'
alias = options.alias
hostname = options.host
portnum = options.port
cmd = args[0]
val = args[1]
if(cmd == "tune" or cmd == "time"):
sys.stderr.write("This application currently does not support the 'tune' or 'time' UHD "
"message commands.\n\n")
sys.exit(1)
elif(cmd == "antenna"):
val = pmt.intern(val)
else:
val = pmt.from_double(float(val))
argv = [None, hostname, portnum]
radiosys = GNURadioControlPortClient(argv=argv, rpcmethod='thrift')
radio = radiosys.client
radio.postMessage(alias, port, pmt.cons(pmt.intern(cmd), val))
|
gpl-3.0
|
jaantollander/CrowdDynamics
|
crowddynamics/core/geom2D.py
|
1
|
1734
|
import numba
@numba.jit(['f8(f8[:, :])'], nopython=True, nogil=True, cache=True)
def polygon_area(vertices):
r"""Shoelace formula for computing area of polygon
.. math::
A = \sum_{i=1}^{n} x_i \left(y_{i+1} - y_{i-1}\right), \quad i\mod n
References:
- https://en.wikipedia.org/wiki/Shoelace_formula
- https://stackoverflow.com/questions/451426/how-do-i-calculate-the-area-of-a-2d-polygon/717367#717367
- https://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates
Args:
vertices (numpy.ndarray): Vertices of the polygon
Returns:
float: Area of the polygon
"""
n = len(vertices)
area = 0.0
if n < 3:
return area
x, y = vertices[:, 0], vertices[:, 1]
for i in range(1, n-1):
area += x[i] * (y[i + 1] - y[i - 1])
# i=n-1
area += x[n-1] * (y[0] - y[n - 2])
# i=n
area += x[0] * (y[1] - y[n - 1])
return 0.5 * abs(area)
@numba.jit(['boolean(f8[:], f8[:], f8[:], f8[:])'],
nopython=True, nogil=True, cache=True)
def line_intersect(x0, x1, y0, y1):
"""Test if two lines intersect. Assumes that lines have finite length.
Args:
x0 (numpy.ndarray): Start point of first line
x1 (numpy.ndarray): End point of first line
y0 (numpy.ndarray): Start point of second line
y1 (numpy.ndarray): End point of second line
Returns:
bool:
"""
# FIXME: if u or v is zero vector
u = x1 - x0
v = y1 - y0
b = y0 - x0
d = u[0] * v[1] - u[1] * v[0]
if d == 0:
return False
t0 = b[0] * v[1] - b[1] * v[0]
t1 = b[0] * u[1] - b[1] * u[0]
return 0 <= t0 / d <= 1 and 0 <= t1 / d <= 1
|
gpl-3.0
|
liorvh/golismero
|
tools/sqlmap/plugins/generic/fingerprint.py
|
8
|
1726
|
#!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.common import Backend
from lib.core.common import readInput
from lib.core.data import logger
from lib.core.enums import OS
from lib.core.exception import SqlmapUndefinedMethod
class Fingerprint:
"""
This class defines generic fingerprint functionalities for plugins.
"""
def __init__(self, dbms):
Backend.forceDbms(dbms)
def getFingerprint(self):
errMsg = "'getFingerprint' method must be defined "
errMsg += "into the specific DBMS plugin"
raise SqlmapUndefinedMethod(errMsg)
def checkDbms(self):
errMsg = "'checkDbms' method must be defined "
errMsg += "into the specific DBMS plugin"
raise SqlmapUndefinedMethod(errMsg)
def checkDbmsOs(self, detailed=False):
errMsg = "'checkDbmsOs' method must be defined "
errMsg += "into the specific DBMS plugin"
raise SqlmapUndefinedMethod(errMsg)
def forceDbmsEnum(self):
pass
def userChooseDbmsOs(self):
warnMsg = "for some reason sqlmap was unable to fingerprint "
warnMsg += "the back-end DBMS operating system"
logger.warn(warnMsg)
msg = "do you want to provide the OS? [(W)indows/(l)inux]"
while True:
os = readInput(msg, default="W")
if os[0].lower() == "w":
Backend.setOs(OS.WINDOWS)
break
elif os[0].lower() == "l":
Backend.setOs(OS.LINUX)
break
else:
warnMsg = "invalid value"
logger.warn(warnMsg)
|
gpl-2.0
|
nwjs/chromium.src
|
third_party/pycoverage/coverage/execfile.py
|
209
|
5865
|
"""Execute files of Python code."""
import imp, marshal, os, sys
from coverage.backward import exec_code_object, open_source
from coverage.misc import ExceptionDuringRun, NoCode, NoSource
try:
# In Py 2.x, the builtins were in __builtin__
BUILTINS = sys.modules['__builtin__']
except KeyError:
# In Py 3.x, they're in builtins
BUILTINS = sys.modules['builtins']
def rsplit1(s, sep):
"""The same as s.rsplit(sep, 1), but works in 2.3"""
parts = s.split(sep)
return sep.join(parts[:-1]), parts[-1]
def run_python_module(modulename, args):
"""Run a python module, as though with ``python -m name args...``.
`modulename` is the name of the module, possibly a dot-separated name.
`args` is the argument array to present as sys.argv, including the first
element naming the module being executed.
"""
openfile = None
glo, loc = globals(), locals()
try:
try:
# Search for the module - inside its parent package, if any - using
# standard import mechanics.
if '.' in modulename:
packagename, name = rsplit1(modulename, '.')
package = __import__(packagename, glo, loc, ['__path__'])
searchpath = package.__path__
else:
packagename, name = None, modulename
searchpath = None # "top-level search" in imp.find_module()
openfile, pathname, _ = imp.find_module(name, searchpath)
# Complain if this is a magic non-file module.
if openfile is None and pathname is None:
raise NoSource(
"module does not live in a file: %r" % modulename
)
# If `modulename` is actually a package, not a mere module, then we
# pretend to be Python 2.7 and try running its __main__.py script.
if openfile is None:
packagename = modulename
name = '__main__'
package = __import__(packagename, glo, loc, ['__path__'])
searchpath = package.__path__
openfile, pathname, _ = imp.find_module(name, searchpath)
except ImportError:
_, err, _ = sys.exc_info()
raise NoSource(str(err))
finally:
if openfile:
openfile.close()
# Finally, hand the file off to run_python_file for execution.
pathname = os.path.abspath(pathname)
args[0] = pathname
run_python_file(pathname, args, package=packagename)
def run_python_file(filename, args, package=None):
"""Run a python file as if it were the main program on the command line.
`filename` is the path to the file to execute, it need not be a .py file.
`args` is the argument array to present as sys.argv, including the first
element naming the file being executed. `package` is the name of the
enclosing package, if any.
"""
# Create a module to serve as __main__
old_main_mod = sys.modules['__main__']
main_mod = imp.new_module('__main__')
sys.modules['__main__'] = main_mod
main_mod.__file__ = filename
if package:
main_mod.__package__ = package
main_mod.__builtins__ = BUILTINS
# Set sys.argv properly.
old_argv = sys.argv
sys.argv = args
try:
# Make a code object somehow.
if filename.endswith(".pyc") or filename.endswith(".pyo"):
code = make_code_from_pyc(filename)
else:
code = make_code_from_py(filename)
# Execute the code object.
try:
exec_code_object(code, main_mod.__dict__)
except SystemExit:
# The user called sys.exit(). Just pass it along to the upper
# layers, where it will be handled.
raise
except:
# Something went wrong while executing the user code.
# Get the exc_info, and pack them into an exception that we can
# throw up to the outer loop. We peel two layers off the traceback
# so that the coverage.py code doesn't appear in the final printed
# traceback.
typ, err, tb = sys.exc_info()
raise ExceptionDuringRun(typ, err, tb.tb_next.tb_next)
finally:
# Restore the old __main__
sys.modules['__main__'] = old_main_mod
# Restore the old argv and path
sys.argv = old_argv
def make_code_from_py(filename):
"""Get source from `filename` and make a code object of it."""
# Open the source file.
try:
source_file = open_source(filename)
except IOError:
raise NoSource("No file to run: %r" % filename)
try:
source = source_file.read()
finally:
source_file.close()
# We have the source. `compile` still needs the last line to be clean,
# so make sure it is, then compile a code object from it.
if not source or source[-1] != '\n':
source += '\n'
code = compile(source, filename, "exec")
return code
def make_code_from_pyc(filename):
"""Get a code object from a .pyc file."""
try:
fpyc = open(filename, "rb")
except IOError:
raise NoCode("No file to run: %r" % filename)
try:
# First four bytes are a version-specific magic number. It has to
# match or we won't run the file.
magic = fpyc.read(4)
if magic != imp.get_magic():
raise NoCode("Bad magic number in .pyc file")
# Skip the junk in the header that we don't need.
fpyc.read(4) # Skip the moddate.
if sys.version_info >= (3, 3):
# 3.3 added another long to the header (size), skip it.
fpyc.read(4)
# The rest of the file is the code object we want.
code = marshal.load(fpyc)
finally:
fpyc.close()
return code
|
bsd-3-clause
|
agiliq/merchant
|
billing/models/pin_models.py
|
3
|
3331
|
from django.db import models
from django.conf import settings
User = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
class PinCard(models.Model):
token = models.CharField(max_length=32, db_index=True, editable=False)
display_number = models.CharField(max_length=20, editable=False)
expiry_month = models.PositiveSmallIntegerField()
expiry_year = models.PositiveSmallIntegerField()
scheme = models.CharField(max_length=20, editable=False)
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
address_line1 = models.CharField(max_length=255)
address_line2 = models.CharField(max_length=255, blank=True)
address_city = models.CharField(max_length=255)
address_postcode = models.CharField(max_length=20)
address_state = models.CharField(max_length=255)
address_country = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(User, related_name='pin_cards', blank=True, null=True)
def __unicode__(self):
return 'Card %s' % self.display_number
class Meta:
app_label = __name__.split(".")[0]
class PinCustomer(models.Model):
token = models.CharField(unique=True, max_length=32)
card = models.ForeignKey("billing.PinCard", related_name='customers')
email = models.EmailField()
created_at = models.DateTimeField()
user = models.OneToOneField(User, related_name='pin_customer', blank=True, null=True)
def __unicode__(self):
return 'Customer %s' % self.email
class Meta:
app_label = __name__.split(".")[0]
class PinCharge(models.Model):
token = models.CharField(unique=True, max_length=32, editable=False)
card = models.ForeignKey("billing.PinCard", related_name='charges', editable=False)
customer = models.ForeignKey("billing.PinCustomer", related_name='customers', null=True, blank=True, editable=False)
success = models.BooleanField(default=False)
amount = models.DecimalField(max_digits=16, decimal_places=2)
currency = models.CharField(max_length=3)
description = models.CharField(max_length=255)
email = models.EmailField()
ip_address = models.IPAddressField()
created_at = models.DateTimeField()
status_message = models.CharField(max_length=255)
error_message = models.CharField(max_length=255, null=True, blank=True)
user = models.ForeignKey(User, related_name='pin_charges', blank=True, null=True)
def __unicode__(self):
return 'Charge %s' % self.email
class Meta:
app_label = __name__.split(".")[0]
class PinRefund(models.Model):
token = models.CharField(unique=True, max_length=32)
charge = models.ForeignKey("billing.PinCharge", related_name='refunds')
success = models.BooleanField(default=False)
amount = models.DecimalField(max_digits=16, decimal_places=2)
currency = models.CharField(max_length=3)
created_at = models.DateTimeField()
status_message = models.CharField(max_length=255)
error_message = models.CharField(max_length=255, null=True, blank=True)
user = models.ForeignKey(User, related_name='pin_refunds', blank=True, null=True)
def __unicode__(self):
return 'Refund %s' % self.charge.email
class Meta:
app_label = __name__.split(".")[0]
|
bsd-3-clause
|
mdutkin/m2core
|
example/run_rest_tests.py
|
1
|
8041
|
import unittest
from tornado.options import define, options
from m2core.utils.tests import RESTTest
from m2core.utils.data_helper import DataHelper
from m2core import M2Core
from m2core.bases import http_statuses
from tornado.escape import json_decode
from example.models import User
# init empty object to pass it through all of your tests
class SomeData:
pass
test_data = SomeData()
test_data.test_access_token = None # access token for REST requests
test_data.users_count = 3 # amount of test users to create
test_data.test_users = list() # test users list
# unit testing config
define('test_server', 'm2core.loc')
define('test_protocol', 'http')
define('test_port', 8888)
define('test_login', 'unittest')
define('test_password', 'unittest')
define('test_username', 'name')
define('test_surname', 'surname')
define('test_email', '[email protected]')
define('test_phone', '+7(111)111-11-11')
options.config_name = 'config_local.py'
m2core = M2Core()
domain_name = '%s://%s:%s' % (
options.test_protocol,
options.test_server,
options.test_port,
)
class TestMyREST(unittest.TestCase, RESTTest):
def test_00_base_model(self):
# test auto modification of updated field from CreatedMixin
modified_user = User.load_or_create(email='[email protected]', gender=0)
updated_date = modified_user.get('updated')
modified_user.save()
self.assertNotEqual(updated_date, modified_user.get('updated'), msg='`updated` filed didn\'t update on save()')
modified_user.delete()
self.report_completed('test_00_base_model')
def test_01_user_create(self):
user = User.load_by_params(email=options.test_email)
# drop test user from DB if it exists
if user:
user.delete()
for i in range(1, test_data.users_count + 1):
# create users, first one will be admin
email = '%[email protected]' % DataHelper.random_char(6)
result = self.fetch_data({
'method': 'POST',
'url': '%s/users' % domain_name,
'codes': [
http_statuses['CREATED']['code'],
],
'data': {
'name': options.test_username if i == 1 else options.test_username + ' %s' % i,
'password': options.test_password,
'email': options.test_email if i == 1 else email,
'gender': 0
}
})
if i != 1:
result1 = self.fetch_data({
'method': 'POST',
'url': '%s/users/login' % domain_name,
'codes': [
http_statuses['OK']['code'],
],
'data': {
'email': email,
'password': options.test_password,
}
})
data = json_decode(result1)['data']
test_data.test_users.append(data)
at = data['access_token']
print('at: %s %s' % (data['user_info']['id'], at))
user = User.load_by_params(email=options.test_email)
user.add_role(options.admin_role_name)
self.report_completed('test_01_user_create')
def test_02_user_login(self):
# login user. after login he gets access token, which gives him some permissions
result = self.fetch_data({
'method': 'POST',
'url': '%s/users/login' % domain_name,
'codes': [
http_statuses['OK']['code'],
],
'data': {
'email': options.test_email,
'password': options.test_password,
}
})
test_data.test_access_token = json_decode(result)['data']['access_token']
self.assertGreater(len(test_data.test_access_token), 0, msg='Received empty access token')
print('access token is: %s' % test_data.test_access_token)
self.report_completed('test_02_user_login')
def test_03_evil_routes(self):
# test our special handler
result = self.fetch_data({
'method': 'GET',
'url': '%s/evil_routes.js?access_token=%s' % (domain_name, test_data.test_access_token),
'codes': [
http_statuses['OK']['code'],
],
'data': None
})
routes = json_decode(result)['data']
self.assertGreater(len(routes.keys()), 0, msg='Received empty access token')
self.report_completed('test_03_evil_routes')
def test_04_admin_get_and_modify_user(self):
for user in test_data.test_users:
# get user's info by id (for admins only)
self.fetch_data({
'method': 'GET',
'url': '%s/admin/users/%s?access_token=%s' % (domain_name, user['user_info']['id'], test_data.test_access_token),
'codes': [
http_statuses['OK']['code'],
],
'data': None
})
self.fetch_data({
'method': 'PUT',
'url': '%s/admin/users/%s?access_token=%s' % (domain_name, user['user_info']['id'], test_data.test_access_token),
'codes': [
http_statuses['CREATED']['code'],
],
'data': {
'name': 'Modified %s' % user['user_info']['id'],
'password': 'new_cool_pass',
'gender': 1
}
})
self.report_completed('test_04_admin_get_and_modify_user')
def test_05_admin_delete_user(self):
# delete user by id (for admin only)
user = test_data.test_users.pop()
self.fetch_data({
'method': 'DELETE',
'url': '%s/admin/users/%s?access_token=%s' % (domain_name, user['user_info']['id'], test_data.test_access_token),
'codes': [
http_statuses['OK']['code'],
],
'data': None
})
self.report_completed('test_05_admin_delete_user')
def test_06_schema(self):
# test our special handler
result = self.fetch_data({
'method': 'GET',
'url': '%s/schema.js?access_token=%s' % (domain_name, test_data.test_access_token),
'codes': [
http_statuses['OK']['code'],
],
'data': None
})
schema = json_decode(result)['data']
self.assertEqual(list(schema.keys()), ['user'], msg='Received wrong json')
self.assertEqual(list(schema['user'].keys()),
['id', 'email', 'password', 'name', 'gender', 'created', 'updated'],
msg='Received wrong json')
self.report_completed('test_06_schema')
def test_06_restrict_access(self):
# get first random user with default roles
user = test_data.test_users[0]
# on this endpoint you can only get with admin's rights
result = self.fetch_data({
'method': 'GET',
'url': '%s/admin/schema.js?access_token=%s' % (domain_name, user['access_token']),
'codes': [
http_statuses['WRONG_CREDENTIALS']['code'],
],
'data': None
})
# now with admin token
result = self.fetch_data({
'method': 'GET',
'url': '%s/admin/schema.js?access_token=%s' % (domain_name, test_data.test_access_token),
'codes': [
http_statuses['OK']['code'],
],
'data': None
})
schema = json_decode(result)['data']
self.assertEqual(list(schema.keys()), ['user'], msg='Received wrong json')
self.assertEqual(list(schema['user'].keys()),
['id', 'email', 'password', 'name', 'gender', 'created', 'updated'],
msg='Received wrong json')
self.report_completed('test_06_schema')
|
mit
|
mercycorps/tola-help
|
project/settings/production.py
|
47
|
2181
|
"""Production settings and globals."""
from os import environ
from base import *
# Normally you should not import ANYTHING from Django directly
# into your settings, but ImproperlyConfigured is an exception.
from django.core.exceptions import ImproperlyConfigured
def get_env_setting(setting):
""" Get the environment setting or return exception """
try:
return environ[setting]
except KeyError:
error_msg = "Set the %s env variable" % setting
raise ImproperlyConfigured(error_msg)
########## HOST CONFIGURATION
# See: https://docs.djangoproject.com/en/1.5/releases/1.5/#allowed-hosts-required-in-production
ALLOWED_HOSTS = []
########## END HOST CONFIGURATION
########## EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host
EMAIL_HOST = environ.get('EMAIL_HOST', 'smtp.gmail.com')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host-password
EMAIL_HOST_PASSWORD = environ.get('EMAIL_HOST_PASSWORD', '')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host-user
EMAIL_HOST_USER = environ.get('EMAIL_HOST_USER', '[email protected]')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-port
EMAIL_PORT = environ.get('EMAIL_PORT', 587)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-subject-prefix
EMAIL_SUBJECT_PREFIX = '[%s] ' % SITE_NAME
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-use-tls
EMAIL_USE_TLS = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#server-email
SERVER_EMAIL = EMAIL_HOST_USER
########## END EMAIL CONFIGURATION
########## DATABASE CONFIGURATION
DATABASES = {}
########## END DATABASE CONFIGURATION
########## CACHE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {}
########## END CACHE CONFIGURATION
########## SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = get_env_setting('SECRET_KEY')
########## END SECRET CONFIGURATION
|
gpl-2.0
|
binking/News_website
|
news_website/models/user.py
|
1
|
1441
|
import datetime as dt
from flask_login import UserMixin
from news_website.extensions import db, bcrypt
from news_website.database import (
Column,
Model,
ReferenceCol,
relationship,
SurrogatePK,
)
class User(UserMixin, SurrogatePK, Model):
__tablename__ = 'users'
username = Column(db.String(80), unique=True, nullable=False)
email = Column(db.String(80), unique=True, nullable=False)
#: The hashed password
password = Column(db.String(128), nullable=True)
created_at = Column(db.DateTime, nullable=False, default=dt.datetime.utcnow)
first_name = Column(db.String(30), nullable=True)
last_name = Column(db.String(30), nullable=True)
active = Column(db.Boolean(), default=False)
is_admin = Column(db.Boolean(), default=False)
def __init__(self, username, email, password=None, **kwargs):
db.Model.__init__(self, username=username, email=email, **kwargs)
if password:
self.set_password(password)
else:
self.password = None
def set_password(self, password):
self.password = bcrypt.generate_password_hash(password)
def check_password(self, value):
return bcrypt.check_password_hash(self.password, value)
@property
def full_name(self):
return "{0} {1}".format(self.first_name, self.last_name)
def __repr__(self):
return '<User({username!r})>'.format(username=self.username)
|
bsd-3-clause
|
albertomurillo/ansible
|
lib/ansible/modules/network/f5/bigip_profile_analytics.py
|
38
|
24086
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_profile_analytics
short_description: Manage HTTP analytics profiles on a BIG-IP
description:
- Manage HTTP analytics profiles on a BIG-IP.
version_added: 2.8
options:
name:
description:
- Specifies the name of the profile.
type: str
required: True
parent:
description:
- Specifies the profile from which this profile inherits settings.
- When creating a new profile, if this parameter is not specified, the default
is the system-supplied C(analytics) profile.
type: str
description:
description:
- Description of the profile.
type: str
collect_geo:
description:
- Enables or disables the collection of the names of the countries
from where the traffic was sent.
type: bool
collect_ip:
description:
- Enables or disables the collection of client IPs statistics.
type: bool
collect_max_tps_and_throughput:
description:
- Enables or disables the collection of maximum TPS and throughput
for all collected entities.
type: bool
collect_page_load_time:
description:
- Enables or disables the collection of the page load time
statistics.
type: bool
collect_url:
description:
- Enables or disables the collection of requested URL statistics.
type: bool
collect_user_agent:
description:
- Enables or disables the collection of user agents.
type: bool
collect_user_sessions:
description:
- Enables or disables the collection of the unique user sessions.
type: bool
collected_stats_external_logging:
description:
- Enables or disables the external logging of the collected
statistics.
type: bool
collected_stats_internal_logging:
description:
- Enables or disables the internal logging of the collected
statistics.
type: bool
external_logging_publisher:
description:
- Specifies the external logging publisher used to send statistical
data to one or more destinations.
type: str
notification_by_syslog:
description:
- Enables or disables logging of the analytics alerts into the
Syslog.
type: bool
notification_by_email:
description:
- Enables or disables sending the analytics alerts by email.
type: bool
notification_email_addresses:
description:
- Specifies which email addresses receive alerts by email when
C(notification_by_email) is enabled.
type: list
partition:
description:
- Device partition to manage resources on.
type: str
default: Common
state:
description:
- When C(present), ensures that the profile exists.
- When C(absent), ensures the profile is removed.
type: str
choices:
- present
- absent
default: present
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create a profile
bigip_profile_analytics:
name: profile1
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
param1:
description: The new param1 value of the resource.
returned: changed
type: bool
sample: true
param2:
description: The new param2 value of the resource.
returned: changed
type: str
sample: Foo is bar
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import flatten_boolean
from library.module_utils.network.f5.compare import cmp_simple_list
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import flatten_boolean
from ansible.module_utils.network.f5.compare import cmp_simple_list
class Parameters(AnsibleF5Parameters):
api_map = {
'defaultsFrom': 'parent',
'collectGeo': 'collect_geo',
'collectIp': 'collect_ip',
'collectMaxTpsAndThroughput': 'collect_max_tps_and_throughput',
'collectPageLoadTime': 'collect_page_load_time',
'collectUrl': 'collect_url',
'collectUserAgent': 'collect_user_agent',
'collectUserSessions': 'collect_user_sessions',
'collectedStatsExternalLogging': 'collected_stats_external_logging',
'collectedStatsInternalLogging': 'collected_stats_internal_logging',
'externalLoggingPublisher': 'external_logging_publisher',
'notificationBySyslog': 'notification_by_syslog',
'notificationByEmail': 'notification_by_email',
'notificationEmailAddresses': 'notification_email_addresses'
}
api_attributes = [
'description',
'defaultsFrom',
'collectGeo',
'collectIp',
'collectMaxTpsAndThroughput',
'collectPageLoadTime',
'collectUrl',
'collectUserAgent',
'collectUserSessions',
'collectedStatsExternalLogging',
'collectedStatsInternalLogging',
'externalLoggingPublisher',
'notificationBySyslog',
'notificationByEmail',
'notificationEmailAddresses',
]
returnables = [
'collect_geo',
'collect_ip',
'collect_max_tps_and_throughput',
'collect_page_load_time',
'collect_url',
'collect_user_agent',
'collect_user_sessions',
'collected_stats_external_logging',
'collected_stats_internal_logging',
'description',
'external_logging_publisher',
'notification_by_syslog',
'notification_by_email',
'notification_email_addresses',
'parent',
]
updatables = [
'collect_geo',
'collect_ip',
'collect_max_tps_and_throughput',
'collect_page_load_time',
'collect_url',
'collect_user_agent',
'collect_user_sessions',
'collected_stats_external_logging',
'collected_stats_internal_logging',
'description',
'external_logging_publisher',
'notification_by_syslog',
'notification_by_email',
'notification_email_addresses',
'parent',
]
@property
def external_logging_publisher(self):
if self._values['external_logging_publisher'] is None:
return None
if self._values['external_logging_publisher'] in ['none', '']:
return ''
result = fq_name(self.partition, self._values['external_logging_publisher'])
return result
@property
def collect_geo(self):
return flatten_boolean(self._values['collect_geo'])
@property
def collect_ip(self):
return flatten_boolean(self._values['collect_ip'])
@property
def collect_max_tps_and_throughput(self):
return flatten_boolean(self._values['collect_max_tps_and_throughput'])
@property
def collect_page_load_time(self):
return flatten_boolean(self._values['collect_page_load_time'])
@property
def collect_url(self):
return flatten_boolean(self._values['collect_url'])
@property
def collect_user_agent(self):
return flatten_boolean(self._values['collect_user_agent'])
@property
def collect_user_sessions(self):
return flatten_boolean(self._values['collect_user_sessions'])
@property
def collected_stats_external_logging(self):
return flatten_boolean(self._values['collected_stats_external_logging'])
@property
def collected_stats_internal_logging(self):
return flatten_boolean(self._values['collected_stats_internal_logging'])
@property
def notification_by_syslog(self):
return flatten_boolean(self._values['notification_by_syslog'])
@property
def notification_by_email(self):
return flatten_boolean(self._values['notification_by_email'])
class ApiParameters(Parameters):
@property
def description(self):
if self._values['description'] in [None, 'none']:
return None
return self._values['description']
class ModuleParameters(Parameters):
@property
def description(self):
if self._values['description'] is None:
return None
elif self._values['description'] in ['none', '']:
return ''
return self._values['description']
@property
def parent(self):
if self._values['parent'] is None:
return None
result = fq_name(self.partition, self._values['parent'])
return result
@property
def notification_email_addresses(self):
if self._values['notification_email_addresses'] is None:
return None
elif len(self._values['notification_email_addresses']) == 1 and self._values['notification_email_addresses'][0] in ['', 'none']:
return []
return self._values['notification_email_addresses']
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
@property
def collect_geo(self):
if self._values['collect_geo'] is None:
return None
elif self._values['collect_geo'] == 'yes':
return 'enabled'
return 'disabled'
@property
def collect_ip(self):
if self._values['collect_ip'] is None:
return None
elif self._values['collect_ip'] == 'yes':
return 'enabled'
return 'disabled'
@property
def collect_max_tps_and_throughput(self):
if self._values['collect_max_tps_and_throughput'] is None:
return None
elif self._values['collect_max_tps_and_throughput'] == 'yes':
return 'enabled'
return 'disabled'
@property
def collect_page_load_time(self):
if self._values['collect_page_load_time'] is None:
return None
elif self._values['collect_page_load_time'] == 'yes':
return 'enabled'
return 'disabled'
@property
def collect_url(self):
if self._values['collect_url'] is None:
return None
elif self._values['collect_url'] == 'yes':
return 'enabled'
return 'disabled'
@property
def collect_user_agent(self):
if self._values['collect_user_agent'] is None:
return None
elif self._values['collect_user_agent'] == 'yes':
return 'enabled'
return 'disabled'
@property
def collect_user_sessions(self):
if self._values['collect_user_sessions'] is None:
return None
elif self._values['collect_user_sessions'] == 'yes':
return 'enabled'
return 'disabled'
@property
def collected_stats_external_logging(self):
if self._values['collected_stats_external_logging'] is None:
return None
elif self._values['collected_stats_external_logging'] == 'yes':
return 'enabled'
return 'disabled'
@property
def collected_stats_internal_logging(self):
if self._values['collected_stats_internal_logging'] is None:
return None
elif self._values['collected_stats_internal_logging'] == 'yes':
return 'enabled'
return 'disabled'
@property
def notification_by_syslog(self):
if self._values['notification_by_syslog'] is None:
return None
elif self._values['notification_by_syslog'] == 'yes':
return 'enabled'
return 'disabled'
@property
def notification_by_email(self):
if self._values['notification_by_email'] is None:
return None
elif self._values['notification_by_email'] == 'yes':
return 'enabled'
return 'disabled'
class ReportableChanges(Changes):
@property
def collect_geo(self):
return flatten_boolean(self._values['collect_geo'])
@property
def collect_ip(self):
return flatten_boolean(self._values['collect_ip'])
@property
def collect_max_tps_and_throughput(self):
return flatten_boolean(self._values['collect_max_tps_and_throughput'])
@property
def collect_page_load_time(self):
return flatten_boolean(self._values['collect_page_load_time'])
@property
def collect_url(self):
return flatten_boolean(self._values['collect_url'])
@property
def collect_user_agent(self):
return flatten_boolean(self._values['collect_user_agent'])
@property
def collect_user_sessions(self):
return flatten_boolean(self._values['collect_user_sessions'])
@property
def collected_stats_external_logging(self):
return flatten_boolean(self._values['collected_stats_external_logging'])
@property
def collected_stats_internal_logging(self):
return flatten_boolean(self._values['collected_stats_internal_logging'])
@property
def notification_by_syslog(self):
return flatten_boolean(self._values['notification_by_syslog'])
@property
def notification_by_email(self):
return flatten_boolean(self._values['notification_by_email'])
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def parent(self):
if self.want.parent is None:
return None
if self.want.parent != self.have.parent:
raise F5ModuleError(
"The parent profile cannot be changed"
)
@property
def description(self):
if self.want.description is None:
return None
if self.have.description is None and self.want.description == '':
return None
if self.want.description != self.have.description:
return self.want.description
@property
def notification_email_addresses(self):
return cmp_simple_list(self.want.notification_email_addresses, self.have.notification_email_addresses)
@property
def external_logging_publisher(self):
if self.want.external_logging_publisher is None:
return None
if self.have.external_logging_publisher is None and self.want.external_logging_publisher == '':
return None
if self.want.external_logging_publisher != self.have.external_logging_publisher:
return self.want.external_logging_publisher
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
if self.exists():
return self.remove()
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/analytics/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/analytics/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 409, 404]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return True
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/analytics/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 404]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/analytics/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/analytics/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
parent=dict(),
description=dict(),
collect_geo=dict(type='bool'),
collect_ip=dict(type='bool'),
collect_max_tps_and_throughput=dict(type='bool'),
collect_page_load_time=dict(type='bool'),
collect_url=dict(type='bool'),
collect_user_agent=dict(type='bool'),
collect_user_sessions=dict(type='bool'),
collected_stats_external_logging=dict(type='bool'),
collected_stats_internal_logging=dict(type='bool'),
external_logging_publisher=dict(),
notification_by_syslog=dict(type='bool'),
notification_by_email=dict(type='bool'),
notification_email_addresses=dict(type='list'),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
gpl-3.0
|
ufownl/shadowsocks
|
shadowsocks/asyncdns.py
|
22
|
17640
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import socket
import struct
import re
import logging
from shadowsocks import common, lru_cache, eventloop, shell
CACHE_SWEEP_INTERVAL = 30
VALID_HOSTNAME = re.compile(br"(?!-)[A-Z\d\-_]{1,63}(?<!-)$", re.IGNORECASE)
common.patch_socket()
# rfc1035
# format
# +---------------------+
# | Header |
# +---------------------+
# | Question | the question for the name server
# +---------------------+
# | Answer | RRs answering the question
# +---------------------+
# | Authority | RRs pointing toward an authority
# +---------------------+
# | Additional | RRs holding additional information
# +---------------------+
#
# header
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ID |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# |QR| Opcode |AA|TC|RD|RA| Z | RCODE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | QDCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ANCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | NSCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ARCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
QTYPE_ANY = 255
QTYPE_A = 1
QTYPE_AAAA = 28
QTYPE_CNAME = 5
QTYPE_NS = 2
QCLASS_IN = 1
def build_address(address):
address = address.strip(b'.')
labels = address.split(b'.')
results = []
for label in labels:
l = len(label)
if l > 63:
return None
results.append(common.chr(l))
results.append(label)
results.append(b'\0')
return b''.join(results)
def build_request(address, qtype):
request_id = os.urandom(2)
header = struct.pack('!BBHHHH', 1, 0, 1, 0, 0, 0)
addr = build_address(address)
qtype_qclass = struct.pack('!HH', qtype, QCLASS_IN)
return request_id + header + addr + qtype_qclass
def parse_ip(addrtype, data, length, offset):
if addrtype == QTYPE_A:
return socket.inet_ntop(socket.AF_INET, data[offset:offset + length])
elif addrtype == QTYPE_AAAA:
return socket.inet_ntop(socket.AF_INET6, data[offset:offset + length])
elif addrtype in [QTYPE_CNAME, QTYPE_NS]:
return parse_name(data, offset)[1]
else:
return data[offset:offset + length]
def parse_name(data, offset):
p = offset
labels = []
l = common.ord(data[p])
while l > 0:
if (l & (128 + 64)) == (128 + 64):
# pointer
pointer = struct.unpack('!H', data[p:p + 2])[0]
pointer &= 0x3FFF
r = parse_name(data, pointer)
labels.append(r[1])
p += 2
# pointer is the end
return p - offset, b'.'.join(labels)
else:
labels.append(data[p + 1:p + 1 + l])
p += 1 + l
l = common.ord(data[p])
return p - offset + 1, b'.'.join(labels)
# rfc1035
# record
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | |
# / /
# / NAME /
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TYPE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | CLASS |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TTL |
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | RDLENGTH |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--|
# / RDATA /
# / /
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
def parse_record(data, offset, question=False):
nlen, name = parse_name(data, offset)
if not question:
record_type, record_class, record_ttl, record_rdlength = struct.unpack(
'!HHiH', data[offset + nlen:offset + nlen + 10]
)
ip = parse_ip(record_type, data, record_rdlength, offset + nlen + 10)
return nlen + 10 + record_rdlength, \
(name, ip, record_type, record_class, record_ttl)
else:
record_type, record_class = struct.unpack(
'!HH', data[offset + nlen:offset + nlen + 4]
)
return nlen + 4, (name, None, record_type, record_class, None, None)
def parse_header(data):
if len(data) >= 12:
header = struct.unpack('!HBBHHHH', data[:12])
res_id = header[0]
res_qr = header[1] & 128
res_tc = header[1] & 2
res_ra = header[2] & 128
res_rcode = header[2] & 15
# assert res_tc == 0
# assert res_rcode in [0, 3]
res_qdcount = header[3]
res_ancount = header[4]
res_nscount = header[5]
res_arcount = header[6]
return (res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount,
res_ancount, res_nscount, res_arcount)
return None
def parse_response(data):
try:
if len(data) >= 12:
header = parse_header(data)
if not header:
return None
res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount, \
res_ancount, res_nscount, res_arcount = header
qds = []
ans = []
offset = 12
for i in range(0, res_qdcount):
l, r = parse_record(data, offset, True)
offset += l
if r:
qds.append(r)
for i in range(0, res_ancount):
l, r = parse_record(data, offset)
offset += l
if r:
ans.append(r)
for i in range(0, res_nscount):
l, r = parse_record(data, offset)
offset += l
for i in range(0, res_arcount):
l, r = parse_record(data, offset)
offset += l
response = DNSResponse()
if qds:
response.hostname = qds[0][0]
for an in qds:
response.questions.append((an[1], an[2], an[3]))
for an in ans:
response.answers.append((an[1], an[2], an[3]))
return response
except Exception as e:
shell.print_exception(e)
return None
def is_valid_hostname(hostname):
if len(hostname) > 255:
return False
if hostname[-1] == b'.':
hostname = hostname[:-1]
return all(VALID_HOSTNAME.match(x) for x in hostname.split(b'.'))
class DNSResponse(object):
def __init__(self):
self.hostname = None
self.questions = [] # each: (addr, type, class)
self.answers = [] # each: (addr, type, class)
def __str__(self):
return '%s: %s' % (self.hostname, str(self.answers))
STATUS_FIRST = 0
STATUS_SECOND = 1
class DNSResolver(object):
def __init__(self, server_list=None, prefer_ipv6=False):
self._loop = None
self._hosts = {}
self._hostname_status = {}
self._hostname_to_cb = {}
self._cb_to_hostname = {}
self._cache = lru_cache.LRUCache(timeout=300)
self._sock = None
if server_list is None:
self._servers = None
self._parse_resolv()
else:
self._servers = server_list
if prefer_ipv6:
self._QTYPES = [QTYPE_AAAA, QTYPE_A]
else:
self._QTYPES = [QTYPE_A, QTYPE_AAAA]
self._parse_hosts()
# TODO monitor hosts change and reload hosts
# TODO parse /etc/gai.conf and follow its rules
def _parse_resolv(self):
self._servers = []
try:
with open('/etc/resolv.conf', 'rb') as f:
content = f.readlines()
for line in content:
line = line.strip()
if not (line and line.startswith(b'nameserver')):
continue
parts = line.split()
if len(parts) < 2:
continue
server = parts[1]
if common.is_ip(server) == socket.AF_INET:
if type(server) != str:
server = server.decode('utf8')
self._servers.append(server)
except IOError:
pass
if not self._servers:
self._servers = ['8.8.4.4', '8.8.8.8']
def _parse_hosts(self):
etc_path = '/etc/hosts'
if 'WINDIR' in os.environ:
etc_path = os.environ['WINDIR'] + '/system32/drivers/etc/hosts'
try:
with open(etc_path, 'rb') as f:
for line in f.readlines():
line = line.strip()
parts = line.split()
if len(parts) < 2:
continue
ip = parts[0]
if not common.is_ip(ip):
continue
for i in range(1, len(parts)):
hostname = parts[i]
if hostname:
self._hosts[hostname] = ip
except IOError:
self._hosts['localhost'] = '127.0.0.1'
def add_to_loop(self, loop):
if self._loop:
raise Exception('already add to loop')
self._loop = loop
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
loop.add(self._sock, eventloop.POLL_IN, self)
loop.add_periodic(self.handle_periodic)
def _call_callback(self, hostname, ip, error=None):
callbacks = self._hostname_to_cb.get(hostname, [])
for callback in callbacks:
if callback in self._cb_to_hostname:
del self._cb_to_hostname[callback]
if ip or error:
callback((hostname, ip), error)
else:
callback((hostname, None),
Exception('unknown hostname %s' % hostname))
if hostname in self._hostname_to_cb:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _handle_data(self, data):
response = parse_response(data)
if response and response.hostname:
hostname = response.hostname
ip = None
for answer in response.answers:
if answer[1] in (QTYPE_A, QTYPE_AAAA) and \
answer[2] == QCLASS_IN:
ip = answer[0]
break
if not ip and self._hostname_status.get(hostname, STATUS_SECOND) \
== STATUS_FIRST:
self._hostname_status[hostname] = STATUS_SECOND
self._send_req(hostname, self._QTYPES[1])
else:
if ip:
self._cache[hostname] = ip
self._call_callback(hostname, ip)
elif self._hostname_status.get(hostname, None) \
== STATUS_SECOND:
for question in response.questions:
if question[1] == self._QTYPES[1]:
self._call_callback(hostname, None)
break
def handle_event(self, sock, fd, event):
if sock != self._sock:
return
if event & eventloop.POLL_ERR:
logging.error('dns socket err')
self._loop.remove(self._sock)
self._sock.close()
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
self._loop.add(self._sock, eventloop.POLL_IN, self)
else:
data, addr = sock.recvfrom(1024)
if addr[0] not in self._servers:
logging.warn('received a packet other than our dns')
return
self._handle_data(data)
def handle_periodic(self):
self._cache.sweep()
def remove_callback(self, callback):
hostname = self._cb_to_hostname.get(callback)
if hostname:
del self._cb_to_hostname[callback]
arr = self._hostname_to_cb.get(hostname, None)
if arr:
arr.remove(callback)
if not arr:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _send_req(self, hostname, qtype):
req = build_request(hostname, qtype)
for server in self._servers:
logging.debug('resolving %s with type %d using server %s',
hostname, qtype, server)
self._sock.sendto(req, (server, 53))
def resolve(self, hostname, callback):
if type(hostname) != bytes:
hostname = hostname.encode('utf8')
if not hostname:
callback(None, Exception('empty hostname'))
elif common.is_ip(hostname):
callback((hostname, hostname), None)
elif hostname in self._hosts:
logging.debug('hit hosts: %s', hostname)
ip = self._hosts[hostname]
callback((hostname, ip), None)
elif hostname in self._cache:
logging.debug('hit cache: %s', hostname)
ip = self._cache[hostname]
callback((hostname, ip), None)
else:
if not is_valid_hostname(hostname):
callback(None, Exception('invalid hostname: %s' % hostname))
return
arr = self._hostname_to_cb.get(hostname, None)
if not arr:
self._hostname_status[hostname] = STATUS_FIRST
self._send_req(hostname, self._QTYPES[0])
self._hostname_to_cb[hostname] = [callback]
self._cb_to_hostname[callback] = hostname
else:
arr.append(callback)
# TODO send again only if waited too long
self._send_req(hostname, self._QTYPES[0])
def close(self):
if self._sock:
if self._loop:
self._loop.remove_periodic(self.handle_periodic)
self._loop.remove(self._sock)
self._sock.close()
self._sock = None
def test():
dns_resolver = DNSResolver()
loop = eventloop.EventLoop()
dns_resolver.add_to_loop(loop)
global counter
counter = 0
def make_callback():
global counter
def callback(result, error):
global counter
# TODO: what can we assert?
print(result, error)
counter += 1
if counter == 9:
dns_resolver.close()
loop.stop()
a_callback = callback
return a_callback
assert(make_callback() != make_callback())
dns_resolver.resolve(b'google.com', make_callback())
dns_resolver.resolve('google.com', make_callback())
dns_resolver.resolve('example.com', make_callback())
dns_resolver.resolve('ipv6.google.com', make_callback())
dns_resolver.resolve('www.facebook.com', make_callback())
dns_resolver.resolve('ns2.google.com', make_callback())
dns_resolver.resolve('invalid.@!#$%^&[email protected]', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
loop.run()
if __name__ == '__main__':
test()
|
apache-2.0
|
unho/translate
|
translate/storage/cpo.py
|
1
|
35665
|
# -*- coding: utf-8 -*-
#
# Copyright 2002-2007 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Classes that hold units of .po files (pounit) or entire files (pofile).
Gettext-style .po (or .pot) files are used in translations for KDE, GNOME and
many other projects.
This uses libgettextpo from the gettext package. Any version before 0.17 will
at least cause some subtle bugs or may not work at all. Developers might want
to have a look at gettext-tools/libgettextpo/gettext-po.h from the gettext
package for the public API of the library.
"""
import ctypes.util
import logging
import os
import re
import sys
import tempfile
from ctypes import (CFUNCTYPE, POINTER, Structure, c_char_p, c_int, c_long,
c_size_t, c_uint, cdll)
from translate.lang import data
from translate.misc.multistring import multistring
from translate.storage import base, pocommon, pypo
logger = logging.getLogger(__name__)
lsep = " "
"""Separator for #: entries"""
STRING = c_char_p
# Structures
class po_message(Structure):
pass
class po_file(Structure):
pass
class po_filepos(Structure):
pass
class po_iterator(Structure):
pass
po_message_t = POINTER(po_message)
"""A po_message_t represents a message in a PO file."""
po_file_t = POINTER(po_file)
"""A po_file_t represents a PO file."""
po_filepos_t = POINTER(po_filepos)
"""A po_filepos_t represents the position in a PO file."""
po_iterator_t = POINTER(po_iterator)
"""A po_iterator_t represents an iterator through a PO file."""
# Function prototypes
xerror_prototype = CFUNCTYPE(None, c_int, po_message_t, STRING, c_uint,
c_uint, c_int, STRING)
xerror2_prototype = CFUNCTYPE(None, c_int, po_message_t, STRING,
c_uint, c_uint, c_int, STRING,
po_message_t, STRING, c_uint, c_uint,
c_int, STRING)
# Structures (error handler)
class po_xerror_handler(Structure):
_fields_ = [('xerror', xerror_prototype),
('xerror2', xerror2_prototype)]
class po_error_handler(Structure):
_fields_ = [
('error', CFUNCTYPE(None, c_int, c_int, STRING)),
('error_at_line', CFUNCTYPE(None, c_int, c_int, STRING, c_uint, STRING)),
('multiline_warning', CFUNCTYPE(None, STRING, STRING)),
('multiline_error', CFUNCTYPE(None, STRING, STRING)),
]
# Callback functions for po_xerror_handler
def xerror_cb(severity, message, filename, lineno, column, multiline_p,
message_text):
logger.error("xerror_cb %s %s %s %s %s %s %s" % (
severity, message, filename, lineno, column, multiline_p, message_text
))
if severity >= 1:
raise ValueError(message_text)
def xerror2_cb(severity, message1, filename1, lineno1, column1, multiline_p1,
message_text1, message2, filename2, lineno2, column2,
multiline_p2, message_text2):
logger.error("xerror2_cb %s %s %s %s %s %s %s %s %s %s %s %s" % (
severity, message1,
filename1, lineno1, column1, multiline_p1, message_text1,
filename2, lineno2, column2, multiline_p2, message_text2
))
if severity >= 1:
raise ValueError(message_text1)
# Setup return and parameter types
# See also http://git.savannah.gnu.org/cgit/gettext.git/tree/gettext-tools/libgettextpo/gettext-po.in.h
def setup_call_types(gpo):
# File access
gpo.po_file_create.restype = po_file_t
gpo.po_file_read_v3.argtypes = [STRING, POINTER(po_xerror_handler)]
gpo.po_file_read_v3.restype = po_file_t
gpo.po_file_write_v2.argtypes = [po_file_t, STRING, POINTER(po_xerror_handler)]
gpo.po_file_write_v2.restype = po_file_t
gpo.po_file_free.argtypes = [po_file_t]
# Header
gpo.po_file_domain_header.argtypes = [po_file_t, STRING]
gpo.po_file_domain_header.restype = STRING
gpo.po_header_field.argtypes = [STRING, STRING]
gpo.po_header_field.restype = STRING
gpo.po_header_set_field.argtypes = [STRING, STRING, STRING]
gpo.po_header_set_field.restype = STRING
# Locations (filepos)
gpo.po_filepos_file.argtypes = [po_filepos_t]
gpo.po_filepos_file.restype = STRING
gpo.po_filepos_start_line.argtypes = [po_filepos_t]
gpo.po_filepos_start_line.restype = c_int # not strictly true casting
gpo.po_message_filepos.argtypes = [po_message_t, c_int]
gpo.po_message_filepos.restype = po_filepos_t
gpo.po_message_add_filepos.argtypes = [po_message_t, STRING, c_size_t]
gpo.po_message_remove_filepos.argtypes = [po_message_t, c_size_t]
# Iterators
gpo.po_message_iterator.argtypes = [po_file_t, STRING]
gpo.po_message_iterator.restype = po_iterator_t
gpo.po_message_iterator_free.argtypes = [po_iterator_t]
gpo.po_next_message.argtypes = [po_iterator_t]
gpo.po_next_message.restype = po_message_t
gpo.po_message_insert.argtypes = [po_iterator_t, po_message_t]
# Message (get methods)
gpo.po_message_create.restype = po_message_t
gpo.po_message_msgctxt.argtypes = [po_message_t]
gpo.po_message_msgctxt.restype = STRING
gpo.po_message_comments.argtypes = [po_message_t]
gpo.po_message_comments.restype = STRING
gpo.po_message_extracted_comments.argtypes = [po_message_t]
gpo.po_message_extracted_comments.restype = STRING
gpo.po_message_prev_msgctxt.argtypes = [po_message_t]
gpo.po_message_prev_msgctxt.restype = STRING
gpo.po_message_prev_msgid.argtypes = [po_message_t]
gpo.po_message_prev_msgid.restype = STRING
gpo.po_message_prev_msgid_plural.argtypes = [po_message_t]
gpo.po_message_prev_msgid_plural.restype = STRING
gpo.po_message_is_obsolete.argtypes = [po_message_t]
gpo.po_message_is_obsolete.restype = c_int
gpo.po_message_is_fuzzy.argtypes = [po_message_t]
gpo.po_message_is_fuzzy.restype = c_int
gpo.po_message_is_format.argtypes = [po_message_t, STRING]
gpo.po_message_is_format.restype = c_int
gpo.po_message_msgctxt.restype = STRING
gpo.po_message_msgid.argtypes = [po_message_t]
gpo.po_message_msgid.restype = STRING
gpo.po_message_msgid_plural.argtypes = [po_message_t]
gpo.po_message_msgid_plural.restype = STRING
gpo.po_message_msgstr.argtypes = [po_message_t]
gpo.po_message_msgstr.restype = STRING
gpo.po_message_msgstr_plural.argtypes = [po_message_t, c_int]
gpo.po_message_msgstr_plural.restype = STRING
# Message (set methods)
gpo.po_message_set_comments.argtypes = [po_message_t, STRING]
gpo.po_message_set_extracted_comments.argtypes = [po_message_t, STRING]
gpo.po_message_set_prev_msgctxt.argtypes = [po_message_t, STRING]
gpo.po_message_set_prev_msgid.argtypes = [po_message_t, STRING]
gpo.po_message_set_prev_msgid_plural.argtypes = [po_message_t, STRING]
gpo.po_message_set_obsolete.argtypes = [po_message_t, c_int]
gpo.po_message_set_fuzzy.argtypes = [po_message_t, c_int]
gpo.po_message_set_format.argtypes = [po_message_t, STRING, c_int]
gpo.po_message_set_msgctxt.argtypes = [po_message_t, STRING]
gpo.po_message_set_msgid.argtypes = [po_message_t, STRING]
gpo.po_message_set_msgstr.argtypes = [po_message_t, STRING]
gpo.po_message_set_msgstr_plural.argtypes = [po_message_t, c_int, STRING]
gpo.po_message_set_range.argtypes = [po_message_t, c_int, c_int]
# Load libgettextpo
gpo = None
# 'gettextpo' is recognised on Unix, while only 'libgettextpo' is recognised on
# windows. Therefore we test both.
names = ['gettextpo', 'libgettextpo']
for name in names:
lib_location = ctypes.util.find_library(name)
if lib_location:
gpo = cdll.LoadLibrary(lib_location)
if gpo:
break
else:
# Don't raise exception in Sphinx autodoc [where xml is Mock()ed]. There is
# nothing special about use of xml here - any of the Mock classes set up
# in docs/conf.py would work as well, but xml is likely always to be there.
gpo = None
if 'xml' not in sys.modules or sys.modules['xml'].__path__ != '/dev/null':
# Now we are getting desperate, so let's guess a unix type DLL that
# might be in LD_LIBRARY_PATH or loaded with LD_PRELOAD
try:
gpo = cdll.LoadLibrary('libgettextpo.so')
except OSError:
raise ImportError("gettext PO library not found")
if gpo:
setup_call_types(gpo)
# Setup the po_xerror_handler
xerror_handler = po_xerror_handler()
xerror_handler.xerror = xerror_prototype(xerror_cb)
xerror_handler.xerror2 = xerror2_prototype(xerror2_cb)
def escapeforpo(text):
return pypo.escapeforpo(text)
def quoteforpo(text):
return pypo.quoteforpo(text)
def unquotefrompo(postr):
return pypo.unquotefrompo(postr)
def get_libgettextpo_version():
"""Returns the libgettextpo version
:rtype: three-value tuple
:return: libgettextpo version in the following format::
(major version, minor version, subminor version)
"""
libversion = c_long.in_dll(gpo, 'libgettextpo_version')
major = libversion.value >> 16
minor = (libversion.value >> 8) & 0xff
subminor = libversion.value - (major << 16) - (minor << 8)
return major, minor, subminor
def gpo_encode(value):
return value.encode('utf-8') if isinstance(value, str) else value
def gpo_decode(value):
if isinstance(value, str):
return value
elif isinstance(value, bytes):
return value.decode('utf-8')
return value
class pounit(pocommon.pounit):
#: fixed encoding that is always used for cPO structure (self._gpo_message)
CPO_ENC = 'utf-8'
def __init__(self, source=None, encoding='utf-8', gpo_message=None):
self._rich_source = None
self._rich_target = None
encoding = encoding or 'utf-8'
if not gpo_message:
self._gpo_message = gpo.po_message_create()
if source or source == "":
self.source = source
self.target = ""
elif gpo_message:
if encoding.lower() != self.CPO_ENC:
features = ['msgctxt', 'msgid', 'msgid_plural']
features += ['prev_' + x for x in features]
features += ['comments', 'extracted_comments',
'msgstr']
for feature in features:
text = getattr(gpo, 'po_message_' + feature)(gpo_message)
if text:
getattr(gpo, 'po_message_set_' + feature)(
gpo_message,
text.decode(encoding).encode(self.CPO_ENC))
# Also iterate through plural forms
nplural = 0
text = True
while text:
text = gpo.po_message_msgstr_plural(gpo_message, nplural)
if text:
gpo.po_message_set_msgstr_plural(
gpo_message,
text.decode(encoding).encode(self.CPO_ENC),
nplural)
nplural += 1
self._gpo_message = gpo_message
self.infer_state()
def infer_state(self):
#FIXME: do obsolete
if gpo.po_message_is_obsolete(self._gpo_message):
if gpo.po_message_is_fuzzy(self._gpo_message):
self.set_state_n(self.STATE[self.S_FUZZY_OBSOLETE][0])
else:
self.set_state_n(self.STATE[self.S_OBSOLETE][0])
elif gpo.po_message_is_fuzzy(self._gpo_message):
self.set_state_n(self.STATE[self.S_FUZZY][0])
elif self.target:
self.set_state_n(self.STATE[self.S_TRANSLATED][0])
else:
self.set_state_n(self.STATE[self.S_UNTRANSLATED][0])
def setmsgid_plural(self, msgid_plural):
if isinstance(msgid_plural, list):
msgid_plural = "".join(msgid_plural)
gpo.po_message_set_msgid_plural(self._gpo_message, gpo_encode(msgid_plural))
msgid_plural = property(None, setmsgid_plural)
@property
def source(self):
def remove_msgid_comments(text):
if not text:
return text
if text.startswith("_:"):
remainder = re.search(r"_: .*\n(.*)", text)
if remainder:
return remainder.group(1)
else:
return u""
else:
return text
singular = remove_msgid_comments(gpo_decode(gpo.po_message_msgid(self._gpo_message)) or "")
if singular:
if self.hasplural():
multi = multistring(singular)
pluralform = gpo_decode(gpo.po_message_msgid_plural(self._gpo_message)) or ""
multi.strings.append(pluralform)
return multi
else:
return singular
else:
return u""
@source.setter
def source(self, source):
if isinstance(source, multistring):
source = source.strings
if isinstance(source, list):
gpo.po_message_set_msgid(self._gpo_message, gpo_encode(source[0]))
if len(source) > 1:
gpo.po_message_set_msgid_plural(self._gpo_message, gpo_encode(source[1]))
else:
gpo.po_message_set_msgid(self._gpo_message, gpo_encode(source))
gpo.po_message_set_msgid_plural(self._gpo_message, None)
@property
def target(self):
if self.hasplural():
plurals = []
nplural = 0
plural = gpo.po_message_msgstr_plural(self._gpo_message, nplural)
while plural:
plurals.append(plural.decode(self.CPO_ENC))
nplural += 1
plural = gpo.po_message_msgstr_plural(self._gpo_message, nplural)
if plurals:
multi = multistring(plurals)
else:
multi = multistring(u"")
else:
multi = gpo_decode(gpo.po_message_msgstr(self._gpo_message)) or ""
return multi
@target.setter
def target(self, target):
# for plural strings: convert 'target' into a list
if self.hasplural():
if isinstance(target, multistring):
target = target.strings
elif isinstance(target, str):
target = [target]
# for non-plurals: check number of items in 'target'
elif isinstance(target, (dict, list)):
if len(target) == 1:
target = target[0]
else:
raise ValueError("po msgid element has no plural but msgstr has %d elements (%s)" % (len(target), target))
# empty the previous list of messages
# TODO: the "pypo" implementation does not remove the previous items of
# the target, if self.target == target (essentially: comparing only
# the first item of a plural string with the single new string)
# Maybe this behaviour should be unified.
if isinstance(target, (dict, list)):
i = 0
message = gpo.po_message_msgstr_plural(self._gpo_message, i)
while message is not None:
gpo.po_message_set_msgstr_plural(self._gpo_message, i, None)
i += 1
message = gpo.po_message_msgstr_plural(self._gpo_message, i)
# add the items of a list
if isinstance(target, list):
for i, targetstring in enumerate(target):
gpo.po_message_set_msgstr_plural(self._gpo_message, i, gpo_encode(targetstring))
# add the values of a dict
elif isinstance(target, dict):
for i, targetstring in enumerate(target.values()):
gpo.po_message_set_msgstr_plural(self._gpo_message, i, gpo_encode(targetstring))
# add a single string
else:
if target is None:
gpo.po_message_set_msgstr(self._gpo_message, gpo_encode(""))
else:
gpo.po_message_set_msgstr(self._gpo_message, gpo_encode(target))
def getid(self):
"""The unique identifier for this unit according to the conventions in
.mo files.
"""
id = gpo_decode(gpo.po_message_msgid(self._gpo_message)) or ""
# Gettext does not consider the plural to determine duplicates, only
# the msgid. For generation of .mo files, we might want to use this
# code to generate the entry for the hash table, but for now, it is
# commented out for conformance to gettext.
# plural = gpo.po_message_msgid_plural(self._gpo_message)
# if not plural is None:
# id = '%s\0%s' % (id, plural)
context = gpo.po_message_msgctxt(self._gpo_message)
if context:
id = u"%s\04%s" % (gpo_decode(context), id)
return id
def getnotes(self, origin=None):
if origin is None:
comments = (gpo.po_message_comments(self._gpo_message) +
gpo.po_message_extracted_comments(self._gpo_message))
elif origin == "translator":
comments = gpo.po_message_comments(self._gpo_message)
elif origin in ["programmer", "developer", "source code"]:
comments = gpo.po_message_extracted_comments(self._gpo_message)
else:
raise ValueError("Comment type not valid")
if comments and get_libgettextpo_version() < (0, 17, 0):
comments = "\n".join([line for line in comments.split("\n")])
# Let's drop the last newline
return gpo_decode(comments[:-1])
def addnote(self, text, origin=None, position="append"):
# ignore empty strings and strings without non-space characters
if not (text and text.strip()):
return
text = data.forceunicode(text)
oldnotes = self.getnotes(origin)
newnotes = None
if oldnotes:
if position == "append":
newnotes = oldnotes + "\n" + text
elif position == "merge":
if oldnotes != text:
oldnoteslist = oldnotes.split("\n")
for newline in text.split("\n"):
newline = newline.rstrip("\r")
# avoid duplicate comment lines (this might cause some problems)
if newline not in oldnotes or len(newline) < 5:
oldnoteslist.append(newline)
newnotes = "\n".join(oldnoteslist)
else:
newnotes = text + '\n' + oldnotes
else:
newnotes = "\n".join([line.rstrip("\r") for line in text.split("\n")])
if newnotes:
newlines = []
needs_space = get_libgettextpo_version() < (0, 17, 0)
for line in newnotes.split("\n"):
if line and needs_space:
newlines.append(" " + line)
else:
newlines.append(line)
newnotes = gpo_encode("\n".join(newlines))
if origin in ["programmer", "developer", "source code"]:
gpo.po_message_set_extracted_comments(self._gpo_message, newnotes)
else:
gpo.po_message_set_comments(self._gpo_message, newnotes)
def removenotes(self, origin=None):
gpo.po_message_set_comments(self._gpo_message, b"")
def copy(self):
newpo = self.__class__()
newpo._gpo_message = self._gpo_message
return newpo
def merge(self, otherpo, overwrite=False, comments=True, authoritative=False):
"""Merges the otherpo (with the same msgid) into this one.
Overwrite non-blank self.msgstr only if overwrite is True
merge comments only if comments is True
"""
if not isinstance(otherpo, pounit):
super().merge(otherpo, overwrite, comments)
return
if comments:
self.addnote(otherpo.getnotes("translator"), origin="translator", position="merge")
# FIXME mergelists(self.typecomments, otherpo.typecomments)
if not authoritative:
# We don't bring across otherpo.automaticcomments as we consider ourself
# to be the the authority. Same applies to otherpo.msgidcomments
self.addnote(otherpo.getnotes("developer"), origin="developer", position="merge")
self.msgidcomment = otherpo._extract_msgidcomments() or None
self.addlocations(otherpo.getlocations())
if not self.istranslated() or overwrite:
# Remove kde-style comments from the translation (if any).
if self._extract_msgidcomments(otherpo.target):
otherpo.target = otherpo.target.replace('_: ' + otherpo._extract_msgidcomments() + '\n', '')
self.target = otherpo.target
if self.source != otherpo.source or self.getcontext() != otherpo.getcontext():
self.markfuzzy()
else:
self.markfuzzy(otherpo.isfuzzy())
elif not otherpo.istranslated():
if self.source != otherpo.source:
self.markfuzzy()
else:
if self.target != otherpo.target:
self.markfuzzy()
def isheader(self):
#return self.source == u"" and self.target != u""
# we really want to make sure that there is no msgidcomment or msgctxt
return self.getid() == "" and len(self.target) > 0
def isblank(self):
return len(self.source) == len(self.target) == len(self.getcontext()) == 0
def hastypecomment(self, typecomment):
return gpo.po_message_is_format(self._gpo_message, gpo_encode(typecomment))
def settypecomment(self, typecomment, present=True):
gpo.po_message_set_format(self._gpo_message, gpo_encode(typecomment), present)
def hasmarkedcomment(self, commentmarker):
commentmarker = "(%s)" % commentmarker
for comment in self.getnotes("translator").split("\n"):
if comment.startswith(commentmarker):
return True
return False
def isfuzzy(self):
return gpo.po_message_is_fuzzy(self._gpo_message)
def _domarkfuzzy(self, present=True):
gpo.po_message_set_fuzzy(self._gpo_message, present)
def makeobsolete(self):
# FIXME: libgettexpo currently does not reset other data, we probably want to do that
# but a better solution would be for libgettextpo to output correct data on serialisation
gpo.po_message_set_obsolete(self._gpo_message, True)
self.infer_state()
def resurrect(self):
gpo.po_message_set_obsolete(self._gpo_message, False)
self.infer_state()
def hasplural(self):
return gpo.po_message_msgid_plural(self._gpo_message) is not None
def _extract_msgidcomments(self, text=None):
"""Extract KDE style msgid comments from the unit.
:rtype: String
:return: Returns the extracted msgidcomments found in this unit's msgid.
"""
if not text:
text = gpo_decode(gpo.po_message_msgid(self._gpo_message)) or ""
if text:
return pocommon.extract_msgid_comment(text)
return u""
def setmsgidcomment(self, msgidcomment):
if msgidcomment:
self.source = u"_: %s\n%s" % (msgidcomment, self.source)
msgidcomment = property(_extract_msgidcomments, setmsgidcomment)
def __str__(self):
pf = pofile(noheader=True)
pf.addunit(self)
return bytes(pf).decode(self.CPO_ENC)
def getlocations(self):
locations = []
i = 0
location = gpo.po_message_filepos(self._gpo_message, i)
while location:
locname = gpo_decode(gpo.po_filepos_file(location))
locline = gpo.po_filepos_start_line(location)
if locline == -1:
locstring = locname
else:
locstring = u":".join([locname, str(locline)])
locations.append(pocommon.unquote_plus(locstring))
i += 1
location = gpo.po_message_filepos(self._gpo_message, i)
return locations
def addlocation(self, location):
if location.find(" ") != -1:
location = pocommon.quote_plus(location)
parts = location.split(":")
if len(parts) == 2 and parts[1].isdigit():
file = parts[0]
line = int(parts[1] or "0")
else:
file = location
line = -1
gpo.po_message_add_filepos(self._gpo_message, gpo_encode(file), line)
def getcontext(self):
msgctxt = gpo.po_message_msgctxt(self._gpo_message)
if msgctxt:
return gpo_decode(msgctxt)
else:
msgidcomment = self._extract_msgidcomments()
return msgidcomment
def setcontext(self, context):
context = data.forceunicode(context)
gpo.po_message_set_msgctxt(self._gpo_message, gpo_encode(context))
@classmethod
def buildfromunit(cls, unit, encoding=None):
"""Build a native unit from a foreign unit, preserving as much
information as possible.
"""
if type(unit) == cls and hasattr(unit, "copy") and callable(unit.copy):
return unit.copy()
elif isinstance(unit, pocommon.pounit):
newunit = cls(unit.source, encoding)
newunit.target = unit.target
#context
newunit.msgidcomment = unit._extract_msgidcomments()
context = unit.getcontext()
if not newunit.msgidcomment and context:
newunit.setcontext(context)
locations = unit.getlocations()
if locations:
newunit.addlocations(locations)
notes = unit.getnotes("developer")
if notes:
newunit.addnote(notes, "developer")
notes = unit.getnotes("translator")
if notes:
newunit.addnote(notes, "translator")
if unit.isobsolete():
newunit.makeobsolete()
newunit.markfuzzy(unit.isfuzzy())
for tc in ['python-format', 'c-format', 'php-format']:
if unit.hastypecomment(tc):
newunit.settypecomment(tc)
# We assume/guess/hope that there will only be one
break
return newunit
else:
return base.TranslationUnit.buildfromunit(unit)
class pofile(pocommon.pofile):
UnitClass = pounit
def __init__(self, inputfile=None, noheader=False, **kwargs):
self._gpo_memory_file = None
self._gpo_message_iterator = None
self.sourcelanguage = None
self.targetlanguage = None
if inputfile is None:
self.units = []
self._encoding = kwargs.get('encoding')
self._gpo_memory_file = gpo.po_file_create()
self._gpo_message_iterator = gpo.po_message_iterator(self._gpo_memory_file, None)
if not noheader:
self.init_headers()
else:
super().__init__(inputfile=inputfile, **kwargs)
def addunit(self, unit, new=True):
if new:
gpo.po_message_insert(self._gpo_message_iterator, unit._gpo_message)
super().addunit(unit)
def _insert_header(self, header):
header._store = self
self.units.insert(0, header)
gpo.po_message_iterator_free(self._gpo_message_iterator)
self._gpo_message_iterator = gpo.po_message_iterator(self._gpo_memory_file, None)
gpo.po_message_insert(self._gpo_message_iterator, header._gpo_message)
while gpo.po_next_message(self._gpo_message_iterator):
pass
def removeduplicates(self, duplicatestyle="merge"):
"""make sure each msgid is unique ; merge comments etc from duplicates into original"""
# TODO: can we handle consecutive calls to removeduplicates()? What
# about files already containing msgctxt? - test
id_dict = {}
uniqueunits = []
# TODO: this is using a list as the pos aren't hashable, but this is slow.
# probably not used frequently enough to worry about it, though.
markedpos = []
def addcomment(thepo):
thepo.msgidcomment = " ".join(thepo.getlocations())
markedpos.append(thepo)
for thepo in self.units:
id = thepo.getid()
if thepo.isheader() and not thepo.getlocations():
# header msgids shouldn't be merged...
uniqueunits.append(thepo)
elif id in id_dict:
if duplicatestyle == "merge":
if id:
id_dict[id].merge(thepo)
else:
addcomment(thepo)
uniqueunits.append(thepo)
elif duplicatestyle == "msgctxt":
origpo = id_dict[id]
if origpo not in markedpos:
origpo.setcontext(" ".join(origpo.getlocations()))
markedpos.append(thepo)
thepo.setcontext(" ".join(thepo.getlocations()))
thepo_msgctxt = gpo.po_message_msgctxt(thepo._gpo_message)
idpo_msgctxt = gpo.po_message_msgctxt(id_dict[id]._gpo_message)
if not thepo_msgctxt == idpo_msgctxt:
uniqueunits.append(thepo)
else:
logger.warning(
"Duplicate unit found with msgctx of '%s' and source '%s'",
thepo_msgctxt,
thepo.source)
else:
if not id:
if duplicatestyle == "merge":
addcomment(thepo)
else:
thepo.setcontext(" ".join(thepo.getlocations()))
id_dict[id] = thepo
uniqueunits.append(thepo)
new_gpo_memory_file = gpo.po_file_create()
new_gpo_message_iterator = gpo.po_message_iterator(new_gpo_memory_file, None)
for unit in uniqueunits:
gpo.po_message_insert(new_gpo_message_iterator, unit._gpo_message)
gpo.po_message_iterator_free(self._gpo_message_iterator)
self._gpo_message_iterator = new_gpo_message_iterator
self._gpo_memory_file = new_gpo_memory_file
self.units = uniqueunits
def serialize(self, out):
def obsolete_workaround():
# Remove all items that are not output by msgmerge when a unit is obsolete. This is a work
# around for bug in libgettextpo
# FIXME Do version test in case they fix this bug
for unit in self.units:
if unit.isobsolete():
gpo.po_message_set_extracted_comments(unit._gpo_message, b"")
location = gpo.po_message_filepos(unit._gpo_message, 0)
while location:
gpo.po_message_remove_filepos(unit._gpo_message, 0)
location = gpo.po_message_filepos(unit._gpo_message, 0)
def writefile(filename):
self._gpo_memory_file = gpo.po_file_write_v2(self._gpo_memory_file, gpo_encode(filename), xerror_handler)
with open(filename, 'rb') as tfile:
content = tfile.read()
return content
outputstring = ""
if self._gpo_memory_file:
obsolete_workaround()
f, fname = tempfile.mkstemp(prefix='translate', suffix='.po')
os.close(f)
outputstring = writefile(fname)
if self.encoding != pounit.CPO_ENC:
try:
outputstring = outputstring.decode(pounit.CPO_ENC).encode(self.encoding)
except UnicodeEncodeError:
self.encoding = pounit.CPO_ENC
self.updateheader(content_type="text/plain; charset=UTF-8",
content_transfer_encoding="8bit")
outputstring = writefile(fname)
os.remove(fname)
out.write(outputstring)
def isempty(self):
"""Returns True if the object doesn't contain any translation units."""
if len(self.units) == 0:
return True
# Skip the first unit if it is a header.
if self.units[0].isheader():
units = self.units[1:]
else:
units = self.units
for unit in units:
if not unit.isblank() and not unit.isobsolete():
return False
return True
def parse(self, input):
if hasattr(input, 'name'):
self.filename = input.name
elif not getattr(self, 'filename', ''):
self.filename = ''
if hasattr(input, "read"):
posrc = input.read()
input.close()
input = posrc
needtmpfile = not os.path.isfile(input)
if needtmpfile:
# This is not a file - we write the string to a temporary file
fd, fname = tempfile.mkstemp(prefix='translate', suffix='.po')
os.write(fd, input)
input = fname
os.close(fd)
self._gpo_memory_file = gpo.po_file_read_v3(gpo_encode(input), xerror_handler)
if self._gpo_memory_file is None:
logger.error("Error:")
if needtmpfile:
os.remove(input)
self.units = []
# Handle xerrors here
self._header = gpo.po_file_domain_header(self._gpo_memory_file, None)
if self._header:
charset = gpo_decode(gpo.po_header_field(self._header, gpo_encode("Content-Type")))
if charset:
charset = re.search("charset=([^\\s]+)", charset).group(1)
self.encoding = charset
self._gpo_message_iterator = gpo.po_message_iterator(self._gpo_memory_file, None)
newmessage = gpo.po_next_message(self._gpo_message_iterator)
while newmessage:
newunit = pounit(gpo_message=newmessage, encoding=self.encoding)
self.addunit(newunit, new=False)
newmessage = gpo.po_next_message(self._gpo_message_iterator)
self._free_iterator()
def __del__(self):
# We currently disable this while we still get segmentation faults.
# Note that this is definitely leaking memory because of this.
return
self._free_iterator()
if self._gpo_memory_file is not None:
gpo.po_file_free(self._gpo_memory_file)
self._gpo_memory_file = None
def _free_iterator(self):
# We currently disable this while we still get segmentation faults.
# Note that this is definitely leaking memory because of this.
return
if self._gpo_message_iterator is not None:
gpo.po_message_iterator_free(self._gpo_message_iterator)
self._gpo_message_iterator = None
|
gpl-2.0
|
msalloum/cs181
|
HomeWork/HW3/TwitterStream/tweetstream.py
|
1
|
2124
|
"""
tweet_read.py
Serve tweets to a socket for spark-streaming.
Adapted from:
http://www.awesomestats.in/spark-twitter-stream/
"""
import tweepy
from tweepy import OAuthHandler
from tweepy import Stream
from tweepy.streaming import StreamListener
import socket
import json
import logging
logger = logging.getLogger("tweetread")
logging.basicConfig(level=logging.INFO)
CONSUMER_KEY = 'tOpKHcNzNpu88PSZxAzCI87Ne'
CONSUMER_SECRET = 'EhSEN89oydJi058EkQP3iMjsVlYw6yLYZ2Uq2UAVSWS43wXju9'
ACCESS_TOKEN = '37501551-hUS1bgjvyBq9H1pplnXkQb1rBIqNfNzPsBHFtx8dw'
ACCESS_SECRET = '0MI19XnM6FXT8D8LWM70KbHYZDp5GefyZpYwD6hUUvtSD'
class TweetsListener(StreamListener):
def __init__(self, sock):
self._sock = sock
self._count = 0
def on_data(self, data):
try:
msg = json.loads(data)
if 'text' not in msg: return True
text = msg['text'].encode('utf-8')
self._count += 1
self._sock.send(text)
if self._count % 10 == 0:
logger.info("Forwarded %d messages", self._count)
return True
except BaseException as e:
logger.exception("Error on_data: %s", e)
return True
def on_error(self, status):
logger.error("API Error status: %d", status)
return True
def sendData(sock):
auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_SECRET)
twitter_stream = Stream(auth, TweetsListener(sock))
twitter_stream.sample()
#twitter_stream.filter(track=['bill nye'])
def main():
s = socket.socket() # Create a socket object
host = "127.0.0.1" # Get local machine name
port = 5555 # Reserve a port for your service.
s.bind((host, port)) # Bind to the port
logger.info("Listening on port: %d", port)
s.listen(5) # Now wait for client connection.
c, addr = s.accept() # Establish connection with client.
logger.info("Received request from: %s", addr)
sendData(c)
if __name__ == "__main__":
main()
|
mit
|
vdjagilev/desefu
|
modules/file/Hash.py
|
1
|
1721
|
from modules import AbstractModule
from kernel.output import Output
import hashlib
class Hash(AbstractModule):
def is_collect_data(self) -> bool:
return True
def check(self):
return True
def description(self) -> str:
return "A module which collects data about file hashes"
def check_arguments(self):
allowed_values = [
'md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'
]
if len(self.args) == 0:
self.args = ['md5', 'sha1', 'sha256']
Output.do("Will calculate only default list of hashes \"%s\"" % self.args)
for hash_name in self.args:
if hash_name not in allowed_values:
Output.err("Unknown hash \"%s\"" % hash_name)
return False
return True
def do_collect_data(self):
for f in self.files:
self.data[f] = []
for h in self.args:
self.data[f].append((h, self.get_file_hash(f, h)))
def get_file_hash(self, file_path, hash_name):
hash_func = None
if hash_name == 'md5':
hash_func = hashlib.md5()
elif hash_name == 'sha1':
hash_func = hashlib.sha1()
elif hash_name == 'sha224':
hash_func = hashlib.sha224()
elif hash_name == 'sha256':
hash_func = hashlib.sha256()
elif hash_name == 'sha384':
hash_func = hashlib.sha384()
elif hash_name == 'sha512':
hash_func = hashlib.sha512()
with open(file_path, 'rb') as f:
for data_chunk in iter(lambda: f.read(4096), b""):
hash_func.update(data_chunk)
return hash_func.hexdigest()
|
mit
|
orangeduck/PyAutoC
|
Python27/Lib/ctypes/test/test_structures.py
|
17
|
15068
|
import unittest
from ctypes import *
from struct import calcsize
class SubclassesTest(unittest.TestCase):
def test_subclass(self):
class X(Structure):
_fields_ = [("a", c_int)]
class Y(X):
_fields_ = [("b", c_int)]
class Z(X):
pass
self.assertEqual(sizeof(X), sizeof(c_int))
self.assertEqual(sizeof(Y), sizeof(c_int)*2)
self.assertEqual(sizeof(Z), sizeof(c_int))
self.assertEqual(X._fields_, [("a", c_int)])
self.assertEqual(Y._fields_, [("b", c_int)])
self.assertEqual(Z._fields_, [("a", c_int)])
def test_subclass_delayed(self):
class X(Structure):
pass
self.assertEqual(sizeof(X), 0)
X._fields_ = [("a", c_int)]
class Y(X):
pass
self.assertEqual(sizeof(Y), sizeof(X))
Y._fields_ = [("b", c_int)]
class Z(X):
pass
self.assertEqual(sizeof(X), sizeof(c_int))
self.assertEqual(sizeof(Y), sizeof(c_int)*2)
self.assertEqual(sizeof(Z), sizeof(c_int))
self.assertEqual(X._fields_, [("a", c_int)])
self.assertEqual(Y._fields_, [("b", c_int)])
self.assertEqual(Z._fields_, [("a", c_int)])
class StructureTestCase(unittest.TestCase):
formats = {"c": c_char,
"b": c_byte,
"B": c_ubyte,
"h": c_short,
"H": c_ushort,
"i": c_int,
"I": c_uint,
"l": c_long,
"L": c_ulong,
"q": c_longlong,
"Q": c_ulonglong,
"f": c_float,
"d": c_double,
}
def test_simple_structs(self):
for code, tp in self.formats.items():
class X(Structure):
_fields_ = [("x", c_char),
("y", tp)]
self.assertEqual((sizeof(X), code),
(calcsize("c%c0%c" % (code, code)), code))
def test_unions(self):
for code, tp in self.formats.items():
class X(Union):
_fields_ = [("x", c_char),
("y", tp)]
self.assertEqual((sizeof(X), code),
(calcsize("%c" % (code)), code))
def test_struct_alignment(self):
class X(Structure):
_fields_ = [("x", c_char * 3)]
self.assertEqual(alignment(X), calcsize("s"))
self.assertEqual(sizeof(X), calcsize("3s"))
class Y(Structure):
_fields_ = [("x", c_char * 3),
("y", c_int)]
self.assertEqual(alignment(Y), calcsize("i"))
self.assertEqual(sizeof(Y), calcsize("3si"))
class SI(Structure):
_fields_ = [("a", X),
("b", Y)]
self.assertEqual(alignment(SI), max(alignment(Y), alignment(X)))
self.assertEqual(sizeof(SI), calcsize("3s0i 3si 0i"))
class IS(Structure):
_fields_ = [("b", Y),
("a", X)]
self.assertEqual(alignment(SI), max(alignment(X), alignment(Y)))
self.assertEqual(sizeof(IS), calcsize("3si 3s 0i"))
class XX(Structure):
_fields_ = [("a", X),
("b", X)]
self.assertEqual(alignment(XX), alignment(X))
self.assertEqual(sizeof(XX), calcsize("3s 3s 0s"))
def test_emtpy(self):
# I had problems with these
#
# Although these are patological cases: Empty Structures!
class X(Structure):
_fields_ = []
class Y(Union):
_fields_ = []
# Is this really the correct alignment, or should it be 0?
self.assertTrue(alignment(X) == alignment(Y) == 1)
self.assertTrue(sizeof(X) == sizeof(Y) == 0)
class XX(Structure):
_fields_ = [("a", X),
("b", X)]
self.assertEqual(alignment(XX), 1)
self.assertEqual(sizeof(XX), 0)
def test_fields(self):
# test the offset and size attributes of Structure/Unoin fields.
class X(Structure):
_fields_ = [("x", c_int),
("y", c_char)]
self.assertEqual(X.x.offset, 0)
self.assertEqual(X.x.size, sizeof(c_int))
self.assertEqual(X.y.offset, sizeof(c_int))
self.assertEqual(X.y.size, sizeof(c_char))
# readonly
self.assertRaises((TypeError, AttributeError), setattr, X.x, "offset", 92)
self.assertRaises((TypeError, AttributeError), setattr, X.x, "size", 92)
class X(Union):
_fields_ = [("x", c_int),
("y", c_char)]
self.assertEqual(X.x.offset, 0)
self.assertEqual(X.x.size, sizeof(c_int))
self.assertEqual(X.y.offset, 0)
self.assertEqual(X.y.size, sizeof(c_char))
# readonly
self.assertRaises((TypeError, AttributeError), setattr, X.x, "offset", 92)
self.assertRaises((TypeError, AttributeError), setattr, X.x, "size", 92)
# XXX Should we check nested data types also?
# offset is always relative to the class...
def test_packed(self):
class X(Structure):
_fields_ = [("a", c_byte),
("b", c_longlong)]
_pack_ = 1
self.assertEqual(sizeof(X), 9)
self.assertEqual(X.b.offset, 1)
class X(Structure):
_fields_ = [("a", c_byte),
("b", c_longlong)]
_pack_ = 2
self.assertEqual(sizeof(X), 10)
self.assertEqual(X.b.offset, 2)
class X(Structure):
_fields_ = [("a", c_byte),
("b", c_longlong)]
_pack_ = 4
self.assertEqual(sizeof(X), 12)
self.assertEqual(X.b.offset, 4)
import struct
longlong_size = struct.calcsize("q")
longlong_align = struct.calcsize("bq") - longlong_size
class X(Structure):
_fields_ = [("a", c_byte),
("b", c_longlong)]
_pack_ = 8
self.assertEqual(sizeof(X), longlong_align + longlong_size)
self.assertEqual(X.b.offset, min(8, longlong_align))
d = {"_fields_": [("a", "b"),
("b", "q")],
"_pack_": -1}
self.assertRaises(ValueError, type(Structure), "X", (Structure,), d)
def test_initializers(self):
class Person(Structure):
_fields_ = [("name", c_char*6),
("age", c_int)]
self.assertRaises(TypeError, Person, 42)
self.assertRaises(ValueError, Person, "asldkjaslkdjaslkdj")
self.assertRaises(TypeError, Person, "Name", "HI")
# short enough
self.assertEqual(Person("12345", 5).name, "12345")
# exact fit
self.assertEqual(Person("123456", 5).name, "123456")
# too long
self.assertRaises(ValueError, Person, "1234567", 5)
def test_conflicting_initializers(self):
class POINT(Structure):
_fields_ = [("x", c_int), ("y", c_int)]
# conflicting positional and keyword args
self.assertRaises(TypeError, POINT, 2, 3, x=4)
self.assertRaises(TypeError, POINT, 2, 3, y=4)
# too many initializers
self.assertRaises(TypeError, POINT, 2, 3, 4)
def test_keyword_initializers(self):
class POINT(Structure):
_fields_ = [("x", c_int), ("y", c_int)]
pt = POINT(1, 2)
self.assertEqual((pt.x, pt.y), (1, 2))
pt = POINT(y=2, x=1)
self.assertEqual((pt.x, pt.y), (1, 2))
def test_invalid_field_types(self):
class POINT(Structure):
pass
self.assertRaises(TypeError, setattr, POINT, "_fields_", [("x", 1), ("y", 2)])
def test_intarray_fields(self):
class SomeInts(Structure):
_fields_ = [("a", c_int * 4)]
# can use tuple to initialize array (but not list!)
self.assertEqual(SomeInts((1, 2)).a[:], [1, 2, 0, 0])
self.assertEqual(SomeInts((1, 2)).a[::], [1, 2, 0, 0])
self.assertEqual(SomeInts((1, 2)).a[::-1], [0, 0, 2, 1])
self.assertEqual(SomeInts((1, 2)).a[::2], [1, 0])
self.assertEqual(SomeInts((1, 2)).a[1:5:6], [2])
self.assertEqual(SomeInts((1, 2)).a[6:4:-1], [])
self.assertEqual(SomeInts((1, 2, 3, 4)).a[:], [1, 2, 3, 4])
self.assertEqual(SomeInts((1, 2, 3, 4)).a[::], [1, 2, 3, 4])
# too long
# XXX Should raise ValueError?, not RuntimeError
self.assertRaises(RuntimeError, SomeInts, (1, 2, 3, 4, 5))
def test_nested_initializers(self):
# test initializing nested structures
class Phone(Structure):
_fields_ = [("areacode", c_char*6),
("number", c_char*12)]
class Person(Structure):
_fields_ = [("name", c_char * 12),
("phone", Phone),
("age", c_int)]
p = Person("Someone", ("1234", "5678"), 5)
self.assertEqual(p.name, "Someone")
self.assertEqual(p.phone.areacode, "1234")
self.assertEqual(p.phone.number, "5678")
self.assertEqual(p.age, 5)
def test_structures_with_wchar(self):
try:
c_wchar
except NameError:
return # no unicode
class PersonW(Structure):
_fields_ = [("name", c_wchar * 12),
("age", c_int)]
p = PersonW(u"Someone")
self.assertEqual(p.name, "Someone")
self.assertEqual(PersonW(u"1234567890").name, u"1234567890")
self.assertEqual(PersonW(u"12345678901").name, u"12345678901")
# exact fit
self.assertEqual(PersonW(u"123456789012").name, u"123456789012")
#too long
self.assertRaises(ValueError, PersonW, u"1234567890123")
def test_init_errors(self):
class Phone(Structure):
_fields_ = [("areacode", c_char*6),
("number", c_char*12)]
class Person(Structure):
_fields_ = [("name", c_char * 12),
("phone", Phone),
("age", c_int)]
cls, msg = self.get_except(Person, "Someone", (1, 2))
self.assertEqual(cls, RuntimeError)
# In Python 2.5, Exception is a new-style class, and the repr changed
if issubclass(Exception, object):
self.assertEqual(msg,
"(Phone) <type 'exceptions.TypeError'>: "
"expected string or Unicode object, int found")
else:
self.assertEqual(msg,
"(Phone) exceptions.TypeError: "
"expected string or Unicode object, int found")
cls, msg = self.get_except(Person, "Someone", ("a", "b", "c"))
self.assertEqual(cls, RuntimeError)
if issubclass(Exception, object):
self.assertEqual(msg,
"(Phone) <type 'exceptions.TypeError'>: too many initializers")
else:
self.assertEqual(msg, "(Phone) exceptions.TypeError: too many initializers")
def get_except(self, func, *args):
try:
func(*args)
except Exception, detail:
return detail.__class__, str(detail)
## def test_subclass_creation(self):
## meta = type(Structure)
## # same as 'class X(Structure): pass'
## # fails, since we need either a _fields_ or a _abstract_ attribute
## cls, msg = self.get_except(meta, "X", (Structure,), {})
## self.assertEqual((cls, msg),
## (AttributeError, "class must define a '_fields_' attribute"))
def test_abstract_class(self):
class X(Structure):
_abstract_ = "something"
# try 'X()'
cls, msg = self.get_except(eval, "X()", locals())
self.assertEqual((cls, msg), (TypeError, "abstract class"))
def test_methods(self):
## class X(Structure):
## _fields_ = []
self.assertTrue("in_dll" in dir(type(Structure)))
self.assertTrue("from_address" in dir(type(Structure)))
self.assertTrue("in_dll" in dir(type(Structure)))
def test_positional_args(self):
# see also http://bugs.python.org/issue5042
class W(Structure):
_fields_ = [("a", c_int), ("b", c_int)]
class X(W):
_fields_ = [("c", c_int)]
class Y(X):
pass
class Z(Y):
_fields_ = [("d", c_int), ("e", c_int), ("f", c_int)]
z = Z(1, 2, 3, 4, 5, 6)
self.assertEqual((z.a, z.b, z.c, z.d, z.e, z.f),
(1, 2, 3, 4, 5, 6))
z = Z(1)
self.assertEqual((z.a, z.b, z.c, z.d, z.e, z.f),
(1, 0, 0, 0, 0, 0))
self.assertRaises(TypeError, lambda: Z(1, 2, 3, 4, 5, 6, 7))
class PointerMemberTestCase(unittest.TestCase):
def test(self):
# a Structure with a POINTER field
class S(Structure):
_fields_ = [("array", POINTER(c_int))]
s = S()
# We can assign arrays of the correct type
s.array = (c_int * 3)(1, 2, 3)
items = [s.array[i] for i in range(3)]
self.assertEqual(items, [1, 2, 3])
# The following are bugs, but are included here because the unittests
# also describe the current behaviour.
#
# This fails with SystemError: bad arg to internal function
# or with IndexError (with a patch I have)
s.array[0] = 42
items = [s.array[i] for i in range(3)]
self.assertEqual(items, [42, 2, 3])
s.array[0] = 1
## s.array[1] = 42
items = [s.array[i] for i in range(3)]
self.assertEqual(items, [1, 2, 3])
def test_none_to_pointer_fields(self):
class S(Structure):
_fields_ = [("x", c_int),
("p", POINTER(c_int))]
s = S()
s.x = 12345678
s.p = None
self.assertEqual(s.x, 12345678)
class TestRecursiveStructure(unittest.TestCase):
def test_contains_itself(self):
class Recursive(Structure):
pass
try:
Recursive._fields_ = [("next", Recursive)]
except AttributeError, details:
self.assertTrue("Structure or union cannot contain itself" in
str(details))
else:
self.fail("Structure or union cannot contain itself")
def test_vice_versa(self):
class First(Structure):
pass
class Second(Structure):
pass
First._fields_ = [("second", Second)]
try:
Second._fields_ = [("first", First)]
except AttributeError, details:
self.assertTrue("_fields_ is final" in
str(details))
else:
self.fail("AttributeError not raised")
if __name__ == '__main__':
unittest.main()
|
bsd-2-clause
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.