repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values | var_hash
int64 -9,223,186,179,200,150,000
9,223,291,175B
| doc_hash
int64 -9,223,304,365,658,930,000
9,223,309,051B
| line_mean
float64 3.5
99.8
| line_max
int64 13
999
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
jakirkham/nanshe | nanshe/registerer.py | 3 | 5569 | """
The ``registerer`` module allows the registration algorithm to be run.
===============================================================================
Overview
===============================================================================
The ``main`` function actually starts the algorithm and can be called
externally. Configuration files for the registerer are provided in the
examples_ and are entitled registerer. Any attributes on the raw dataset are
copied to the registered dataset.
.. _examples: http://github.com/nanshe-org/nanshe/tree/master/examples
===============================================================================
API
===============================================================================
"""
__author__ = "John Kirkham <[email protected]>"
__date__ = "$Feb 20, 2015 13:00:51 EST$"
import itertools
import os
import h5py
from nanshe.util import iters, prof
from nanshe.io import hdf5, xjson
from nanshe.imp import registration
# Get the logger
trace_logger = prof.getTraceLogger(__name__)
@prof.log_call(trace_logger)
def main(*argv):
"""
Simple main function (like in C). Takes all arguments (as from
sys.argv) and returns an exit status.
Args:
argv(list): arguments (includes command line call).
Returns:
int: exit code (0 if success)
"""
# Only necessary if running main (normally if calling command line). No
# point in importing otherwise.
import argparse
argv = list(argv)
# Creates command line parser
parser = argparse.ArgumentParser(
description="Parses input from the command line " +
"for a registration job."
)
parser.add_argument("config_filename",
metavar="CONFIG_FILE",
type=str,
help="JSON file that provides configuration options " +
"for how to import TIFF(s)."
)
parser.add_argument("input_filenames",
metavar="INPUT_FILE",
type=str,
nargs=1,
help="HDF5 file to import (this should include a " +
"path to where the internal dataset should be " +
"stored)."
)
parser.add_argument("output_filenames",
metavar="OUTPUT_FILE",
type=str,
nargs=1,
help="HDF5 file to export (this should include a " +
"path to where the internal dataset should be " +
"stored)."
)
# Results of parsing arguments
# (ignore the first one as it is the command line call).
parsed_args = parser.parse_args(argv[1:])
# Go ahead and stuff in parameters with the other parsed_args
parsed_args.parameters = xjson.read_parameters(parsed_args.config_filename)
parsed_args.input_file_components = []
for each_input_filename in parsed_args.input_filenames:
parsed_args.input_file_components.append(
hdf5.serializers.split_hdf5_path(each_input_filename)
)
parsed_args.output_file_components = []
for each_output_filename in parsed_args.output_filenames:
parsed_args.output_file_components.append(
hdf5.serializers.split_hdf5_path(each_output_filename)
)
for each_input_filename_components, each_output_filename_components in iters.izip(
parsed_args.input_file_components, parsed_args.output_file_components):
with h5py.File(each_input_filename_components[0], "r") as input_file:
with h5py.File(each_output_filename_components[0], "a") as output_file:
data = input_file[each_input_filename_components[1]]
result_filename = registration.register_mean_offsets(
data, to_truncate=True, **parsed_args.parameters
)
with h5py.File(result_filename, "r") as result_file:
result_file.copy(
"reg_frames",
output_file[os.path.dirname(each_output_filename_components[1])],
name=each_output_filename_components[1]
)
if parsed_args.parameters.get("include_shift", False):
result_file.copy(
"space_shift",
output_file[os.path.dirname(each_output_filename_components[1])],
name=each_output_filename_components[1] + "_shift"
)
# Copy all attributes from raw data to the final result.
output = output_file[
each_output_filename_components[1]
]
for each_attr_name in data.attrs:
output.attrs[each_attr_name] = data.attrs[each_attr_name]
# Only remove the directory if our input or output files are
# not stored there.
os.remove(result_filename)
in_out_dirnames = set(
os.path.dirname(os.path.abspath(_.filename)) for _ in [
input_file, output_file
]
)
result_dirname = os.path.dirname(result_filename)
if result_dirname not in in_out_dirnames:
os.rmdir(result_dirname)
return(0)
| bsd-3-clause | 8,532,911,601,324,335,000 | -6,934,552,699,141,148,000 | 36.375839 | 93 | 0.530795 | false |
diegocortassa/TACTIC | 3rd_party/site-packages/pytz/__init__.py | 5 | 34206 | '''
datetime.tzinfo timezone definitions generated from the
Olson timezone database:
ftp://elsie.nci.nih.gov/pub/tz*.tar.gz
See the datetime section of the Python Library Reference for information
on how to use these modules.
'''
import sys
import datetime
import os.path
from pytz.exceptions import AmbiguousTimeError
from pytz.exceptions import InvalidTimeError
from pytz.exceptions import NonExistentTimeError
from pytz.exceptions import UnknownTimeZoneError
from pytz.lazy import LazyDict, LazyList, LazySet
from pytz.tzinfo import unpickler
from pytz.tzfile import build_tzinfo
# The IANA (nee Olson) database is updated several times a year.
OLSON_VERSION = '2018c'
VERSION = '2018.3' # Switching to pip compatible version numbering.
__version__ = VERSION
OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling
__all__ = [
'timezone', 'utc', 'country_timezones', 'country_names',
'AmbiguousTimeError', 'InvalidTimeError',
'NonExistentTimeError', 'UnknownTimeZoneError',
'all_timezones', 'all_timezones_set',
'common_timezones', 'common_timezones_set',
]
try:
unicode
except NameError: # Python 3.x
# Python 3.x doesn't have unicode(), making writing code
# for Python 2.3 and Python 3.x a pain.
unicode = str
def ascii(s):
r"""
>>> ascii('Hello')
'Hello'
>>> ascii('\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
UnicodeEncodeError: ...
"""
if type(s) == bytes:
s = s.decode('ASCII')
else:
s.encode('ASCII') # Raise an exception if not ASCII
return s # But the string - not a byte string.
else: # Python 2.x
def ascii(s):
r"""
>>> ascii('Hello')
'Hello'
>>> ascii(u'Hello')
'Hello'
>>> ascii(u'\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
UnicodeEncodeError: ...
"""
return s.encode('ASCII')
def open_resource(name):
"""Open a resource from the zoneinfo subdir for reading.
Uses the pkg_resources module if available and no standard file
found at the calculated location.
It is possible to specify different location for zoneinfo
subdir by using the PYTZ_TZDATADIR environment variable.
"""
name_parts = name.lstrip('/').split('/')
for part in name_parts:
if part == os.path.pardir or os.path.sep in part:
raise ValueError('Bad path segment: %r' % part)
zoneinfo_dir = os.environ.get('PYTZ_TZDATADIR', None)
if zoneinfo_dir is not None:
filename = os.path.join(zoneinfo_dir, *name_parts)
else:
filename = os.path.join(os.path.dirname(__file__),
'zoneinfo', *name_parts)
if not os.path.exists(filename):
# http://bugs.launchpad.net/bugs/383171 - we avoid using this
# unless absolutely necessary to help when a broken version of
# pkg_resources is installed.
try:
from pkg_resources import resource_stream
except ImportError:
resource_stream = None
if resource_stream is not None:
return resource_stream(__name__, 'zoneinfo/' + name)
return open(filename, 'rb')
def resource_exists(name):
"""Return true if the given resource exists"""
try:
open_resource(name).close()
return True
except IOError:
return False
_tzinfo_cache = {}
def timezone(zone):
r''' Return a datetime.tzinfo implementation for the given timezone
>>> from datetime import datetime, timedelta
>>> utc = timezone('UTC')
>>> eastern = timezone('US/Eastern')
>>> eastern.zone
'US/Eastern'
>>> timezone(unicode('US/Eastern')) is eastern
True
>>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc)
>>> loc_dt = utc_dt.astimezone(eastern)
>>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
>>> loc_dt.strftime(fmt)
'2002-10-27 01:00:00 EST (-0500)'
>>> (loc_dt - timedelta(minutes=10)).strftime(fmt)
'2002-10-27 00:50:00 EST (-0500)'
>>> eastern.normalize(loc_dt - timedelta(minutes=10)).strftime(fmt)
'2002-10-27 01:50:00 EDT (-0400)'
>>> (loc_dt + timedelta(minutes=10)).strftime(fmt)
'2002-10-27 01:10:00 EST (-0500)'
Raises UnknownTimeZoneError if passed an unknown zone.
>>> try:
... timezone('Asia/Shangri-La')
... except UnknownTimeZoneError:
... print('Unknown')
Unknown
>>> try:
... timezone(unicode('\N{TRADE MARK SIGN}'))
... except UnknownTimeZoneError:
... print('Unknown')
Unknown
'''
if zone.upper() == 'UTC':
return utc
try:
zone = ascii(zone)
except UnicodeEncodeError:
# All valid timezones are ASCII
raise UnknownTimeZoneError(zone)
zone = _unmunge_zone(zone)
if zone not in _tzinfo_cache:
if zone in all_timezones_set:
fp = open_resource(zone)
try:
_tzinfo_cache[zone] = build_tzinfo(zone, fp)
finally:
fp.close()
else:
raise UnknownTimeZoneError(zone)
return _tzinfo_cache[zone]
def _unmunge_zone(zone):
"""Undo the time zone name munging done by older versions of pytz."""
return zone.replace('_plus_', '+').replace('_minus_', '-')
ZERO = datetime.timedelta(0)
HOUR = datetime.timedelta(hours=1)
class UTC(datetime.tzinfo):
"""UTC
Optimized UTC implementation. It unpickles using the single module global
instance defined beneath this class declaration.
"""
zone = "UTC"
_utcoffset = ZERO
_dst = ZERO
_tzname = zone
def fromutc(self, dt):
if dt.tzinfo is None:
return self.localize(dt)
return super(utc.__class__, self).fromutc(dt)
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
def __reduce__(self):
return _UTC, ()
def localize(self, dt, is_dst=False):
'''Convert naive time to local time'''
if dt.tzinfo is not None:
raise ValueError('Not naive datetime (tzinfo is already set)')
return dt.replace(tzinfo=self)
def normalize(self, dt, is_dst=False):
'''Correct the timezone information on the given datetime'''
if dt.tzinfo is self:
return dt
if dt.tzinfo is None:
raise ValueError('Naive time - no tzinfo set')
return dt.astimezone(self)
def __repr__(self):
return "<UTC>"
def __str__(self):
return "UTC"
UTC = utc = UTC() # UTC is a singleton
def _UTC():
"""Factory function for utc unpickling.
Makes sure that unpickling a utc instance always returns the same
module global.
These examples belong in the UTC class above, but it is obscured; or in
the README.txt, but we are not depending on Python 2.4 so integrating
the README.txt examples with the unit tests is not trivial.
>>> import datetime, pickle
>>> dt = datetime.datetime(2005, 3, 1, 14, 13, 21, tzinfo=utc)
>>> naive = dt.replace(tzinfo=None)
>>> p = pickle.dumps(dt, 1)
>>> naive_p = pickle.dumps(naive, 1)
>>> len(p) - len(naive_p)
17
>>> new = pickle.loads(p)
>>> new == dt
True
>>> new is dt
False
>>> new.tzinfo is dt.tzinfo
True
>>> utc is UTC is timezone('UTC')
True
>>> utc is timezone('GMT')
False
"""
return utc
_UTC.__safe_for_unpickling__ = True
def _p(*args):
"""Factory function for unpickling pytz tzinfo instances.
Just a wrapper around tzinfo.unpickler to save a few bytes in each pickle
by shortening the path.
"""
return unpickler(*args)
_p.__safe_for_unpickling__ = True
class _CountryTimezoneDict(LazyDict):
"""Map ISO 3166 country code to a list of timezone names commonly used
in that country.
iso3166_code is the two letter code used to identify the country.
>>> def print_list(list_of_strings):
... 'We use a helper so doctests work under Python 2.3 -> 3.x'
... for s in list_of_strings:
... print(s)
>>> print_list(country_timezones['nz'])
Pacific/Auckland
Pacific/Chatham
>>> print_list(country_timezones['ch'])
Europe/Zurich
>>> print_list(country_timezones['CH'])
Europe/Zurich
>>> print_list(country_timezones[unicode('ch')])
Europe/Zurich
>>> print_list(country_timezones['XXX'])
Traceback (most recent call last):
...
KeyError: 'XXX'
Previously, this information was exposed as a function rather than a
dictionary. This is still supported::
>>> print_list(country_timezones('nz'))
Pacific/Auckland
Pacific/Chatham
"""
def __call__(self, iso3166_code):
"""Backwards compatibility."""
return self[iso3166_code]
def _fill(self):
data = {}
zone_tab = open_resource('zone.tab')
try:
for line in zone_tab:
line = line.decode('UTF-8')
if line.startswith('#'):
continue
code, coordinates, zone = line.split(None, 4)[:3]
if zone not in all_timezones_set:
continue
try:
data[code].append(zone)
except KeyError:
data[code] = [zone]
self.data = data
finally:
zone_tab.close()
country_timezones = _CountryTimezoneDict()
class _CountryNameDict(LazyDict):
'''Dictionary proving ISO3166 code -> English name.
>>> print(country_names['au'])
Australia
'''
def _fill(self):
data = {}
zone_tab = open_resource('iso3166.tab')
try:
for line in zone_tab.readlines():
line = line.decode('UTF-8')
if line.startswith('#'):
continue
code, name = line.split(None, 1)
data[code] = name.strip()
self.data = data
finally:
zone_tab.close()
country_names = _CountryNameDict()
# Time-zone info based solely on fixed offsets
class _FixedOffset(datetime.tzinfo):
zone = None # to match the standard pytz API
def __init__(self, minutes):
if abs(minutes) >= 1440:
raise ValueError("absolute offset is too large", minutes)
self._minutes = minutes
self._offset = datetime.timedelta(minutes=minutes)
def utcoffset(self, dt):
return self._offset
def __reduce__(self):
return FixedOffset, (self._minutes, )
def dst(self, dt):
return ZERO
def tzname(self, dt):
return None
def __repr__(self):
return 'pytz.FixedOffset(%d)' % self._minutes
def localize(self, dt, is_dst=False):
'''Convert naive time to local time'''
if dt.tzinfo is not None:
raise ValueError('Not naive datetime (tzinfo is already set)')
return dt.replace(tzinfo=self)
def normalize(self, dt, is_dst=False):
'''Correct the timezone information on the given datetime'''
if dt.tzinfo is self:
return dt
if dt.tzinfo is None:
raise ValueError('Naive time - no tzinfo set')
return dt.astimezone(self)
def FixedOffset(offset, _tzinfos={}):
"""return a fixed-offset timezone based off a number of minutes.
>>> one = FixedOffset(-330)
>>> one
pytz.FixedOffset(-330)
>>> one.utcoffset(datetime.datetime.now())
datetime.timedelta(-1, 66600)
>>> one.dst(datetime.datetime.now())
datetime.timedelta(0)
>>> two = FixedOffset(1380)
>>> two
pytz.FixedOffset(1380)
>>> two.utcoffset(datetime.datetime.now())
datetime.timedelta(0, 82800)
>>> two.dst(datetime.datetime.now())
datetime.timedelta(0)
The datetime.timedelta must be between the range of -1 and 1 day,
non-inclusive.
>>> FixedOffset(1440)
Traceback (most recent call last):
...
ValueError: ('absolute offset is too large', 1440)
>>> FixedOffset(-1440)
Traceback (most recent call last):
...
ValueError: ('absolute offset is too large', -1440)
An offset of 0 is special-cased to return UTC.
>>> FixedOffset(0) is UTC
True
There should always be only one instance of a FixedOffset per timedelta.
This should be true for multiple creation calls.
>>> FixedOffset(-330) is one
True
>>> FixedOffset(1380) is two
True
It should also be true for pickling.
>>> import pickle
>>> pickle.loads(pickle.dumps(one)) is one
True
>>> pickle.loads(pickle.dumps(two)) is two
True
"""
if offset == 0:
return UTC
info = _tzinfos.get(offset)
if info is None:
# We haven't seen this one before. we need to save it.
# Use setdefault to avoid a race condition and make sure we have
# only one
info = _tzinfos.setdefault(offset, _FixedOffset(offset))
return info
FixedOffset.__safe_for_unpickling__ = True
def _test():
import doctest
sys.path.insert(0, os.pardir)
import pytz
return doctest.testmod(pytz)
if __name__ == '__main__':
_test()
all_timezones = \
['Africa/Abidjan',
'Africa/Accra',
'Africa/Addis_Ababa',
'Africa/Algiers',
'Africa/Asmara',
'Africa/Asmera',
'Africa/Bamako',
'Africa/Bangui',
'Africa/Banjul',
'Africa/Bissau',
'Africa/Blantyre',
'Africa/Brazzaville',
'Africa/Bujumbura',
'Africa/Cairo',
'Africa/Casablanca',
'Africa/Ceuta',
'Africa/Conakry',
'Africa/Dakar',
'Africa/Dar_es_Salaam',
'Africa/Djibouti',
'Africa/Douala',
'Africa/El_Aaiun',
'Africa/Freetown',
'Africa/Gaborone',
'Africa/Harare',
'Africa/Johannesburg',
'Africa/Juba',
'Africa/Kampala',
'Africa/Khartoum',
'Africa/Kigali',
'Africa/Kinshasa',
'Africa/Lagos',
'Africa/Libreville',
'Africa/Lome',
'Africa/Luanda',
'Africa/Lubumbashi',
'Africa/Lusaka',
'Africa/Malabo',
'Africa/Maputo',
'Africa/Maseru',
'Africa/Mbabane',
'Africa/Mogadishu',
'Africa/Monrovia',
'Africa/Nairobi',
'Africa/Ndjamena',
'Africa/Niamey',
'Africa/Nouakchott',
'Africa/Ouagadougou',
'Africa/Porto-Novo',
'Africa/Sao_Tome',
'Africa/Timbuktu',
'Africa/Tripoli',
'Africa/Tunis',
'Africa/Windhoek',
'America/Adak',
'America/Anchorage',
'America/Anguilla',
'America/Antigua',
'America/Araguaina',
'America/Argentina/Buenos_Aires',
'America/Argentina/Catamarca',
'America/Argentina/ComodRivadavia',
'America/Argentina/Cordoba',
'America/Argentina/Jujuy',
'America/Argentina/La_Rioja',
'America/Argentina/Mendoza',
'America/Argentina/Rio_Gallegos',
'America/Argentina/Salta',
'America/Argentina/San_Juan',
'America/Argentina/San_Luis',
'America/Argentina/Tucuman',
'America/Argentina/Ushuaia',
'America/Aruba',
'America/Asuncion',
'America/Atikokan',
'America/Atka',
'America/Bahia',
'America/Bahia_Banderas',
'America/Barbados',
'America/Belem',
'America/Belize',
'America/Blanc-Sablon',
'America/Boa_Vista',
'America/Bogota',
'America/Boise',
'America/Buenos_Aires',
'America/Cambridge_Bay',
'America/Campo_Grande',
'America/Cancun',
'America/Caracas',
'America/Catamarca',
'America/Cayenne',
'America/Cayman',
'America/Chicago',
'America/Chihuahua',
'America/Coral_Harbour',
'America/Cordoba',
'America/Costa_Rica',
'America/Creston',
'America/Cuiaba',
'America/Curacao',
'America/Danmarkshavn',
'America/Dawson',
'America/Dawson_Creek',
'America/Denver',
'America/Detroit',
'America/Dominica',
'America/Edmonton',
'America/Eirunepe',
'America/El_Salvador',
'America/Ensenada',
'America/Fort_Nelson',
'America/Fort_Wayne',
'America/Fortaleza',
'America/Glace_Bay',
'America/Godthab',
'America/Goose_Bay',
'America/Grand_Turk',
'America/Grenada',
'America/Guadeloupe',
'America/Guatemala',
'America/Guayaquil',
'America/Guyana',
'America/Halifax',
'America/Havana',
'America/Hermosillo',
'America/Indiana/Indianapolis',
'America/Indiana/Knox',
'America/Indiana/Marengo',
'America/Indiana/Petersburg',
'America/Indiana/Tell_City',
'America/Indiana/Vevay',
'America/Indiana/Vincennes',
'America/Indiana/Winamac',
'America/Indianapolis',
'America/Inuvik',
'America/Iqaluit',
'America/Jamaica',
'America/Jujuy',
'America/Juneau',
'America/Kentucky/Louisville',
'America/Kentucky/Monticello',
'America/Knox_IN',
'America/Kralendijk',
'America/La_Paz',
'America/Lima',
'America/Los_Angeles',
'America/Louisville',
'America/Lower_Princes',
'America/Maceio',
'America/Managua',
'America/Manaus',
'America/Marigot',
'America/Martinique',
'America/Matamoros',
'America/Mazatlan',
'America/Mendoza',
'America/Menominee',
'America/Merida',
'America/Metlakatla',
'America/Mexico_City',
'America/Miquelon',
'America/Moncton',
'America/Monterrey',
'America/Montevideo',
'America/Montreal',
'America/Montserrat',
'America/Nassau',
'America/New_York',
'America/Nipigon',
'America/Nome',
'America/Noronha',
'America/North_Dakota/Beulah',
'America/North_Dakota/Center',
'America/North_Dakota/New_Salem',
'America/Ojinaga',
'America/Panama',
'America/Pangnirtung',
'America/Paramaribo',
'America/Phoenix',
'America/Port-au-Prince',
'America/Port_of_Spain',
'America/Porto_Acre',
'America/Porto_Velho',
'America/Puerto_Rico',
'America/Punta_Arenas',
'America/Rainy_River',
'America/Rankin_Inlet',
'America/Recife',
'America/Regina',
'America/Resolute',
'America/Rio_Branco',
'America/Rosario',
'America/Santa_Isabel',
'America/Santarem',
'America/Santiago',
'America/Santo_Domingo',
'America/Sao_Paulo',
'America/Scoresbysund',
'America/Shiprock',
'America/Sitka',
'America/St_Barthelemy',
'America/St_Johns',
'America/St_Kitts',
'America/St_Lucia',
'America/St_Thomas',
'America/St_Vincent',
'America/Swift_Current',
'America/Tegucigalpa',
'America/Thule',
'America/Thunder_Bay',
'America/Tijuana',
'America/Toronto',
'America/Tortola',
'America/Vancouver',
'America/Virgin',
'America/Whitehorse',
'America/Winnipeg',
'America/Yakutat',
'America/Yellowknife',
'Antarctica/Casey',
'Antarctica/Davis',
'Antarctica/DumontDUrville',
'Antarctica/Macquarie',
'Antarctica/Mawson',
'Antarctica/McMurdo',
'Antarctica/Palmer',
'Antarctica/Rothera',
'Antarctica/South_Pole',
'Antarctica/Syowa',
'Antarctica/Troll',
'Antarctica/Vostok',
'Arctic/Longyearbyen',
'Asia/Aden',
'Asia/Almaty',
'Asia/Amman',
'Asia/Anadyr',
'Asia/Aqtau',
'Asia/Aqtobe',
'Asia/Ashgabat',
'Asia/Ashkhabad',
'Asia/Atyrau',
'Asia/Baghdad',
'Asia/Bahrain',
'Asia/Baku',
'Asia/Bangkok',
'Asia/Barnaul',
'Asia/Beirut',
'Asia/Bishkek',
'Asia/Brunei',
'Asia/Calcutta',
'Asia/Chita',
'Asia/Choibalsan',
'Asia/Chongqing',
'Asia/Chungking',
'Asia/Colombo',
'Asia/Dacca',
'Asia/Damascus',
'Asia/Dhaka',
'Asia/Dili',
'Asia/Dubai',
'Asia/Dushanbe',
'Asia/Famagusta',
'Asia/Gaza',
'Asia/Harbin',
'Asia/Hebron',
'Asia/Ho_Chi_Minh',
'Asia/Hong_Kong',
'Asia/Hovd',
'Asia/Irkutsk',
'Asia/Istanbul',
'Asia/Jakarta',
'Asia/Jayapura',
'Asia/Jerusalem',
'Asia/Kabul',
'Asia/Kamchatka',
'Asia/Karachi',
'Asia/Kashgar',
'Asia/Kathmandu',
'Asia/Katmandu',
'Asia/Khandyga',
'Asia/Kolkata',
'Asia/Krasnoyarsk',
'Asia/Kuala_Lumpur',
'Asia/Kuching',
'Asia/Kuwait',
'Asia/Macao',
'Asia/Macau',
'Asia/Magadan',
'Asia/Makassar',
'Asia/Manila',
'Asia/Muscat',
'Asia/Nicosia',
'Asia/Novokuznetsk',
'Asia/Novosibirsk',
'Asia/Omsk',
'Asia/Oral',
'Asia/Phnom_Penh',
'Asia/Pontianak',
'Asia/Pyongyang',
'Asia/Qatar',
'Asia/Qyzylorda',
'Asia/Rangoon',
'Asia/Riyadh',
'Asia/Saigon',
'Asia/Sakhalin',
'Asia/Samarkand',
'Asia/Seoul',
'Asia/Shanghai',
'Asia/Singapore',
'Asia/Srednekolymsk',
'Asia/Taipei',
'Asia/Tashkent',
'Asia/Tbilisi',
'Asia/Tehran',
'Asia/Tel_Aviv',
'Asia/Thimbu',
'Asia/Thimphu',
'Asia/Tokyo',
'Asia/Tomsk',
'Asia/Ujung_Pandang',
'Asia/Ulaanbaatar',
'Asia/Ulan_Bator',
'Asia/Urumqi',
'Asia/Ust-Nera',
'Asia/Vientiane',
'Asia/Vladivostok',
'Asia/Yakutsk',
'Asia/Yangon',
'Asia/Yekaterinburg',
'Asia/Yerevan',
'Atlantic/Azores',
'Atlantic/Bermuda',
'Atlantic/Canary',
'Atlantic/Cape_Verde',
'Atlantic/Faeroe',
'Atlantic/Faroe',
'Atlantic/Jan_Mayen',
'Atlantic/Madeira',
'Atlantic/Reykjavik',
'Atlantic/South_Georgia',
'Atlantic/St_Helena',
'Atlantic/Stanley',
'Australia/ACT',
'Australia/Adelaide',
'Australia/Brisbane',
'Australia/Broken_Hill',
'Australia/Canberra',
'Australia/Currie',
'Australia/Darwin',
'Australia/Eucla',
'Australia/Hobart',
'Australia/LHI',
'Australia/Lindeman',
'Australia/Lord_Howe',
'Australia/Melbourne',
'Australia/NSW',
'Australia/North',
'Australia/Perth',
'Australia/Queensland',
'Australia/South',
'Australia/Sydney',
'Australia/Tasmania',
'Australia/Victoria',
'Australia/West',
'Australia/Yancowinna',
'Brazil/Acre',
'Brazil/DeNoronha',
'Brazil/East',
'Brazil/West',
'CET',
'CST6CDT',
'Canada/Atlantic',
'Canada/Central',
'Canada/Eastern',
'Canada/Mountain',
'Canada/Newfoundland',
'Canada/Pacific',
'Canada/Saskatchewan',
'Canada/Yukon',
'Chile/Continental',
'Chile/EasterIsland',
'Cuba',
'EET',
'EST',
'EST5EDT',
'Egypt',
'Eire',
'Etc/GMT',
'Etc/GMT+0',
'Etc/GMT+1',
'Etc/GMT+10',
'Etc/GMT+11',
'Etc/GMT+12',
'Etc/GMT+2',
'Etc/GMT+3',
'Etc/GMT+4',
'Etc/GMT+5',
'Etc/GMT+6',
'Etc/GMT+7',
'Etc/GMT+8',
'Etc/GMT+9',
'Etc/GMT-0',
'Etc/GMT-1',
'Etc/GMT-10',
'Etc/GMT-11',
'Etc/GMT-12',
'Etc/GMT-13',
'Etc/GMT-14',
'Etc/GMT-2',
'Etc/GMT-3',
'Etc/GMT-4',
'Etc/GMT-5',
'Etc/GMT-6',
'Etc/GMT-7',
'Etc/GMT-8',
'Etc/GMT-9',
'Etc/GMT0',
'Etc/Greenwich',
'Etc/UCT',
'Etc/UTC',
'Etc/Universal',
'Etc/Zulu',
'Europe/Amsterdam',
'Europe/Andorra',
'Europe/Astrakhan',
'Europe/Athens',
'Europe/Belfast',
'Europe/Belgrade',
'Europe/Berlin',
'Europe/Bratislava',
'Europe/Brussels',
'Europe/Bucharest',
'Europe/Budapest',
'Europe/Busingen',
'Europe/Chisinau',
'Europe/Copenhagen',
'Europe/Dublin',
'Europe/Gibraltar',
'Europe/Guernsey',
'Europe/Helsinki',
'Europe/Isle_of_Man',
'Europe/Istanbul',
'Europe/Jersey',
'Europe/Kaliningrad',
'Europe/Kiev',
'Europe/Kirov',
'Europe/Lisbon',
'Europe/Ljubljana',
'Europe/London',
'Europe/Luxembourg',
'Europe/Madrid',
'Europe/Malta',
'Europe/Mariehamn',
'Europe/Minsk',
'Europe/Monaco',
'Europe/Moscow',
'Europe/Nicosia',
'Europe/Oslo',
'Europe/Paris',
'Europe/Podgorica',
'Europe/Prague',
'Europe/Riga',
'Europe/Rome',
'Europe/Samara',
'Europe/San_Marino',
'Europe/Sarajevo',
'Europe/Saratov',
'Europe/Simferopol',
'Europe/Skopje',
'Europe/Sofia',
'Europe/Stockholm',
'Europe/Tallinn',
'Europe/Tirane',
'Europe/Tiraspol',
'Europe/Ulyanovsk',
'Europe/Uzhgorod',
'Europe/Vaduz',
'Europe/Vatican',
'Europe/Vienna',
'Europe/Vilnius',
'Europe/Volgograd',
'Europe/Warsaw',
'Europe/Zagreb',
'Europe/Zaporozhye',
'Europe/Zurich',
'GB',
'GB-Eire',
'GMT',
'GMT+0',
'GMT-0',
'GMT0',
'Greenwich',
'HST',
'Hongkong',
'Iceland',
'Indian/Antananarivo',
'Indian/Chagos',
'Indian/Christmas',
'Indian/Cocos',
'Indian/Comoro',
'Indian/Kerguelen',
'Indian/Mahe',
'Indian/Maldives',
'Indian/Mauritius',
'Indian/Mayotte',
'Indian/Reunion',
'Iran',
'Israel',
'Jamaica',
'Japan',
'Kwajalein',
'Libya',
'MET',
'MST',
'MST7MDT',
'Mexico/BajaNorte',
'Mexico/BajaSur',
'Mexico/General',
'NZ',
'NZ-CHAT',
'Navajo',
'PRC',
'PST8PDT',
'Pacific/Apia',
'Pacific/Auckland',
'Pacific/Bougainville',
'Pacific/Chatham',
'Pacific/Chuuk',
'Pacific/Easter',
'Pacific/Efate',
'Pacific/Enderbury',
'Pacific/Fakaofo',
'Pacific/Fiji',
'Pacific/Funafuti',
'Pacific/Galapagos',
'Pacific/Gambier',
'Pacific/Guadalcanal',
'Pacific/Guam',
'Pacific/Honolulu',
'Pacific/Johnston',
'Pacific/Kiritimati',
'Pacific/Kosrae',
'Pacific/Kwajalein',
'Pacific/Majuro',
'Pacific/Marquesas',
'Pacific/Midway',
'Pacific/Nauru',
'Pacific/Niue',
'Pacific/Norfolk',
'Pacific/Noumea',
'Pacific/Pago_Pago',
'Pacific/Palau',
'Pacific/Pitcairn',
'Pacific/Pohnpei',
'Pacific/Ponape',
'Pacific/Port_Moresby',
'Pacific/Rarotonga',
'Pacific/Saipan',
'Pacific/Samoa',
'Pacific/Tahiti',
'Pacific/Tarawa',
'Pacific/Tongatapu',
'Pacific/Truk',
'Pacific/Wake',
'Pacific/Wallis',
'Pacific/Yap',
'Poland',
'Portugal',
'ROC',
'ROK',
'Singapore',
'Turkey',
'UCT',
'US/Alaska',
'US/Aleutian',
'US/Arizona',
'US/Central',
'US/East-Indiana',
'US/Eastern',
'US/Hawaii',
'US/Indiana-Starke',
'US/Michigan',
'US/Mountain',
'US/Pacific',
'US/Samoa',
'UTC',
'Universal',
'W-SU',
'WET',
'Zulu']
all_timezones = LazyList(
tz for tz in all_timezones if resource_exists(tz))
all_timezones_set = LazySet(all_timezones)
common_timezones = \
['Africa/Abidjan',
'Africa/Accra',
'Africa/Addis_Ababa',
'Africa/Algiers',
'Africa/Asmara',
'Africa/Bamako',
'Africa/Bangui',
'Africa/Banjul',
'Africa/Bissau',
'Africa/Blantyre',
'Africa/Brazzaville',
'Africa/Bujumbura',
'Africa/Cairo',
'Africa/Casablanca',
'Africa/Ceuta',
'Africa/Conakry',
'Africa/Dakar',
'Africa/Dar_es_Salaam',
'Africa/Djibouti',
'Africa/Douala',
'Africa/El_Aaiun',
'Africa/Freetown',
'Africa/Gaborone',
'Africa/Harare',
'Africa/Johannesburg',
'Africa/Juba',
'Africa/Kampala',
'Africa/Khartoum',
'Africa/Kigali',
'Africa/Kinshasa',
'Africa/Lagos',
'Africa/Libreville',
'Africa/Lome',
'Africa/Luanda',
'Africa/Lubumbashi',
'Africa/Lusaka',
'Africa/Malabo',
'Africa/Maputo',
'Africa/Maseru',
'Africa/Mbabane',
'Africa/Mogadishu',
'Africa/Monrovia',
'Africa/Nairobi',
'Africa/Ndjamena',
'Africa/Niamey',
'Africa/Nouakchott',
'Africa/Ouagadougou',
'Africa/Porto-Novo',
'Africa/Sao_Tome',
'Africa/Tripoli',
'Africa/Tunis',
'Africa/Windhoek',
'America/Adak',
'America/Anchorage',
'America/Anguilla',
'America/Antigua',
'America/Araguaina',
'America/Argentina/Buenos_Aires',
'America/Argentina/Catamarca',
'America/Argentina/Cordoba',
'America/Argentina/Jujuy',
'America/Argentina/La_Rioja',
'America/Argentina/Mendoza',
'America/Argentina/Rio_Gallegos',
'America/Argentina/Salta',
'America/Argentina/San_Juan',
'America/Argentina/San_Luis',
'America/Argentina/Tucuman',
'America/Argentina/Ushuaia',
'America/Aruba',
'America/Asuncion',
'America/Atikokan',
'America/Bahia',
'America/Bahia_Banderas',
'America/Barbados',
'America/Belem',
'America/Belize',
'America/Blanc-Sablon',
'America/Boa_Vista',
'America/Bogota',
'America/Boise',
'America/Cambridge_Bay',
'America/Campo_Grande',
'America/Cancun',
'America/Caracas',
'America/Cayenne',
'America/Cayman',
'America/Chicago',
'America/Chihuahua',
'America/Costa_Rica',
'America/Creston',
'America/Cuiaba',
'America/Curacao',
'America/Danmarkshavn',
'America/Dawson',
'America/Dawson_Creek',
'America/Denver',
'America/Detroit',
'America/Dominica',
'America/Edmonton',
'America/Eirunepe',
'America/El_Salvador',
'America/Fort_Nelson',
'America/Fortaleza',
'America/Glace_Bay',
'America/Godthab',
'America/Goose_Bay',
'America/Grand_Turk',
'America/Grenada',
'America/Guadeloupe',
'America/Guatemala',
'America/Guayaquil',
'America/Guyana',
'America/Halifax',
'America/Havana',
'America/Hermosillo',
'America/Indiana/Indianapolis',
'America/Indiana/Knox',
'America/Indiana/Marengo',
'America/Indiana/Petersburg',
'America/Indiana/Tell_City',
'America/Indiana/Vevay',
'America/Indiana/Vincennes',
'America/Indiana/Winamac',
'America/Inuvik',
'America/Iqaluit',
'America/Jamaica',
'America/Juneau',
'America/Kentucky/Louisville',
'America/Kentucky/Monticello',
'America/Kralendijk',
'America/La_Paz',
'America/Lima',
'America/Los_Angeles',
'America/Lower_Princes',
'America/Maceio',
'America/Managua',
'America/Manaus',
'America/Marigot',
'America/Martinique',
'America/Matamoros',
'America/Mazatlan',
'America/Menominee',
'America/Merida',
'America/Metlakatla',
'America/Mexico_City',
'America/Miquelon',
'America/Moncton',
'America/Monterrey',
'America/Montevideo',
'America/Montserrat',
'America/Nassau',
'America/New_York',
'America/Nipigon',
'America/Nome',
'America/Noronha',
'America/North_Dakota/Beulah',
'America/North_Dakota/Center',
'America/North_Dakota/New_Salem',
'America/Ojinaga',
'America/Panama',
'America/Pangnirtung',
'America/Paramaribo',
'America/Phoenix',
'America/Port-au-Prince',
'America/Port_of_Spain',
'America/Porto_Velho',
'America/Puerto_Rico',
'America/Punta_Arenas',
'America/Rainy_River',
'America/Rankin_Inlet',
'America/Recife',
'America/Regina',
'America/Resolute',
'America/Rio_Branco',
'America/Santarem',
'America/Santiago',
'America/Santo_Domingo',
'America/Sao_Paulo',
'America/Scoresbysund',
'America/Sitka',
'America/St_Barthelemy',
'America/St_Johns',
'America/St_Kitts',
'America/St_Lucia',
'America/St_Thomas',
'America/St_Vincent',
'America/Swift_Current',
'America/Tegucigalpa',
'America/Thule',
'America/Thunder_Bay',
'America/Tijuana',
'America/Toronto',
'America/Tortola',
'America/Vancouver',
'America/Whitehorse',
'America/Winnipeg',
'America/Yakutat',
'America/Yellowknife',
'Antarctica/Casey',
'Antarctica/Davis',
'Antarctica/DumontDUrville',
'Antarctica/Macquarie',
'Antarctica/Mawson',
'Antarctica/McMurdo',
'Antarctica/Palmer',
'Antarctica/Rothera',
'Antarctica/Syowa',
'Antarctica/Troll',
'Antarctica/Vostok',
'Arctic/Longyearbyen',
'Asia/Aden',
'Asia/Almaty',
'Asia/Amman',
'Asia/Anadyr',
'Asia/Aqtau',
'Asia/Aqtobe',
'Asia/Ashgabat',
'Asia/Atyrau',
'Asia/Baghdad',
'Asia/Bahrain',
'Asia/Baku',
'Asia/Bangkok',
'Asia/Barnaul',
'Asia/Beirut',
'Asia/Bishkek',
'Asia/Brunei',
'Asia/Chita',
'Asia/Choibalsan',
'Asia/Colombo',
'Asia/Damascus',
'Asia/Dhaka',
'Asia/Dili',
'Asia/Dubai',
'Asia/Dushanbe',
'Asia/Famagusta',
'Asia/Gaza',
'Asia/Hebron',
'Asia/Ho_Chi_Minh',
'Asia/Hong_Kong',
'Asia/Hovd',
'Asia/Irkutsk',
'Asia/Jakarta',
'Asia/Jayapura',
'Asia/Jerusalem',
'Asia/Kabul',
'Asia/Kamchatka',
'Asia/Karachi',
'Asia/Kathmandu',
'Asia/Khandyga',
'Asia/Kolkata',
'Asia/Krasnoyarsk',
'Asia/Kuala_Lumpur',
'Asia/Kuching',
'Asia/Kuwait',
'Asia/Macau',
'Asia/Magadan',
'Asia/Makassar',
'Asia/Manila',
'Asia/Muscat',
'Asia/Nicosia',
'Asia/Novokuznetsk',
'Asia/Novosibirsk',
'Asia/Omsk',
'Asia/Oral',
'Asia/Phnom_Penh',
'Asia/Pontianak',
'Asia/Pyongyang',
'Asia/Qatar',
'Asia/Qyzylorda',
'Asia/Riyadh',
'Asia/Sakhalin',
'Asia/Samarkand',
'Asia/Seoul',
'Asia/Shanghai',
'Asia/Singapore',
'Asia/Srednekolymsk',
'Asia/Taipei',
'Asia/Tashkent',
'Asia/Tbilisi',
'Asia/Tehran',
'Asia/Thimphu',
'Asia/Tokyo',
'Asia/Tomsk',
'Asia/Ulaanbaatar',
'Asia/Urumqi',
'Asia/Ust-Nera',
'Asia/Vientiane',
'Asia/Vladivostok',
'Asia/Yakutsk',
'Asia/Yangon',
'Asia/Yekaterinburg',
'Asia/Yerevan',
'Atlantic/Azores',
'Atlantic/Bermuda',
'Atlantic/Canary',
'Atlantic/Cape_Verde',
'Atlantic/Faroe',
'Atlantic/Madeira',
'Atlantic/Reykjavik',
'Atlantic/South_Georgia',
'Atlantic/St_Helena',
'Atlantic/Stanley',
'Australia/Adelaide',
'Australia/Brisbane',
'Australia/Broken_Hill',
'Australia/Currie',
'Australia/Darwin',
'Australia/Eucla',
'Australia/Hobart',
'Australia/Lindeman',
'Australia/Lord_Howe',
'Australia/Melbourne',
'Australia/Perth',
'Australia/Sydney',
'Canada/Atlantic',
'Canada/Central',
'Canada/Eastern',
'Canada/Mountain',
'Canada/Newfoundland',
'Canada/Pacific',
'Europe/Amsterdam',
'Europe/Andorra',
'Europe/Astrakhan',
'Europe/Athens',
'Europe/Belgrade',
'Europe/Berlin',
'Europe/Bratislava',
'Europe/Brussels',
'Europe/Bucharest',
'Europe/Budapest',
'Europe/Busingen',
'Europe/Chisinau',
'Europe/Copenhagen',
'Europe/Dublin',
'Europe/Gibraltar',
'Europe/Guernsey',
'Europe/Helsinki',
'Europe/Isle_of_Man',
'Europe/Istanbul',
'Europe/Jersey',
'Europe/Kaliningrad',
'Europe/Kiev',
'Europe/Kirov',
'Europe/Lisbon',
'Europe/Ljubljana',
'Europe/London',
'Europe/Luxembourg',
'Europe/Madrid',
'Europe/Malta',
'Europe/Mariehamn',
'Europe/Minsk',
'Europe/Monaco',
'Europe/Moscow',
'Europe/Oslo',
'Europe/Paris',
'Europe/Podgorica',
'Europe/Prague',
'Europe/Riga',
'Europe/Rome',
'Europe/Samara',
'Europe/San_Marino',
'Europe/Sarajevo',
'Europe/Saratov',
'Europe/Simferopol',
'Europe/Skopje',
'Europe/Sofia',
'Europe/Stockholm',
'Europe/Tallinn',
'Europe/Tirane',
'Europe/Ulyanovsk',
'Europe/Uzhgorod',
'Europe/Vaduz',
'Europe/Vatican',
'Europe/Vienna',
'Europe/Vilnius',
'Europe/Volgograd',
'Europe/Warsaw',
'Europe/Zagreb',
'Europe/Zaporozhye',
'Europe/Zurich',
'GMT',
'Indian/Antananarivo',
'Indian/Chagos',
'Indian/Christmas',
'Indian/Cocos',
'Indian/Comoro',
'Indian/Kerguelen',
'Indian/Mahe',
'Indian/Maldives',
'Indian/Mauritius',
'Indian/Mayotte',
'Indian/Reunion',
'Pacific/Apia',
'Pacific/Auckland',
'Pacific/Bougainville',
'Pacific/Chatham',
'Pacific/Chuuk',
'Pacific/Easter',
'Pacific/Efate',
'Pacific/Enderbury',
'Pacific/Fakaofo',
'Pacific/Fiji',
'Pacific/Funafuti',
'Pacific/Galapagos',
'Pacific/Gambier',
'Pacific/Guadalcanal',
'Pacific/Guam',
'Pacific/Honolulu',
'Pacific/Kiritimati',
'Pacific/Kosrae',
'Pacific/Kwajalein',
'Pacific/Majuro',
'Pacific/Marquesas',
'Pacific/Midway',
'Pacific/Nauru',
'Pacific/Niue',
'Pacific/Norfolk',
'Pacific/Noumea',
'Pacific/Pago_Pago',
'Pacific/Palau',
'Pacific/Pitcairn',
'Pacific/Pohnpei',
'Pacific/Port_Moresby',
'Pacific/Rarotonga',
'Pacific/Saipan',
'Pacific/Tahiti',
'Pacific/Tarawa',
'Pacific/Tongatapu',
'Pacific/Wake',
'Pacific/Wallis',
'US/Alaska',
'US/Arizona',
'US/Central',
'US/Eastern',
'US/Hawaii',
'US/Mountain',
'US/Pacific',
'UTC']
common_timezones = LazyList(
tz for tz in common_timezones if tz in all_timezones)
common_timezones_set = LazySet(common_timezones)
| epl-1.0 | -2,416,979,886,403,022,000 | 8,744,079,230,191,129,000 | 21.371485 | 77 | 0.649798 | false |
EvanK/ansible | lib/ansible/modules/net_tools/nios/nios_naptr_record.py | 68 | 5884 | #!/usr/bin/python
# Copyright (c) 2018 Red Hat, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
---
module: nios_naptr_record
version_added: "2.7"
author: "Blair Rampling (@brampling)"
short_description: Configure Infoblox NIOS NAPTR records
description:
- Adds and/or removes instances of NAPTR record objects from
Infoblox NIOS servers. This module manages NIOS C(record:naptr) objects
using the Infoblox WAPI interface over REST.
requirements:
- infoblox_client
extends_documentation_fragment: nios
options:
name:
description:
- Specifies the fully qualified hostname to add or remove from
the system
required: true
view:
description:
- Sets the DNS view to associate this a record with. The DNS
view must already be configured on the system
required: true
default: default
aliases:
- dns_view
order:
description:
- Configures the order (0-65535) for this NAPTR record. This parameter
specifies the order in which the NAPTR rules are applied when
multiple rules are present.
required: true
preference:
description:
- Configures the preference (0-65535) for this NAPTR record. The
preference field determines the order NAPTR records are processed
when multiple records with the same order parameter are present.
required: true
replacement:
description:
- Configures the replacement field for this NAPTR record.
For nonterminal NAPTR records, this field specifies the
next domain name to look up.
required: true
services:
description:
- Configures the services field (128 characters maximum) for this
NAPTR record. The services field contains protocol and service
identifiers, such as "http+E2U" or "SIPS+D2T".
required: false
flags:
description:
- Configures the flags field for this NAPTR record. These control the
interpretation of the fields for an NAPTR record object. Supported
values for the flags field are "U", "S", "P" and "A".
required: false
regexp:
description:
- Configures the regexp field for this NAPTR record. This is the
regular expression-based rewriting rule of the NAPTR record. This
should be a POSIX compliant regular expression, including the
substitution rule and flags. Refer to RFC 2915 for the field syntax
details.
required: false
ttl:
description:
- Configures the TTL to be associated with this NAPTR record
extattrs:
description:
- Allows for the configuration of Extensible Attributes on the
instance of the object. This argument accepts a set of key / value
pairs for configuration.
comment:
description:
- Configures a text string comment to be associated with the instance
of this object. The provided text string will be configured on the
object instance.
state:
description:
- Configures the intended state of the instance of the object on
the NIOS server. When this value is set to C(present), the object
is configured on the device and when this value is set to C(absent)
the value is removed (if necessary) from the device.
default: present
choices:
- present
- absent
'''
EXAMPLES = '''
- name: configure a NAPTR record
nios_naptr_record:
name: '*.subscriber-100.ansiblezone.com'
order: 1000
preference: 10
replacement: replacement1.network.ansiblezone.com
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: add a comment to an existing NAPTR record
nios_naptr_record:
name: '*.subscriber-100.ansiblezone.com'
order: 1000
preference: 10
replacement: replacement1.network.ansiblezone.com
comment: this is a test comment
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: remove a NAPTR record from the system
nios_naptr_record:
name: '*.subscriber-100.ansiblezone.com'
order: 1000
preference: 10
replacement: replacement1.network.ansiblezone.com
state: absent
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
'''
RETURN = ''' # '''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
from ansible.module_utils.net_tools.nios.api import WapiModule
def main():
''' Main entry point for module execution
'''
ib_spec = dict(
name=dict(required=True, ib_req=True),
view=dict(default='default', aliases=['dns_view'], ib_req=True),
order=dict(type='int', ib_req=True),
preference=dict(type='int', ib_req=True),
replacement=dict(ib_req=True),
services=dict(),
flags=dict(),
regexp=dict(),
ttl=dict(type='int'),
extattrs=dict(type='dict'),
comment=dict(),
)
argument_spec = dict(
provider=dict(required=True),
state=dict(default='present', choices=['present', 'absent'])
)
argument_spec.update(ib_spec)
argument_spec.update(WapiModule.provider_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
wapi = WapiModule(module)
result = wapi.run('record:naptr', ib_spec)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | -8,091,377,778,447,952,000 | -6,486,759,474,799,776,000 | 29.806283 | 92 | 0.669103 | false |
elinebakker/paparazzi | sw/tools/calibration/report_imu_scaled.py | 24 | 4378 | #! /usr/bin/env python
# This file is part of Paparazzi.
#
# Paparazzi is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# Paparazzi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Paparazzi; see the file COPYING. If not, write to
# the Free Software Foundation, 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
#
import sys
import os
from optparse import OptionParser
import calibration_utils
def main():
usage = "usage: %prog [options] log_filename.data" + "\n" + "Run %prog --help to list the options."
parser = OptionParser(usage)
parser.add_option("-i", "--id", dest="ac_id",
action="store",
help="aircraft id to use")
parser.add_option("-p", "--plot",
help="Show sensor plots",
action="store_true", dest="plot")
parser.add_option("-s", "--start", dest="start",
action="store",
type=int, default=0,
help="start time in seconds")
parser.add_option("-e", "--end", dest="end",
action="store",
type=int, default=36000,
help="end time in seconds")
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error("incorrect number of arguments")
else:
if os.path.isfile(args[0]):
filename = args[0]
else:
print(args[0] + " not found")
sys.exit(1)
ac_ids = calibration_utils.get_ids_in_log(filename)
if options.ac_id is None:
if len(ac_ids) == 1:
options.ac_id = ac_ids[0]
else:
parser.error("More than one aircraft id found in log file. Specify the id to use.")
if options.verbose:
print("Using aircraft id "+options.ac_id)
if not filename.endswith(".data"):
parser.error("Please specify a *.data log file")
if options.verbose:
print("reading file "+filename+" for aircraft "+options.ac_id+" and scaled sensors")
#Moved these checks to the command line parser above
#
#if options.start is None:
# options.start = 0
#if options.end is None:
# options.end = 36000
# read scaled sensor measurements from log file
# TBD: Eventually populate the sensor attributes/values with data found in the messages.xml file
sensor_names = [ "ACCEL", "GYRO", "MAG" ]
sensor_attrs = [ [0.0009766, "m/s2", "ax", "ay", "az"], [0.0139882, "deg/s", "gp", "gq", "gr"], [0.0004883, "unit", "mx", "my", "mz"] ]
for sensor_name in sensor_names:
measurements = calibration_utils.read_log_scaled(options.ac_id, filename, sensor_name, options.start, options.end)
if len(measurements) > 0:
if options.verbose:
print("found "+str(len(measurements))+" records")
calibration_utils.print_imu_scaled(sensor_name, measurements, sensor_attrs[sensor_names.index(sensor_name)])
if options.plot:
calibration_utils.plot_imu_scaled(sensor_name, measurements, sensor_attrs[sensor_names.index(sensor_name)])
calibration_utils.plot_imu_scaled_fft(sensor_name, measurements, sensor_attrs[sensor_names.index(sensor_name)])
else:
print("Warning: found zero IMU_"+sensor_name+"_SCALED measurements for aircraft with id "+options.ac_id+" in log file!")
#sys.exit(1)
print("")
# coefficient = calibration_utils.estimate_mag_current_relation(measurements)
# print("")
# print("<define name= \"MAG_X_CURRENT_COEF\" value=\""+str(coefficient[0])+"\"/>")
# print("<define name= \"MAG_Y_CURRENT_COEF\" value=\""+str(coefficient[1])+"\"/>")
# print("<define name= \"MAG_Z_CURRENT_COEF\" value=\""+str(coefficient[2])+"\"/>")
if __name__ == "__main__":
main()
| gpl-2.0 | -1,391,390,899,974,022,400 | -5,406,816,703,940,953,000 | 39.165138 | 140 | 0.607355 | false |
jaimahajan1997/sympy | sympy/functions/special/gamma_functions.py | 22 | 32460 | from __future__ import print_function, division
from sympy.core import Add, S, sympify, oo, pi, Dummy
from sympy.core.function import Function, ArgumentIndexError
from sympy.core.numbers import Rational
from sympy.core.power import Pow
from sympy.core.compatibility import range
from .zeta_functions import zeta
from .error_functions import erf, erfc
from sympy.functions.elementary.exponential import exp, log
from sympy.functions.elementary.integers import ceiling, floor
from sympy.functions.elementary.miscellaneous import sqrt
from sympy.functions.combinatorial.numbers import bernoulli, harmonic
from sympy.functions.combinatorial.factorials import factorial, rf, RisingFactorial
###############################################################################
############################ COMPLETE GAMMA FUNCTION ##########################
###############################################################################
class gamma(Function):
r"""
The gamma function
.. math::
\Gamma(x) := \int^{\infty}_{0} t^{x-1} e^{t} \mathrm{d}t.
The ``gamma`` function implements the function which passes through the
values of the factorial function, i.e. `\Gamma(n) = (n - 1)!` when n is
an integer. More general, `\Gamma(z)` is defined in the whole complex
plane except at the negative integers where there are simple poles.
Examples
========
>>> from sympy import S, I, pi, oo, gamma
>>> from sympy.abc import x
Several special values are known:
>>> gamma(1)
1
>>> gamma(4)
6
>>> gamma(S(3)/2)
sqrt(pi)/2
The Gamma function obeys the mirror symmetry:
>>> from sympy import conjugate
>>> conjugate(gamma(x))
gamma(conjugate(x))
Differentiation with respect to x is supported:
>>> from sympy import diff
>>> diff(gamma(x), x)
gamma(x)*polygamma(0, x)
Series expansion is also supported:
>>> from sympy import series
>>> series(gamma(x), x, 0, 3)
1/x - EulerGamma + x*(EulerGamma**2/2 + pi**2/12) + x**2*(-EulerGamma*pi**2/12 + polygamma(2, 1)/6 - EulerGamma**3/6) + O(x**3)
We can numerically evaluate the gamma function to arbitrary precision
on the whole complex plane:
>>> gamma(pi).evalf(40)
2.288037795340032417959588909060233922890
>>> gamma(1+I).evalf(20)
0.49801566811835604271 - 0.15494982830181068512*I
See Also
========
lowergamma: Lower incomplete gamma function.
uppergamma: Upper incomplete gamma function.
polygamma: Polygamma function.
loggamma: Log Gamma function.
digamma: Digamma function.
trigamma: Trigamma function.
sympy.functions.special.beta_functions.beta: Euler Beta function.
References
==========
.. [1] http://en.wikipedia.org/wiki/Gamma_function
.. [2] http://dlmf.nist.gov/5
.. [3] http://mathworld.wolfram.com/GammaFunction.html
.. [4] http://functions.wolfram.com/GammaBetaErf/Gamma/
"""
unbranched = True
def fdiff(self, argindex=1):
if argindex == 1:
return self.func(self.args[0])*polygamma(0, self.args[0])
else:
raise ArgumentIndexError(self, argindex)
@classmethod
def eval(cls, arg):
if arg.is_Number:
if arg is S.NaN:
return S.NaN
elif arg is S.Infinity:
return S.Infinity
elif arg.is_Integer:
if arg.is_positive:
return factorial(arg - 1)
else:
return S.ComplexInfinity
elif arg.is_Rational:
if arg.q == 2:
n = abs(arg.p) // arg.q
if arg.is_positive:
k, coeff = n, S.One
else:
n = k = n + 1
if n & 1 == 0:
coeff = S.One
else:
coeff = S.NegativeOne
for i in range(3, 2*k, 2):
coeff *= i
if arg.is_positive:
return coeff*sqrt(S.Pi) / 2**n
else:
return 2**n*sqrt(S.Pi) / coeff
if arg.is_integer and arg.is_nonpositive:
return S.ComplexInfinity
def _eval_expand_func(self, **hints):
arg = self.args[0]
if arg.is_Rational:
if abs(arg.p) > arg.q:
x = Dummy('x')
n = arg.p // arg.q
p = arg.p - n*arg.q
return self.func(x + n)._eval_expand_func().subs(x, Rational(p, arg.q))
if arg.is_Add:
coeff, tail = arg.as_coeff_add()
if coeff and coeff.q != 1:
intpart = floor(coeff)
tail = (coeff - intpart,) + tail
coeff = intpart
tail = arg._new_rawargs(*tail, reeval=False)
return self.func(tail)*RisingFactorial(tail, coeff)
return self.func(*self.args)
def _eval_conjugate(self):
return self.func(self.args[0].conjugate())
def _eval_is_real(self):
x = self.args[0]
if x.is_positive or x.is_noninteger:
return True
def _eval_is_positive(self):
x = self.args[0]
if x.is_positive:
return True
elif x.is_noninteger:
return floor(x).is_even
def _eval_rewrite_as_tractable(self, z):
return exp(loggamma(z))
def _eval_rewrite_as_factorial(self, z):
return factorial(z - 1)
def _eval_nseries(self, x, n, logx):
x0 = self.args[0].limit(x, 0)
if not (x0.is_Integer and x0 <= 0):
return super(gamma, self)._eval_nseries(x, n, logx)
t = self.args[0] - x0
return (self.func(t + 1)/rf(self.args[0], -x0 + 1))._eval_nseries(x, n, logx)
def _latex(self, printer, exp=None):
if len(self.args) != 1:
raise ValueError("Args length should be 1")
aa = printer._print(self.args[0])
if exp:
return r'\Gamma^{%s}{\left(%s \right)}' % (printer._print(exp), aa)
else:
return r'\Gamma{\left(%s \right)}' % aa
@staticmethod
def _latex_no_arg(printer):
return r'\Gamma'
###############################################################################
################## LOWER and UPPER INCOMPLETE GAMMA FUNCTIONS #################
###############################################################################
class lowergamma(Function):
r"""
The lower incomplete gamma function.
It can be defined as the meromorphic continuation of
.. math::
\gamma(s, x) := \int_0^x t^{s-1} e^{-t} \mathrm{d}t = \Gamma(s) - \Gamma(s, x).
This can be shown to be the same as
.. math::
\gamma(s, x) = \frac{x^s}{s} {}_1F_1\left({s \atop s+1} \middle| -x\right),
where :math:`{}_1F_1` is the (confluent) hypergeometric function.
Examples
========
>>> from sympy import lowergamma, S
>>> from sympy.abc import s, x
>>> lowergamma(s, x)
lowergamma(s, x)
>>> lowergamma(3, x)
-x**2*exp(-x) - 2*x*exp(-x) + 2 - 2*exp(-x)
>>> lowergamma(-S(1)/2, x)
-2*sqrt(pi)*erf(sqrt(x)) - 2*exp(-x)/sqrt(x)
See Also
========
gamma: Gamma function.
uppergamma: Upper incomplete gamma function.
polygamma: Polygamma function.
loggamma: Log Gamma function.
digamma: Digamma function.
trigamma: Trigamma function.
sympy.functions.special.beta_functions.beta: Euler Beta function.
References
==========
.. [1] http://en.wikipedia.org/wiki/Incomplete_gamma_function#Lower_incomplete_Gamma_function
.. [2] Abramowitz, Milton; Stegun, Irene A., eds. (1965), Chapter 6, Section 5,
Handbook of Mathematical Functions with Formulas, Graphs, and Mathematical Tables
.. [3] http://dlmf.nist.gov/8
.. [4] http://functions.wolfram.com/GammaBetaErf/Gamma2/
.. [5] http://functions.wolfram.com/GammaBetaErf/Gamma3/
"""
def fdiff(self, argindex=2):
from sympy import meijerg, unpolarify
if argindex == 2:
a, z = self.args
return exp(-unpolarify(z))*z**(a - 1)
elif argindex == 1:
a, z = self.args
return gamma(a)*digamma(a) - log(z)*uppergamma(a, z) \
- meijerg([], [1, 1], [0, 0, a], [], z)
else:
raise ArgumentIndexError(self, argindex)
@classmethod
def eval(cls, a, x):
# For lack of a better place, we use this one to extract branching
# information. The following can be
# found in the literature (c/f references given above), albeit scattered:
# 1) For fixed x != 0, lowergamma(s, x) is an entire function of s
# 2) For fixed positive integers s, lowergamma(s, x) is an entire
# function of x.
# 3) For fixed non-positive integers s,
# lowergamma(s, exp(I*2*pi*n)*x) =
# 2*pi*I*n*(-1)**(-s)/factorial(-s) + lowergamma(s, x)
# (this follows from lowergamma(s, x).diff(x) = x**(s-1)*exp(-x)).
# 4) For fixed non-integral s,
# lowergamma(s, x) = x**s*gamma(s)*lowergamma_unbranched(s, x),
# where lowergamma_unbranched(s, x) is an entire function (in fact
# of both s and x), i.e.
# lowergamma(s, exp(2*I*pi*n)*x) = exp(2*pi*I*n*a)*lowergamma(a, x)
from sympy import unpolarify, I
nx, n = x.extract_branch_factor()
if a.is_integer and a.is_positive:
nx = unpolarify(x)
if nx != x:
return lowergamma(a, nx)
elif a.is_integer and a.is_nonpositive:
if n != 0:
return 2*pi*I*n*(-1)**(-a)/factorial(-a) + lowergamma(a, nx)
elif n != 0:
return exp(2*pi*I*n*a)*lowergamma(a, nx)
# Special values.
if a.is_Number:
# TODO this should be non-recursive
if a is S.One:
return S.One - exp(-x)
elif a is S.Half:
return sqrt(pi)*erf(sqrt(x))
elif a.is_Integer or (2*a).is_Integer:
b = a - 1
if b.is_positive:
return b*cls(b, x) - x**b * exp(-x)
if not a.is_Integer:
return (cls(a + 1, x) + x**a * exp(-x))/a
def _eval_evalf(self, prec):
from mpmath import mp, workprec
from sympy import Expr
a = self.args[0]._to_mpmath(prec)
z = self.args[1]._to_mpmath(prec)
with workprec(prec):
res = mp.gammainc(a, 0, z)
return Expr._from_mpmath(res, prec)
def _eval_conjugate(self):
z = self.args[1]
if not z in (S.Zero, S.NegativeInfinity):
return self.func(self.args[0].conjugate(), z.conjugate())
def _eval_rewrite_as_uppergamma(self, s, x):
return gamma(s) - uppergamma(s, x)
def _eval_rewrite_as_expint(self, s, x):
from sympy import expint
if s.is_integer and s.is_nonpositive:
return self
return self.rewrite(uppergamma).rewrite(expint)
@staticmethod
def _latex_no_arg(printer):
return r'\gamma'
class uppergamma(Function):
r"""
The upper incomplete gamma function.
It can be defined as the meromorphic continuation of
.. math::
\Gamma(s, x) := \int_x^\infty t^{s-1} e^{-t} \mathrm{d}t = \Gamma(s) - \gamma(s, x).
where `\gamma(s, x)` is the lower incomplete gamma function,
:class:`lowergamma`. This can be shown to be the same as
.. math::
\Gamma(s, x) = \Gamma(s) - \frac{x^s}{s} {}_1F_1\left({s \atop s+1} \middle| -x\right),
where :math:`{}_1F_1` is the (confluent) hypergeometric function.
The upper incomplete gamma function is also essentially equivalent to the
generalized exponential integral:
.. math::
\operatorname{E}_{n}(x) = \int_{1}^{\infty}{\frac{e^{-xt}}{t^n} \, dt} = x^{n-1}\Gamma(1-n,x).
Examples
========
>>> from sympy import uppergamma, S
>>> from sympy.abc import s, x
>>> uppergamma(s, x)
uppergamma(s, x)
>>> uppergamma(3, x)
x**2*exp(-x) + 2*x*exp(-x) + 2*exp(-x)
>>> uppergamma(-S(1)/2, x)
-2*sqrt(pi)*erfc(sqrt(x)) + 2*exp(-x)/sqrt(x)
>>> uppergamma(-2, x)
expint(3, x)/x**2
See Also
========
gamma: Gamma function.
lowergamma: Lower incomplete gamma function.
polygamma: Polygamma function.
loggamma: Log Gamma function.
digamma: Digamma function.
trigamma: Trigamma function.
sympy.functions.special.beta_functions.beta: Euler Beta function.
References
==========
.. [1] http://en.wikipedia.org/wiki/Incomplete_gamma_function#Upper_incomplete_Gamma_function
.. [2] Abramowitz, Milton; Stegun, Irene A., eds. (1965), Chapter 6, Section 5,
Handbook of Mathematical Functions with Formulas, Graphs, and Mathematical Tables
.. [3] http://dlmf.nist.gov/8
.. [4] http://functions.wolfram.com/GammaBetaErf/Gamma2/
.. [5] http://functions.wolfram.com/GammaBetaErf/Gamma3/
.. [6] http://en.wikipedia.org/wiki/Exponential_integral#Relation_with_other_functions
"""
def fdiff(self, argindex=2):
from sympy import meijerg, unpolarify
if argindex == 2:
a, z = self.args
return -exp(-unpolarify(z))*z**(a - 1)
elif argindex == 1:
a, z = self.args
return uppergamma(a, z)*log(z) + meijerg([], [1, 1], [0, 0, a], [], z)
else:
raise ArgumentIndexError(self, argindex)
def _eval_evalf(self, prec):
from mpmath import mp, workprec
from sympy import Expr
a = self.args[0]._to_mpmath(prec)
z = self.args[1]._to_mpmath(prec)
with workprec(prec):
res = mp.gammainc(a, z, mp.inf)
return Expr._from_mpmath(res, prec)
@classmethod
def eval(cls, a, z):
from sympy import unpolarify, I, expint
if z.is_Number:
if z is S.NaN:
return S.NaN
elif z is S.Infinity:
return S.Zero
elif z is S.Zero:
# TODO: Holds only for Re(a) > 0:
return gamma(a)
# We extract branching information here. C/f lowergamma.
nx, n = z.extract_branch_factor()
if a.is_integer and (a > 0) == True:
nx = unpolarify(z)
if z != nx:
return uppergamma(a, nx)
elif a.is_integer and (a <= 0) == True:
if n != 0:
return -2*pi*I*n*(-1)**(-a)/factorial(-a) + uppergamma(a, nx)
elif n != 0:
return gamma(a)*(1 - exp(2*pi*I*n*a)) + exp(2*pi*I*n*a)*uppergamma(a, nx)
# Special values.
if a.is_Number:
# TODO this should be non-recursive
if a is S.One:
return exp(-z)
elif a is S.Half:
return sqrt(pi)*erfc(sqrt(z))
elif a.is_Integer or (2*a).is_Integer:
b = a - 1
if b.is_positive:
return b*cls(b, z) + z**b * exp(-z)
elif b.is_Integer:
return expint(-b, z)*unpolarify(z)**(b + 1)
if not a.is_Integer:
return (cls(a + 1, z) - z**a * exp(-z))/a
def _eval_conjugate(self):
z = self.args[1]
if not z in (S.Zero, S.NegativeInfinity):
return self.func(self.args[0].conjugate(), z.conjugate())
def _eval_rewrite_as_lowergamma(self, s, x):
return gamma(s) - lowergamma(s, x)
def _eval_rewrite_as_expint(self, s, x):
from sympy import expint
return expint(1 - s, x)*x**s
###############################################################################
###################### POLYGAMMA and LOGGAMMA FUNCTIONS #######################
###############################################################################
class polygamma(Function):
r"""
The function ``polygamma(n, z)`` returns ``log(gamma(z)).diff(n + 1)``.
It is a meromorphic function on `\mathbb{C}` and defined as the (n+1)-th
derivative of the logarithm of the gamma function:
.. math::
\psi^{(n)} (z) := \frac{\mathrm{d}^{n+1}}{\mathrm{d} z^{n+1}} \log\Gamma(z).
Examples
========
Several special values are known:
>>> from sympy import S, polygamma
>>> polygamma(0, 1)
-EulerGamma
>>> polygamma(0, 1/S(2))
-2*log(2) - EulerGamma
>>> polygamma(0, 1/S(3))
-3*log(3)/2 - sqrt(3)*pi/6 - EulerGamma
>>> polygamma(0, 1/S(4))
-3*log(2) - pi/2 - EulerGamma
>>> polygamma(0, 2)
-EulerGamma + 1
>>> polygamma(0, 23)
-EulerGamma + 19093197/5173168
>>> from sympy import oo, I
>>> polygamma(0, oo)
oo
>>> polygamma(0, -oo)
oo
>>> polygamma(0, I*oo)
oo
>>> polygamma(0, -I*oo)
oo
Differentiation with respect to x is supported:
>>> from sympy import Symbol, diff
>>> x = Symbol("x")
>>> diff(polygamma(0, x), x)
polygamma(1, x)
>>> diff(polygamma(0, x), x, 2)
polygamma(2, x)
>>> diff(polygamma(0, x), x, 3)
polygamma(3, x)
>>> diff(polygamma(1, x), x)
polygamma(2, x)
>>> diff(polygamma(1, x), x, 2)
polygamma(3, x)
>>> diff(polygamma(2, x), x)
polygamma(3, x)
>>> diff(polygamma(2, x), x, 2)
polygamma(4, x)
>>> n = Symbol("n")
>>> diff(polygamma(n, x), x)
polygamma(n + 1, x)
>>> diff(polygamma(n, x), x, 2)
polygamma(n + 2, x)
We can rewrite polygamma functions in terms of harmonic numbers:
>>> from sympy import harmonic
>>> polygamma(0, x).rewrite(harmonic)
harmonic(x - 1) - EulerGamma
>>> polygamma(2, x).rewrite(harmonic)
2*harmonic(x - 1, 3) - 2*zeta(3)
>>> ni = Symbol("n", integer=True)
>>> polygamma(ni, x).rewrite(harmonic)
(-1)**(n + 1)*(-harmonic(x - 1, n + 1) + zeta(n + 1))*factorial(n)
See Also
========
gamma: Gamma function.
lowergamma: Lower incomplete gamma function.
uppergamma: Upper incomplete gamma function.
loggamma: Log Gamma function.
digamma: Digamma function.
trigamma: Trigamma function.
sympy.functions.special.beta_functions.beta: Euler Beta function.
References
==========
.. [1] http://en.wikipedia.org/wiki/Polygamma_function
.. [2] http://mathworld.wolfram.com/PolygammaFunction.html
.. [3] http://functions.wolfram.com/GammaBetaErf/PolyGamma/
.. [4] http://functions.wolfram.com/GammaBetaErf/PolyGamma2/
"""
def fdiff(self, argindex=2):
if argindex == 2:
n, z = self.args[:2]
return polygamma(n + 1, z)
else:
raise ArgumentIndexError(self, argindex)
def _eval_is_positive(self):
if self.args[1].is_positive and (self.args[0] > 0) == True:
return self.args[0].is_odd
def _eval_is_negative(self):
if self.args[1].is_positive and (self.args[0] > 0) == True:
return self.args[0].is_even
def _eval_is_real(self):
return self.args[0].is_real
def _eval_aseries(self, n, args0, x, logx):
from sympy import Order
if args0[1] != oo or not \
(self.args[0].is_Integer and self.args[0].is_nonnegative):
return super(polygamma, self)._eval_aseries(n, args0, x, logx)
z = self.args[1]
N = self.args[0]
if N == 0:
# digamma function series
# Abramowitz & Stegun, p. 259, 6.3.18
r = log(z) - 1/(2*z)
o = None
if n < 2:
o = Order(1/z, x)
else:
m = ceiling((n + 1)//2)
l = [bernoulli(2*k) / (2*k*z**(2*k)) for k in range(1, m)]
r -= Add(*l)
o = Order(1/z**(2*m), x)
return r._eval_nseries(x, n, logx) + o
else:
# proper polygamma function
# Abramowitz & Stegun, p. 260, 6.4.10
# We return terms to order higher than O(x**n) on purpose
# -- otherwise we would not be able to return any terms for
# quite a long time!
fac = gamma(N)
e0 = fac + N*fac/(2*z)
m = ceiling((n + 1)//2)
for k in range(1, m):
fac = fac*(2*k + N - 1)*(2*k + N - 2) / ((2*k)*(2*k - 1))
e0 += bernoulli(2*k)*fac/z**(2*k)
o = Order(1/z**(2*m), x)
if n == 0:
o = Order(1/z, x)
elif n == 1:
o = Order(1/z**2, x)
r = e0._eval_nseries(z, n, logx) + o
return (-1 * (-1/z)**N * r)._eval_nseries(x, n, logx)
@classmethod
def eval(cls, n, z):
n, z = list(map(sympify, (n, z)))
from sympy import unpolarify
if n.is_integer:
if n.is_nonnegative:
nz = unpolarify(z)
if z != nz:
return polygamma(n, nz)
if n == -1:
return loggamma(z)
else:
if z.is_Number:
if z is S.NaN:
return S.NaN
elif z is S.Infinity:
if n.is_Number:
if n is S.Zero:
return S.Infinity
else:
return S.Zero
elif z.is_Integer:
if z.is_nonpositive:
return S.ComplexInfinity
else:
if n is S.Zero:
return -S.EulerGamma + harmonic(z - 1, 1)
elif n.is_odd:
return (-1)**(n + 1)*factorial(n)*zeta(n + 1, z)
if n == 0:
if z is S.NaN:
return S.NaN
elif z.is_Rational:
# TODO actually *any* n/m can be done, but that is messy
lookup = {S(1)/2: -2*log(2) - S.EulerGamma,
S(1)/3: -S.Pi/2/sqrt(3) - 3*log(3)/2 - S.EulerGamma,
S(1)/4: -S.Pi/2 - 3*log(2) - S.EulerGamma,
S(3)/4: -3*log(2) - S.EulerGamma + S.Pi/2,
S(2)/3: -3*log(3)/2 + S.Pi/2/sqrt(3) - S.EulerGamma}
if z > 0:
n = floor(z)
z0 = z - n
if z0 in lookup:
return lookup[z0] + Add(*[1/(z0 + k) for k in range(n)])
elif z < 0:
n = floor(1 - z)
z0 = z + n
if z0 in lookup:
return lookup[z0] - Add(*[1/(z0 - 1 - k) for k in range(n)])
elif z in (S.Infinity, S.NegativeInfinity):
return S.Infinity
else:
t = z.extract_multiplicatively(S.ImaginaryUnit)
if t in (S.Infinity, S.NegativeInfinity):
return S.Infinity
# TODO n == 1 also can do some rational z
def _eval_expand_func(self, **hints):
n, z = self.args
if n.is_Integer and n.is_nonnegative:
if z.is_Add:
coeff = z.args[0]
if coeff.is_Integer:
e = -(n + 1)
if coeff > 0:
tail = Add(*[Pow(
z - i, e) for i in range(1, int(coeff) + 1)])
else:
tail = -Add(*[Pow(
z + i, e) for i in range(0, int(-coeff))])
return polygamma(n, z - coeff) + (-1)**n*factorial(n)*tail
elif z.is_Mul:
coeff, z = z.as_two_terms()
if coeff.is_Integer and coeff.is_positive:
tail = [ polygamma(n, z + Rational(
i, coeff)) for i in range(0, int(coeff)) ]
if n == 0:
return Add(*tail)/coeff + log(coeff)
else:
return Add(*tail)/coeff**(n + 1)
z *= coeff
return polygamma(n, z)
def _eval_rewrite_as_zeta(self, n, z):
if n >= S.One:
return (-1)**(n + 1)*factorial(n)*zeta(n + 1, z)
else:
return self
def _eval_rewrite_as_harmonic(self, n, z):
if n.is_integer:
if n == S.Zero:
return harmonic(z - 1) - S.EulerGamma
else:
return S.NegativeOne**(n+1) * factorial(n) * (zeta(n+1) - harmonic(z-1, n+1))
def _eval_as_leading_term(self, x):
from sympy import Order
n, z = [a.as_leading_term(x) for a in self.args]
o = Order(z, x)
if n == 0 and o.contains(1/x):
return o.getn() * log(x)
else:
return self.func(n, z)
class loggamma(Function):
r"""
The ``loggamma`` function implements the logarithm of the
gamma function i.e, `\log\Gamma(x)`.
Examples
========
Several special values are known. For numerical integral
arguments we have:
>>> from sympy import loggamma
>>> loggamma(-2)
oo
>>> loggamma(0)
oo
>>> loggamma(1)
0
>>> loggamma(2)
0
>>> loggamma(3)
log(2)
and for symbolic values:
>>> from sympy import Symbol
>>> n = Symbol("n", integer=True, positive=True)
>>> loggamma(n)
log(gamma(n))
>>> loggamma(-n)
oo
for half-integral values:
>>> from sympy import S, pi
>>> loggamma(S(5)/2)
log(3*sqrt(pi)/4)
>>> loggamma(n/2)
log(2**(-n + 1)*sqrt(pi)*gamma(n)/gamma(n/2 + 1/2))
and general rational arguments:
>>> from sympy import expand_func
>>> L = loggamma(S(16)/3)
>>> expand_func(L).doit()
-5*log(3) + loggamma(1/3) + log(4) + log(7) + log(10) + log(13)
>>> L = loggamma(S(19)/4)
>>> expand_func(L).doit()
-4*log(4) + loggamma(3/4) + log(3) + log(7) + log(11) + log(15)
>>> L = loggamma(S(23)/7)
>>> expand_func(L).doit()
-3*log(7) + log(2) + loggamma(2/7) + log(9) + log(16)
The loggamma function has the following limits towards infinity:
>>> from sympy import oo
>>> loggamma(oo)
oo
>>> loggamma(-oo)
zoo
The loggamma function obeys the mirror symmetry
if `x \in \mathbb{C} \setminus \{-\infty, 0\}`:
>>> from sympy.abc import x
>>> from sympy import conjugate
>>> conjugate(loggamma(x))
loggamma(conjugate(x))
Differentiation with respect to x is supported:
>>> from sympy import diff
>>> diff(loggamma(x), x)
polygamma(0, x)
Series expansion is also supported:
>>> from sympy import series
>>> series(loggamma(x), x, 0, 4)
-log(x) - EulerGamma*x + pi**2*x**2/12 + x**3*polygamma(2, 1)/6 + O(x**4)
We can numerically evaluate the gamma function to arbitrary precision
on the whole complex plane:
>>> from sympy import I
>>> loggamma(5).evalf(30)
3.17805383034794561964694160130
>>> loggamma(I).evalf(20)
-0.65092319930185633889 - 1.8724366472624298171*I
See Also
========
gamma: Gamma function.
lowergamma: Lower incomplete gamma function.
uppergamma: Upper incomplete gamma function.
polygamma: Polygamma function.
digamma: Digamma function.
trigamma: Trigamma function.
sympy.functions.special.beta_functions.beta: Euler Beta function.
References
==========
.. [1] http://en.wikipedia.org/wiki/Gamma_function
.. [2] http://dlmf.nist.gov/5
.. [3] http://mathworld.wolfram.com/LogGammaFunction.html
.. [4] http://functions.wolfram.com/GammaBetaErf/LogGamma/
"""
@classmethod
def eval(cls, z):
z = sympify(z)
if z.is_integer:
if z.is_nonpositive:
return S.Infinity
elif z.is_positive:
return log(gamma(z))
elif z.is_rational:
p, q = z.as_numer_denom()
# Half-integral values:
if p.is_positive and q == 2:
return log(sqrt(S.Pi) * 2**(1 - p) * gamma(p) / gamma((p + 1)*S.Half))
if z is S.Infinity:
return S.Infinity
elif abs(z) is S.Infinity:
return S.ComplexInfinity
if z is S.NaN:
return S.NaN
def _eval_expand_func(self, **hints):
from sympy import Sum
z = self.args[0]
if z.is_Rational:
p, q = z.as_numer_denom()
# General rational arguments (u + p/q)
# Split z as n + p/q with p < q
n = p // q
p = p - n*q
if p.is_positive and q.is_positive and p < q:
k = Dummy("k")
if n.is_positive:
return loggamma(p / q) - n*log(q) + Sum(log((k - 1)*q + p), (k, 1, n))
elif n.is_negative:
return loggamma(p / q) - n*log(q) + S.Pi*S.ImaginaryUnit*n - Sum(log(k*q - p), (k, 1, -n))
elif n.is_zero:
return loggamma(p / q)
return self
def _eval_nseries(self, x, n, logx=None):
x0 = self.args[0].limit(x, 0)
if x0 is S.Zero:
f = self._eval_rewrite_as_intractable(*self.args)
return f._eval_nseries(x, n, logx)
return super(loggamma, self)._eval_nseries(x, n, logx)
def _eval_aseries(self, n, args0, x, logx):
from sympy import Order
if args0[0] != oo:
return super(loggamma, self)._eval_aseries(n, args0, x, logx)
z = self.args[0]
m = min(n, ceiling((n + S(1))/2))
r = log(z)*(z - S(1)/2) - z + log(2*pi)/2
l = [bernoulli(2*k) / (2*k*(2*k - 1)*z**(2*k - 1)) for k in range(1, m)]
o = None
if m == 0:
o = Order(1, x)
else:
o = Order(1/z**(2*m - 1), x)
# It is very inefficient to first add the order and then do the nseries
return (r + Add(*l))._eval_nseries(x, n, logx) + o
def _eval_rewrite_as_intractable(self, z):
return log(gamma(z))
def _eval_is_real(self):
return self.args[0].is_real
def _eval_conjugate(self):
z = self.args[0]
if not z in (S.Zero, S.NegativeInfinity):
return self.func(z.conjugate())
def fdiff(self, argindex=1):
if argindex == 1:
return polygamma(0, self.args[0])
else:
raise ArgumentIndexError(self, argindex)
def _sage_(self):
import sage.all as sage
return sage.log_gamma(self.args[0]._sage_())
def digamma(x):
r"""
The digamma function is the first derivative of the loggamma function i.e,
.. math::
\psi(x) := \frac{\mathrm{d}}{\mathrm{d} z} \log\Gamma(z)
= \frac{\Gamma'(z)}{\Gamma(z) }
In this case, ``digamma(z) = polygamma(0, z)``.
See Also
========
gamma: Gamma function.
lowergamma: Lower incomplete gamma function.
uppergamma: Upper incomplete gamma function.
polygamma: Polygamma function.
loggamma: Log Gamma function.
trigamma: Trigamma function.
sympy.functions.special.beta_functions.beta: Euler Beta function.
References
==========
.. [1] http://en.wikipedia.org/wiki/Digamma_function
.. [2] http://mathworld.wolfram.com/DigammaFunction.html
.. [3] http://functions.wolfram.com/GammaBetaErf/PolyGamma2/
"""
return polygamma(0, x)
def trigamma(x):
r"""
The trigamma function is the second derivative of the loggamma function i.e,
.. math::
\psi^{(1)}(z) := \frac{\mathrm{d}^{2}}{\mathrm{d} z^{2}} \log\Gamma(z).
In this case, ``trigamma(z) = polygamma(1, z)``.
See Also
========
gamma: Gamma function.
lowergamma: Lower incomplete gamma function.
uppergamma: Upper incomplete gamma function.
polygamma: Polygamma function.
loggamma: Log Gamma function.
digamma: Digamma function.
sympy.functions.special.beta_functions.beta: Euler Beta function.
References
==========
.. [1] http://en.wikipedia.org/wiki/Trigamma_function
.. [2] http://mathworld.wolfram.com/TrigammaFunction.html
.. [3] http://functions.wolfram.com/GammaBetaErf/PolyGamma2/
"""
return polygamma(1, x)
| bsd-3-clause | -3,181,680,509,414,293,500 | 6,936,937,617,660,353,000 | 31.39521 | 131 | 0.516697 | false |
synctree/synctree-awsebcli | ebcli/operations/deployops.py | 1 | 1403 | # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from ..lib import elasticbeanstalk, aws
from ..core import io
from . import commonops
def deploy(app_name, env_name, version, label, message, staged=False,
timeout=5):
region_name = aws.get_region_name()
io.log_info('Deploying code to ' + env_name + " in region " + (region_name or 'default'))
if version:
app_version_label = version
else:
# Create app version
app_version_label = commonops.create_app_version(
app_name, label=label, message=message, staged=staged)
# swap env to new app version
request_id = elasticbeanstalk.update_env_application_version(
env_name, app_version_label)
commonops.wait_for_success_events(request_id,
timeout_in_minutes=timeout,
can_abort=True) | apache-2.0 | -4,556,731,000,948,679,700 | 6,590,282,424,158,412,000 | 35.947368 | 93 | 0.669993 | false |
diego-d5000/MisValesMd | env/lib/python2.7/site-packages/MySQLdb/times.py | 76 | 3488 | """times module
This module provides some Date and Time classes for dealing with MySQL data.
Use Python datetime module to handle date and time columns."""
import math
from time import localtime
from datetime import date, datetime, time, timedelta
from _mysql import string_literal
Date = date
Time = time
TimeDelta = timedelta
Timestamp = datetime
DateTimeDeltaType = timedelta
DateTimeType = datetime
def DateFromTicks(ticks):
"""Convert UNIX ticks into a date instance."""
return date(*localtime(ticks)[:3])
def TimeFromTicks(ticks):
"""Convert UNIX ticks into a time instance."""
return time(*localtime(ticks)[3:6])
def TimestampFromTicks(ticks):
"""Convert UNIX ticks into a datetime instance."""
return datetime(*localtime(ticks)[:6])
format_TIME = format_DATE = str
def format_TIMEDELTA(v):
seconds = int(v.seconds) % 60
minutes = int(v.seconds / 60) % 60
hours = int(v.seconds / 3600) % 24
return '%d %d:%d:%d' % (v.days, hours, minutes, seconds)
def format_TIMESTAMP(d):
return d.isoformat(" ")
def DateTime_or_None(s):
if ' ' in s:
sep = ' '
elif 'T' in s:
sep = 'T'
else:
return Date_or_None(s)
try:
d, t = s.split(sep, 1)
if '.' in t:
t, ms = t.split('.',1)
ms = ms.ljust(6, '0')
else:
ms = 0
return datetime(*[ int(x) for x in d.split('-')+t.split(':')+[ms] ])
except (SystemExit, KeyboardInterrupt):
raise
except:
return Date_or_None(s)
def TimeDelta_or_None(s):
try:
h, m, s = s.split(':')
if '.' in s:
s, ms = s.split('.')
ms = ms.ljust(6, '0')
else:
ms = 0
h, m, s, ms = int(h), int(m), int(s), int(ms)
td = timedelta(hours=abs(h), minutes=m, seconds=s,
microseconds=ms)
if h < 0:
return -td
else:
return td
except ValueError:
# unpacking or int/float conversion failed
return None
def Time_or_None(s):
try:
h, m, s = s.split(':')
if '.' in s:
s, ms = s.split('.')
ms = ms.ljust(6, '0')
else:
ms = 0
h, m, s, ms = int(h), int(m), int(s), int(ms)
return time(hour=h, minute=m, second=s,
microsecond=ms)
except ValueError:
return None
def Date_or_None(s):
try:
return date(*[ int(x) for x in s.split('-',2)])
except (SystemExit, KeyboardInterrupt):
raise
except:
return None
def DateTime2literal(d, c):
"""Format a DateTime object as an ISO timestamp."""
return string_literal(format_TIMESTAMP(d),c)
def DateTimeDelta2literal(d, c):
"""Format a DateTimeDelta object as a time."""
return string_literal(format_TIMEDELTA(d),c)
def mysql_timestamp_converter(s):
"""Convert a MySQL TIMESTAMP to a Timestamp object."""
# MySQL>4.1 returns TIMESTAMP in the same format as DATETIME
if s[4] == '-': return DateTime_or_None(s)
s = s + "0"*(14-len(s)) # padding
parts = map(int, filter(None, (s[:4],s[4:6],s[6:8],
s[8:10],s[10:12],s[12:14])))
try:
return Timestamp(*parts)
except (SystemExit, KeyboardInterrupt):
raise
except:
return None
| mit | -6,225,257,277,810,086,000 | -2,242,819,044,597,594,000 | 25.68254 | 76 | 0.540998 | false |
Esri/ArcREST | samples/update_user_password.py | 5 | 1203 | """
Update a users passwords
version 3.5.x
Python 2/3
"""
from __future__ import print_function
from arcresthelper import securityhandlerhelper
import arcrest
if __name__ == "__main__":
username = ''# Username
proxy_port = None
proxy_url = None
securityinfo = {}
securityinfo['security_type'] = 'Portal'#LDAP, NTLM, OAuth, Portal, PKI, ArcGIS
securityinfo['username'] = "" #User Name
securityinfo['password'] = "" #password
securityinfo['org_url'] = "https://www.arcgis.com"
securityinfo['proxy_url'] = proxy_url
securityinfo['proxy_port'] = proxy_port
securityinfo['referer_url'] = None
securityinfo['token_url'] = None
securityinfo['certificatefile'] = None
securityinfo['keyfile'] = None
securityinfo['client_id'] = None
securityinfo['secret_id'] = None
shh = securityhandlerhelper.securityhandlerhelper(securityinfo=securityinfo)
if shh.valid == False:
print (shh.message)
else:
admin = arcrest.manageorg.Administration(securityHandler=shh.securityhandler, initialize=True)
user = admin.community.users.user(str(username).strip())
print (user.update(password="1234testtest"))
| apache-2.0 | 3,310,622,357,313,473,500 | -1,008,464,417,877,914,200 | 30.657895 | 102 | 0.673317 | false |
ghchinoy/tensorflow | tensorflow/contrib/timeseries/examples/known_anomaly.py | 24 | 7880 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Example of using an exogenous feature to ignore a known anomaly."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
from os import path
import numpy as np
import tensorflow as tf
try:
import matplotlib # pylint: disable=g-import-not-at-top
matplotlib.use("TkAgg") # Need Tk for interactive plots.
from matplotlib import pyplot # pylint: disable=g-import-not-at-top
HAS_MATPLOTLIB = True
except ImportError:
# Plotting requires matplotlib, but the unit test running this code may
# execute in an environment without it (i.e. matplotlib is not a build
# dependency). We'd still like to test the TensorFlow-dependent parts of this
# example, namely train_and_predict.
HAS_MATPLOTLIB = False
_MODULE_PATH = path.dirname(__file__)
_DATA_FILE = path.join(_MODULE_PATH, "data/changepoints.csv")
def state_space_estimator(exogenous_feature_columns):
"""Constructs a StructuralEnsembleRegressor."""
def _exogenous_update_condition(times, features):
del times # unused
# Make exogenous updates sparse by setting an update condition. This in
# effect allows missing exogenous features: if the condition evaluates to
# False, no update is performed. Otherwise we sometimes end up with "leaky"
# updates which add unnecessary uncertainty to the model even when there is
# no changepoint.
return tf.equal(tf.squeeze(features["is_changepoint"], axis=-1), "yes")
return (
tf.contrib.timeseries.StructuralEnsembleRegressor(
periodicities=12,
# Extract a smooth period by constraining the number of latent values
# being cycled between.
cycle_num_latent_values=3,
num_features=1,
exogenous_feature_columns=exogenous_feature_columns,
exogenous_update_condition=_exogenous_update_condition),
# Use truncated backpropagation with a window size of 64, batching
# together 4 of these windows (random offsets) per training step. Training
# with exogenous features often requires somewhat larger windows.
4, 64)
def autoregressive_estimator(exogenous_feature_columns):
input_window_size = 8
output_window_size = 2
return (
tf.contrib.timeseries.ARRegressor(
periodicities=12,
num_features=1,
input_window_size=input_window_size,
output_window_size=output_window_size,
exogenous_feature_columns=exogenous_feature_columns),
64, input_window_size + output_window_size)
def train_and_evaluate_exogenous(
estimator_fn, csv_file_name=_DATA_FILE, train_steps=300):
"""Training, evaluating, and predicting on a series with changepoints."""
# Indicate the format of our exogenous feature, in this case a string
# representing a boolean value.
string_feature = tf.feature_column.categorical_column_with_vocabulary_list(
key="is_changepoint", vocabulary_list=["no", "yes"])
# Specify the way this feature is presented to the model, here using a one-hot
# encoding.
one_hot_feature = tf.feature_column.indicator_column(
categorical_column=string_feature)
estimator, batch_size, window_size = estimator_fn(
exogenous_feature_columns=[one_hot_feature])
reader = tf.contrib.timeseries.CSVReader(
csv_file_name,
# Indicate the format of our CSV file. First we have two standard columns,
# one for times and one for values. The third column is a custom exogenous
# feature indicating whether each timestep is a changepoint. The
# changepoint feature name must match the string_feature column name
# above.
column_names=(tf.contrib.timeseries.TrainEvalFeatures.TIMES,
tf.contrib.timeseries.TrainEvalFeatures.VALUES,
"is_changepoint"),
# Indicate dtypes for our features.
column_dtypes=(tf.int64, tf.float32, tf.string),
# This CSV has a header line; here we just ignore it.
skip_header_lines=1)
train_input_fn = tf.contrib.timeseries.RandomWindowInputFn(
reader, batch_size=batch_size, window_size=window_size)
estimator.train(input_fn=train_input_fn, steps=train_steps)
evaluation_input_fn = tf.contrib.timeseries.WholeDatasetInputFn(reader)
evaluation = estimator.evaluate(input_fn=evaluation_input_fn, steps=1)
# Create an input_fn for prediction, with a simulated changepoint. Since all
# of the anomalies in the training data are explained by the exogenous
# feature, we should get relatively confident predictions before the indicated
# changepoint (since we are telling the model that no changepoint exists at
# those times) and relatively uncertain predictions after.
(predictions,) = tuple(estimator.predict(
input_fn=tf.contrib.timeseries.predict_continuation_input_fn(
evaluation, steps=100,
exogenous_features={
"is_changepoint": [["no"] * 49 + ["yes"] + ["no"] * 50]})))
times = evaluation["times"][0]
observed = evaluation["observed"][0, :, 0]
mean = np.squeeze(np.concatenate(
[evaluation["mean"][0], predictions["mean"]], axis=0))
variance = np.squeeze(np.concatenate(
[evaluation["covariance"][0], predictions["covariance"]], axis=0))
all_times = np.concatenate([times, predictions["times"]], axis=0)
upper_limit = mean + np.sqrt(variance)
lower_limit = mean - np.sqrt(variance)
# Indicate the locations of the changepoints for plotting vertical lines.
anomaly_locations = []
with open(csv_file_name, "r") as csv_file:
csv_reader = csv.DictReader(csv_file)
for row in csv_reader:
if row["is_changepoint"] == "yes":
anomaly_locations.append(int(row["time"]))
anomaly_locations.append(predictions["times"][49])
return (times, observed, all_times, mean, upper_limit, lower_limit,
anomaly_locations)
def make_plot(name, training_times, observed, all_times, mean,
upper_limit, lower_limit, anomaly_locations):
"""Plot the time series and anomalies in a new figure."""
pyplot.figure()
pyplot.plot(training_times, observed, "b", label="training series")
pyplot.plot(all_times, mean, "r", label="forecast")
pyplot.axvline(anomaly_locations[0], linestyle="dotted", label="changepoints")
for anomaly_location in anomaly_locations[1:]:
pyplot.axvline(anomaly_location, linestyle="dotted")
pyplot.fill_between(all_times, lower_limit, upper_limit, color="grey",
alpha="0.2")
pyplot.axvline(training_times[-1], color="k", linestyle="--")
pyplot.xlabel("time")
pyplot.ylabel("observations")
pyplot.legend(loc=0)
pyplot.title(name)
def main(unused_argv):
if not HAS_MATPLOTLIB:
raise ImportError(
"Please install matplotlib to generate a plot from this example.")
make_plot("Ignoring a known anomaly (state space)",
*train_and_evaluate_exogenous(
estimator_fn=state_space_estimator))
make_plot("Ignoring a known anomaly (autoregressive)",
*train_and_evaluate_exogenous(
estimator_fn=autoregressive_estimator, train_steps=3000))
pyplot.show()
if __name__ == "__main__":
tf.app.run(main=main)
| apache-2.0 | 5,503,126,570,237,869,000 | 7,564,743,554,624,709,000 | 42.777778 | 80 | 0.698858 | false |
infoxchange/lettuce | tests/integration/lib/Django-1.3/django/utils/log.py | 152 | 3494 | import logging
import sys
from django.core import mail
# Make sure a NullHandler is available
# This was added in Python 2.7/3.2
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Make sure that dictConfig is available
# This was added in Python 2.7/3.2
try:
from logging.config import dictConfig
except ImportError:
from django.utils.dictconfig import dictConfig
if sys.version_info < (2, 5):
class LoggerCompat(object):
def __init__(self, logger):
self._logger = logger
def __getattr__(self, name):
val = getattr(self._logger, name)
if callable(val):
def _wrapper(*args, **kwargs):
# Python 2.4 logging module doesn't support 'extra' parameter to
# methods of Logger
kwargs.pop('extra', None)
return val(*args, **kwargs)
return _wrapper
else:
return val
def getLogger(name=None):
return LoggerCompat(logging.getLogger(name=name))
else:
getLogger = logging.getLogger
# Ensure the creation of the Django logger
# with a null handler. This ensures we don't get any
# 'No handlers could be found for logger "django"' messages
logger = getLogger('django')
if not logger.handlers:
logger.addHandler(NullHandler())
class AdminEmailHandler(logging.Handler):
def __init__(self, include_html=False):
logging.Handler.__init__(self)
self.include_html = include_html
"""An exception log handler that e-mails log entries to site admins.
If the request is passed as the first argument to the log record,
request data will be provided in the
"""
def emit(self, record):
import traceback
from django.conf import settings
from django.views.debug import ExceptionReporter
try:
if sys.version_info < (2,5):
# A nasty workaround required because Python 2.4's logging
# module doesn't support passing in extra context.
# For this handler, the only extra data we need is the
# request, and that's in the top stack frame.
request = record.exc_info[2].tb_frame.f_locals['request']
else:
request = record.request
subject = '%s (%s IP): %s' % (
record.levelname,
(request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS and 'internal' or 'EXTERNAL'),
record.msg
)
request_repr = repr(request)
except:
subject = '%s: %s' % (
record.levelname,
record.msg
)
request = None
request_repr = "Request repr() unavailable"
if record.exc_info:
exc_info = record.exc_info
stack_trace = '\n'.join(traceback.format_exception(*record.exc_info))
else:
exc_info = (None, record.msg, None)
stack_trace = 'No stack trace available'
message = "%s\n\n%s" % (stack_trace, request_repr)
reporter = ExceptionReporter(request, is_email=True, *exc_info)
html_message = self.include_html and reporter.get_traceback_html() or None
mail.mail_admins(subject, message, fail_silently=True,
html_message=html_message)
| gpl-3.0 | 4,944,236,443,692,133,000 | -7,632,333,629,803,290,000 | 33.594059 | 104 | 0.589868 | false |
haniehrajabi/ryu | ryu/app/simple_isolation.py | 22 | 14069 | # Copyright (C) 2011 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011, 2012 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
MAC address based isolation logic.
"""
import logging
import struct
from ryu.app.rest_nw_id import NW_ID_UNKNOWN, NW_ID_EXTERNAL
from ryu.base import app_manager
from ryu.exception import MacAddressDuplicated
from ryu.exception import PortUnknown
from ryu.controller import dpset
from ryu.controller import mac_to_network
from ryu.controller import mac_to_port
from ryu.controller import network
from ryu.controller import ofp_event
from ryu.controller.handler import MAIN_DISPATCHER
from ryu.controller.handler import CONFIG_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import nx_match
from ryu.lib.mac import haddr_to_str
from ryu.lib import mac
class SimpleIsolation(app_manager.RyuApp):
_CONTEXTS = {
'network': network.Network,
'dpset': dpset.DPSet,
}
def __init__(self, *args, **kwargs):
super(SimpleIsolation, self).__init__(*args, **kwargs)
self.nw = kwargs['network']
self.dpset = kwargs['dpset']
self.mac2port = mac_to_port.MacToPortTable()
self.mac2net = mac_to_network.MacToNetwork(self.nw)
@set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)
def switch_features_handler(self, ev):
msg = ev.msg
datapath = msg.datapath
datapath.send_delete_all_flows()
datapath.send_barrier()
self.mac2port.dpid_add(ev.msg.datapath_id)
self.nw.add_datapath(ev.msg)
@staticmethod
def _modflow_and_send_packet(msg, src, dst, actions):
datapath = msg.datapath
ofproto = datapath.ofproto
#
# install flow and then send packet
#
rule = nx_match.ClsRule()
rule.set_in_port(msg.in_port)
rule.set_dl_dst(dst)
rule.set_dl_src(src)
datapath.send_flow_mod(
rule=rule, cookie=0, command=datapath.ofproto.OFPFC_ADD,
idle_timeout=0, hard_timeout=0,
priority=ofproto.OFP_DEFAULT_PRIORITY,
buffer_id=ofproto.OFP_NO_BUFFER, out_port=ofproto.OFPP_NONE,
flags=ofproto.OFPFF_SEND_FLOW_REM, actions=actions)
datapath.send_packet_out(msg.buffer_id, msg.in_port, actions)
def _forward_to_nw_id(self, msg, src, dst, nw_id, out_port):
assert out_port is not None
datapath = msg.datapath
if not self.nw.same_network(datapath.id, nw_id, out_port,
NW_ID_EXTERNAL):
self.logger.debug('packet is blocked src %s dst %s '
'from %d to %d on datapath %d',
haddr_to_str(src), haddr_to_str(dst),
msg.in_port, out_port, datapath.id)
return
self.logger.debug("learned dpid %s in_port %d out_port "
"%d src %s dst %s",
datapath.id, msg.in_port, out_port,
haddr_to_str(src), haddr_to_str(dst))
actions = [datapath.ofproto_parser.OFPActionOutput(out_port)]
self._modflow_and_send_packet(msg, src, dst, actions)
def _flood_to_nw_id(self, msg, src, dst, nw_id):
datapath = msg.datapath
actions = []
self.logger.debug("dpid %s in_port %d src %s dst %s ports %s",
datapath.id, msg.in_port,
haddr_to_str(src), haddr_to_str(dst),
self.nw.dpids.get(datapath.id, {}).items())
for port_no in self.nw.filter_ports(datapath.id, msg.in_port,
nw_id, NW_ID_EXTERNAL):
self.logger.debug("port_no %s", port_no)
actions.append(datapath.ofproto_parser.OFPActionOutput(port_no))
self._modflow_and_send_packet(msg, src, dst, actions)
def _learned_mac_or_flood_to_nw_id(self, msg, src, dst,
dst_nw_id, out_port):
if out_port is not None:
self._forward_to_nw_id(msg, src, dst, dst_nw_id, out_port)
else:
self._flood_to_nw_id(msg, src, dst, dst_nw_id)
def _modflow_and_drop_packet(self, msg, src, dst):
self._modflow_and_send_packet(msg, src, dst, [])
def _drop_packet(self, msg):
datapath = msg.datapath
datapath.send_packet_out(msg.buffer_id, msg.in_port, [])
@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def packet_in_handler(self, ev):
# self.logger.debug('packet in ev %s msg %s', ev, ev.msg)
msg = ev.msg
datapath = msg.datapath
ofproto = datapath.ofproto
dst, src, _eth_type = struct.unpack_from('!6s6sH', buffer(msg.data), 0)
try:
port_nw_id = self.nw.get_network(datapath.id, msg.in_port)
except PortUnknown:
port_nw_id = NW_ID_UNKNOWN
if port_nw_id != NW_ID_UNKNOWN:
# Here it is assumed that the
# (port <-> network id)/(mac <-> network id) relationship
# is stable once the port is created. The port will be destroyed
# before assigning new network id to the given port.
# This is correct nova-network/nova-compute.
try:
# allow external -> known nw id change
self.mac2net.add_mac(src, port_nw_id, NW_ID_EXTERNAL)
except MacAddressDuplicated:
self.logger.warn('mac address %s is already in use.'
' So (dpid %s, port %s) can not use it',
haddr_to_str(src), datapath.id, msg.in_port)
#
# should we install drop action pro-actively for future?
#
self._drop_packet(msg)
return
old_port = self.mac2port.port_add(datapath.id, msg.in_port, src)
if old_port is not None and old_port != msg.in_port:
# We really overwrite already learned mac address.
# So discard already installed stale flow entry which conflicts
# new port.
rule = nx_match.ClsRule()
rule.set_dl_dst(src)
datapath.send_flow_mod(rule=rule,
cookie=0,
command=ofproto.OFPFC_DELETE,
idle_timeout=0,
hard_timeout=0,
priority=ofproto.OFP_DEFAULT_PRIORITY,
out_port=old_port)
# to make sure the old flow entries are purged.
datapath.send_barrier()
src_nw_id = self.mac2net.get_network(src, NW_ID_UNKNOWN)
dst_nw_id = self.mac2net.get_network(dst, NW_ID_UNKNOWN)
# we handle multicast packet as same as broadcast
broadcast = (dst == mac.BROADCAST) or mac.is_multicast(dst)
out_port = self.mac2port.port_get(datapath.id, dst)
#
# there are several combinations:
# in_port: known nw_id, external, unknown nw,
# src mac: known nw_id, external, unknown nw,
# dst mac: known nw_id, external, unknown nw, and broadcast/multicast
# where known nw_id: is quantum network id
# external: means that these ports are connected to outside
# unknown nw: means that we don't know this port is bounded to
# specific nw_id or external
# broadcast: the destination mac address is broadcast address
# (or multicast address)
#
# Can the following logic be refined/shortened?
#
# When NW_ID_UNKNOWN is found, registering ports might be delayed.
# So just drop only this packet and not install flow entry.
# It is expected that when next packet arrives, the port is registers
# with some network id
if port_nw_id != NW_ID_EXTERNAL and port_nw_id != NW_ID_UNKNOWN:
if broadcast:
# flood to all ports of external or src_nw_id
self._flood_to_nw_id(msg, src, dst, src_nw_id)
elif src_nw_id == NW_ID_EXTERNAL:
self._modflow_and_drop_packet(msg, src, dst)
return
elif src_nw_id == NW_ID_UNKNOWN:
self._drop_packet(msg)
return
else:
# src_nw_id != NW_ID_EXTERNAL and src_nw_id != NW_ID_UNKNOWN:
#
# try learned mac check if the port is net_id
# or
# flood to all ports of external or src_nw_id
self._learned_mac_or_flood_to_nw_id(msg, src, dst,
src_nw_id, out_port)
elif port_nw_id == NW_ID_EXTERNAL:
if src_nw_id != NW_ID_EXTERNAL and src_nw_id != NW_ID_UNKNOWN:
if broadcast:
# flood to all ports of external or src_nw_id
self._flood_to_nw_id(msg, src, dst, src_nw_id)
elif (dst_nw_id != NW_ID_EXTERNAL and
dst_nw_id != NW_ID_UNKNOWN):
if src_nw_id == dst_nw_id:
# try learned mac
# check if the port is external or same net_id
# or
# flood to all ports of external or src_nw_id
self._learned_mac_or_flood_to_nw_id(msg, src, dst,
src_nw_id,
out_port)
else:
# should not occur?
self.logger.debug("should this case happen?")
self._drop_packet(msg)
elif dst_nw_id == NW_ID_EXTERNAL:
# try learned mac
# or
# flood to all ports of external or src_nw_id
self._learned_mac_or_flood_to_nw_id(msg, src, dst,
src_nw_id, out_port)
else:
assert dst_nw_id == NW_ID_UNKNOWN
self.logger.debug("Unknown dst_nw_id")
self._drop_packet(msg)
elif src_nw_id == NW_ID_EXTERNAL:
self._modflow_and_drop_packet(msg, src, dst)
else:
# should not occur?
assert src_nw_id == NW_ID_UNKNOWN
self._drop_packet(msg)
else:
# drop packets
assert port_nw_id == NW_ID_UNKNOWN
self._drop_packet(msg)
# self.logger.debug("Unknown port_nw_id")
def _port_add(self, ev):
#
# delete flows entries that matches with
# dl_dst == broadcast/multicast
# and dl_src = network id if network id of this port is known
# to send broadcast packet to this newly added port.
#
# Openflow v1.0 doesn't support masked match of dl_dst,
# so delete all flow entries. It's inefficient, though.
#
msg = ev.msg
datapath = msg.datapath
datapath.send_delete_all_flows()
datapath.send_barrier()
self.nw.port_added(datapath, msg.desc.port_no)
def _port_del(self, ev):
# free mac addresses associated to this VM port,
# and delete related flow entries for later reuse of mac address
dps_needs_barrier = set()
msg = ev.msg
datapath = msg.datapath
datapath_id = datapath.id
port_no = msg.desc.port_no
rule = nx_match.ClsRule()
rule.set_in_port(port_no)
datapath.send_flow_del(rule=rule, cookie=0)
rule = nx_match.ClsRule()
datapath.send_flow_del(rule=rule, cookie=0, out_port=port_no)
dps_needs_barrier.add(datapath)
try:
port_nw_id = self.nw.get_network(datapath_id, port_no)
except PortUnknown:
# race condition between rest api delete port
# and openflow port deletion ofp_event
pass
else:
if port_nw_id in (NW_ID_UNKNOWN, NW_ID_EXTERNAL):
datapath.send_barrier()
return
for mac_ in self.mac2port.mac_list(datapath_id, port_no):
for (_dpid, dp) in self.dpset.get_all():
if self.mac2port.port_get(dp.id, mac_) is None:
continue
rule = nx_match.ClsRule()
rule.set_dl_src(mac_)
dp.send_flow_del(rule=rule, cookie=0)
rule = nx_match.ClsRule()
rule.set_dl_dst(mac_)
dp.send_flow_del(rule=rule, cookie=0)
dps_needs_barrier.add(dp)
self.mac2port.mac_del(dp.id, mac_)
self.mac2net.del_mac(mac_)
self.nw.port_deleted(datapath.id, port_no)
for dp in dps_needs_barrier:
dp.send_barrier()
@set_ev_cls(ofp_event.EventOFPPortStatus, MAIN_DISPATCHER)
def port_status_handler(self, ev):
msg = ev.msg
reason = msg.reason
ofproto = msg.datapath.ofproto
if reason == ofproto.OFPPR_ADD:
self._port_add(ev)
elif reason == ofproto.OFPPR_DELETE:
self._port_del(ev)
else:
assert reason == ofproto.OFPPR_MODIFY
| apache-2.0 | -1,695,023,865,596,354,300 | -4,888,485,052,047,860,000 | 39.082621 | 79 | 0.54844 | false |
ezequielpereira/Time-Line | libs64/wx/lib/pubsub/core/topicexc.py | 9 | 3557 | '''
:copyright: Copyright 2006-2009 by Oliver Schoenborn, all rights reserved.
:license: BSD, see LICENSE.txt for details.
'''
from topicutils import stringize
class ListenerNotValidatable(RuntimeError):
'''
Raised when an attempt is made to validate a listener relative to a
topic that doesn't have (yet) a Listener Protocol Specification.
'''
def __init__(self):
msg = 'Topics args not set yet, cannot validate listener'
RuntimeError.__init__(self, msg)
class UndefinedTopic(RuntimeError):
'''
Raised when an attempt is made to retrieve a Topic object
for a topic name that hasn't yet been created.
'''
def __init__(self, topicName, msgFormat=None):
if msgFormat is None:
msgFormat = 'Topic "%s" doesn\'t exist'
RuntimeError.__init__(self, msgFormat % topicName)
class UndefinedSubtopic(UndefinedTopic):
'''
Raised when an attempt is made to retrieve a Topic object
for a subtopic name that hasn't yet been created within
its parent topic.
'''
def __init__(self, parentName, subName):
msgFormat = 'Topic "%s" doesn\'t have "%%s" as subtopic' % parentName
UndefinedTopic.__init__(self, subName, msgFormat)
class ListenerSpecIncomplete(RuntimeError):
'''
Raised when an attempt is made to create a topic for which
a specification is not available, but pub.setTopicUnspecifiedFatal()
was called.
'''
def __init__(self, topicNameTuple):
msg = "No topic specification for topic '%s'." \
% stringize(topicNameTuple)
RuntimeError.__init__(self, msg +
" See pub.getOrCreateTopic(), pub.addTopicDefnProvider(), and/or pub.setTopicUnspecifiedFatal()")
class ListenerSpecInvalid(RuntimeError):
'''
Raised when an attempt is made to define a topic's Listener Protocol
Specification to something that is not valid.
The argument names that are invalid can be put in the 'args' list,
and the msg should say what is the problem and contain "%s" for the
args, such as ListenerSpecInvalid('duplicate args %s', ('arg1', 'arg2')).
'''
def __init__(self, msg, args):
argsMsg = msg % ','.join(args)
RuntimeError.__init__(self, 'Invalid listener spec: ' + argsMsg)
class ExcHandlerError(RuntimeError):
'''
When an exception gets raised within some listener during a
sendMessage(), the registered handler (see pub.setListenerExcHandler())
gets called (via its __call__ method) and the send operation can
resume on remaining listeners. However, if the handler itself
raises an exception while it is being called, the send operation
must be aborted: an ExcHandlerError exception gets raised.
'''
def __init__(self, badExcListenerID, topicObj, origExc=None):
'''The badExcListenerID is the name of the listener that raised
the original exception that handler was attempting to handle.
The topicObj is the pub.Topic object for the topic of the
sendMessage that had an exception raised.
The origExc is currently not used. '''
self.badExcListenerID = badExcListenerID
import traceback
self.exc = traceback.format_exc()
msg = 'The exception handler registered with pubsub raised an ' \
+ 'exception, *while* handling an exception raised by listener ' \
+ ' "%s" of topic "%s"):\n%s' \
% (self.badExcListenerID, topicObj.getName(), self.exc)
RuntimeError.__init__(self, msg)
| gpl-3.0 | -4,759,660,190,083,119,000 | -8,844,088,765,639,684,000 | 36.052083 | 109 | 0.66826 | false |
naliboff/dealii | contrib/python-bindings/tests/cell_accessor_wrapper.py | 17 | 3314 | # ---------------------------------------------------------------------
#
# Copyright (C) 2016 by the deal.II authors
#
# This file is part of the deal.II library.
#
# The deal.II library is free software; you can use it, redistribute
# it, and/or modify it under the terms of the GNU Lesser General
# Public License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# The full text of the license can be found in the file LICENSE at
# the top level of the deal.II distribution.
#
# ---------------------------------------------------------------------
import unittest
from PyDealII.Debug import *
class TestCellAccessorWrapper(unittest.TestCase):
def setUp(self):
self.triangulation = Triangulation('2D')
self.triangulation.generate_hyper_cube()
self.triangulation.refine_global(1)
def test_material_id(self):
material_id = 0
for cell in self.triangulation.active_cells():
cell.material_id = material_id
material_id += 1
material_id = 0
for cell in self.triangulation.active_cells():
self.assertEqual(cell.material_id, material_id)
material_id += 1
def test_manifold_id(self):
manifold_id = 0
for cell in self.triangulation.active_cells():
cell.manifold_id = manifold_id
manifold_id += 1
manifold_id = 0
for cell in self.triangulation.active_cells():
self.assertEqual(cell.manifold_id, manifold_id)
manifold_id += 1
def test_refine_flag(self):
index = 0
refine_flags = ['no_refinement', 'cut_x', 'cut_y', 'cut_xy']
for cell in self.triangulation.active_cells():
cell.refine_flag = refine_flags[index]
index += 1
index = 0
for cell in self.triangulation.active_cells():
self.assertEqual(cell.refine_flag, refine_flags[index])
index += 1
def test_coarsen_flag(self):
coarsen_flag = True
for cell in self.triangulation.active_cells():
cell.coarsen_flag = coarsen_flag
coarsen_flag = not coarsen_flag
coarsen_flag = True
for cell in self.triangulation.active_cells():
self.assertEqual(cell.coarsen_flag, coarsen_flag)
coarsen_flag = not coarsen_flag
def test_barycenter(self):
centers = [[0.25, 0.25], [0.75, 0.25], [0.25, 0.75], [0.75, 0.75]]
index = 0
for cell in self.triangulation.active_cells():
barycenter = cell.barycenter()
self.assertEqual(barycenter.x, centers[index][0])
self.assertEqual(barycenter.y, centers[index][1])
index += 1
def test_move_vertex(self):
point = Point([0.6, 0.6])
for cell in self.triangulation.active_cells():
cell.set_vertex(3, point)
vertex = cell.get_vertex(3)
break
vertices = [3, 2, 1, 0]
index = 0
for cell in self.triangulation.active_cells():
vertex = cell.get_vertex(vertices[index])
self.assertEqual(vertex.x, point.x)
self.assertEqual(vertex.y, point.y)
index += 1
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | -3,051,897,620,308,393,000 | -4,450,192,900,022,259,000 | 34.634409 | 74 | 0.57755 | false |
maciekcc/tensorflow | tensorflow/tools/test/system_info_lib.py | 101 | 4760 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Library for getting system information during TensorFlow tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import glob
import multiprocessing
import platform
import re
import socket
# pylint: disable=g-bad-import-order
# Note: cpuinfo and psutil are not installed for you in the TensorFlow
# OSS tree. They are installable via pip.
import cpuinfo
import psutil
# pylint: enable=g-bad-import-order
from tensorflow.core.util import test_log_pb2
from tensorflow.python.client import device_lib
from tensorflow.python.framework import errors
from tensorflow.python.platform import gfile
from tensorflow.tools.test import gpu_info_lib
def gather_machine_configuration():
"""Gather Machine Configuration. This is the top level fn of this library."""
config = test_log_pb2.MachineConfiguration()
config.cpu_info.CopyFrom(gather_cpu_info())
config.platform_info.CopyFrom(gather_platform_info())
# gather_available_device_info must come before gather_gpu_devices
# because the latter may access libcudart directly, which confuses
# TensorFlow StreamExecutor.
for d in gather_available_device_info():
config.available_device_info.add().CopyFrom(d)
for gpu in gpu_info_lib.gather_gpu_devices():
config.device_info.add().Pack(gpu)
config.memory_info.CopyFrom(gather_memory_info())
config.hostname = gather_hostname()
return config
def gather_hostname():
return socket.gethostname()
def gather_memory_info():
"""Gather memory info."""
mem_info = test_log_pb2.MemoryInfo()
vmem = psutil.virtual_memory()
mem_info.total = vmem.total
mem_info.available = vmem.available
return mem_info
def gather_cpu_info():
"""Gather CPU Information. Assumes all CPUs are the same."""
cpu_info = test_log_pb2.CPUInfo()
cpu_info.num_cores = multiprocessing.cpu_count()
# Gather num_cores_allowed
try:
with gfile.GFile('/proc/self/status', 'rb') as fh:
nc = re.search(r'(?m)^Cpus_allowed:\s*(.*)$', fh.read())
if nc: # e.g. 'ff' => 8, 'fff' => 12
cpu_info.num_cores_allowed = (
bin(int(nc.group(1).replace(',', ''), 16)).count('1'))
except errors.OpError:
pass
finally:
if cpu_info.num_cores_allowed == 0:
cpu_info.num_cores_allowed = cpu_info.num_cores
# Gather the rest
info = cpuinfo.get_cpu_info()
cpu_info.cpu_info = info['brand']
cpu_info.num_cores = info['count']
cpu_info.mhz_per_cpu = info['hz_advertised_raw'][0] / 1.0e6
l2_cache_size = re.match(r'(\d+)', str(info.get('l2_cache_size', '')))
if l2_cache_size:
# If a value is returned, it's in KB
cpu_info.cache_size['L2'] = int(l2_cache_size.group(0)) * 1024
# Try to get the CPU governor
try:
cpu_governors = set([
gfile.GFile(f, 'r').readline().rstrip()
for f in glob.glob(
'/sys/devices/system/cpu/cpu*/cpufreq/scaling_governor')
])
if cpu_governors:
if len(cpu_governors) > 1:
cpu_info.cpu_governor = 'mixed'
else:
cpu_info.cpu_governor = list(cpu_governors)[0]
except errors.OpError:
pass
return cpu_info
def gather_available_device_info():
"""Gather list of devices available to TensorFlow.
Returns:
A list of test_log_pb2.AvailableDeviceInfo messages.
"""
device_info_list = []
devices = device_lib.list_local_devices()
for d in devices:
device_info = test_log_pb2.AvailableDeviceInfo()
device_info.name = d.name
device_info.type = d.device_type
device_info.memory_limit = d.memory_limit
device_info.physical_description = d.physical_device_desc
device_info_list.append(device_info)
return device_info_list
def gather_platform_info():
"""Gather platform info."""
platform_info = test_log_pb2.PlatformInfo()
(platform_info.bits, platform_info.linkage) = platform.architecture()
platform_info.machine = platform.machine()
platform_info.release = platform.release()
platform_info.system = platform.system()
platform_info.version = platform.version()
return platform_info
| apache-2.0 | -7,959,145,161,722,186,000 | 1,114,009,975,324,332,800 | 30.733333 | 80 | 0.693277 | false |
t794104/ansible | lib/ansible/modules/network/fortios/fortios_firewall_policy6.py | 24 | 37823 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2018 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# the lib use python logging can get it if the following is set in your
# Ansible config.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_firewall_policy6
short_description: Configure IPv6 policies in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS by
allowing the user to configure firewall feature and policy6 category.
Examples includes all options and need to be adjusted to datasources before usage.
Tested with FOS v6.0.2
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate ip address.
required: true
username:
description:
- FortiOS or FortiGate username.
required: true
password:
description:
- FortiOS or FortiGate password.
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS
protocol
type: bool
default: false
firewall_policy6:
description:
- Configure IPv6 policies.
default: null
suboptions:
state:
description:
- Indicates whether to create or remove the object
choices:
- present
- absent
action:
description:
- Policy action (allow/deny/ipsec).
choices:
- accept
- deny
- ipsec
app-category:
description:
- Application category ID list.
suboptions:
id:
description:
- Category IDs.
required: true
app-group:
description:
- Application group names.
suboptions:
name:
description:
- Application group names. Source application.group.name.
required: true
application:
description:
- Application ID list.
suboptions:
id:
description:
- Application IDs.
required: true
application-list:
description:
- Name of an existing Application list. Source application.list.name.
av-profile:
description:
- Name of an existing Antivirus profile. Source antivirus.profile.name.
comments:
description:
- Comment.
custom-log-fields:
description:
- Log field index numbers to append custom log fields to log messages for this policy.
suboptions:
field-id:
description:
- Custom log field. Source log.custom-field.id.
required: true
devices:
description:
- Names of devices or device groups that can be matched by the policy.
suboptions:
name:
description:
- Device or group name. Source user.device.alias user.device-group.name user.device-category.name.
required: true
diffserv-forward:
description:
- Enable to change packet's DiffServ values to the specified diffservcode-forward value.
choices:
- enable
- disable
diffserv-reverse:
description:
- Enable to change packet's reverse (reply) DiffServ values to the specified diffservcode-rev value.
choices:
- enable
- disable
diffservcode-forward:
description:
- Change packet's DiffServ to this value.
diffservcode-rev:
description:
- Change packet's reverse (reply) DiffServ to this value.
dlp-sensor:
description:
- Name of an existing DLP sensor. Source dlp.sensor.name.
dscp-match:
description:
- Enable DSCP check.
choices:
- enable
- disable
dscp-negate:
description:
- Enable negated DSCP match.
choices:
- enable
- disable
dscp-value:
description:
- DSCP value.
dsri:
description:
- Enable DSRI to ignore HTTP server responses.
choices:
- enable
- disable
dstaddr:
description:
- Destination address and address group names.
suboptions:
name:
description:
- Address name. Source firewall.address6.name firewall.addrgrp6.name firewall.vip6.name firewall.vipgrp6.name.
required: true
dstaddr-negate:
description:
- When enabled dstaddr specifies what the destination address must NOT be.
choices:
- enable
- disable
dstintf:
description:
- Outgoing (egress) interface.
suboptions:
name:
description:
- Interface name. Source system.interface.name system.zone.name.
required: true
firewall-session-dirty:
description:
- How to handle sessions if the configuration of this firewall policy changes.
choices:
- check-all
- check-new
fixedport:
description:
- Enable to prevent source NAT from changing a session's source port.
choices:
- enable
- disable
global-label:
description:
- Label for the policy that appears when the GUI is in Global View mode.
groups:
description:
- Names of user groups that can authenticate with this policy.
suboptions:
name:
description:
- Group name. Source user.group.name.
required: true
icap-profile:
description:
- Name of an existing ICAP profile. Source icap.profile.name.
inbound:
description:
- "Policy-based IPsec VPN: only traffic from the remote network can initiate a VPN."
choices:
- enable
- disable
ippool:
description:
- Enable to use IP Pools for source NAT.
choices:
- enable
- disable
ips-sensor:
description:
- Name of an existing IPS sensor. Source ips.sensor.name.
label:
description:
- Label for the policy that appears when the GUI is in Section View mode.
logtraffic:
description:
- Enable or disable logging. Log all sessions or security profile sessions.
choices:
- all
- utm
- disable
logtraffic-start:
description:
- Record logs when a session starts and ends.
choices:
- enable
- disable
name:
description:
- Policy name.
nat:
description:
- Enable/disable source NAT.
choices:
- enable
- disable
natinbound:
description:
- "Policy-based IPsec VPN: apply destination NAT to inbound traffic."
choices:
- enable
- disable
natoutbound:
description:
- "Policy-based IPsec VPN: apply source NAT to outbound traffic."
choices:
- enable
- disable
outbound:
description:
- "Policy-based IPsec VPN: only traffic from the internal network can initiate a VPN."
choices:
- enable
- disable
per-ip-shaper:
description:
- Per-IP traffic shaper. Source firewall.shaper.per-ip-shaper.name.
policyid:
description:
- Policy ID.
required: true
poolname:
description:
- IP Pool names.
suboptions:
name:
description:
- IP pool name. Source firewall.ippool6.name.
required: true
profile-group:
description:
- Name of profile group. Source firewall.profile-group.name.
profile-protocol-options:
description:
- Name of an existing Protocol options profile. Source firewall.profile-protocol-options.name.
profile-type:
description:
- Determine whether the firewall policy allows security profile groups or single profiles only.
choices:
- single
- group
replacemsg-override-group:
description:
- Override the default replacement message group for this policy. Source system.replacemsg-group.name.
rsso:
description:
- Enable/disable RADIUS single sign-on (RSSO).
choices:
- enable
- disable
schedule:
description:
- Schedule name. Source firewall.schedule.onetime.name firewall.schedule.recurring.name firewall.schedule.group.name.
send-deny-packet:
description:
- Enable/disable return of deny-packet.
choices:
- enable
- disable
service:
description:
- Service and service group names.
suboptions:
name:
description:
- Address name. Source firewall.service.custom.name firewall.service.group.name.
required: true
service-negate:
description:
- When enabled service specifies what the service must NOT be.
choices:
- enable
- disable
session-ttl:
description:
- Session TTL in seconds for sessions accepted by this policy. 0 means use the system default session TTL.
spamfilter-profile:
description:
- Name of an existing Spam filter profile. Source spamfilter.profile.name.
srcaddr:
description:
- Source address and address group names.
suboptions:
name:
description:
- Address name. Source firewall.address6.name firewall.addrgrp6.name.
required: true
srcaddr-negate:
description:
- When enabled srcaddr specifies what the source address must NOT be.
choices:
- enable
- disable
srcintf:
description:
- Incoming (ingress) interface.
suboptions:
name:
description:
- Interface name. Source system.zone.name system.interface.name.
required: true
ssh-filter-profile:
description:
- Name of an existing SSH filter profile. Source ssh-filter.profile.name.
ssl-mirror:
description:
- Enable to copy decrypted SSL traffic to a FortiGate interface (called SSL mirroring).
choices:
- enable
- disable
ssl-mirror-intf:
description:
- SSL mirror interface name.
suboptions:
name:
description:
- Interface name. Source system.zone.name system.interface.name.
required: true
ssl-ssh-profile:
description:
- Name of an existing SSL SSH profile. Source firewall.ssl-ssh-profile.name.
status:
description:
- Enable or disable this policy.
choices:
- enable
- disable
tcp-mss-receiver:
description:
- Receiver TCP maximum segment size (MSS).
tcp-mss-sender:
description:
- Sender TCP maximum segment size (MSS).
tcp-session-without-syn:
description:
- Enable/disable creation of TCP session without SYN flag.
choices:
- all
- data-only
- disable
timeout-send-rst:
description:
- Enable/disable sending RST packets when TCP sessions expire.
choices:
- enable
- disable
traffic-shaper:
description:
- Reverse traffic shaper. Source firewall.shaper.traffic-shaper.name.
traffic-shaper-reverse:
description:
- Reverse traffic shaper. Source firewall.shaper.traffic-shaper.name.
url-category:
description:
- URL category ID list.
suboptions:
id:
description:
- URL category ID.
required: true
users:
description:
- Names of individual users that can authenticate with this policy.
suboptions:
name:
description:
- Names of individual users that can authenticate with this policy. Source user.local.name.
required: true
utm-status:
description:
- Enable AV/web/ips protection profile.
choices:
- enable
- disable
uuid:
description:
- Universally Unique Identifier (UUID; automatically assigned but can be manually reset).
vlan-cos-fwd:
description:
- "VLAN forward direction user priority: 255 passthrough, 0 lowest, 7 highest"
vlan-cos-rev:
description:
- "VLAN reverse direction user priority: 255 passthrough, 0 lowest, 7 highest"
vlan-filter:
description:
- Set VLAN filters.
voip-profile:
description:
- Name of an existing VoIP profile. Source voip.profile.name.
vpntunnel:
description:
- "Policy-based IPsec VPN: name of the IPsec VPN Phase 1. Source vpn.ipsec.phase1.name vpn.ipsec.manualkey.name."
webfilter-profile:
description:
- Name of an existing Web filter profile. Source webfilter.profile.name.
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
tasks:
- name: Configure IPv6 policies.
fortios_firewall_policy6:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
firewall_policy6:
state: "present"
action: "accept"
app-category:
-
id: "5"
app-group:
-
name: "default_name_7 (source application.group.name)"
application:
-
id: "9"
application-list: "<your_own_value> (source application.list.name)"
av-profile: "<your_own_value> (source antivirus.profile.name)"
comments: "<your_own_value>"
custom-log-fields:
-
field-id: "<your_own_value> (source log.custom-field.id)"
devices:
-
name: "default_name_16 (source user.device.alias user.device-group.name user.device-category.name)"
diffserv-forward: "enable"
diffserv-reverse: "enable"
diffservcode-forward: "<your_own_value>"
diffservcode-rev: "<your_own_value>"
dlp-sensor: "<your_own_value> (source dlp.sensor.name)"
dscp-match: "enable"
dscp-negate: "enable"
dscp-value: "<your_own_value>"
dsri: "enable"
dstaddr:
-
name: "default_name_27 (source firewall.address6.name firewall.addrgrp6.name firewall.vip6.name firewall.vipgrp6.name)"
dstaddr-negate: "enable"
dstintf:
-
name: "default_name_30 (source system.interface.name system.zone.name)"
firewall-session-dirty: "check-all"
fixedport: "enable"
global-label: "<your_own_value>"
groups:
-
name: "default_name_35 (source user.group.name)"
icap-profile: "<your_own_value> (source icap.profile.name)"
inbound: "enable"
ippool: "enable"
ips-sensor: "<your_own_value> (source ips.sensor.name)"
label: "<your_own_value>"
logtraffic: "all"
logtraffic-start: "enable"
name: "default_name_43"
nat: "enable"
natinbound: "enable"
natoutbound: "enable"
outbound: "enable"
per-ip-shaper: "<your_own_value> (source firewall.shaper.per-ip-shaper.name)"
policyid: "49"
poolname:
-
name: "default_name_51 (source firewall.ippool6.name)"
profile-group: "<your_own_value> (source firewall.profile-group.name)"
profile-protocol-options: "<your_own_value> (source firewall.profile-protocol-options.name)"
profile-type: "single"
replacemsg-override-group: "<your_own_value> (source system.replacemsg-group.name)"
rsso: "enable"
schedule: "<your_own_value> (source firewall.schedule.onetime.name firewall.schedule.recurring.name firewall.schedule.group.name)"
send-deny-packet: "enable"
service:
-
name: "default_name_60 (source firewall.service.custom.name firewall.service.group.name)"
service-negate: "enable"
session-ttl: "62"
spamfilter-profile: "<your_own_value> (source spamfilter.profile.name)"
srcaddr:
-
name: "default_name_65 (source firewall.address6.name firewall.addrgrp6.name)"
srcaddr-negate: "enable"
srcintf:
-
name: "default_name_68 (source system.zone.name system.interface.name)"
ssh-filter-profile: "<your_own_value> (source ssh-filter.profile.name)"
ssl-mirror: "enable"
ssl-mirror-intf:
-
name: "default_name_72 (source system.zone.name system.interface.name)"
ssl-ssh-profile: "<your_own_value> (source firewall.ssl-ssh-profile.name)"
status: "enable"
tcp-mss-receiver: "75"
tcp-mss-sender: "76"
tcp-session-without-syn: "all"
timeout-send-rst: "enable"
traffic-shaper: "<your_own_value> (source firewall.shaper.traffic-shaper.name)"
traffic-shaper-reverse: "<your_own_value> (source firewall.shaper.traffic-shaper.name)"
url-category:
-
id: "82"
users:
-
name: "default_name_84 (source user.local.name)"
utm-status: "enable"
uuid: "<your_own_value>"
vlan-cos-fwd: "87"
vlan-cos-rev: "88"
vlan-filter: "<your_own_value>"
voip-profile: "<your_own_value> (source voip.profile.name)"
vpntunnel: "<your_own_value> (source vpn.ipsec.phase1.name vpn.ipsec.manualkey.name)"
webfilter-profile: "<your_own_value> (source webfilter.profile.name)"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "key1"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
fos = None
def login(data):
host = data['host']
username = data['username']
password = data['password']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password)
def filter_firewall_policy6_data(json):
option_list = ['action', 'app-category', 'app-group',
'application', 'application-list', 'av-profile',
'comments', 'custom-log-fields', 'devices',
'diffserv-forward', 'diffserv-reverse', 'diffservcode-forward',
'diffservcode-rev', 'dlp-sensor', 'dscp-match',
'dscp-negate', 'dscp-value', 'dsri',
'dstaddr', 'dstaddr-negate', 'dstintf',
'firewall-session-dirty', 'fixedport', 'global-label',
'groups', 'icap-profile', 'inbound',
'ippool', 'ips-sensor', 'label',
'logtraffic', 'logtraffic-start', 'name',
'nat', 'natinbound', 'natoutbound',
'outbound', 'per-ip-shaper', 'policyid',
'poolname', 'profile-group', 'profile-protocol-options',
'profile-type', 'replacemsg-override-group', 'rsso',
'schedule', 'send-deny-packet', 'service',
'service-negate', 'session-ttl', 'spamfilter-profile',
'srcaddr', 'srcaddr-negate', 'srcintf',
'ssh-filter-profile', 'ssl-mirror', 'ssl-mirror-intf',
'ssl-ssh-profile', 'status', 'tcp-mss-receiver',
'tcp-mss-sender', 'tcp-session-without-syn', 'timeout-send-rst',
'traffic-shaper', 'traffic-shaper-reverse', 'url-category',
'users', 'utm-status', 'uuid',
'vlan-cos-fwd', 'vlan-cos-rev', 'vlan-filter',
'voip-profile', 'vpntunnel', 'webfilter-profile']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def firewall_policy6(data, fos):
vdom = data['vdom']
firewall_policy6_data = data['firewall_policy6']
filtered_data = filter_firewall_policy6_data(firewall_policy6_data)
if firewall_policy6_data['state'] == "present":
return fos.set('firewall',
'policy6',
data=filtered_data,
vdom=vdom)
elif firewall_policy6_data['state'] == "absent":
return fos.delete('firewall',
'policy6',
mkey=filtered_data['policyid'],
vdom=vdom)
def fortios_firewall(data, fos):
login(data)
methodlist = ['firewall_policy6']
for method in methodlist:
if data[method]:
resp = eval(method)(data, fos)
break
fos.logout()
return not resp['status'] == "success", resp['status'] == "success", resp
def main():
fields = {
"host": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": "False"},
"firewall_policy6": {
"required": False, "type": "dict",
"options": {
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"action": {"required": False, "type": "str",
"choices": ["accept", "deny", "ipsec"]},
"app-category": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "int"}
}},
"app-group": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"application": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "int"}
}},
"application-list": {"required": False, "type": "str"},
"av-profile": {"required": False, "type": "str"},
"comments": {"required": False, "type": "str"},
"custom-log-fields": {"required": False, "type": "list",
"options": {
"field-id": {"required": True, "type": "str"}
}},
"devices": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"diffserv-forward": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"diffserv-reverse": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"diffservcode-forward": {"required": False, "type": "str"},
"diffservcode-rev": {"required": False, "type": "str"},
"dlp-sensor": {"required": False, "type": "str"},
"dscp-match": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dscp-negate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dscp-value": {"required": False, "type": "str"},
"dsri": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dstaddr": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"dstaddr-negate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dstintf": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"firewall-session-dirty": {"required": False, "type": "str",
"choices": ["check-all", "check-new"]},
"fixedport": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"global-label": {"required": False, "type": "str"},
"groups": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"icap-profile": {"required": False, "type": "str"},
"inbound": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ippool": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ips-sensor": {"required": False, "type": "str"},
"label": {"required": False, "type": "str"},
"logtraffic": {"required": False, "type": "str",
"choices": ["all", "utm", "disable"]},
"logtraffic-start": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"name": {"required": False, "type": "str"},
"nat": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"natinbound": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"natoutbound": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"outbound": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"per-ip-shaper": {"required": False, "type": "str"},
"policyid": {"required": True, "type": "int"},
"poolname": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"profile-group": {"required": False, "type": "str"},
"profile-protocol-options": {"required": False, "type": "str"},
"profile-type": {"required": False, "type": "str",
"choices": ["single", "group"]},
"replacemsg-override-group": {"required": False, "type": "str"},
"rsso": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"schedule": {"required": False, "type": "str"},
"send-deny-packet": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"service": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"service-negate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"session-ttl": {"required": False, "type": "int"},
"spamfilter-profile": {"required": False, "type": "str"},
"srcaddr": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"srcaddr-negate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"srcintf": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"ssh-filter-profile": {"required": False, "type": "str"},
"ssl-mirror": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ssl-mirror-intf": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"ssl-ssh-profile": {"required": False, "type": "str"},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"tcp-mss-receiver": {"required": False, "type": "int"},
"tcp-mss-sender": {"required": False, "type": "int"},
"tcp-session-without-syn": {"required": False, "type": "str",
"choices": ["all", "data-only", "disable"]},
"timeout-send-rst": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"traffic-shaper": {"required": False, "type": "str"},
"traffic-shaper-reverse": {"required": False, "type": "str"},
"url-category": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "int"}
}},
"users": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"utm-status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"uuid": {"required": False, "type": "str"},
"vlan-cos-fwd": {"required": False, "type": "int"},
"vlan-cos-rev": {"required": False, "type": "int"},
"vlan-filter": {"required": False, "type": "str"},
"voip-profile": {"required": False, "type": "str"},
"vpntunnel": {"required": False, "type": "str"},
"webfilter-profile": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
global fos
fos = FortiOSAPI()
is_error, has_changed, result = fortios_firewall(module.params, fos)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 | -3,181,267,092,349,761,000 | -8,084,517,396,985,949,000 | 40.246456 | 138 | 0.474235 | false |
skjena/Assemblyx86 | triMatMult/result.py | 9 | 1238 | class Result(object):
"""
a wrapper to contain the results of a test
@testName: the name of the test run
@correct: True if the output of matched the solution; False otherwise
@timeTaken is either
the number of seconds it took the program to run
'Timed Out' if the program took too long to complete
'Crashed' if the program encountered some fatal error
"""
def __init__(self, testName, correct, timeTaken):
"""
@testName: the name of the test run
@correct: True if the output of matched the solution; False otherwise
@timeTaken is either
the number of seconds it took the program to run
'Timed Out' if the program took too long to complete
'Crashed' if the program encountered some fatal error
"""
self.testName = testName
self.correct = correct
self.timeTaken = timeTaken
#end init
def __repr__(self):
if type(self.timeTaken) == str:
format_str = 'Test: {!s} | Correct: {!s} | Time Taken: {!s}'
else:
format_str = 'Test: {!s} | Correct: {!s} | Time Taken: {:.3f}'
s = format_str.format(self.testName, self.correct, self.timeTaken)
return s
def __str__(self):
return self.__repr__()
| gpl-2.0 | -7,486,726,393,763,602,000 | -2,159,934,898,450,057,200 | 33.371429 | 74 | 0.634895 | false |
ryfeus/lambda-packs | Sklearn_scipy_numpy/source/scipy/io/matlab/tests/test_streams.py | 109 | 5442 | """ Testing
"""
from __future__ import division, print_function, absolute_import
import os
import sys
import zlib
from io import BytesIO
if sys.version_info[0] >= 3:
cStringIO = BytesIO
else:
from cStringIO import StringIO as cStringIO
from tempfile import mkstemp
import numpy as np
from numpy.testing import (assert_, assert_equal, assert_raises,
run_module_suite)
from scipy.io.matlab.streams import make_stream, \
GenericStream, cStringStream, FileStream, ZlibInputStream, \
_read_into, _read_string
fs = None
gs = None
cs = None
fname = None
def setup():
val = b'a\x00string'
global fs, gs, cs, fname
fd, fname = mkstemp()
fs = os.fdopen(fd, 'wb')
fs.write(val)
fs.close()
fs = open(fname, 'rb')
gs = BytesIO(val)
cs = cStringIO(val)
def teardown():
global fname, fs
fs.close()
del fs
os.unlink(fname)
def test_make_stream():
global fs, gs, cs
# test stream initialization
assert_(isinstance(make_stream(gs), GenericStream))
if sys.version_info[0] < 3:
assert_(isinstance(make_stream(cs), cStringStream))
assert_(isinstance(make_stream(fs), FileStream))
def test_tell_seek():
global fs, gs, cs
for s in (fs, gs, cs):
st = make_stream(s)
res = st.seek(0)
yield assert_equal, res, 0
yield assert_equal, st.tell(), 0
res = st.seek(5)
yield assert_equal, res, 0
yield assert_equal, st.tell(), 5
res = st.seek(2, 1)
yield assert_equal, res, 0
yield assert_equal, st.tell(), 7
res = st.seek(-2, 2)
yield assert_equal, res, 0
yield assert_equal, st.tell(), 6
def test_read():
global fs, gs, cs
for s in (fs, gs, cs):
st = make_stream(s)
st.seek(0)
res = st.read(-1)
yield assert_equal, res, b'a\x00string'
st.seek(0)
res = st.read(4)
yield assert_equal, res, b'a\x00st'
# read into
st.seek(0)
res = _read_into(st, 4)
yield assert_equal, res, b'a\x00st'
res = _read_into(st, 4)
yield assert_equal, res, b'ring'
yield assert_raises, IOError, _read_into, st, 2
# read alloc
st.seek(0)
res = _read_string(st, 4)
yield assert_equal, res, b'a\x00st'
res = _read_string(st, 4)
yield assert_equal, res, b'ring'
yield assert_raises, IOError, _read_string, st, 2
class TestZlibInputStream(object):
def _get_data(self, size):
data = np.random.randint(0, 256, size).astype(np.uint8).tostring()
compressed_data = zlib.compress(data)
stream = BytesIO(compressed_data)
return stream, len(compressed_data), data
def test_read(self):
block_size = 131072
SIZES = [0, 1, 10, block_size//2, block_size-1,
block_size, block_size+1, 2*block_size-1]
READ_SIZES = [block_size//2, block_size-1,
block_size, block_size+1]
def check(size, read_size):
compressed_stream, compressed_data_len, data = self._get_data(size)
stream = ZlibInputStream(compressed_stream, compressed_data_len)
data2 = b''
so_far = 0
while True:
block = stream.read(min(read_size,
size - so_far))
if not block:
break
so_far += len(block)
data2 += block
assert_equal(data, data2)
for size in SIZES:
for read_size in READ_SIZES:
yield check, size, read_size
def test_read_max_length(self):
size = 1234
data = np.random.randint(0, 256, size).astype(np.uint8).tostring()
compressed_data = zlib.compress(data)
compressed_stream = BytesIO(compressed_data + b"abbacaca")
stream = ZlibInputStream(compressed_stream, len(compressed_data))
stream.read(len(data))
assert_equal(compressed_stream.tell(), len(compressed_data))
assert_raises(IOError, stream.read, 1)
def test_seek(self):
compressed_stream, compressed_data_len, data = self._get_data(1024)
stream = ZlibInputStream(compressed_stream, compressed_data_len)
stream.seek(123)
p = 123
assert_equal(stream.tell(), p)
d1 = stream.read(11)
assert_equal(d1, data[p:p+11])
stream.seek(321, 1)
p = 123+11+321
assert_equal(stream.tell(), p)
d2 = stream.read(21)
assert_equal(d2, data[p:p+21])
stream.seek(641, 0)
p = 641
assert_equal(stream.tell(), p)
d3 = stream.read(11)
assert_equal(d3, data[p:p+11])
assert_raises(IOError, stream.seek, 10, 2)
assert_raises(IOError, stream.seek, -1, 1)
assert_raises(ValueError, stream.seek, 1, 123)
stream.seek(10000, 1)
assert_raises(IOError, stream.read, 12)
def test_all_data_read(self):
compressed_stream, compressed_data_len, data = self._get_data(1024)
stream = ZlibInputStream(compressed_stream, compressed_data_len)
assert_(not stream.all_data_read())
stream.seek(512)
assert_(not stream.all_data_read())
stream.seek(1024)
assert_(stream.all_data_read())
if __name__ == "__main__":
run_module_suite()
| mit | -1,108,837,555,137,061,400 | 1,937,433,805,373,402,600 | 26.907692 | 79 | 0.574605 | false |
ArvinDevel/incubator-pulsar | dashboard/django/stats/migrations/0001_initial.py | 13 | 11195 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-21 21:20
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ActiveBroker',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.BigIntegerField(db_index=True)),
],
),
migrations.CreateModel(
name='Broker',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.URLField(db_index=True)),
],
),
migrations.CreateModel(
name='Bundle',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.BigIntegerField(db_index=True)),
('range', models.CharField(max_length=200)),
('broker', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.Broker')),
],
),
migrations.CreateModel(
name='Cluster',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200, unique=True)),
('serviceUrl', models.URLField()),
],
),
migrations.CreateModel(
name='Consumer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.BigIntegerField(db_index=True)),
('address', models.CharField(max_length=64, null=True)),
('availablePermits', models.IntegerField(default=0)),
('connectedSince', models.DateTimeField(null=True)),
('consumerName', models.CharField(max_length=64, null=True)),
('msgRateOut', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('msgRateRedeliver', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('msgThroughputOut', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('unackedMessages', models.BigIntegerField(default=0)),
('blockedConsumerOnUnackedMsgs', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='LatestTimestamp',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=10, unique=True)),
('timestamp', models.BigIntegerField(default=0)),
],
),
migrations.CreateModel(
name='Namespace',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200, unique=True)),
('clusters', models.ManyToManyField(to='stats.Cluster')),
],
),
migrations.CreateModel(
name='Property',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200, unique=True)),
],
options={
'verbose_name_plural': 'properties',
},
),
migrations.CreateModel(
name='Replication',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.BigIntegerField(db_index=True)),
('msgRateIn', models.DecimalField(decimal_places=1, max_digits=12)),
('msgThroughputIn', models.DecimalField(decimal_places=1, max_digits=12)),
('msgRateOut', models.DecimalField(decimal_places=1, max_digits=12)),
('msgThroughputOut', models.DecimalField(decimal_places=1, max_digits=12)),
('msgRateExpired', models.DecimalField(decimal_places=1, max_digits=12)),
('replicationBacklog', models.BigIntegerField(default=0)),
('connected', models.BooleanField(default=False)),
('replicationDelayInSeconds', models.IntegerField(default=0)),
('inboundConnectedSince', models.DateTimeField(null=True)),
('outboundConnectedSince', models.DateTimeField(null=True)),
('local_cluster', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.Cluster')),
('remote_cluster', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='remote_cluster', to='stats.Cluster')),
],
),
migrations.CreateModel(
name='Subscription',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('timestamp', models.BigIntegerField(db_index=True)),
('msgBacklog', models.BigIntegerField(default=0)),
('msgRateExpired', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('msgRateOut', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('msgRateRedeliver', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('msgThroughputOut', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('subscriptionType', models.CharField(choices=[('N', 'Not connected'), ('E', 'Exclusive'), ('S', 'Shared'), ('F', 'Failover')], default='N', max_length=1)),
('unackedMessages', models.BigIntegerField(default=0)),
('namespace', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.Namespace')),
],
),
migrations.CreateModel(
name='Topic',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=1024)),
('timestamp', models.BigIntegerField(db_index=True)),
('averageMsgSize', models.IntegerField(default=0)),
('msgRateIn', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('msgRateOut', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('msgThroughputIn', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('msgThroughputOut', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('pendingAddEntriesCount', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('producerCount', models.IntegerField(default=0)),
('subscriptionCount', models.IntegerField(default=0)),
('consumerCount', models.IntegerField(default=0)),
('storageSize', models.BigIntegerField(default=0)),
('backlog', models.BigIntegerField(default=0)),
('localRateIn', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('localRateOut', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('localThroughputIn', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('localThroughputOut', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('replicationRateIn', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('replicationRateOut', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('replicationThroughputIn', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('replicationThroughputOut', models.DecimalField(decimal_places=1, default=0, max_digits=12)),
('replicationBacklog', models.BigIntegerField(default=0)),
('active_broker', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.ActiveBroker')),
('broker', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.Broker')),
('bundle', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.Bundle')),
('cluster', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.Cluster')),
('namespace', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.Namespace')),
],
),
migrations.AddField(
model_name='subscription',
name='topic',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.Topic'),
),
migrations.AddField(
model_name='replication',
name='topic',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.Topic'),
),
migrations.AddField(
model_name='namespace',
name='property',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.Property'),
),
migrations.AddField(
model_name='consumer',
name='subscription',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.Subscription'),
),
migrations.AddField(
model_name='bundle',
name='cluster',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.Cluster'),
),
migrations.AddField(
model_name='bundle',
name='namespace',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.Namespace'),
),
migrations.AddField(
model_name='broker',
name='cluster',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.Cluster'),
),
migrations.AddField(
model_name='activebroker',
name='broker',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='stats.Broker'),
),
migrations.AlterIndexTogether(
name='topic',
index_together=set([('name', 'cluster', 'timestamp')]),
),
]
| apache-2.0 | 8,639,376,888,896,042,000 | 8,631,579,368,015,213,000 | 54.420792 | 172 | 0.58812 | false |
lorisercole/thermocepstrum | thermocepstrum/i_o/read_lammps_dump.py | 1 | 19682 | # -*- coding: utf-8 -*-
################################################################################
###
### ReadLAMMPSDump - v0.1.8 - May 03, 2018
###
################################################################################
###
### a package to read LAMMPS Dump files
### (it assumes that the data column names and the number of atoms do not change)
###
################################################################################
## example:
## import read_lammps_dump as rd
## data = rd.LAMMPS_Dump(filename)
##
import numpy as np
from time import time
from thermocepstrum.utils import log
def is_string(string):
try:
float(string)
except ValueError:
return True
return False
def is_vector_variable(string):
bracket = string.rfind('[')
if (bracket == -1):
bracket = 0
return bracket
def file_length(filename):
i = -1
with open(filename) as f:
for i, l in enumerate(f, 1):
pass
return i
def get_volume(filename):
f = open(filename, 'r')
line = f.readline()
while (line):
if 'BOX BOUNDS' in line:
xlo, xhi = list(map(float, f.readline().split()))
ylo, yhi = list(map(float, f.readline().split()))
zlo, zhi = list(map(float, f.readline().split()))
break
line = f.readline()
f.close()
volume = (xhi - xlo) * (yhi - ylo) * (zhi - zlo)
return volume
def get_natoms(filename):
f = open(filename, 'r')
line = f.readline()
while (line):
if 'NUMBER OF ATOMS' in line:
natoms = int(f.readline())
break
line = f.readline()
f.close()
return natoms
class LAMMPS_Dump(object):
"""
A LAMMPS_Dump file that can be read in blocks.
example:
traj = LAMMPS_Dump(filename, preload=False) -->> do not preload list of steps (suggested if the file is big)
traj.read_timesteps(10, start_step=0, select_ckeys=['id,xu,yu,vu']) -->> Read first 10 timesteps, only the specified columns
traj.read_timesteps(10, select_ckeys=['id,xu,yu,vu']) -->> Read the next 10 timesteps, only the specified columns (DELTA_TIMESTEP is assumed)
traj.read_timesteps((10,30)) -->> Read from TIMESTEP 10 to 30
traj.read_timesteps((10,30,2)) -->> Read every 2 steps from TIMESTEP 10 to 30
print(traj.data)
"""
def __init__(self, *args, **kwargs):
#*******
if (len(args) > 0):
self.filename = args[0]
if (len(args) == 2):
self.select_ckeys = args[1]
else:
self.select_ckeys = None
else:
raise ValueError('No file given.')
group_vectors = kwargs.get('group_vectors', True)
preload_timesteps = kwargs.get('preload', True)
self._quiet = kwargs.get('quiet', False)
self._GUI = kwargs.get('GUI', False)
if self._GUI:
from ipywidgets import FloatProgress
from IPython.display import display
global FloatProgress, display
self._open_file()
self._read_ckeys(group_vectors, preload_timesteps)
self.ckey = None
#self.MAX_NSTEPS = data_length(self.filename)
#log.write_log("Data length = ", self.MAX_NSTEPS)
return
def __repr__(self):
msg = 'LAMMPS_Dump:\n' + \
' filename: {}\n'.format(self.filename) + \
' all_ckeys: {}\n'.format(self.all_ckeys) + \
' select_ckeys: {}\n'.format(self.select_ckeys) + \
' used ckey: {}\n'.format(self.ckey) + \
' all_timesteps: {}\n'.format(self.all_timesteps) + \
' select_timesteps: {}\n'.format(self.select_timesteps) + \
' used timesteps: {}\n'.format(self.timestep) + \
' start pos: {}\n'.format(self._start_byte) + \
' current pos: {}\n'.format(self.file.tell()) + \
' FIRST TIMESTEP: {}\n'.format(self.FIRST_TIMESTEP) + \
' LAST TIMESTEP: {}\n'.format(self.LAST_TIMESTEP) + \
' DELTA TIMESTEP: {}\n'.format(self.DELTA_TIMESTEP) + \
' current step: {}\n'.format(self.current_timestep)
return msg
def _open_file(self):
"""Open the file."""
try:
self.file = open(self.filename, 'r')
except:
raise ValueError('File does not exist.')
return
def _read_ckeys(self, group_vectors=True, preload_timesteps=True):
"""Read the column keys. If group_vectors=True the vector ckeys are grouped togheter"""
self._start_byte = self.file.tell()
self.all_ckeys = {}
self.all_timesteps = []
self.preload_timesteps = preload_timesteps
while True:
line = self.file.readline()
if len(line) == 0: # EOF
raise RuntimeError('Reached EOF, no ckeys found.')
values = np.array(line.split())
if (values[0] == 'ITEM:'):
if (values[1] == 'TIMESTEP'):
self.current_timestep = int(self.file.readline())
self.FIRST_TIMESTEP = self.current_timestep
self.all_timesteps.append(self.current_timestep)
# facoltativo:
elif ((values[1] == 'NUMBER') and values[2] == 'OF' and values[3] == 'ATOMS'):
self.NATOMS = int(self.file.readline())
elif ((values[1] == 'BOX') and values[2] == 'BOUNDS'):
self.BOX_BOUNDS_TYPE = values[3:6]
xbox = self.file.readline().split()
ybox = self.file.readline().split()
zbox = self.file.readline().split()
self.BOX_BOUNDS = np.array([xbox, ybox, zbox], dtype='float')
elif (values[1] == 'ATOMS'):
for i in range(2, len(values)):
if group_vectors:
bracket = is_vector_variable(values[i]) # get position of left square bracket
else:
bracket = 0
if (bracket == 0): # the variable is a scalar
key = values[i]
if (key[:2] == 'c_'): # remove 'c_' if present
key = key[2:]
self.all_ckeys[key] = [i - 2] # -2 offset
else: # the variable is a vector
key = values[i][:bracket] # name of vector
if (key[:2] == 'c_'): # remove 'c_' if present
key = key[2:]
vecidx = int(values[i][bracket + 1:-1]) # current index
if key in self.all_ckeys: # if this vector is already defined, add this component
if (vecidx > self.all_ckeys[key].size):
self.ckeys[key] = np.resize(self.all_ckeys[key], vecidx)
self.all_ckeys[key][vecidx - 1] = i - 2 # -2 offset!
else: # if it is not, define a vector
self.all_ckeys[key] = np.array([0] * vecidx)
self.all_ckeys[key][-1] = i - 2 # -2 offset!
#self._start_byte = self.file.tell()
break
#else:
# self.header += line
if self.preload_timesteps:
# get the list of time steps
while True:
line = self.file.readline()
if len(line) == 0: # EOF
break
if (line == 'ITEM: TIMESTEP\n'):
self.current_timestep = int(self.file.readline())
self.all_timesteps.append(self.current_timestep)
self.LAST_TIMESTEP = self.all_timesteps[-1]
self.DELTA_TIMESTEP = self.all_timesteps[1] - self.FIRST_TIMESTEP
self.TOT_TIMESTEPS = len(self.all_timesteps)
self.all_timesteps = np.array(self.all_timesteps)
else:
log.write_log(' ** No timesteps pre-loaded. Be careful in the selection. **')
# get the first 2 timesteps
while (len(self.all_timesteps) < 2):
line = self.file.readline()
if len(line) == 0: # EOF
break
if (line == 'ITEM: TIMESTEP\n'):
self.current_timestep = int(self.file.readline())
self.all_timesteps.append(self.current_timestep)
self.LAST_TIMESTEP = None
self.DELTA_TIMESTEP = self.all_timesteps[1] - self.FIRST_TIMESTEP
self.TOT_TIMESTEPS = None
self.all_timesteps = None
# go back to the first timestep
self.gototimestep(0) # compute_first = True
self._start_byte = 0
log.write_log(' all_ckeys = ', self.all_ckeys)
log.write_log(' TOT_TIMESTEPS = ', self.TOT_TIMESTEPS)
log.write_log(' FIRST_TIMESTEP = ', self.FIRST_TIMESTEP)
log.write_log(' DELTA_TIMESTEP = ', self.DELTA_TIMESTEP)
log.write_log(' LAST_TIMESTEP = ', self.LAST_TIMESTEP)
log.write_log(' all_timesteps = ', self.all_timesteps)
return
def _set_ckey(self, select_ckeys=None):
"""
Set the ckeys to read from the selected, checking the available ones.
If select_ckeys is not passed, then use the already selected ones, or all the available ones if no selection
was previously made.
"""
if select_ckeys is not None:
self.select_ckeys = select_ckeys
self.ckey = {}
if self.select_ckeys is None: # take all ckeys
self.ckey = self.all_ckeys
else:
for key in self.select_ckeys: # take only the selected ckeys
value = self.all_ckeys.get(key, None)
if value is not None:
self.ckey[key] = value[:] # copy all indexes (up to max dimension for vectors)
else:
log.write_log('Warning: ', key, 'key not found.')
if (len(self.ckey) == 0):
raise KeyError('No ckey set. Check selected keys.')
else:
if not self._quiet:
log.write_log(' ckey = ', self.ckey)
return
def _set_timesteps(self, selection, start_step=-1):
"""Set the timesteps to read from the selected, checking the available ones.
INPUT: N --> Read the next N steps (DELTA_TIMESTEP is assumed)
N, start_step=30 --> Read N steps from the TIMESTEP 30
if compute_first=True, read the current step as well
(10,30) --> Read from TIMESTEP 10 to 30
(10,30,2) --> Read every 2 steps from TIMESTEP 10 to 30"""
if (start_step == -1):
if self._compute_current_step:
start_step = self.current_timestep
else:
start_step = self.current_timestep + self.DELTA_TIMESTEP
elif (start_step == 0):
start_step = self.FIRST_TIMESTEP
if np.isscalar(selection) or (len(selection) == 1): # select N steps from start one
first = start_step
last = self.DELTA_TIMESTEP * selection + start_step
step = None
elif (len(selection) == 2):
first = selection[0]
last = selection[1]
step = None
elif (len(selection) == 3):
first = selection[0]
last = selection[1]
step = selection[2]
if step is None:
step = self.DELTA_TIMESTEP
elif (step % self.DELTA_TIMESTEP != 0):
log.write_log('Warning: step is not a multiple of the detected DELTA_TIMESTEP. You may get errors.')
if (first % step != 0):
first += step - first % step # round first step to the next in the list
self.timestep = []
self.select_timesteps = np.arange(first, last, step) # selected timesteps
if self.preload_timesteps:
for step in self.select_timesteps:
if step in self.all_timesteps:
self.timestep.append(step) # make list of available selected-timesteps
else:
log.write_log('Warning: timestep # {:d} not found.'.format(step))
else:
self.timestep = self.select_timesteps # use all the selected (be careful)
self.nsteps = len(self.timestep) # number of available steps
if (self.nsteps == 0):
raise ValueError('No timestep set. Check selected timesteps.')
else:
if not self._quiet:
log.write_log(' nsteps = ', self.nsteps)
log.write_log(' timestep = ', self.timestep)
return
def _initialize_dic(self):
"""Initialize the data dictionary once the ckeys and timesteps have been set."""
if self.ckey is None:
raise ValueError('ckey not set.')
if self.timestep is None:
raise ValueError('timestep not set.')
self.data = [dict() for i in range(self.nsteps)]
for istep in range(self.nsteps):
for key, idx in self.ckey.items():
if (key == 'element'): # this should be improved
self.data[istep][key] = np.zeros((self.NATOMS, len(idx)), dtype='S8')
else:
self.data[istep][key] = np.zeros((self.NATOMS, len(idx)), dtype='float64')
return
def _gototimestep(self, start_step, fast_check=True):
"""
Go to the start_step-th line in the time series (assumes step=1).
start_step = -1 --> ignore, continue from current step
0 --> go to FIRST timestep
N --> go to N-th timestep
fast_check = True --> assumes the TIMESTEP are a monotonously increasing.
If the the start_step is passed and not found then stop.
"""
if (start_step >= 0):
if (start_step <= self.current_timestep):
# or (self.current_timestep == -1): # if start_step is before/equal the current step
self.file.seek(self._start_byte) # --> start over
if (start_step == 0): # or (self.current_timestep == -1):
goto_step = self.FIRST_TIMESTEP
else:
goto_step = start_step
# search until start_step is found ***** MAY BE IMPROVED KNOWING THE N OF LINES TO SKIP ******
while True:
line = self.file.readline()
if len(line) == 0: # EOF
raise EOFError('Warning (gototimestep): reached EOF. Timestep {} NOT FOUND.'.format(goto_step))
if (line == 'ITEM: TIMESTEP\n'):
self.current_timestep = int(self.file.readline())
if (self.current_timestep == goto_step):
while (self.file.readline().find('ITEM: ATOMS') < 0): # jump to the data part
pass
break
if (fast_check) and (self.current_timestep > goto_step):
raise Warning(
'Warning (gototimestep): Timestep {} NOT FOUND up to current_step = {}. (To force check the whole trajectory set fast_check=False)'
.format(goto_step, self.current_timestep))
else:
pass
return
def gototimestep(self, start_step, fast_check=True):
"""
Go to the start_step-th line in the time series (assumes step=1).
start_step = -1 --> ignore, continue from current step
0 --> go to FIRST timestep
N --> go to N-th timestep
fast_check = True --> assumes the TIMESTEP are a monotonously increasing.
If the the start_step is passed and not found then stop.
"""
## user-called function
self._compute_current_step = True
self._gototimestep(start_step, fast_check)
return
def read_timesteps(self, selection, start_step=-1, select_ckeys=None, fast_check=True):
"""
Read selected keys of file, within the provided range.
Examples:
read_timesteps(10, start_step=0, select_ckeys=['id,xu,yu,vu']) -->> Read first 10 timesteps, only the specified columns
read_timesteps(10, select_ckeys=['id,xu,yu,vu']) -->> Read the next 10 timesteps, only the specified columns (DELTA_TIMESTEP is assumed)
read_timesteps((10,30)) -->> Read from TIMESTEP 10 to 30
read_timesteps((10,30,2)) -->> Read every 2 steps from TIMESTEP 10 to 30
"""
if self._GUI:
progbar = FloatProgress(min=0, max=100)
display(progbar)
start_time = time()
self._set_ckey(select_ckeys) # set the ckeys to read --> ckey
self._set_timesteps(selection, start_step) # set the timesteps to read --> timestep
self._initialize_dic() # allocate dictionary --> data
# extract the steps from the file
progbar_step = max(1000, int(0.005 * self.nsteps))
atomid_col = self.all_ckeys['id'][0]
for istep, step in enumerate(self.timestep):
self._gototimestep(step, fast_check) # jump to the desired step,
self.data[istep]['TIMESTEP'] = step
for nat in range(self.NATOMS): # read data (may be unsorted)
line = self.file.readline()
if len(line) == 0: # EOF
raise EOFError('Warning: reached EOF.')
values = np.array(line.split())
for key, idx in self.ckey.items(): # save the selected columns
atomid = int(values[atomid_col]) - 1 # current atom index (in LAMMPS it starts from 1)
if (key == 'element'): # this should be improved
self.data[istep][key][atomid, :] = np.array(list(map(str, values[idx])))
else:
self.data[istep][key][atomid, :] = np.array(list(map(float, values[idx])))
if ((istep + 1) % progbar_step == 0):
if self._GUI:
progbar.value = float(istep + 1) / self.nsteps * 100.
progbar.description = '%g %%' % progbar.value
else:
log.write_log(' step = {:9d} - {:6.2f}% completed'.format(istep + 1,
float(istep + 1) / self.nsteps * 100.))
if self._GUI:
progbar.close()
# check number of steps read, keep an even number of steps
if (istep + 1 < self.nsteps): # (should never happen)
if (istep == 0):
log.write_log('WARNING: no step read.')
return
else:
log.write_log('Warning: less steps read.')
self.nsteps = istep + 1
if not self._quiet:
log.write_log(' ( %d ) steps read.' % (self.nsteps))
log.write_log('DONE. Elapsed time: ', time() - start_time, 'seconds')
self._compute_current_step = False # next time do not compute the current_step
return self.data
| gpl-3.0 | -789,466,276,920,837,400 | 903,329,000,829,441,300 | 44.878788 | 160 | 0.511381 | false |
coruus/pyasn1-modules | tools/pkcs10dump.py | 26 | 1109 | #!/usr/bin/python
#
# Read ASN.1/PEM X.509 certificate requests (PKCS#10 format) on stdin,
# parse each into plain text, then build substrate from it
#
from pyasn1.codec.der import decoder, encoder
from pyasn1_modules import rfc2314, pem
import sys
if len(sys.argv) != 1:
print("""Usage:
$ cat certificateRequest.pem | %s""" % sys.argv[0])
sys.exit(-1)
certType = rfc2314.CertificationRequest()
certCnt = 0
while 1:
idx, substrate = pem.readPemBlocksFromFile(
sys.stdin, ('-----BEGIN CERTIFICATE REQUEST-----',
'-----END CERTIFICATE REQUEST-----')
)
if not substrate:
break
cert, rest = decoder.decode(substrate, asn1Spec=certType)
if rest: substrate = substrate[:-len(rest)]
print(cert.prettyPrint())
assert encoder.encode(cert, defMode=False) == substrate or \
encoder.encode(cert, defMode=True) == substrate, \
'cert recode fails'
certCnt = certCnt + 1
print('*** %s PEM certificate request(s) de/serialized' % certCnt)
| bsd-2-clause | -1,296,181,284,599,166,000 | 5,274,375,254,876,001,000 | 27.435897 | 72 | 0.602344 | false |
petecummings/django-cms | cms/tests/test_publisher.py | 32 | 44420 | # -*- coding: utf-8 -*-
from __future__ import with_statement
from djangocms_text_ckeditor.models import Text
from django.contrib.auth import get_user_model
from django.contrib.sites.models import Site
from django.core.cache import cache
from django.core.management.base import CommandError
from django.core.management import call_command
from django.core.urlresolvers import reverse
from cms.api import create_page, add_plugin, create_title
from cms.constants import PUBLISHER_STATE_PENDING, PUBLISHER_STATE_DEFAULT, PUBLISHER_STATE_DIRTY
from cms.management.commands.subcommands.publisher_publish import PublishCommand
from cms.models import CMSPlugin, Title
from cms.models.pagemodel import Page
from cms.plugin_pool import plugin_pool
from cms.test_utils.testcases import CMSTestCase as TestCase
from cms.test_utils.util.context_managers import StdoutOverride
from cms.test_utils.util.fuzzy_int import FuzzyInt
from cms.utils.conf import get_cms_setting
from cms.utils.i18n import force_language
from cms.utils.urlutils import admin_reverse
class PublisherCommandTests(TestCase):
"""
Tests for the publish command
"""
def test_command_line_should_raise_without_superuser(self):
with self.assertRaises(CommandError):
com = PublishCommand()
com.handle_noargs()
def test_command_line_publishes_zero_pages_on_empty_db(self):
# we need to create a superuser (the db is empty)
get_user_model().objects.create_superuser('djangocms', '[email protected]', '123456')
pages_from_output = 0
published_from_output = 0
with StdoutOverride() as buffer:
# Now we don't expect it to raise, but we need to redirect IO
call_command('cms', 'publisher_publish')
lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work
for line in lines:
if 'Total' in line:
pages_from_output = int(line.split(':')[1])
elif 'Published' in line:
published_from_output = int(line.split(':')[1])
self.assertEqual(pages_from_output, 0)
self.assertEqual(published_from_output, 0)
def test_command_line_ignores_draft_page(self):
# we need to create a superuser (the db is empty)
get_user_model().objects.create_superuser('djangocms', '[email protected]', '123456')
create_page("The page!", "nav_playground.html", "en", published=False)
pages_from_output = 0
published_from_output = 0
with StdoutOverride() as buffer:
# Now we don't expect it to raise, but we need to redirect IO
call_command('cms', 'publisher_publish')
lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work
for line in lines:
if 'Total' in line:
pages_from_output = int(line.split(':')[1])
elif 'Published' in line:
published_from_output = int(line.split(':')[1])
self.assertEqual(pages_from_output, 0)
self.assertEqual(published_from_output, 0)
self.assertEqual(Page.objects.public().count(), 0)
def test_command_line_publishes_draft_page(self):
# we need to create a superuser (the db is empty)
get_user_model().objects.create_superuser('djangocms', '[email protected]', '123456')
create_page("The page!", "nav_playground.html", "en", published=False)
pages_from_output = 0
published_from_output = 0
with StdoutOverride() as buffer:
# Now we don't expect it to raise, but we need to redirect IO
call_command('cms', 'publisher_publish', include_unpublished=True)
lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work
for line in lines:
if 'Total' in line:
pages_from_output = int(line.split(':')[1])
elif 'Published' in line:
published_from_output = int(line.split(':')[1])
self.assertEqual(pages_from_output, 1)
self.assertEqual(published_from_output, 1)
self.assertEqual(Page.objects.public().count(), 1)
def test_command_line_publishes_selected_language(self):
# we need to create a superuser (the db is empty)
get_user_model().objects.create_superuser('djangocms', '[email protected]', '123456')
page = create_page("en title", "nav_playground.html", "en")
title = create_title('de', 'de title', page)
title.published = True
title.save()
title = create_title('fr', 'fr title', page)
title.published = True
title.save()
pages_from_output = 0
published_from_output = 0
with StdoutOverride() as buffer:
# Now we don't expect it to raise, but we need to redirect IO
call_command('cms', 'publisher_publish', language='de')
lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work
for line in lines:
if 'Total' in line:
pages_from_output = int(line.split(':')[1])
elif 'Published' in line:
published_from_output = int(line.split(':')[1])
self.assertEqual(pages_from_output, 1)
self.assertEqual(published_from_output, 1)
self.assertEqual(Page.objects.public().count(), 1)
public = Page.objects.public()[0]
languages = sorted(public.title_set.values_list('language', flat=True))
self.assertEqual(languages, ['de'])
def test_command_line_publishes_selected_language_drafts(self):
# we need to create a superuser (the db is empty)
get_user_model().objects.create_superuser('djangocms', '[email protected]', '123456')
page = create_page("en title", "nav_playground.html", "en")
title = create_title('de', 'de title', page)
title.published = False
title.save()
title = create_title('fr', 'fr title', page)
title.published = False
title.save()
pages_from_output = 0
published_from_output = 0
with StdoutOverride() as buffer:
# Now we don't expect it to raise, but we need to redirect IO
call_command('cms', 'publisher_publish', language='de', include_unpublished=True)
lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work
for line in lines:
if 'Total' in line:
pages_from_output = int(line.split(':')[1])
elif 'Published' in line:
published_from_output = int(line.split(':')[1])
self.assertEqual(pages_from_output, 1)
self.assertEqual(published_from_output, 1)
self.assertEqual(Page.objects.public().count(), 1)
public = Page.objects.public()[0]
languages = sorted(public.title_set.values_list('language', flat=True))
self.assertEqual(languages, ['de'])
def test_table_name_patching(self):
"""
This tests the plugin models patching when publishing from the command line
"""
User = get_user_model()
User.objects.create_superuser('djangocms', '[email protected]', '123456')
create_page("The page!", "nav_playground.html", "en", published=True)
draft = Page.objects.drafts()[0]
draft.reverse_id = 'a_test' # we have to change *something*
draft.save()
add_plugin(draft.placeholders.get(slot=u"body"),
u"TextPlugin", u"en", body="Test content")
draft.publish('en')
add_plugin(draft.placeholders.get(slot=u"body"),
u"TextPlugin", u"en", body="Test content")
# Manually undoing table name patching
Text._meta.db_table = 'djangocms_text_ckeditor_text'
plugin_pool.patched = False
with StdoutOverride():
# Now we don't expect it to raise, but we need to redirect IO
call_command('cms', 'publisher_publish')
not_drafts = len(Page.objects.filter(publisher_is_draft=False))
drafts = len(Page.objects.filter(publisher_is_draft=True))
self.assertEqual(not_drafts, 1)
self.assertEqual(drafts, 1)
def test_command_line_publishes_one_page(self):
"""
Publisher always creates two Page objects for every CMS page,
one is_draft and one is_public.
The public version of the page can be either published or not.
This bit of code uses sometimes manager methods and sometimes manual
filters on purpose (this helps test the managers)
"""
# we need to create a superuser (the db is empty)
get_user_model().objects.create_superuser('djangocms', '[email protected]', '123456')
# Now, let's create a page. That actually creates 2 Page objects
create_page("The page!", "nav_playground.html", "en", published=True)
draft = Page.objects.drafts()[0]
draft.reverse_id = 'a_test' # we have to change *something*
draft.save()
pages_from_output = 0
published_from_output = 0
with StdoutOverride() as buffer:
# Now we don't expect it to raise, but we need to redirect IO
call_command('cms', 'publisher_publish')
lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work
for line in lines:
if 'Total' in line:
pages_from_output = int(line.split(':')[1])
elif 'Published' in line:
published_from_output = int(line.split(':')[1])
self.assertEqual(pages_from_output, 1)
self.assertEqual(published_from_output, 1)
# Sanity check the database (we should have one draft and one public)
not_drafts = len(Page.objects.filter(publisher_is_draft=False))
drafts = len(Page.objects.filter(publisher_is_draft=True))
self.assertEqual(not_drafts, 1)
self.assertEqual(drafts, 1)
# Now check that the non-draft has the attribute we set to the draft.
non_draft = Page.objects.public()[0]
self.assertEqual(non_draft.reverse_id, 'a_test')
def test_command_line_publish_multiple_languages(self):
# we need to create a superuser (the db is empty)
get_user_model().objects.create_superuser('djangocms', '[email protected]', '123456')
# Create a draft page with two published titles
page = create_page(u"The page!", "nav_playground.html", "en", published=False)
title = create_title('de', 'ja', page)
title.published = True
title.save()
title = create_title('fr', 'non', page)
title.published = True
title.save()
with StdoutOverride():
# Now we don't expect it to raise, but we need to redirect IO
call_command('cms', 'publisher_publish')
public = Page.objects.public()[0]
languages = sorted(public.title_set.values_list('language', flat=True))
self.assertEqual(languages, ['de', 'fr'])
def test_command_line_publish_one_site(self):
get_user_model().objects.create_superuser('djangocms', '[email protected]', '123456')
siteA = Site.objects.create(domain='a.example.com', name='a.example.com')
siteB = Site.objects.create(domain='b.example.com', name='b.example.com')
#example.com
create_page(u"example.com homepage", "nav_playground.html", "en", published=True)
#a.example.com
create_page(u"a.example.com homepage", "nav_playground.html", "de", site=siteA, published=True)
#b.example.com
create_page(u"b.example.com homepage", "nav_playground.html", "de", site=siteB, published=True)
create_page(u"b.example.com about", "nav_playground.html", "nl", site=siteB, published=True)
with StdoutOverride() as buffer:
# Now we don't expect it to raise, but we need to redirect IO
call_command('cms', 'publisher_publish', site=siteB.id)
lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work
for line in lines:
if 'Total' in line:
pages_from_output = int(line.split(':')[1])
elif 'Published' in line:
published_from_output = int(line.split(':')[1])
self.assertEqual(pages_from_output, 2)
self.assertEqual(published_from_output, 2)
def test_command_line_publish_multiple_languages_check_count(self):
"""
Publishing one page with multiple languages still counts
as one page. This test case checks whether it works
as expected.
"""
# we need to create a superuser (the db is empty)
get_user_model().objects.create_superuser('djangocms', '[email protected]', '123456')
# Now, let's create a page with 2 languages.
page = create_page("en title", "nav_playground.html", "en", published=True)
create_title("de", "de title", page)
page.publish("de")
pages_from_output = 0
published_from_output = 0
with StdoutOverride() as buffer:
# Now we don't expect it to raise, but we need to redirect IO
call_command('cms', 'publisher_publish')
lines = buffer.getvalue().split('\n') #NB: readlines() doesn't work
for line in lines:
if 'Total' in line:
pages_from_output = int(line.split(':')[1])
elif 'Published' in line:
published_from_output = int(line.split(':')[1])
self.assertEqual(pages_from_output, 1)
self.assertEqual(published_from_output, 1)
def tearDown(self):
plugin_pool.patched = False
plugin_pool.set_plugin_meta()
class PublishingTests(TestCase):
def create_page(self, title=None, **kwargs):
return create_page(title or self._testMethodName,
"nav_playground.html", "en", **kwargs)
def test_publish_home(self):
name = self._testMethodName
page = self.create_page(name, published=False)
self.assertFalse(page.publisher_public_id)
self.assertEqual(Page.objects.all().count(), 1)
superuser = self.get_superuser()
with self.login_user_context(superuser):
response = self.client.post(admin_reverse("cms_page_publish_page", args=[page.pk, 'en']))
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], "http://testserver/en/?%s" % get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF'))
def test_publish_single(self):
name = self._testMethodName
page = self.create_page(name, published=False)
self.assertFalse(page.is_published('en'))
drafts = Page.objects.drafts()
public = Page.objects.public()
published = Page.objects.public().published("en")
self.assertObjectExist(drafts, title_set__title=name)
self.assertObjectDoesNotExist(public, title_set__title=name)
self.assertObjectDoesNotExist(published, title_set__title=name)
page.publish("en")
drafts = Page.objects.drafts()
public = Page.objects.public()
published = Page.objects.public().published("en")
self.assertTrue(page.is_published('en'))
self.assertEqual(page.get_publisher_state("en"), PUBLISHER_STATE_DEFAULT)
self.assertIsNotNone(page.publisher_public)
self.assertTrue(page.publisher_public_id)
self.assertObjectExist(drafts, title_set__title=name)
self.assertObjectExist(public, title_set__title=name)
self.assertObjectExist(published, title_set__title=name)
page = Page.objects.get(pk=page.pk)
self.assertEqual(page.get_publisher_state("en"), 0)
def test_publish_admin(self):
page = self.create_page("test_admin", published=False)
superuser = self.get_superuser()
with self.login_user_context(superuser):
response = self.client.post(admin_reverse("cms_page_publish_page", args=[page.pk, 'en']))
self.assertEqual(response.status_code, 302)
page = Page.objects.get(pk=page.pk)
self.assertEqual(page.get_publisher_state('en'), 0)
def test_publish_wrong_lang(self):
page = self.create_page("test_admin", published=False)
superuser = self.get_superuser()
with self.settings(
LANGUAGES=(('de', 'de'), ('en', 'en')),
CMS_LANGUAGES={1: [{'code': 'en', 'name': 'en', 'fallbacks': ['fr', 'de'], 'public': True}]}
):
with self.login_user_context(superuser):
with force_language('de'):
response = self.client.post(admin_reverse("cms_page_publish_page", args=[page.pk, 'en']))
self.assertEqual(response.status_code, 302)
page = Page.objects.get(pk=page.pk)
def test_publish_child_first(self):
parent = self.create_page('parent', published=False)
child = self.create_page('child', published=False, parent=parent)
parent = parent.reload()
self.assertFalse(parent.is_published('en'))
self.assertFalse(child.is_published('en'))
drafts = Page.objects.drafts()
public = Page.objects.public()
published = Page.objects.public().published('en')
for name in ('parent', 'child'):
self.assertObjectExist(drafts, title_set__title=name)
self.assertObjectDoesNotExist(public, title_set__title=name)
self.assertObjectDoesNotExist(published, title_set__title=name)
child.publish("en")
child = child.reload()
self.assertTrue(child.is_published("en"))
self.assertEqual(child.get_publisher_state('en'), PUBLISHER_STATE_PENDING)
self.assertIsNone(child.publisher_public)
# Since we have no parent, the state is otherwise unchanged
for name in ('parent', 'child'):
self.assertObjectExist(drafts, title_set__title=name)
self.assertObjectDoesNotExist(public, title_set__title=name)
self.assertObjectDoesNotExist(published, title_set__title=name)
parent.publish("en")
drafts = Page.objects.drafts()
public = Page.objects.public()
published = Page.objects.public().published('en')
# Cascade publish for all pending descendants
for name in ('parent', 'child'):
self.assertObjectExist(drafts, title_set__title=name)
page = drafts.get(title_set__title=name)
self.assertTrue(page.is_published("en"), name)
self.assertEqual(page.get_publisher_state('en'), PUBLISHER_STATE_DEFAULT, name)
self.assertIsNotNone(page.publisher_public, name)
self.assertTrue(page.publisher_public.is_published('en'), name)
self.assertObjectExist(public, title_set__title=name)
self.assertObjectExist(published, title_set__title=name)
def test_simple_publisher(self):
"""
Creates the stuff needed for these tests.
Please keep this up-to-date (the docstring!)
A
/ \
B C
"""
# Create a simple tree of 3 pages
pageA = create_page("Page A", "nav_playground.html", "en",
published=True)
pageB = create_page("Page B", "nav_playground.html", "en", parent=pageA,
published=True)
pageC = create_page("Page C", "nav_playground.html", "en", parent=pageA,
published=False)
# Assert A and B are published, C unpublished
self.assertTrue(pageA.publisher_public_id)
self.assertTrue(pageB.publisher_public_id)
self.assertTrue(not pageC.publisher_public_id)
self.assertEqual(len(Page.objects.public().published("en")), 2)
# Let's publish C now.
pageC.publish("en")
# Assert all are published
self.assertTrue(pageA.publisher_public_id)
self.assertTrue(pageB.publisher_public_id)
self.assertTrue(pageC.publisher_public_id)
self.assertEqual(len(Page.objects.public().published("en")), 3)
def test_i18n_publishing(self):
page = self.create_page('parent', published=True)
self.assertEqual(Title.objects.all().count(), 2)
create_title("de", "vater", page)
self.assertEqual(Title.objects.all().count(), 3)
self.assertEqual(Title.objects.filter(published=True).count(), 2)
page.publish('de')
self.assertEqual(Title.objects.all().count(), 4)
self.assertEqual(Title.objects.filter(published=True).count(), 4)
def test_publish_ordering(self):
page = self.create_page('parent', published=True)
pageA = self.create_page('pageA', parent=page, published=True)
pageC = self.create_page('pageC', parent=page, published=True)
pageB = self.create_page('pageB', parent=page, published=True)
page = page.reload()
pageB.move_page(pageA, 'right')
pageB.publish("en")
# pageC needs reload since B has swapped places with it
pageC.reload().publish("en")
pageA.publish('en')
drafts = Page.objects.drafts().order_by('path')
draft_titles = [(p.get_title('en'), p.path) for p in drafts]
self.assertEqual([('parent', "0001"),
('pageA', "00010001"),
('pageB', "00010002"),
('pageC', "00010003")], draft_titles)
public = Page.objects.public().order_by('path')
public_titles = [(p.get_title('en'), p.path) for p in public]
self.assertEqual([('parent', "0002"),
('pageA', "00020001"),
('pageB', "00020002"),
('pageC', "00020003")], public_titles)
page.publish('en')
drafts = Page.objects.drafts().order_by('path')
draft_titles = [(p.get_title('en'), p.path) for p in drafts]
self.assertEqual([('parent', "0001"),
('pageA', "00010001"),
('pageB', "00010002"),
('pageC', "00010003")], draft_titles)
public = Page.objects.public().order_by('path')
public_titles = [(p.get_title('en'), p.path) for p in public]
self.assertEqual([('parent', "0002"),
('pageA', "00020001"),
('pageB', "00020002"),
('pageC', "00020003")], public_titles)
def test_publish_ordering2(self):
page = self.create_page('parent', published=False)
pageA = self.create_page('pageA', published=False)
pageC = self.create_page('pageC', published=False, parent=pageA)
pageB = self.create_page('pageB', published=False, parent=pageA)
page = page.reload()
pageA.publish('en')
pageB.publish('en')
pageC.publish('en')
page.publish('en')
drafts = Page.objects.filter(publisher_is_draft=True).order_by('path')
publics = Page.objects.filter(publisher_is_draft=False).order_by('path')
x = 0
for draft in drafts:
self.assertEqual(draft.publisher_public_id, publics[x].pk)
x += 1
def test_unpublish_unpublish(self):
name = self._testMethodName
page = self.create_page(name, published=True)
drafts = Page.objects.drafts()
published = Page.objects.public().published("en")
self.assertObjectExist(drafts, title_set__title=name)
self.assertObjectExist(published, title_set__title=name)
page.unpublish('en')
self.assertFalse(page.is_published('en'))
self.assertObjectExist(drafts, title_set__title=name)
self.assertObjectDoesNotExist(published, title_set__title=name)
page.publish('en')
self.assertTrue(page.publisher_public_id)
self.assertObjectExist(drafts, title_set__title=name)
self.assertObjectExist(published, title_set__title=name)
def test_delete_title_unpublish(self):
page = self.create_page('test', published=True)
sub_page = self.create_page('test2', published=True, parent=page)
self.assertTrue(sub_page.publisher_public.is_published('en'))
page.title_set.all().delete()
self.assertFalse(sub_page.publisher_public.is_published('en', force_reload=True))
def test_modify_child_while_pending(self):
home = self.create_page("Home", published=True, in_navigation=True)
child = self.create_page("Child", published=True, parent=home,
in_navigation=False)
home = home.reload()
home.unpublish('en')
self.assertEqual(Title.objects.count(), 4)
child = child.reload()
self.assertFalse(child.publisher_public.is_published('en'))
self.assertFalse(child.in_navigation)
self.assertFalse(child.publisher_public.in_navigation)
child.in_navigation = True
child.save()
child.publish('en')
child = self.reload(child)
self.assertEqual(Title.objects.count(), 4)
self.assertTrue(child.is_published('en'))
self.assertFalse(child.publisher_public.is_published('en'))
self.assertTrue(child.in_navigation)
self.assertTrue(child.publisher_public.in_navigation)
self.assertEqual(child.get_publisher_state('en'), PUBLISHER_STATE_PENDING)
home.publish('en')
child = self.reload(child)
self.assertTrue(child.is_published('en'))
self.assertTrue(child.publisher_public_id)
self.assertTrue(child.publisher_public.in_navigation)
self.assertEqual(child.get_publisher_state('en'), PUBLISHER_STATE_DEFAULT)
def test_republish_with_descendants(self):
home = self.create_page("Home", published=True)
child = self.create_page("Child", published=True, parent=home)
gc = self.create_page("GC", published=True, parent=child)
self.assertTrue(child.is_published("en"))
self.assertTrue(gc.is_published('en'))
home = home.reload()
home.unpublish('en')
child = self.reload(child)
gc = self.reload(gc)
self.assertTrue(child.is_published("en"))
self.assertTrue(gc.is_published("en"))
self.assertFalse(child.publisher_public.is_published("en"))
self.assertFalse(gc.publisher_public.is_published('en'))
self.assertEqual(child.get_publisher_state('en'), PUBLISHER_STATE_PENDING)
self.assertEqual(gc.get_publisher_state('en'), PUBLISHER_STATE_PENDING)
home.publish('en')
child = self.reload(child)
gc = self.reload(gc)
self.assertTrue(child.publisher_public_id)
self.assertTrue(gc.is_published('en'))
self.assertTrue(child.is_published('en'))
self.assertTrue(gc.publisher_public_id)
self.assertEqual(child.get_publisher_state('en'), PUBLISHER_STATE_DEFAULT)
self.assertEqual(gc.get_publisher_state('en'), PUBLISHER_STATE_DEFAULT)
def test_republish_with_dirty_children(self):
home = self.create_page("Home", published=True)
dirty1 = self.create_page("Dirty1", published=True, parent=home)
dirty2 = self.create_page("Dirty2", published=True, parent=home)
home = self.reload(home)
dirty1 = self.reload(dirty1)
dirty2 = self.reload(dirty2)
dirty1.in_navigation = True
dirty1.save()
home.unpublish('en')
dirty2.in_navigation = True
dirty2.save()
dirty1 = self.reload(dirty1)
dirty2 = self.reload(dirty2)
self.assertTrue(dirty1.is_published)
self.assertTrue(dirty2.publisher_public_id)
self.assertEqual(dirty1.get_publisher_state("en"), PUBLISHER_STATE_DIRTY)
self.assertEqual(dirty2.get_publisher_state("en"), PUBLISHER_STATE_DIRTY)
home = self.reload(home)
with self.assertNumQueries(FuzzyInt(0, 100)):
home.publish('en')
dirty1 = self.reload(dirty1)
dirty2 = self.reload(dirty2)
self.assertTrue(dirty1.is_published("en"))
self.assertTrue(dirty2.is_published("en"))
self.assertTrue(dirty1.publisher_public.is_published("en"))
self.assertTrue(dirty2.publisher_public.is_published("en"))
self.assertEqual(dirty1.get_publisher_state("en"), PUBLISHER_STATE_DIRTY)
self.assertEqual(dirty2.get_publisher_state("en"), PUBLISHER_STATE_DIRTY)
def test_republish_with_unpublished_child(self):
"""
Unpub1 was never published, and unpub2 has been unpublished after the
fact. None of the grandchildren should become published.
"""
home = self.create_page("Home", published=True)
unpub1 = self.create_page("Unpub1", published=False, parent=home)
unpub2 = self.create_page("Unpub2", published=True, parent=home)
gc1 = self.create_page("GC1", published=True, parent=unpub1)
gc2 = self.create_page("GC2", published=True, parent=unpub2)
self.assertFalse(gc1.publisher_public_id)
self.assertFalse(gc1.publisher_public_id)
self.assertTrue(gc1.is_published('en'))
self.assertTrue(gc2.is_published('en'))
home.unpublish('en')
unpub1 = self.reload(unpub1)
unpub2.unpublish('en') # Just marks this as not published
for page in (unpub1, unpub2):
self.assertFalse(page.is_published('en'), page)
self.assertEqual(page.get_publisher_state("en"), PUBLISHER_STATE_DIRTY)
self.assertIsNone(unpub1.publisher_public)
self.assertIsNotNone(unpub2.publisher_public)
self.assertFalse(unpub2.publisher_public.is_published('en'))
gc1 = self.reload(gc1)
gc2 = self.reload(gc2)
for page in (gc1, gc2):
self.assertTrue(page.is_published('en'))
self.assertEqual(page.get_publisher_state('en'), PUBLISHER_STATE_PENDING)
self.assertIsNone(gc1.publisher_public)
self.assertIsNotNone(gc2.publisher_public)
self.assertFalse(gc2.publisher_public.is_published('en'))
def test_unpublish_with_descendants(self):
page = self.create_page("Page", published=True)
child = self.create_page("Child", parent=page, published=True)
self.create_page("Grandchild", parent=child, published=True)
page = page.reload()
child.reload()
drafts = Page.objects.drafts()
public = Page.objects.public()
published = Page.objects.public().published("en")
self.assertEqual(published.count(), 3)
self.assertEqual(page.get_descendant_count(), 2)
base = reverse('pages-root')
for url in (base, base + 'child/', base + 'child/grandchild/'):
response = self.client.get(url)
self.assertEqual(response.status_code, 200, url)
for title in ('Page', 'Child', 'Grandchild'):
self.assertObjectExist(drafts, title_set__title=title)
self.assertObjectExist(public, title_set__title=title)
self.assertObjectExist(published, title_set__title=title)
item = drafts.get(title_set__title=title)
self.assertTrue(item.publisher_public_id)
self.assertEqual(item.get_publisher_state('en'), PUBLISHER_STATE_DEFAULT)
self.assertTrue(page.unpublish('en'), 'Unpublish was not successful')
self.assertFalse(page.is_published('en'))
cache.clear()
for url in (base, base + 'child/', base + 'child/grandchild/'):
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
for title in ('Page', 'Child', 'Grandchild'):
self.assertObjectExist(drafts, title_set__title=title)
self.assertObjectExist(public, title_set__title=title)
self.assertObjectDoesNotExist(published, title_set__title=title)
item = drafts.get(title_set__title=title)
if title == 'Page':
self.assertFalse(item.is_published("en"))
self.assertFalse(item.publisher_public.is_published("en"))
# Not sure what the proper state of these are after unpublish
#self.assertEqual(page.publisher_state, PUBLISHER_STATE_DEFAULT)
self.assertTrue(page.is_dirty('en'))
else:
# The changes to the published subpages are simply that the
# published flag of the PUBLIC instance goes to false, and the
# publisher state is set to mark waiting for parent
self.assertTrue(item.is_published('en'), title)
self.assertFalse(item.publisher_public.is_published('en'), title)
self.assertEqual(item.get_publisher_state('en'), PUBLISHER_STATE_PENDING,
title)
self.assertTrue(item.is_dirty('en'), title)
def test_unpublish_with_dirty_descendants(self):
page = self.create_page("Page", published=True)
child = self.create_page("Child", parent=page, published=True)
gchild = self.create_page("Grandchild", parent=child, published=True)
child.in_navigation = True
child.save()
self.assertTrue(child.is_dirty("en"))
self.assertFalse(gchild.is_dirty('en'))
self.assertTrue(child.publisher_public.is_published('en'))
self.assertTrue(gchild.publisher_public.is_published('en'))
page.unpublish('en')
child = self.reload(child)
gchild = self.reload(gchild)
# Descendants become dirty after unpublish
self.assertTrue(child.is_dirty('en'))
self.assertTrue(gchild.is_dirty('en'))
# However, their public version is still removed no matter what
self.assertFalse(child.publisher_public.is_published('en'))
self.assertFalse(gchild.publisher_public.is_published('en'))
def test_prepublish_descendants(self):
page = self.create_page("Page", published=True)
child = self.create_page("Child", parent=page, published=False)
gchild2 = self.create_page("Grandchild2", parent=child, published=False)
self.create_page("Grandchild3", parent=child, published=True)
gchild = self.create_page("Grandchild", published=True)
gchild = gchild.reload()
child = child.reload()
gchild.move_page(target=child, position='last-child')
gchild.reload()
gchild.publish('en')
self.assertFalse(child.is_published('en'))
self.assertTrue(gchild.is_published('en'))
self.assertEqual(gchild.get_publisher_state('en'), PUBLISHER_STATE_PENDING)
child = child.reload()
child.publish('en')
gchild2 = gchild2.reload()
gchild2.publish('en')
self.assertTrue(child.is_published("en"))
self.assertTrue(gchild.is_published("en"))
self.assertEqual(gchild.get_publisher_state('en', force_reload=True), PUBLISHER_STATE_DEFAULT)
gchild = gchild.reload()
gchild2 = gchild2.reload()
self.assertEqual(gchild.path[4:], gchild.publisher_public.path[4:])
self.assertEqual(gchild.depth, gchild.publisher_public.depth)
def test_republish_multiple_root(self):
# TODO: The paths do not match expected behaviour
home = self.create_page("Page", published=True)
other = self.create_page("Another Page", published=True)
child = self.create_page("Child", published=True, parent=home)
child2 = self.create_page("Child", published=True, parent=other)
self.assertTrue(Page.objects.filter(is_home=True).count(), 2)
self.assertTrue(home.is_home)
home = home.reload()
self.assertTrue(home.publisher_public.is_home)
root = reverse('pages-root')
self.assertEqual(home.get_absolute_url(), root)
self.assertEqual(home.get_public_object().get_absolute_url(), root)
self.assertEqual(child.get_absolute_url(), root + 'child/')
self.assertEqual(child.get_public_object().get_absolute_url(), root + 'child/')
self.assertEqual(other.get_absolute_url(), root + 'another-page/')
self.assertEqual(other.get_public_object().get_absolute_url(), root + 'another-page/')
self.assertEqual(child2.get_absolute_url(), root + 'another-page/child/')
self.assertEqual(child2.get_public_object().get_absolute_url(), root + 'another-page/child/')
home = self.reload(home)
home.unpublish('en')
home = self.reload(home)
other = self.reload(other)
child = self.reload(child)
child2 = self.reload(child2)
self.assertFalse(home.is_home)
self.assertFalse(home.publisher_public.is_home)
self.assertTrue(other.is_home)
self.assertTrue(other.publisher_public.is_home)
self.assertEqual(other.get_absolute_url(), root)
self.assertEqual(other.get_public_object().get_absolute_url(), root)
self.assertEqual(home.get_absolute_url(), root + 'page/')
self.assertEqual(home.get_public_object().get_absolute_url(), root + 'page/')
self.assertEqual(child.get_absolute_url(), root + 'page/child/')
self.assertEqual(child.get_public_object().get_absolute_url(), root + 'page/child/')
self.assertEqual(child2.get_absolute_url(), root + 'child/')
self.assertEqual(child2.get_public_object().get_absolute_url(), root + 'child/')
home.publish('en')
home = self.reload(home)
other = self.reload(other)
child = self.reload(child)
child2 = self.reload(child2)
self.assertTrue(home.is_home)
self.assertTrue(home.publisher_public.is_home)
self.assertEqual(home.get_absolute_url(), root)
self.assertEqual(home.get_public_object().get_absolute_url(), root)
self.assertEqual(child.get_absolute_url(), root + 'child/')
self.assertEqual(child.get_public_object().get_absolute_url(), root + 'child/')
self.assertEqual(other.get_absolute_url(), root + 'another-page/')
self.assertEqual(other.get_public_object().get_absolute_url(), root + 'another-page/')
self.assertEqual(child2.get_absolute_url(), root + 'another-page/child/')
self.assertEqual(child2.get_public_object().get_absolute_url(), root + 'another-page/child/')
def test_revert_contents(self):
user = self.get_superuser()
page = create_page("Page", "nav_playground.html", "en", published=True,
created_by=user)
placeholder = page.placeholders.get(slot=u"body")
deleted_plugin = add_plugin(placeholder, u"TextPlugin", u"en", body="Deleted content")
text_plugin = add_plugin(placeholder, u"TextPlugin", u"en", body="Public content")
page.publish('en')
# Modify and delete plugins
text_plugin.body = "<p>Draft content</p>"
text_plugin.save()
deleted_plugin.delete()
self.assertEqual(CMSPlugin.objects.count(), 3)
# Now let's revert and restore
page.revert('en')
self.assertEqual(page.get_publisher_state("en"), PUBLISHER_STATE_DEFAULT)
self.assertEqual(CMSPlugin.objects.count(), 4)
plugins = CMSPlugin.objects.filter(placeholder__page=page)
self.assertEqual(plugins.count(), 2)
plugins = [plugin.get_plugin_instance()[0] for plugin in plugins]
self.assertEqual(plugins[0].body, "Deleted content")
self.assertEqual(plugins[1].body, "Public content")
def test_revert_move(self):
parent = create_page("Parent", "nav_playground.html", "en", published=True)
parent_url = parent.get_absolute_url()
page = create_page("Page", "nav_playground.html", "en", published=True,
parent=parent)
other = create_page("Other", "nav_playground.html", "en", published=True)
other_url = other.get_absolute_url()
child = create_page("Child", "nav_playground.html", "en", published=True,
parent=page)
parent = parent.reload()
page = page.reload()
self.assertEqual(page.get_absolute_url(), parent_url + "page/")
self.assertEqual(child.get_absolute_url(), parent_url + "page/child/")
# Now let's move it (and the child)
page.move_page(other)
page = self.reload(page)
child = self.reload(child)
self.assertEqual(page.get_absolute_url(), other_url + "page/")
self.assertEqual(child.get_absolute_url(), other_url + "page/child/")
# Public version changed the url as well
self.assertEqual(page.publisher_public.get_absolute_url(), other_url + "page/")
self.assertEqual(child.publisher_public.get_absolute_url(), other_url + "page/child/")
def test_publish_works_with_descendants(self):
"""
For help understanding what this tests for, see:
http://articles.sitepoint.com/print/hierarchical-data-database
Creates this published structure:
home
/ \
item1 item2
/ \
subitem1 subitem2
"""
home_page = create_page("home", "nav_playground.html", "en",
published=True, in_navigation=False)
create_page("item1", "nav_playground.html", "en", parent=home_page,
published=True)
item2 = create_page("item2", "nav_playground.html", "en", parent=home_page,
published=True)
create_page("subitem1", "nav_playground.html", "en", parent=item2,
published=True)
create_page("subitem2", "nav_playground.html", "en", parent=item2,
published=True)
item2 = item2.reload()
not_drafts = list(Page.objects.filter(publisher_is_draft=False).order_by('path'))
drafts = list(Page.objects.filter(publisher_is_draft=True).order_by('path'))
self.assertEqual(len(not_drafts), 5)
self.assertEqual(len(drafts), 5)
for idx, draft in enumerate(drafts):
public = not_drafts[idx]
# Check that a node doesn't become a root node magically
self.assertEqual(bool(public.parent_id), bool(draft.parent_id))
if public.parent:
self.assertEqual(public.path[0:4], public.parent.path[0:4])
self.assertTrue(public.parent in public.get_ancestors())
self.assertTrue(public in public.parent.get_descendants())
self.assertTrue(public in public.parent.get_children())
if draft.parent:
# Same principle for the draft tree
self.assertEqual(draft.path[0:4], draft.parent.path[0:4])
self.assertTrue(draft.parent in draft.get_ancestors())
self.assertTrue(draft in draft.parent.get_descendants())
self.assertTrue(draft in draft.parent.get_children())
# Now call publish again. The structure should not change.
item2.publish('en')
not_drafts = list(Page.objects.filter(publisher_is_draft=False).order_by('path'))
drafts = list(Page.objects.filter(publisher_is_draft=True).order_by('path'))
self.assertEqual(len(not_drafts), 5)
self.assertEqual(len(drafts), 5)
for idx, draft in enumerate(drafts):
public = not_drafts[idx]
# Check that a node doesn't become a root node magically
self.assertEqual(bool(public.parent_id), bool(draft.parent_id))
self.assertEqual(public.numchild, draft.numchild)
if public.parent:
self.assertEqual(public.path[0:4], public.parent.path[0:4])
self.assertTrue(public.parent in public.get_ancestors())
self.assertTrue(public in public.parent.get_descendants())
self.assertTrue(public in public.parent.get_children())
if draft.parent:
self.assertEqual(draft.path[0:4], draft.parent.path[0:4])
self.assertTrue(draft.parent in draft.get_ancestors())
self.assertTrue(draft in draft.parent.get_descendants())
self.assertTrue(draft in draft.parent.get_children())
| bsd-3-clause | -1,396,721,029,273,121,800 | -480,335,555,200,317,900 | 44.512295 | 125 | 0.619293 | false |
KamranMackey/CloudBot | plugins/wordnik.py | 4 | 6404 | import re
import random
import requests
import urllib.parse
from cloudbot import hook
from cloudbot.util import web
API_URL = 'http://api.wordnik.com/v4/'
WEB_URL = 'https://www.wordnik.com/words/{}'
ATTRIB_NAMES = {
'ahd-legacy': 'AHD/Wordnik',
'century': 'Century/Wordnik',
'wiktionary': 'Wiktionary/Wordnik',
'gcide': 'GCIDE/Wordnik',
'wordnet': 'Wordnet/Wordnik'
}
def sanitize(text):
return urllib.parse.quote(text.translate({ord('\\'):None, ord('/'):None}))
@hook.on_start()
def load_key(bot):
global api_key
api_key = bot.config.get("api_keys", {}).get("wordnik", None)
@hook.command("define", "dictionary")
def define(text):
"""<word> -- Returns a dictionary definition from Wordnik for <word>."""
if not api_key:
return "This command requires an API key from wordnik.com."
word = sanitize(text)
url = API_URL + "word.json/{}/definitions".format(word)
params = {
'api_key': api_key,
'limit': 1
}
json = requests.get(url, params=params).json()
if json:
data = json[0]
data['url'] = web.try_shorten(WEB_URL.format(data['word']))
data['attrib'] = ATTRIB_NAMES[data['sourceDictionary']]
return "\x02{word}\x02: {text} - {url} ({attrib})".format(**data)
else:
return "I could not find a definition for \x02{}\x02.".format(word)
@hook.command("wordusage", "wordexample", "usage")
def word_usage(text):
"""<word> -- Returns an example sentence showing the usage of <word>."""
if not api_key:
return "This command requires an API key from wordnik.com."
word = sanitize(text)
url = API_URL + "word.json/{}/examples".format(word)
params = {
'api_key': api_key,
'limit': 10
}
json = requests.get(url, params=params).json()
if json:
out = "\x02{}\x02: ".format(word)
example = random.choice(json['examples'])
out += "{} ".format(example['text'])
return out
else:
return "I could not find any usage examples for \x02{}\x02.".format(word)
@hook.command("pronounce", "sounditout")
def pronounce(text):
"""<word> -- Returns instructions on how to pronounce <word> with an audio example."""
if not api_key:
return "This command requires an API key from wordnik.com."
word = sanitize(text)
url = API_URL + "word.json/{}/pronunciations".format(word)
params = {
'api_key': api_key,
'limit': 5
}
json = requests.get(url, params=params).json()
if json:
out = "\x02{}\x02: ".format(word)
out += " • ".join([i['raw'] for i in json])
else:
return "Sorry, I don't know how to pronounce \x02{}\x02.".format(word)
url = API_URL + "word.json/{}/audio".format(word)
params = {
'api_key': api_key,
'limit': 1,
'useCanonical': 'false'
}
json = requests.get(url, params=params).json()
if json:
url = web.try_shorten(json[0]['fileUrl'])
out += " - {}".format(url)
return out
@hook.command()
def synonym(text):
"""<word> -- Returns a list of synonyms for <word>."""
if not api_key:
return "This command requires an API key from wordnik.com."
word = sanitize(text)
url = API_URL + "word.json/{}/relatedWords".format(word)
params = {
'api_key': api_key,
'relationshipTypes': 'synonym',
'limitPerRelationshipType': 5
}
json = requests.get(url, params=params).json()
if json:
out = "\x02{}\x02: ".format(word)
out += " • ".join(json[0]['words'])
return out
else:
return "Sorry, I couldn't find any synonyms for \x02{}\x02.".format(word)
@hook.command()
def antonym(text):
"""<word> -- Returns a list of antonyms for <word>."""
if not api_key:
return "This command requires an API key from wordnik.com."
word = sanitize(text)
url = API_URL + "word.json/{}/relatedWords".format(word)
params = {
'api_key': api_key,
'relationshipTypes': 'antonym',
'limitPerRelationshipType': 5,
'useCanonical': 'false'
}
json = requests.get(url, params=params).json()
if json:
out = "\x02{}\x02: ".format(word)
out += " • ".join(json[0]['words'])
out = out[:-2]
return out
else:
return "Sorry, I couldn't find any antonyms for \x02{}\x02.".format(word)
# word of the day
@hook.command("word", "wordoftheday", autohelp=False)
def wordoftheday(text, conn):
"""returns the word of the day. To see past word of the day enter use the format yyyy-MM-dd. The specified date must be after 2009-08-10."""
if not api_key:
return "This command requires an API key from wordnik.com."
match = re.search(r'(\d\d\d\d-\d\d-\d\d)', text)
date = ""
if match:
date = match.group(1)
url = API_URL + "words.json/wordOfTheDay"
if date:
params = {
'api_key': api_key,
'date': date
}
day = date
else:
params = {
'api_key': api_key,
}
day = "today"
json = requests.get(url, params=params).json()
if json:
word = json['word']
note = json['note']
pos = json['definitions'][0]['partOfSpeech']
definition = json['definitions'][0]['text']
out = "The word for \x02{}\x02 is \x02{}\x02: ".format(day, word)
out += "\x0305({})\x0305 ".format(pos)
out += "\x0310{}\x0310 ".format(note)
out += "\x02Definition:\x02 \x0303{}\x0303".format(definition)
return out
else:
return "Sorry I couldn't find the word of the day, check out this awesome otter instead {}".format(
"http://i.imgur.com/pkuWlWx.gif")
# random word
@hook.command("wordrandom", "randomword", autohelp=False)
def random_word(conn):
"""Grabs a random word from wordnik.com"""
if not api_key:
return "This command requires an API key from wordnik.com."
url = API_URL + "words.json/randomWord"
params = {
'api_key': api_key,
'hasDictionarydef': 'true',
'vulgar': 'true'
}
json = requests.get(url, params=params).json()
if json:
word = json['word']
return "Your random word is \x02{}\x02.".format(word)
else:
return "There was a problem contacting the Wordnik API."
| gpl-3.0 | 783,071,293,114,778,800 | 6,244,651,314,903,096,000 | 28.348624 | 144 | 0.581432 | false |
sserrot/champion_relationships | venv/Lib/site-packages/pip/_vendor/ipaddress.py | 30 | 79875 | # Copyright 2007 Google Inc.
# Licensed to PSF under a Contributor Agreement.
"""A fast, lightweight IPv4/IPv6 manipulation library in Python.
This library is used to create/poke/manipulate IPv4 and IPv6 addresses
and networks.
"""
from __future__ import unicode_literals
import itertools
import struct
__version__ = '1.0.23'
# Compatibility functions
_compat_int_types = (int,)
try:
_compat_int_types = (int, long)
except NameError:
pass
try:
_compat_str = unicode
except NameError:
_compat_str = str
assert bytes != str
if b'\0'[0] == 0: # Python 3 semantics
def _compat_bytes_to_byte_vals(byt):
return byt
else:
def _compat_bytes_to_byte_vals(byt):
return [struct.unpack(b'!B', b)[0] for b in byt]
try:
_compat_int_from_byte_vals = int.from_bytes
except AttributeError:
def _compat_int_from_byte_vals(bytvals, endianess):
assert endianess == 'big'
res = 0
for bv in bytvals:
assert isinstance(bv, _compat_int_types)
res = (res << 8) + bv
return res
def _compat_to_bytes(intval, length, endianess):
assert isinstance(intval, _compat_int_types)
assert endianess == 'big'
if length == 4:
if intval < 0 or intval >= 2 ** 32:
raise struct.error("integer out of range for 'I' format code")
return struct.pack(b'!I', intval)
elif length == 16:
if intval < 0 or intval >= 2 ** 128:
raise struct.error("integer out of range for 'QQ' format code")
return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff)
else:
raise NotImplementedError()
if hasattr(int, 'bit_length'):
# Not int.bit_length , since that won't work in 2.7 where long exists
def _compat_bit_length(i):
return i.bit_length()
else:
def _compat_bit_length(i):
for res in itertools.count():
if i >> res == 0:
return res
def _compat_range(start, end, step=1):
assert step > 0
i = start
while i < end:
yield i
i += step
class _TotalOrderingMixin(object):
__slots__ = ()
# Helper that derives the other comparison operations from
# __lt__ and __eq__
# We avoid functools.total_ordering because it doesn't handle
# NotImplemented correctly yet (http://bugs.python.org/issue10042)
def __eq__(self, other):
raise NotImplementedError
def __ne__(self, other):
equal = self.__eq__(other)
if equal is NotImplemented:
return NotImplemented
return not equal
def __lt__(self, other):
raise NotImplementedError
def __le__(self, other):
less = self.__lt__(other)
if less is NotImplemented or not less:
return self.__eq__(other)
return less
def __gt__(self, other):
less = self.__lt__(other)
if less is NotImplemented:
return NotImplemented
equal = self.__eq__(other)
if equal is NotImplemented:
return NotImplemented
return not (less or equal)
def __ge__(self, other):
less = self.__lt__(other)
if less is NotImplemented:
return NotImplemented
return not less
IPV4LENGTH = 32
IPV6LENGTH = 128
class AddressValueError(ValueError):
"""A Value Error related to the address."""
class NetmaskValueError(ValueError):
"""A Value Error related to the netmask."""
def ip_address(address):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP address. Either IPv4 or
IPv6 addresses may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Address or IPv6Address object.
Raises:
ValueError: if the *address* passed isn't either a v4 or a v6
address
"""
try:
return IPv4Address(address)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Address(address)
except (AddressValueError, NetmaskValueError):
pass
if isinstance(address, bytes):
raise AddressValueError(
'%r does not appear to be an IPv4 or IPv6 address. '
'Did you pass in a bytes (str in Python 2) instead of'
' a unicode object?' % address)
raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
address)
def ip_network(address, strict=True):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP network. Either IPv4 or
IPv6 networks may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Network or IPv6Network object.
Raises:
ValueError: if the string passed isn't either a v4 or a v6
address. Or if the network has host bits set.
"""
try:
return IPv4Network(address, strict)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Network(address, strict)
except (AddressValueError, NetmaskValueError):
pass
if isinstance(address, bytes):
raise AddressValueError(
'%r does not appear to be an IPv4 or IPv6 network. '
'Did you pass in a bytes (str in Python 2) instead of'
' a unicode object?' % address)
raise ValueError('%r does not appear to be an IPv4 or IPv6 network' %
address)
def ip_interface(address):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP address. Either IPv4 or
IPv6 addresses may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Interface or IPv6Interface object.
Raises:
ValueError: if the string passed isn't either a v4 or a v6
address.
Notes:
The IPv?Interface classes describe an Address on a particular
Network, so they're basically a combination of both the Address
and Network classes.
"""
try:
return IPv4Interface(address)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Interface(address)
except (AddressValueError, NetmaskValueError):
pass
raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' %
address)
def v4_int_to_packed(address):
"""Represent an address as 4 packed bytes in network (big-endian) order.
Args:
address: An integer representation of an IPv4 IP address.
Returns:
The integer address packed as 4 bytes in network (big-endian) order.
Raises:
ValueError: If the integer is negative or too large to be an
IPv4 IP address.
"""
try:
return _compat_to_bytes(address, 4, 'big')
except (struct.error, OverflowError):
raise ValueError("Address negative or too large for IPv4")
def v6_int_to_packed(address):
"""Represent an address as 16 packed bytes in network (big-endian) order.
Args:
address: An integer representation of an IPv6 IP address.
Returns:
The integer address packed as 16 bytes in network (big-endian) order.
"""
try:
return _compat_to_bytes(address, 16, 'big')
except (struct.error, OverflowError):
raise ValueError("Address negative or too large for IPv6")
def _split_optional_netmask(address):
"""Helper to split the netmask and raise AddressValueError if needed"""
addr = _compat_str(address).split('/')
if len(addr) > 2:
raise AddressValueError("Only one '/' permitted in %r" % address)
return addr
def _find_address_range(addresses):
"""Find a sequence of sorted deduplicated IPv#Address.
Args:
addresses: a list of IPv#Address objects.
Yields:
A tuple containing the first and last IP addresses in the sequence.
"""
it = iter(addresses)
first = last = next(it)
for ip in it:
if ip._ip != last._ip + 1:
yield first, last
first = ip
last = ip
yield first, last
def _count_righthand_zero_bits(number, bits):
"""Count the number of zero bits on the right hand side.
Args:
number: an integer.
bits: maximum number of bits to count.
Returns:
The number of zero bits on the right hand side of the number.
"""
if number == 0:
return bits
return min(bits, _compat_bit_length(~number & (number - 1)))
def summarize_address_range(first, last):
"""Summarize a network range given the first and last IP addresses.
Example:
>>> list(summarize_address_range(IPv4Address('192.0.2.0'),
... IPv4Address('192.0.2.130')))
... #doctest: +NORMALIZE_WHITESPACE
[IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
IPv4Network('192.0.2.130/32')]
Args:
first: the first IPv4Address or IPv6Address in the range.
last: the last IPv4Address or IPv6Address in the range.
Returns:
An iterator of the summarized IPv(4|6) network objects.
Raise:
TypeError:
If the first and last objects are not IP addresses.
If the first and last objects are not the same version.
ValueError:
If the last object is not greater than the first.
If the version of the first address is not 4 or 6.
"""
if (not (isinstance(first, _BaseAddress) and
isinstance(last, _BaseAddress))):
raise TypeError('first and last must be IP addresses, not networks')
if first.version != last.version:
raise TypeError("%s and %s are not of the same version" % (
first, last))
if first > last:
raise ValueError('last IP address must be greater than first')
if first.version == 4:
ip = IPv4Network
elif first.version == 6:
ip = IPv6Network
else:
raise ValueError('unknown IP version')
ip_bits = first._max_prefixlen
first_int = first._ip
last_int = last._ip
while first_int <= last_int:
nbits = min(_count_righthand_zero_bits(first_int, ip_bits),
_compat_bit_length(last_int - first_int + 1) - 1)
net = ip((first_int, ip_bits - nbits))
yield net
first_int += 1 << nbits
if first_int - 1 == ip._ALL_ONES:
break
def _collapse_addresses_internal(addresses):
"""Loops through the addresses, collapsing concurrent netblocks.
Example:
ip1 = IPv4Network('192.0.2.0/26')
ip2 = IPv4Network('192.0.2.64/26')
ip3 = IPv4Network('192.0.2.128/26')
ip4 = IPv4Network('192.0.2.192/26')
_collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
[IPv4Network('192.0.2.0/24')]
This shouldn't be called directly; it is called via
collapse_addresses([]).
Args:
addresses: A list of IPv4Network's or IPv6Network's
Returns:
A list of IPv4Network's or IPv6Network's depending on what we were
passed.
"""
# First merge
to_merge = list(addresses)
subnets = {}
while to_merge:
net = to_merge.pop()
supernet = net.supernet()
existing = subnets.get(supernet)
if existing is None:
subnets[supernet] = net
elif existing != net:
# Merge consecutive subnets
del subnets[supernet]
to_merge.append(supernet)
# Then iterate over resulting networks, skipping subsumed subnets
last = None
for net in sorted(subnets.values()):
if last is not None:
# Since they are sorted,
# last.network_address <= net.network_address is a given.
if last.broadcast_address >= net.broadcast_address:
continue
yield net
last = net
def collapse_addresses(addresses):
"""Collapse a list of IP objects.
Example:
collapse_addresses([IPv4Network('192.0.2.0/25'),
IPv4Network('192.0.2.128/25')]) ->
[IPv4Network('192.0.2.0/24')]
Args:
addresses: An iterator of IPv4Network or IPv6Network objects.
Returns:
An iterator of the collapsed IPv(4|6)Network objects.
Raises:
TypeError: If passed a list of mixed version objects.
"""
addrs = []
ips = []
nets = []
# split IP addresses and networks
for ip in addresses:
if isinstance(ip, _BaseAddress):
if ips and ips[-1]._version != ip._version:
raise TypeError("%s and %s are not of the same version" % (
ip, ips[-1]))
ips.append(ip)
elif ip._prefixlen == ip._max_prefixlen:
if ips and ips[-1]._version != ip._version:
raise TypeError("%s and %s are not of the same version" % (
ip, ips[-1]))
try:
ips.append(ip.ip)
except AttributeError:
ips.append(ip.network_address)
else:
if nets and nets[-1]._version != ip._version:
raise TypeError("%s and %s are not of the same version" % (
ip, nets[-1]))
nets.append(ip)
# sort and dedup
ips = sorted(set(ips))
# find consecutive address ranges in the sorted sequence and summarize them
if ips:
for first, last in _find_address_range(ips):
addrs.extend(summarize_address_range(first, last))
return _collapse_addresses_internal(addrs + nets)
def get_mixed_type_key(obj):
"""Return a key suitable for sorting between networks and addresses.
Address and Network objects are not sortable by default; they're
fundamentally different so the expression
IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')
doesn't make any sense. There are some times however, where you may wish
to have ipaddress sort these for you anyway. If you need to do this, you
can use this function as the key= argument to sorted().
Args:
obj: either a Network or Address object.
Returns:
appropriate key.
"""
if isinstance(obj, _BaseNetwork):
return obj._get_networks_key()
elif isinstance(obj, _BaseAddress):
return obj._get_address_key()
return NotImplemented
class _IPAddressBase(_TotalOrderingMixin):
"""The mother class."""
__slots__ = ()
@property
def exploded(self):
"""Return the longhand version of the IP address as a string."""
return self._explode_shorthand_ip_string()
@property
def compressed(self):
"""Return the shorthand version of the IP address as a string."""
return _compat_str(self)
@property
def reverse_pointer(self):
"""The name of the reverse DNS pointer for the IP address, e.g.:
>>> ipaddress.ip_address("127.0.0.1").reverse_pointer
'1.0.0.127.in-addr.arpa'
>>> ipaddress.ip_address("2001:db8::1").reverse_pointer
'1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'
"""
return self._reverse_pointer()
@property
def version(self):
msg = '%200s has no version specified' % (type(self),)
raise NotImplementedError(msg)
def _check_int_address(self, address):
if address < 0:
msg = "%d (< 0) is not permitted as an IPv%d address"
raise AddressValueError(msg % (address, self._version))
if address > self._ALL_ONES:
msg = "%d (>= 2**%d) is not permitted as an IPv%d address"
raise AddressValueError(msg % (address, self._max_prefixlen,
self._version))
def _check_packed_address(self, address, expected_len):
address_len = len(address)
if address_len != expected_len:
msg = (
'%r (len %d != %d) is not permitted as an IPv%d address. '
'Did you pass in a bytes (str in Python 2) instead of'
' a unicode object?')
raise AddressValueError(msg % (address, address_len,
expected_len, self._version))
@classmethod
def _ip_int_from_prefix(cls, prefixlen):
"""Turn the prefix length into a bitwise netmask
Args:
prefixlen: An integer, the prefix length.
Returns:
An integer.
"""
return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)
@classmethod
def _prefix_from_ip_int(cls, ip_int):
"""Return prefix length from the bitwise netmask.
Args:
ip_int: An integer, the netmask in expanded bitwise format
Returns:
An integer, the prefix length.
Raises:
ValueError: If the input intermingles zeroes & ones
"""
trailing_zeroes = _count_righthand_zero_bits(ip_int,
cls._max_prefixlen)
prefixlen = cls._max_prefixlen - trailing_zeroes
leading_ones = ip_int >> trailing_zeroes
all_ones = (1 << prefixlen) - 1
if leading_ones != all_ones:
byteslen = cls._max_prefixlen // 8
details = _compat_to_bytes(ip_int, byteslen, 'big')
msg = 'Netmask pattern %r mixes zeroes & ones'
raise ValueError(msg % details)
return prefixlen
@classmethod
def _report_invalid_netmask(cls, netmask_str):
msg = '%r is not a valid netmask' % netmask_str
raise NetmaskValueError(msg)
@classmethod
def _prefix_from_prefix_string(cls, prefixlen_str):
"""Return prefix length from a numeric string
Args:
prefixlen_str: The string to be converted
Returns:
An integer, the prefix length.
Raises:
NetmaskValueError: If the input is not a valid netmask
"""
# int allows a leading +/- as well as surrounding whitespace,
# so we ensure that isn't the case
if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str):
cls._report_invalid_netmask(prefixlen_str)
try:
prefixlen = int(prefixlen_str)
except ValueError:
cls._report_invalid_netmask(prefixlen_str)
if not (0 <= prefixlen <= cls._max_prefixlen):
cls._report_invalid_netmask(prefixlen_str)
return prefixlen
@classmethod
def _prefix_from_ip_string(cls, ip_str):
"""Turn a netmask/hostmask string into a prefix length
Args:
ip_str: The netmask/hostmask to be converted
Returns:
An integer, the prefix length.
Raises:
NetmaskValueError: If the input is not a valid netmask/hostmask
"""
# Parse the netmask/hostmask like an IP address.
try:
ip_int = cls._ip_int_from_string(ip_str)
except AddressValueError:
cls._report_invalid_netmask(ip_str)
# Try matching a netmask (this would be /1*0*/ as a bitwise regexp).
# Note that the two ambiguous cases (all-ones and all-zeroes) are
# treated as netmasks.
try:
return cls._prefix_from_ip_int(ip_int)
except ValueError:
pass
# Invert the bits, and try matching a /0+1+/ hostmask instead.
ip_int ^= cls._ALL_ONES
try:
return cls._prefix_from_ip_int(ip_int)
except ValueError:
cls._report_invalid_netmask(ip_str)
def __reduce__(self):
return self.__class__, (_compat_str(self),)
class _BaseAddress(_IPAddressBase):
"""A generic IP object.
This IP class contains the version independent methods which are
used by single IP addresses.
"""
__slots__ = ()
def __int__(self):
return self._ip
def __eq__(self, other):
try:
return (self._ip == other._ip and
self._version == other._version)
except AttributeError:
return NotImplemented
def __lt__(self, other):
if not isinstance(other, _IPAddressBase):
return NotImplemented
if not isinstance(other, _BaseAddress):
raise TypeError('%s and %s are not of the same type' % (
self, other))
if self._version != other._version:
raise TypeError('%s and %s are not of the same version' % (
self, other))
if self._ip != other._ip:
return self._ip < other._ip
return False
# Shorthand for Integer addition and subtraction. This is not
# meant to ever support addition/subtraction of addresses.
def __add__(self, other):
if not isinstance(other, _compat_int_types):
return NotImplemented
return self.__class__(int(self) + other)
def __sub__(self, other):
if not isinstance(other, _compat_int_types):
return NotImplemented
return self.__class__(int(self) - other)
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
def __str__(self):
return _compat_str(self._string_from_ip_int(self._ip))
def __hash__(self):
return hash(hex(int(self._ip)))
def _get_address_key(self):
return (self._version, self)
def __reduce__(self):
return self.__class__, (self._ip,)
class _BaseNetwork(_IPAddressBase):
"""A generic IP network object.
This IP class contains the version independent methods which are
used by networks.
"""
def __init__(self, address):
self._cache = {}
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
def __str__(self):
return '%s/%d' % (self.network_address, self.prefixlen)
def hosts(self):
"""Generate Iterator over usable hosts in a network.
This is like __iter__ except it doesn't return the network
or broadcast addresses.
"""
network = int(self.network_address)
broadcast = int(self.broadcast_address)
for x in _compat_range(network + 1, broadcast):
yield self._address_class(x)
def __iter__(self):
network = int(self.network_address)
broadcast = int(self.broadcast_address)
for x in _compat_range(network, broadcast + 1):
yield self._address_class(x)
def __getitem__(self, n):
network = int(self.network_address)
broadcast = int(self.broadcast_address)
if n >= 0:
if network + n > broadcast:
raise IndexError('address out of range')
return self._address_class(network + n)
else:
n += 1
if broadcast + n < network:
raise IndexError('address out of range')
return self._address_class(broadcast + n)
def __lt__(self, other):
if not isinstance(other, _IPAddressBase):
return NotImplemented
if not isinstance(other, _BaseNetwork):
raise TypeError('%s and %s are not of the same type' % (
self, other))
if self._version != other._version:
raise TypeError('%s and %s are not of the same version' % (
self, other))
if self.network_address != other.network_address:
return self.network_address < other.network_address
if self.netmask != other.netmask:
return self.netmask < other.netmask
return False
def __eq__(self, other):
try:
return (self._version == other._version and
self.network_address == other.network_address and
int(self.netmask) == int(other.netmask))
except AttributeError:
return NotImplemented
def __hash__(self):
return hash(int(self.network_address) ^ int(self.netmask))
def __contains__(self, other):
# always false if one is v4 and the other is v6.
if self._version != other._version:
return False
# dealing with another network.
if isinstance(other, _BaseNetwork):
return False
# dealing with another address
else:
# address
return (int(self.network_address) <= int(other._ip) <=
int(self.broadcast_address))
def overlaps(self, other):
"""Tell if self is partly contained in other."""
return self.network_address in other or (
self.broadcast_address in other or (
other.network_address in self or (
other.broadcast_address in self)))
@property
def broadcast_address(self):
x = self._cache.get('broadcast_address')
if x is None:
x = self._address_class(int(self.network_address) |
int(self.hostmask))
self._cache['broadcast_address'] = x
return x
@property
def hostmask(self):
x = self._cache.get('hostmask')
if x is None:
x = self._address_class(int(self.netmask) ^ self._ALL_ONES)
self._cache['hostmask'] = x
return x
@property
def with_prefixlen(self):
return '%s/%d' % (self.network_address, self._prefixlen)
@property
def with_netmask(self):
return '%s/%s' % (self.network_address, self.netmask)
@property
def with_hostmask(self):
return '%s/%s' % (self.network_address, self.hostmask)
@property
def num_addresses(self):
"""Number of hosts in the current subnet."""
return int(self.broadcast_address) - int(self.network_address) + 1
@property
def _address_class(self):
# Returning bare address objects (rather than interfaces) allows for
# more consistent behaviour across the network address, broadcast
# address and individual host addresses.
msg = '%200s has no associated address class' % (type(self),)
raise NotImplementedError(msg)
@property
def prefixlen(self):
return self._prefixlen
def address_exclude(self, other):
"""Remove an address from a larger block.
For example:
addr1 = ip_network('192.0.2.0/28')
addr2 = ip_network('192.0.2.1/32')
list(addr1.address_exclude(addr2)) =
[IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]
or IPv6:
addr1 = ip_network('2001:db8::1/32')
addr2 = ip_network('2001:db8::1/128')
list(addr1.address_exclude(addr2)) =
[ip_network('2001:db8::1/128'),
ip_network('2001:db8::2/127'),
ip_network('2001:db8::4/126'),
ip_network('2001:db8::8/125'),
...
ip_network('2001:db8:8000::/33')]
Args:
other: An IPv4Network or IPv6Network object of the same type.
Returns:
An iterator of the IPv(4|6)Network objects which is self
minus other.
Raises:
TypeError: If self and other are of differing address
versions, or if other is not a network object.
ValueError: If other is not completely contained by self.
"""
if not self._version == other._version:
raise TypeError("%s and %s are not of the same version" % (
self, other))
if not isinstance(other, _BaseNetwork):
raise TypeError("%s is not a network object" % other)
if not other.subnet_of(self):
raise ValueError('%s not contained in %s' % (other, self))
if other == self:
return
# Make sure we're comparing the network of other.
other = other.__class__('%s/%s' % (other.network_address,
other.prefixlen))
s1, s2 = self.subnets()
while s1 != other and s2 != other:
if other.subnet_of(s1):
yield s2
s1, s2 = s1.subnets()
elif other.subnet_of(s2):
yield s1
s1, s2 = s2.subnets()
else:
# If we got here, there's a bug somewhere.
raise AssertionError('Error performing exclusion: '
's1: %s s2: %s other: %s' %
(s1, s2, other))
if s1 == other:
yield s2
elif s2 == other:
yield s1
else:
# If we got here, there's a bug somewhere.
raise AssertionError('Error performing exclusion: '
's1: %s s2: %s other: %s' %
(s1, s2, other))
def compare_networks(self, other):
"""Compare two IP objects.
This is only concerned about the comparison of the integer
representation of the network addresses. This means that the
host bits aren't considered at all in this method. If you want
to compare host bits, you can easily enough do a
'HostA._ip < HostB._ip'
Args:
other: An IP object.
Returns:
If the IP versions of self and other are the same, returns:
-1 if self < other:
eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
IPv6Network('2001:db8::1000/124') <
IPv6Network('2001:db8::2000/124')
0 if self == other
eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
IPv6Network('2001:db8::1000/124') ==
IPv6Network('2001:db8::1000/124')
1 if self > other
eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
IPv6Network('2001:db8::2000/124') >
IPv6Network('2001:db8::1000/124')
Raises:
TypeError if the IP versions are different.
"""
# does this need to raise a ValueError?
if self._version != other._version:
raise TypeError('%s and %s are not of the same type' % (
self, other))
# self._version == other._version below here:
if self.network_address < other.network_address:
return -1
if self.network_address > other.network_address:
return 1
# self.network_address == other.network_address below here:
if self.netmask < other.netmask:
return -1
if self.netmask > other.netmask:
return 1
return 0
def _get_networks_key(self):
"""Network-only key function.
Returns an object that identifies this address' network and
netmask. This function is a suitable "key" argument for sorted()
and list.sort().
"""
return (self._version, self.network_address, self.netmask)
def subnets(self, prefixlen_diff=1, new_prefix=None):
"""The subnets which join to make the current subnet.
In the case that self contains only one IP
(self._prefixlen == 32 for IPv4 or self._prefixlen == 128
for IPv6), yield an iterator with just ourself.
Args:
prefixlen_diff: An integer, the amount the prefix length
should be increased by. This should not be set if
new_prefix is also set.
new_prefix: The desired new prefix length. This must be a
larger number (smaller prefix) than the existing prefix.
This should not be set if prefixlen_diff is also set.
Returns:
An iterator of IPv(4|6) objects.
Raises:
ValueError: The prefixlen_diff is too small or too large.
OR
prefixlen_diff and new_prefix are both set or new_prefix
is a smaller number than the current prefix (smaller
number means a larger network)
"""
if self._prefixlen == self._max_prefixlen:
yield self
return
if new_prefix is not None:
if new_prefix < self._prefixlen:
raise ValueError('new prefix must be longer')
if prefixlen_diff != 1:
raise ValueError('cannot set prefixlen_diff and new_prefix')
prefixlen_diff = new_prefix - self._prefixlen
if prefixlen_diff < 0:
raise ValueError('prefix length diff must be > 0')
new_prefixlen = self._prefixlen + prefixlen_diff
if new_prefixlen > self._max_prefixlen:
raise ValueError(
'prefix length diff %d is invalid for netblock %s' % (
new_prefixlen, self))
start = int(self.network_address)
end = int(self.broadcast_address) + 1
step = (int(self.hostmask) + 1) >> prefixlen_diff
for new_addr in _compat_range(start, end, step):
current = self.__class__((new_addr, new_prefixlen))
yield current
def supernet(self, prefixlen_diff=1, new_prefix=None):
"""The supernet containing the current network.
Args:
prefixlen_diff: An integer, the amount the prefix length of
the network should be decreased by. For example, given a
/24 network and a prefixlen_diff of 3, a supernet with a
/21 netmask is returned.
Returns:
An IPv4 network object.
Raises:
ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
a negative prefix length.
OR
If prefixlen_diff and new_prefix are both set or new_prefix is a
larger number than the current prefix (larger number means a
smaller network)
"""
if self._prefixlen == 0:
return self
if new_prefix is not None:
if new_prefix > self._prefixlen:
raise ValueError('new prefix must be shorter')
if prefixlen_diff != 1:
raise ValueError('cannot set prefixlen_diff and new_prefix')
prefixlen_diff = self._prefixlen - new_prefix
new_prefixlen = self.prefixlen - prefixlen_diff
if new_prefixlen < 0:
raise ValueError(
'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
(self.prefixlen, prefixlen_diff))
return self.__class__((
int(self.network_address) & (int(self.netmask) << prefixlen_diff),
new_prefixlen))
@property
def is_multicast(self):
"""Test if the address is reserved for multicast use.
Returns:
A boolean, True if the address is a multicast address.
See RFC 2373 2.7 for details.
"""
return (self.network_address.is_multicast and
self.broadcast_address.is_multicast)
@staticmethod
def _is_subnet_of(a, b):
try:
# Always false if one is v4 and the other is v6.
if a._version != b._version:
raise TypeError(
"%s and %s are not of the same version" % (a, b))
return (b.network_address <= a.network_address and
b.broadcast_address >= a.broadcast_address)
except AttributeError:
raise TypeError("Unable to test subnet containment "
"between %s and %s" % (a, b))
def subnet_of(self, other):
"""Return True if this network is a subnet of other."""
return self._is_subnet_of(self, other)
def supernet_of(self, other):
"""Return True if this network is a supernet of other."""
return self._is_subnet_of(other, self)
@property
def is_reserved(self):
"""Test if the address is otherwise IETF reserved.
Returns:
A boolean, True if the address is within one of the
reserved IPv6 Network ranges.
"""
return (self.network_address.is_reserved and
self.broadcast_address.is_reserved)
@property
def is_link_local(self):
"""Test if the address is reserved for link-local.
Returns:
A boolean, True if the address is reserved per RFC 4291.
"""
return (self.network_address.is_link_local and
self.broadcast_address.is_link_local)
@property
def is_private(self):
"""Test if this address is allocated for private networks.
Returns:
A boolean, True if the address is reserved per
iana-ipv4-special-registry or iana-ipv6-special-registry.
"""
return (self.network_address.is_private and
self.broadcast_address.is_private)
@property
def is_global(self):
"""Test if this address is allocated for public networks.
Returns:
A boolean, True if the address is not reserved per
iana-ipv4-special-registry or iana-ipv6-special-registry.
"""
return not self.is_private
@property
def is_unspecified(self):
"""Test if the address is unspecified.
Returns:
A boolean, True if this is the unspecified address as defined in
RFC 2373 2.5.2.
"""
return (self.network_address.is_unspecified and
self.broadcast_address.is_unspecified)
@property
def is_loopback(self):
"""Test if the address is a loopback address.
Returns:
A boolean, True if the address is a loopback address as defined in
RFC 2373 2.5.3.
"""
return (self.network_address.is_loopback and
self.broadcast_address.is_loopback)
class _BaseV4(object):
"""Base IPv4 object.
The following methods are used by IPv4 objects in both single IP
addresses and networks.
"""
__slots__ = ()
_version = 4
# Equivalent to 255.255.255.255 or 32 bits of 1's.
_ALL_ONES = (2 ** IPV4LENGTH) - 1
_DECIMAL_DIGITS = frozenset('0123456789')
# the valid octets for host and netmasks. only useful for IPv4.
_valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0])
_max_prefixlen = IPV4LENGTH
# There are only a handful of valid v4 netmasks, so we cache them all
# when constructed (see _make_netmask()).
_netmask_cache = {}
def _explode_shorthand_ip_string(self):
return _compat_str(self)
@classmethod
def _make_netmask(cls, arg):
"""Make a (netmask, prefix_len) tuple from the given argument.
Argument can be:
- an integer (the prefix length)
- a string representing the prefix length (e.g. "24")
- a string representing the prefix netmask (e.g. "255.255.255.0")
"""
if arg not in cls._netmask_cache:
if isinstance(arg, _compat_int_types):
prefixlen = arg
else:
try:
# Check for a netmask in prefix length form
prefixlen = cls._prefix_from_prefix_string(arg)
except NetmaskValueError:
# Check for a netmask or hostmask in dotted-quad form.
# This may raise NetmaskValueError.
prefixlen = cls._prefix_from_ip_string(arg)
netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen))
cls._netmask_cache[arg] = netmask, prefixlen
return cls._netmask_cache[arg]
@classmethod
def _ip_int_from_string(cls, ip_str):
"""Turn the given IP string into an integer for comparison.
Args:
ip_str: A string, the IP ip_str.
Returns:
The IP ip_str as an integer.
Raises:
AddressValueError: if ip_str isn't a valid IPv4 Address.
"""
if not ip_str:
raise AddressValueError('Address cannot be empty')
octets = ip_str.split('.')
if len(octets) != 4:
raise AddressValueError("Expected 4 octets in %r" % ip_str)
try:
return _compat_int_from_byte_vals(
map(cls._parse_octet, octets), 'big')
except ValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str))
@classmethod
def _parse_octet(cls, octet_str):
"""Convert a decimal octet into an integer.
Args:
octet_str: A string, the number to parse.
Returns:
The octet as an integer.
Raises:
ValueError: if the octet isn't strictly a decimal from [0..255].
"""
if not octet_str:
raise ValueError("Empty octet not permitted")
# Whitelist the characters, since int() allows a lot of bizarre stuff.
if not cls._DECIMAL_DIGITS.issuperset(octet_str):
msg = "Only decimal digits permitted in %r"
raise ValueError(msg % octet_str)
# We do the length check second, since the invalid character error
# is likely to be more informative for the user
if len(octet_str) > 3:
msg = "At most 3 characters permitted in %r"
raise ValueError(msg % octet_str)
# Convert to integer (we know digits are legal)
octet_int = int(octet_str, 10)
# Any octets that look like they *might* be written in octal,
# and which don't look exactly the same in both octal and
# decimal are rejected as ambiguous
if octet_int > 7 and octet_str[0] == '0':
msg = "Ambiguous (octal/decimal) value in %r not permitted"
raise ValueError(msg % octet_str)
if octet_int > 255:
raise ValueError("Octet %d (> 255) not permitted" % octet_int)
return octet_int
@classmethod
def _string_from_ip_int(cls, ip_int):
"""Turns a 32-bit integer into dotted decimal notation.
Args:
ip_int: An integer, the IP address.
Returns:
The IP address as a string in dotted decimal notation.
"""
return '.'.join(_compat_str(struct.unpack(b'!B', b)[0]
if isinstance(b, bytes)
else b)
for b in _compat_to_bytes(ip_int, 4, 'big'))
def _is_hostmask(self, ip_str):
"""Test if the IP string is a hostmask (rather than a netmask).
Args:
ip_str: A string, the potential hostmask.
Returns:
A boolean, True if the IP string is a hostmask.
"""
bits = ip_str.split('.')
try:
parts = [x for x in map(int, bits) if x in self._valid_mask_octets]
except ValueError:
return False
if len(parts) != len(bits):
return False
if parts[0] < parts[-1]:
return True
return False
def _reverse_pointer(self):
"""Return the reverse DNS pointer name for the IPv4 address.
This implements the method described in RFC1035 3.5.
"""
reverse_octets = _compat_str(self).split('.')[::-1]
return '.'.join(reverse_octets) + '.in-addr.arpa'
@property
def max_prefixlen(self):
return self._max_prefixlen
@property
def version(self):
return self._version
class IPv4Address(_BaseV4, _BaseAddress):
"""Represent and manipulate single IPv4 Addresses."""
__slots__ = ('_ip', '__weakref__')
def __init__(self, address):
"""
Args:
address: A string or integer representing the IP
Additionally, an integer can be passed, so
IPv4Address('192.0.2.1') == IPv4Address(3221225985).
or, more generally
IPv4Address(int(IPv4Address('192.0.2.1'))) ==
IPv4Address('192.0.2.1')
Raises:
AddressValueError: If ipaddress isn't a valid IPv4 address.
"""
# Efficient constructor from integer.
if isinstance(address, _compat_int_types):
self._check_int_address(address)
self._ip = address
return
# Constructing from a packed address
if isinstance(address, bytes):
self._check_packed_address(address, 4)
bvs = _compat_bytes_to_byte_vals(address)
self._ip = _compat_int_from_byte_vals(bvs, 'big')
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP string.
addr_str = _compat_str(address)
if '/' in addr_str:
raise AddressValueError("Unexpected '/' in %r" % address)
self._ip = self._ip_int_from_string(addr_str)
@property
def packed(self):
"""The binary representation of this address."""
return v4_int_to_packed(self._ip)
@property
def is_reserved(self):
"""Test if the address is otherwise IETF reserved.
Returns:
A boolean, True if the address is within the
reserved IPv4 Network range.
"""
return self in self._constants._reserved_network
@property
def is_private(self):
"""Test if this address is allocated for private networks.
Returns:
A boolean, True if the address is reserved per
iana-ipv4-special-registry.
"""
return any(self in net for net in self._constants._private_networks)
@property
def is_global(self):
return (
self not in self._constants._public_network and
not self.is_private)
@property
def is_multicast(self):
"""Test if the address is reserved for multicast use.
Returns:
A boolean, True if the address is multicast.
See RFC 3171 for details.
"""
return self in self._constants._multicast_network
@property
def is_unspecified(self):
"""Test if the address is unspecified.
Returns:
A boolean, True if this is the unspecified address as defined in
RFC 5735 3.
"""
return self == self._constants._unspecified_address
@property
def is_loopback(self):
"""Test if the address is a loopback address.
Returns:
A boolean, True if the address is a loopback per RFC 3330.
"""
return self in self._constants._loopback_network
@property
def is_link_local(self):
"""Test if the address is reserved for link-local.
Returns:
A boolean, True if the address is link-local per RFC 3927.
"""
return self in self._constants._linklocal_network
class IPv4Interface(IPv4Address):
def __init__(self, address):
if isinstance(address, (bytes, _compat_int_types)):
IPv4Address.__init__(self, address)
self.network = IPv4Network(self._ip)
self._prefixlen = self._max_prefixlen
return
if isinstance(address, tuple):
IPv4Address.__init__(self, address[0])
if len(address) > 1:
self._prefixlen = int(address[1])
else:
self._prefixlen = self._max_prefixlen
self.network = IPv4Network(address, strict=False)
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
return
addr = _split_optional_netmask(address)
IPv4Address.__init__(self, addr[0])
self.network = IPv4Network(address, strict=False)
self._prefixlen = self.network._prefixlen
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
def __str__(self):
return '%s/%d' % (self._string_from_ip_int(self._ip),
self.network.prefixlen)
def __eq__(self, other):
address_equal = IPv4Address.__eq__(self, other)
if not address_equal or address_equal is NotImplemented:
return address_equal
try:
return self.network == other.network
except AttributeError:
# An interface with an associated network is NOT the
# same as an unassociated address. That's why the hash
# takes the extra info into account.
return False
def __lt__(self, other):
address_less = IPv4Address.__lt__(self, other)
if address_less is NotImplemented:
return NotImplemented
try:
return (self.network < other.network or
self.network == other.network and address_less)
except AttributeError:
# We *do* allow addresses and interfaces to be sorted. The
# unassociated address is considered less than all interfaces.
return False
def __hash__(self):
return self._ip ^ self._prefixlen ^ int(self.network.network_address)
__reduce__ = _IPAddressBase.__reduce__
@property
def ip(self):
return IPv4Address(self._ip)
@property
def with_prefixlen(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self._prefixlen)
@property
def with_netmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.netmask)
@property
def with_hostmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.hostmask)
class IPv4Network(_BaseV4, _BaseNetwork):
"""This class represents and manipulates 32-bit IPv4 network + addresses..
Attributes: [examples for IPv4Network('192.0.2.0/27')]
.network_address: IPv4Address('192.0.2.0')
.hostmask: IPv4Address('0.0.0.31')
.broadcast_address: IPv4Address('192.0.2.32')
.netmask: IPv4Address('255.255.255.224')
.prefixlen: 27
"""
# Class to use when creating address objects
_address_class = IPv4Address
def __init__(self, address, strict=True):
"""Instantiate a new IPv4 network object.
Args:
address: A string or integer representing the IP [& network].
'192.0.2.0/24'
'192.0.2.0/255.255.255.0'
'192.0.0.2/0.0.0.255'
are all functionally the same in IPv4. Similarly,
'192.0.2.1'
'192.0.2.1/255.255.255.255'
'192.0.2.1/32'
are also functionally equivalent. That is to say, failing to
provide a subnetmask will create an object with a mask of /32.
If the mask (portion after the / in the argument) is given in
dotted quad form, it is treated as a netmask if it starts with a
non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
starts with a zero field (e.g. 0.255.255.255 == /8), with the
single exception of an all-zero mask which is treated as a
netmask == /0. If no mask is given, a default of /32 is used.
Additionally, an integer can be passed, so
IPv4Network('192.0.2.1') == IPv4Network(3221225985)
or, more generally
IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
IPv4Interface('192.0.2.1')
Raises:
AddressValueError: If ipaddress isn't a valid IPv4 address.
NetmaskValueError: If the netmask isn't valid for
an IPv4 address.
ValueError: If strict is True and a network address is not
supplied.
"""
_BaseNetwork.__init__(self, address)
# Constructing from a packed address or integer
if isinstance(address, (_compat_int_types, bytes)):
self.network_address = IPv4Address(address)
self.netmask, self._prefixlen = self._make_netmask(
self._max_prefixlen)
# fixme: address/network test here.
return
if isinstance(address, tuple):
if len(address) > 1:
arg = address[1]
else:
# We weren't given an address[1]
arg = self._max_prefixlen
self.network_address = IPv4Address(address[0])
self.netmask, self._prefixlen = self._make_netmask(arg)
packed = int(self.network_address)
if packed & int(self.netmask) != packed:
if strict:
raise ValueError('%s has host bits set' % self)
else:
self.network_address = IPv4Address(packed &
int(self.netmask))
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP prefix string.
addr = _split_optional_netmask(address)
self.network_address = IPv4Address(self._ip_int_from_string(addr[0]))
if len(addr) == 2:
arg = addr[1]
else:
arg = self._max_prefixlen
self.netmask, self._prefixlen = self._make_netmask(arg)
if strict:
if (IPv4Address(int(self.network_address) & int(self.netmask)) !=
self.network_address):
raise ValueError('%s has host bits set' % self)
self.network_address = IPv4Address(int(self.network_address) &
int(self.netmask))
if self._prefixlen == (self._max_prefixlen - 1):
self.hosts = self.__iter__
@property
def is_global(self):
"""Test if this address is allocated for public networks.
Returns:
A boolean, True if the address is not reserved per
iana-ipv4-special-registry.
"""
return (not (self.network_address in IPv4Network('100.64.0.0/10') and
self.broadcast_address in IPv4Network('100.64.0.0/10')) and
not self.is_private)
class _IPv4Constants(object):
_linklocal_network = IPv4Network('169.254.0.0/16')
_loopback_network = IPv4Network('127.0.0.0/8')
_multicast_network = IPv4Network('224.0.0.0/4')
_public_network = IPv4Network('100.64.0.0/10')
_private_networks = [
IPv4Network('0.0.0.0/8'),
IPv4Network('10.0.0.0/8'),
IPv4Network('127.0.0.0/8'),
IPv4Network('169.254.0.0/16'),
IPv4Network('172.16.0.0/12'),
IPv4Network('192.0.0.0/29'),
IPv4Network('192.0.0.170/31'),
IPv4Network('192.0.2.0/24'),
IPv4Network('192.168.0.0/16'),
IPv4Network('198.18.0.0/15'),
IPv4Network('198.51.100.0/24'),
IPv4Network('203.0.113.0/24'),
IPv4Network('240.0.0.0/4'),
IPv4Network('255.255.255.255/32'),
]
_reserved_network = IPv4Network('240.0.0.0/4')
_unspecified_address = IPv4Address('0.0.0.0')
IPv4Address._constants = _IPv4Constants
class _BaseV6(object):
"""Base IPv6 object.
The following methods are used by IPv6 objects in both single IP
addresses and networks.
"""
__slots__ = ()
_version = 6
_ALL_ONES = (2 ** IPV6LENGTH) - 1
_HEXTET_COUNT = 8
_HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
_max_prefixlen = IPV6LENGTH
# There are only a bunch of valid v6 netmasks, so we cache them all
# when constructed (see _make_netmask()).
_netmask_cache = {}
@classmethod
def _make_netmask(cls, arg):
"""Make a (netmask, prefix_len) tuple from the given argument.
Argument can be:
- an integer (the prefix length)
- a string representing the prefix length (e.g. "24")
- a string representing the prefix netmask (e.g. "255.255.255.0")
"""
if arg not in cls._netmask_cache:
if isinstance(arg, _compat_int_types):
prefixlen = arg
else:
prefixlen = cls._prefix_from_prefix_string(arg)
netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen))
cls._netmask_cache[arg] = netmask, prefixlen
return cls._netmask_cache[arg]
@classmethod
def _ip_int_from_string(cls, ip_str):
"""Turn an IPv6 ip_str into an integer.
Args:
ip_str: A string, the IPv6 ip_str.
Returns:
An int, the IPv6 address
Raises:
AddressValueError: if ip_str isn't a valid IPv6 Address.
"""
if not ip_str:
raise AddressValueError('Address cannot be empty')
parts = ip_str.split(':')
# An IPv6 address needs at least 2 colons (3 parts).
_min_parts = 3
if len(parts) < _min_parts:
msg = "At least %d parts expected in %r" % (_min_parts, ip_str)
raise AddressValueError(msg)
# If the address has an IPv4-style suffix, convert it to hexadecimal.
if '.' in parts[-1]:
try:
ipv4_int = IPv4Address(parts.pop())._ip
except AddressValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str))
parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))
parts.append('%x' % (ipv4_int & 0xFFFF))
# An IPv6 address can't have more than 8 colons (9 parts).
# The extra colon comes from using the "::" notation for a single
# leading or trailing zero part.
_max_parts = cls._HEXTET_COUNT + 1
if len(parts) > _max_parts:
msg = "At most %d colons permitted in %r" % (
_max_parts - 1, ip_str)
raise AddressValueError(msg)
# Disregarding the endpoints, find '::' with nothing in between.
# This indicates that a run of zeroes has been skipped.
skip_index = None
for i in _compat_range(1, len(parts) - 1):
if not parts[i]:
if skip_index is not None:
# Can't have more than one '::'
msg = "At most one '::' permitted in %r" % ip_str
raise AddressValueError(msg)
skip_index = i
# parts_hi is the number of parts to copy from above/before the '::'
# parts_lo is the number of parts to copy from below/after the '::'
if skip_index is not None:
# If we found a '::', then check if it also covers the endpoints.
parts_hi = skip_index
parts_lo = len(parts) - skip_index - 1
if not parts[0]:
parts_hi -= 1
if parts_hi:
msg = "Leading ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # ^: requires ^::
if not parts[-1]:
parts_lo -= 1
if parts_lo:
msg = "Trailing ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # :$ requires ::$
parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo)
if parts_skipped < 1:
msg = "Expected at most %d other parts with '::' in %r"
raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str))
else:
# Otherwise, allocate the entire address to parts_hi. The
# endpoints could still be empty, but _parse_hextet() will check
# for that.
if len(parts) != cls._HEXTET_COUNT:
msg = "Exactly %d parts expected without '::' in %r"
raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str))
if not parts[0]:
msg = "Leading ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # ^: requires ^::
if not parts[-1]:
msg = "Trailing ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # :$ requires ::$
parts_hi = len(parts)
parts_lo = 0
parts_skipped = 0
try:
# Now, parse the hextets into a 128-bit integer.
ip_int = 0
for i in range(parts_hi):
ip_int <<= 16
ip_int |= cls._parse_hextet(parts[i])
ip_int <<= 16 * parts_skipped
for i in range(-parts_lo, 0):
ip_int <<= 16
ip_int |= cls._parse_hextet(parts[i])
return ip_int
except ValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str))
@classmethod
def _parse_hextet(cls, hextet_str):
"""Convert an IPv6 hextet string into an integer.
Args:
hextet_str: A string, the number to parse.
Returns:
The hextet as an integer.
Raises:
ValueError: if the input isn't strictly a hex number from
[0..FFFF].
"""
# Whitelist the characters, since int() allows a lot of bizarre stuff.
if not cls._HEX_DIGITS.issuperset(hextet_str):
raise ValueError("Only hex digits permitted in %r" % hextet_str)
# We do the length check second, since the invalid character error
# is likely to be more informative for the user
if len(hextet_str) > 4:
msg = "At most 4 characters permitted in %r"
raise ValueError(msg % hextet_str)
# Length check means we can skip checking the integer value
return int(hextet_str, 16)
@classmethod
def _compress_hextets(cls, hextets):
"""Compresses a list of hextets.
Compresses a list of strings, replacing the longest continuous
sequence of "0" in the list with "" and adding empty strings at
the beginning or at the end of the string such that subsequently
calling ":".join(hextets) will produce the compressed version of
the IPv6 address.
Args:
hextets: A list of strings, the hextets to compress.
Returns:
A list of strings.
"""
best_doublecolon_start = -1
best_doublecolon_len = 0
doublecolon_start = -1
doublecolon_len = 0
for index, hextet in enumerate(hextets):
if hextet == '0':
doublecolon_len += 1
if doublecolon_start == -1:
# Start of a sequence of zeros.
doublecolon_start = index
if doublecolon_len > best_doublecolon_len:
# This is the longest sequence of zeros so far.
best_doublecolon_len = doublecolon_len
best_doublecolon_start = doublecolon_start
else:
doublecolon_len = 0
doublecolon_start = -1
if best_doublecolon_len > 1:
best_doublecolon_end = (best_doublecolon_start +
best_doublecolon_len)
# For zeros at the end of the address.
if best_doublecolon_end == len(hextets):
hextets += ['']
hextets[best_doublecolon_start:best_doublecolon_end] = ['']
# For zeros at the beginning of the address.
if best_doublecolon_start == 0:
hextets = [''] + hextets
return hextets
@classmethod
def _string_from_ip_int(cls, ip_int=None):
"""Turns a 128-bit integer into hexadecimal notation.
Args:
ip_int: An integer, the IP address.
Returns:
A string, the hexadecimal representation of the address.
Raises:
ValueError: The address is bigger than 128 bits of all ones.
"""
if ip_int is None:
ip_int = int(cls._ip)
if ip_int > cls._ALL_ONES:
raise ValueError('IPv6 address is too large')
hex_str = '%032x' % ip_int
hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)]
hextets = cls._compress_hextets(hextets)
return ':'.join(hextets)
def _explode_shorthand_ip_string(self):
"""Expand a shortened IPv6 address.
Args:
ip_str: A string, the IPv6 address.
Returns:
A string, the expanded IPv6 address.
"""
if isinstance(self, IPv6Network):
ip_str = _compat_str(self.network_address)
elif isinstance(self, IPv6Interface):
ip_str = _compat_str(self.ip)
else:
ip_str = _compat_str(self)
ip_int = self._ip_int_from_string(ip_str)
hex_str = '%032x' % ip_int
parts = [hex_str[x:x + 4] for x in range(0, 32, 4)]
if isinstance(self, (_BaseNetwork, IPv6Interface)):
return '%s/%d' % (':'.join(parts), self._prefixlen)
return ':'.join(parts)
def _reverse_pointer(self):
"""Return the reverse DNS pointer name for the IPv6 address.
This implements the method described in RFC3596 2.5.
"""
reverse_chars = self.exploded[::-1].replace(':', '')
return '.'.join(reverse_chars) + '.ip6.arpa'
@property
def max_prefixlen(self):
return self._max_prefixlen
@property
def version(self):
return self._version
class IPv6Address(_BaseV6, _BaseAddress):
"""Represent and manipulate single IPv6 Addresses."""
__slots__ = ('_ip', '__weakref__')
def __init__(self, address):
"""Instantiate a new IPv6 address object.
Args:
address: A string or integer representing the IP
Additionally, an integer can be passed, so
IPv6Address('2001:db8::') ==
IPv6Address(42540766411282592856903984951653826560)
or, more generally
IPv6Address(int(IPv6Address('2001:db8::'))) ==
IPv6Address('2001:db8::')
Raises:
AddressValueError: If address isn't a valid IPv6 address.
"""
# Efficient constructor from integer.
if isinstance(address, _compat_int_types):
self._check_int_address(address)
self._ip = address
return
# Constructing from a packed address
if isinstance(address, bytes):
self._check_packed_address(address, 16)
bvs = _compat_bytes_to_byte_vals(address)
self._ip = _compat_int_from_byte_vals(bvs, 'big')
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP string.
addr_str = _compat_str(address)
if '/' in addr_str:
raise AddressValueError("Unexpected '/' in %r" % address)
self._ip = self._ip_int_from_string(addr_str)
@property
def packed(self):
"""The binary representation of this address."""
return v6_int_to_packed(self._ip)
@property
def is_multicast(self):
"""Test if the address is reserved for multicast use.
Returns:
A boolean, True if the address is a multicast address.
See RFC 2373 2.7 for details.
"""
return self in self._constants._multicast_network
@property
def is_reserved(self):
"""Test if the address is otherwise IETF reserved.
Returns:
A boolean, True if the address is within one of the
reserved IPv6 Network ranges.
"""
return any(self in x for x in self._constants._reserved_networks)
@property
def is_link_local(self):
"""Test if the address is reserved for link-local.
Returns:
A boolean, True if the address is reserved per RFC 4291.
"""
return self in self._constants._linklocal_network
@property
def is_site_local(self):
"""Test if the address is reserved for site-local.
Note that the site-local address space has been deprecated by RFC 3879.
Use is_private to test if this address is in the space of unique local
addresses as defined by RFC 4193.
Returns:
A boolean, True if the address is reserved per RFC 3513 2.5.6.
"""
return self in self._constants._sitelocal_network
@property
def is_private(self):
"""Test if this address is allocated for private networks.
Returns:
A boolean, True if the address is reserved per
iana-ipv6-special-registry.
"""
return any(self in net for net in self._constants._private_networks)
@property
def is_global(self):
"""Test if this address is allocated for public networks.
Returns:
A boolean, true if the address is not reserved per
iana-ipv6-special-registry.
"""
return not self.is_private
@property
def is_unspecified(self):
"""Test if the address is unspecified.
Returns:
A boolean, True if this is the unspecified address as defined in
RFC 2373 2.5.2.
"""
return self._ip == 0
@property
def is_loopback(self):
"""Test if the address is a loopback address.
Returns:
A boolean, True if the address is a loopback address as defined in
RFC 2373 2.5.3.
"""
return self._ip == 1
@property
def ipv4_mapped(self):
"""Return the IPv4 mapped address.
Returns:
If the IPv6 address is a v4 mapped address, return the
IPv4 mapped address. Return None otherwise.
"""
if (self._ip >> 32) != 0xFFFF:
return None
return IPv4Address(self._ip & 0xFFFFFFFF)
@property
def teredo(self):
"""Tuple of embedded teredo IPs.
Returns:
Tuple of the (server, client) IPs or None if the address
doesn't appear to be a teredo address (doesn't start with
2001::/32)
"""
if (self._ip >> 96) != 0x20010000:
return None
return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF),
IPv4Address(~self._ip & 0xFFFFFFFF))
@property
def sixtofour(self):
"""Return the IPv4 6to4 embedded address.
Returns:
The IPv4 6to4-embedded address if present or None if the
address doesn't appear to contain a 6to4 embedded address.
"""
if (self._ip >> 112) != 0x2002:
return None
return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)
class IPv6Interface(IPv6Address):
def __init__(self, address):
if isinstance(address, (bytes, _compat_int_types)):
IPv6Address.__init__(self, address)
self.network = IPv6Network(self._ip)
self._prefixlen = self._max_prefixlen
return
if isinstance(address, tuple):
IPv6Address.__init__(self, address[0])
if len(address) > 1:
self._prefixlen = int(address[1])
else:
self._prefixlen = self._max_prefixlen
self.network = IPv6Network(address, strict=False)
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
return
addr = _split_optional_netmask(address)
IPv6Address.__init__(self, addr[0])
self.network = IPv6Network(address, strict=False)
self.netmask = self.network.netmask
self._prefixlen = self.network._prefixlen
self.hostmask = self.network.hostmask
def __str__(self):
return '%s/%d' % (self._string_from_ip_int(self._ip),
self.network.prefixlen)
def __eq__(self, other):
address_equal = IPv6Address.__eq__(self, other)
if not address_equal or address_equal is NotImplemented:
return address_equal
try:
return self.network == other.network
except AttributeError:
# An interface with an associated network is NOT the
# same as an unassociated address. That's why the hash
# takes the extra info into account.
return False
def __lt__(self, other):
address_less = IPv6Address.__lt__(self, other)
if address_less is NotImplemented:
return NotImplemented
try:
return (self.network < other.network or
self.network == other.network and address_less)
except AttributeError:
# We *do* allow addresses and interfaces to be sorted. The
# unassociated address is considered less than all interfaces.
return False
def __hash__(self):
return self._ip ^ self._prefixlen ^ int(self.network.network_address)
__reduce__ = _IPAddressBase.__reduce__
@property
def ip(self):
return IPv6Address(self._ip)
@property
def with_prefixlen(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self._prefixlen)
@property
def with_netmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.netmask)
@property
def with_hostmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.hostmask)
@property
def is_unspecified(self):
return self._ip == 0 and self.network.is_unspecified
@property
def is_loopback(self):
return self._ip == 1 and self.network.is_loopback
class IPv6Network(_BaseV6, _BaseNetwork):
"""This class represents and manipulates 128-bit IPv6 networks.
Attributes: [examples for IPv6('2001:db8::1000/124')]
.network_address: IPv6Address('2001:db8::1000')
.hostmask: IPv6Address('::f')
.broadcast_address: IPv6Address('2001:db8::100f')
.netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
.prefixlen: 124
"""
# Class to use when creating address objects
_address_class = IPv6Address
def __init__(self, address, strict=True):
"""Instantiate a new IPv6 Network object.
Args:
address: A string or integer representing the IPv6 network or the
IP and prefix/netmask.
'2001:db8::/128'
'2001:db8:0000:0000:0000:0000:0000:0000/128'
'2001:db8::'
are all functionally the same in IPv6. That is to say,
failing to provide a subnetmask will create an object with
a mask of /128.
Additionally, an integer can be passed, so
IPv6Network('2001:db8::') ==
IPv6Network(42540766411282592856903984951653826560)
or, more generally
IPv6Network(int(IPv6Network('2001:db8::'))) ==
IPv6Network('2001:db8::')
strict: A boolean. If true, ensure that we have been passed
A true network address, eg, 2001:db8::1000/124 and not an
IP address on a network, eg, 2001:db8::1/124.
Raises:
AddressValueError: If address isn't a valid IPv6 address.
NetmaskValueError: If the netmask isn't valid for
an IPv6 address.
ValueError: If strict was True and a network address was not
supplied.
"""
_BaseNetwork.__init__(self, address)
# Efficient constructor from integer or packed address
if isinstance(address, (bytes, _compat_int_types)):
self.network_address = IPv6Address(address)
self.netmask, self._prefixlen = self._make_netmask(
self._max_prefixlen)
return
if isinstance(address, tuple):
if len(address) > 1:
arg = address[1]
else:
arg = self._max_prefixlen
self.netmask, self._prefixlen = self._make_netmask(arg)
self.network_address = IPv6Address(address[0])
packed = int(self.network_address)
if packed & int(self.netmask) != packed:
if strict:
raise ValueError('%s has host bits set' % self)
else:
self.network_address = IPv6Address(packed &
int(self.netmask))
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP prefix string.
addr = _split_optional_netmask(address)
self.network_address = IPv6Address(self._ip_int_from_string(addr[0]))
if len(addr) == 2:
arg = addr[1]
else:
arg = self._max_prefixlen
self.netmask, self._prefixlen = self._make_netmask(arg)
if strict:
if (IPv6Address(int(self.network_address) & int(self.netmask)) !=
self.network_address):
raise ValueError('%s has host bits set' % self)
self.network_address = IPv6Address(int(self.network_address) &
int(self.netmask))
if self._prefixlen == (self._max_prefixlen - 1):
self.hosts = self.__iter__
def hosts(self):
"""Generate Iterator over usable hosts in a network.
This is like __iter__ except it doesn't return the
Subnet-Router anycast address.
"""
network = int(self.network_address)
broadcast = int(self.broadcast_address)
for x in _compat_range(network + 1, broadcast + 1):
yield self._address_class(x)
@property
def is_site_local(self):
"""Test if the address is reserved for site-local.
Note that the site-local address space has been deprecated by RFC 3879.
Use is_private to test if this address is in the space of unique local
addresses as defined by RFC 4193.
Returns:
A boolean, True if the address is reserved per RFC 3513 2.5.6.
"""
return (self.network_address.is_site_local and
self.broadcast_address.is_site_local)
class _IPv6Constants(object):
_linklocal_network = IPv6Network('fe80::/10')
_multicast_network = IPv6Network('ff00::/8')
_private_networks = [
IPv6Network('::1/128'),
IPv6Network('::/128'),
IPv6Network('::ffff:0:0/96'),
IPv6Network('100::/64'),
IPv6Network('2001::/23'),
IPv6Network('2001:2::/48'),
IPv6Network('2001:db8::/32'),
IPv6Network('2001:10::/28'),
IPv6Network('fc00::/7'),
IPv6Network('fe80::/10'),
]
_reserved_networks = [
IPv6Network('::/8'), IPv6Network('100::/8'),
IPv6Network('200::/7'), IPv6Network('400::/6'),
IPv6Network('800::/5'), IPv6Network('1000::/4'),
IPv6Network('4000::/3'), IPv6Network('6000::/3'),
IPv6Network('8000::/3'), IPv6Network('A000::/3'),
IPv6Network('C000::/3'), IPv6Network('E000::/4'),
IPv6Network('F000::/5'), IPv6Network('F800::/6'),
IPv6Network('FE00::/9'),
]
_sitelocal_network = IPv6Network('fec0::/10')
IPv6Address._constants = _IPv6Constants
| mit | 7,370,209,632,763,728,000 | 6,302,655,720,969,769,000 | 32.006198 | 86 | 0.56815 | false |
zooba/PTVS | Python/Templates/Django/ProjectTemplates/Python/Web/StarterDjangoProject/project-wsgi.py | 10 | 1121 | """
WSGI config for $safeprojectname$ project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
For more information, visit
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault(
'DJANGO_SETTINGS_MODULE',
'$safeprojectname$.settings')
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
| apache-2.0 | -7,009,153,533,128,749,000 | 6,229,161,649,782,577,000 | 37.655172 | 79 | 0.789474 | false |
leesavide/pythonista-docs | Documentation/matplotlib/mpl_examples/api/custom_scale_example.py | 9 | 6401 | from __future__ import unicode_literals
import numpy as np
from numpy import ma
from matplotlib import scale as mscale
from matplotlib import transforms as mtransforms
from matplotlib.ticker import Formatter, FixedLocator
class MercatorLatitudeScale(mscale.ScaleBase):
"""
Scales data in range -pi/2 to pi/2 (-90 to 90 degrees) using
the system used to scale latitudes in a Mercator projection.
The scale function:
ln(tan(y) + sec(y))
The inverse scale function:
atan(sinh(y))
Since the Mercator scale tends to infinity at +/- 90 degrees,
there is user-defined threshold, above and below which nothing
will be plotted. This defaults to +/- 85 degrees.
source:
http://en.wikipedia.org/wiki/Mercator_projection
"""
# The scale class must have a member ``name`` that defines the
# string used to select the scale. For example,
# ``gca().set_yscale("mercator")`` would be used to select this
# scale.
name = 'mercator'
def __init__(self, axis, **kwargs):
"""
Any keyword arguments passed to ``set_xscale`` and
``set_yscale`` will be passed along to the scale's
constructor.
thresh: The degree above which to crop the data.
"""
mscale.ScaleBase.__init__(self)
thresh = kwargs.pop("thresh", (85 / 180.0) * np.pi)
if thresh >= np.pi / 2.0:
raise ValueError("thresh must be less than pi/2")
self.thresh = thresh
def get_transform(self):
"""
Override this method to return a new instance that does the
actual transformation of the data.
The MercatorLatitudeTransform class is defined below as a
nested class of this one.
"""
return self.MercatorLatitudeTransform(self.thresh)
def set_default_locators_and_formatters(self, axis):
"""
Override to set up the locators and formatters to use with the
scale. This is only required if the scale requires custom
locators and formatters. Writing custom locators and
formatters is rather outside the scope of this example, but
there are many helpful examples in ``ticker.py``.
In our case, the Mercator example uses a fixed locator from
-90 to 90 degrees and a custom formatter class to put convert
the radians to degrees and put a degree symbol after the
value::
"""
class DegreeFormatter(Formatter):
def __call__(self, x, pos=None):
# \u00b0 : degree symbol
return "%d\u00b0" % ((x / np.pi) * 180.0)
deg2rad = np.pi / 180.0
axis.set_major_locator(FixedLocator(
np.arange(-90, 90, 10) * deg2rad))
axis.set_major_formatter(DegreeFormatter())
axis.set_minor_formatter(DegreeFormatter())
def limit_range_for_scale(self, vmin, vmax, minpos):
"""
Override to limit the bounds of the axis to the domain of the
transform. In the case of Mercator, the bounds should be
limited to the threshold that was passed in. Unlike the
autoscaling provided by the tick locators, this range limiting
will always be adhered to, whether the axis range is set
manually, determined automatically or changed through panning
and zooming.
"""
return max(vmin, -self.thresh), min(vmax, self.thresh)
class MercatorLatitudeTransform(mtransforms.Transform):
# There are two value members that must be defined.
# ``input_dims`` and ``output_dims`` specify number of input
# dimensions and output dimensions to the transformation.
# These are used by the transformation framework to do some
# error checking and prevent incompatible transformations from
# being connected together. When defining transforms for a
# scale, which are, by definition, separable and have only one
# dimension, these members should always be set to 1.
input_dims = 1
output_dims = 1
is_separable = True
def __init__(self, thresh):
mtransforms.Transform.__init__(self)
self.thresh = thresh
def transform_non_affine(self, a):
"""
This transform takes an Nx1 ``numpy`` array and returns a
transformed copy. Since the range of the Mercator scale
is limited by the user-specified threshold, the input
array must be masked to contain only valid values.
``matplotlib`` will handle masked arrays and remove the
out-of-range data from the plot. Importantly, the
``transform`` method *must* return an array that is the
same shape as the input array, since these values need to
remain synchronized with values in the other dimension.
"""
masked = ma.masked_where((a < -self.thresh) | (a > self.thresh), a)
if masked.mask.any():
return ma.log(np.abs(ma.tan(masked) + 1.0 / ma.cos(masked)))
else:
return np.log(np.abs(np.tan(a) + 1.0 / np.cos(a)))
def inverted(self):
"""
Override this method so matplotlib knows how to get the
inverse transform for this transform.
"""
return MercatorLatitudeScale.InvertedMercatorLatitudeTransform(self.thresh)
class InvertedMercatorLatitudeTransform(mtransforms.Transform):
input_dims = 1
output_dims = 1
is_separable = True
def __init__(self, thresh):
mtransforms.Transform.__init__(self)
self.thresh = thresh
def transform_non_affine(self, a):
return np.arctan(np.sinh(a))
def inverted(self):
return MercatorLatitudeScale.MercatorLatitudeTransform(self.thresh)
# Now that the Scale class has been defined, it must be registered so
# that ``matplotlib`` can find it.
mscale.register_scale(MercatorLatitudeScale)
if __name__ == '__main__':
import matplotlib.pyplot as plt
t = np.arange(-180.0, 180.0, 0.1)
s = t / 360.0 * np.pi
plt.plot(t, s, '-', lw=2)
plt.gca().set_yscale('mercator')
plt.xlabel('Longitude')
plt.ylabel('Latitude')
plt.title('Mercator: Projection of the Oppressor')
plt.grid(True)
plt.show()
| apache-2.0 | 2,825,477,210,719,256,000 | 607,979,865,881,057,200 | 36 | 87 | 0.628496 | false |
strahlex/machinekit | src/emc/usr_intf/gscreen/keybindings.py | 28 | 3632 | # Gscreen is Copyright (c) 20013 Chris Morley
#
# Gscreen is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Gscreen is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# This holds/converts the generic function keyword to the actual function call name
# it returns this name so Gscreen can call the function to actually do something.
# you can add or change these
class Keycalls:
def __init__(self):
self.ESTOP = 'on_keycall_ESTOP'
self.POWER = 'on_keycall_POWER'
self.ABORT = 'on_keycall_ABORT'
self.XPOS = 'on_keycall_XPOS'
self.XNEG = 'on_keycall_XNEG'
self.YPOS = 'on_keycall_YPOS'
self.YNEG = 'on_keycall_YNEG'
self.ZPOS = 'on_keycall_ZPOS'
self.ZNEG = 'on_keycall_ZNEG'
self.APOS = 'on_keycall_APOS'
self.ANEG = 'on_keycall_ANEG'
self.INCREMENTS = 'on_keycall_INCREMENTS'
self.TEST = 'on_keycall_INCREMENTS'
def __getitem__(self, item):
return getattr(self, item)
def __setitem__(self, item, value):
return setattr(self, item, value)
# This holds/converts the actual keypress (keyname = gtk.gdk.keyval_name(event.keyval))
# to a generic function keyword
# you can add or change these.
class Keybinding:
def __init__(self):
self.F1 = 'ESTOP'
self.F2 = 'POWER'
self.Escape = 'ABORT'
self.Up = 'YPOS'
self.Down = 'YNEG'
self.Right = 'XPOS'
self.Left = 'XNEG'
self.Page_Up = 'ZPOS'
self.Page_Down = 'ZNEG'
self.bracketleft = 'APOS'
self.bracketright = 'ANEG'
self.i = 'INCREMENTS'
self.I = 'INCREMENTS'
def __getitem__(self, item):
return getattr(self, item)
def __setitem__(self, item, value):
return setattr(self, item, value)
# These is the public methods for key conversion to function call name.
# get_call and get_binding are for confirmation of a call or binding entry.
# convert() takes a key string (from gtk.gdk.keyval_name(event.keyval)) and converts it to a function call string or returns None
# add_call and add_binding allow adding or changing calls or bindings
# add_conversion() does both at the same time
class Keylookup:
def __init__(self):
self.keycall = Keycalls()
self.keybinding = Keybinding()
def get_call(self,binding):
try:
return self.keycall[binding]
except:
print "No key function call"
return None
def get_binding(self,key):
try:
return self.keybinding[key]
except:
print "No key binding"
return None
def convert(self,key):
try:
b = self.keybinding[key]
return self.keycall[b]
except:
return None
def add_binding(self,key,binding):
try:
self.keybinding[key] = binding
except:
print "Binding for key %s could not be added"% key
def add_call(self,binding,function):
try:
self.keycall[binding] = function
except:
print "Binding %s could not be added"% binding
def add_conversion(self,key,binding,function):
self.add_binding(key,binding)
self.add_call(binding,function)
| lgpl-2.1 | 2,081,090,418,237,782,800 | 8,748,659,073,804,405,000 | 33.264151 | 129 | 0.628304 | false |
LEXmono/q | urllib3/contrib/ntlmpool.py | 312 | 4478 | """
NTLM authenticating pool, contributed by erikcederstran
Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
"""
from __future__ import absolute_import
from logging import getLogger
from ntlm import ntlm
from .. import HTTPSConnectionPool
from ..packages.six.moves.http_client import HTTPSConnection
log = getLogger(__name__)
class NTLMConnectionPool(HTTPSConnectionPool):
"""
Implements an NTLM authentication version of an urllib3 connection pool
"""
scheme = 'https'
def __init__(self, user, pw, authurl, *args, **kwargs):
"""
authurl is a random URL on the server that is protected by NTLM.
user is the Windows user, probably in the DOMAIN\\username format.
pw is the password for the user.
"""
super(NTLMConnectionPool, self).__init__(*args, **kwargs)
self.authurl = authurl
self.rawuser = user
user_parts = user.split('\\', 1)
self.domain = user_parts[0].upper()
self.user = user_parts[1]
self.pw = pw
def _new_conn(self):
# Performs the NTLM handshake that secures the connection. The socket
# must be kept open while requests are performed.
self.num_connections += 1
log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s',
self.num_connections, self.host, self.authurl)
headers = {}
headers['Connection'] = 'Keep-Alive'
req_header = 'Authorization'
resp_header = 'www-authenticate'
conn = HTTPSConnection(host=self.host, port=self.port)
# Send negotiation message
headers[req_header] = (
'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
log.debug('Request headers: %s', headers)
conn.request('GET', self.authurl, None, headers)
res = conn.getresponse()
reshdr = dict(res.getheaders())
log.debug('Response status: %s %s', res.status, res.reason)
log.debug('Response headers: %s', reshdr)
log.debug('Response data: %s [...]', res.read(100))
# Remove the reference to the socket, so that it can not be closed by
# the response object (we want to keep the socket open)
res.fp = None
# Server should respond with a challenge message
auth_header_values = reshdr[resp_header].split(', ')
auth_header_value = None
for s in auth_header_values:
if s[:5] == 'NTLM ':
auth_header_value = s[5:]
if auth_header_value is None:
raise Exception('Unexpected %s response header: %s' %
(resp_header, reshdr[resp_header]))
# Send authentication message
ServerChallenge, NegotiateFlags = \
ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
self.user,
self.domain,
self.pw,
NegotiateFlags)
headers[req_header] = 'NTLM %s' % auth_msg
log.debug('Request headers: %s', headers)
conn.request('GET', self.authurl, None, headers)
res = conn.getresponse()
log.debug('Response status: %s %s', res.status, res.reason)
log.debug('Response headers: %s', dict(res.getheaders()))
log.debug('Response data: %s [...]', res.read()[:100])
if res.status != 200:
if res.status == 401:
raise Exception('Server rejected request: wrong '
'username or password')
raise Exception('Wrong server response: %s %s' %
(res.status, res.reason))
res.fp = None
log.debug('Connection established')
return conn
def urlopen(self, method, url, body=None, headers=None, retries=3,
redirect=True, assert_same_host=True):
if headers is None:
headers = {}
headers['Connection'] = 'Keep-Alive'
return super(NTLMConnectionPool, self).urlopen(method, url, body,
headers, retries,
redirect,
assert_same_host)
| apache-2.0 | 4,658,138,623,685,956,000 | 1,515,095,862,449,454,300 | 38.982143 | 77 | 0.553149 | false |
MAECProject/MAECProject.github.io | documentation/idioms/av_classification/maec_av_classification.py | 1 | 1818 | # Code for MAEC AV Classification Idiom
from maec.package.package import Package
from maec.package.malware_subject import MalwareSubject
from maec.package.analysis import Analysis
from maec.bundle.bundle import Bundle
from maec.bundle.av_classification import AVClassification
from cybox.core import Object
from cybox.objects.win_executable_file_object import WinExecutableFile
# Set up the necessary Package, Malware Subject, Analysis Bundle Instances
p = Package()
ms = MalwareSubject()
b = Bundle()
a = Analysis()
# Set the Malware_Instance_Object_Attributes on the Malware Subject
ms.malware_instance_object_attributes = Object()
ms.malware_instance_object_attributes.properties = WinExecutableFile()
ms.malware_instance_object_attributes.properties.add_hash("076e5b2bae0b4b3a3d81c85610b95cd4")
ms.malware_instance_object_attributes.properties.add_hash("4484e08903744ceeaedd8f5e1bfc06b2c4688e76")
# Populate the Analysis with the metadata relating to the Analysis that was performed
a.method = "static"
a.type_ = "triage"
a.set_findings_bundle(b.id_)
# Set the requisite attributes on the Bundle
b.defined_subject = False
b.content_type = "static analysis tool output"
# Create the AV Classifications
av1 = AVClassification()
av1.name = "Microsoft"
av1.classification_name = "PWS:Win32/Zbot.gen!B"
av2 = AVClassification()
av2.name = "Symantec"
av2.classification_name = "Backdoor.Paproxy"
av3 = AVClassification()
av3.name = "TrendMicro"
av3.classification_name = "TSPY_ZBOT.TD"
# Add the AV classifications to the Bundle
b.add_av_classification(av1)
b.add_av_classification(av2)
b.add_av_classification(av3)
# Build up the full Package/Malware Subject/Analysis/Bundle hierarchy
p.add_malware_subject(ms)
ms.add_analysis(a)
ms.add_findings_bundle(b)
# Output the built up Package to XML
print p.to_xml()
| bsd-3-clause | 8,539,474,669,377,452,000 | 3,039,984,413,890,775,000 | 32.666667 | 101 | 0.79703 | false |
halvertoluke/edx-platform | common/djangoapps/student/tests/test_reset_password.py | 34 | 11864 | """
Test the various password reset flows
"""
import json
import re
import unittest
from django.core.cache import cache
from django.conf import settings
from django.test import TestCase
from django.test.client import RequestFactory
from django.contrib.auth.models import User
from django.contrib.auth.hashers import UNUSABLE_PASSWORD_PREFIX
from django.contrib.auth.tokens import default_token_generator
from django.utils.encoding import force_bytes, force_text
from django.utils.http import urlsafe_base64_encode, base36_to_int, int_to_base36
from mock import Mock, patch
import ddt
from student.views import password_reset, password_reset_confirm_wrapper, SETTING_CHANGE_INITIATED
from student.tests.factories import UserFactory
from student.tests.test_email import mock_render_to_string
from util.testing import EventTestMixin
from .test_microsite import fake_microsite_get_value
@ddt.ddt
class ResetPasswordTests(EventTestMixin, TestCase):
""" Tests that clicking reset password sends email, and doesn't activate the user
"""
request_factory = RequestFactory()
def setUp(self):
super(ResetPasswordTests, self).setUp('student.views.tracker')
self.user = UserFactory.create()
self.user.is_active = False
self.user.save()
self.token = default_token_generator.make_token(self.user)
self.uidb36 = int_to_base36(self.user.id)
self.user_bad_passwd = UserFactory.create()
self.user_bad_passwd.is_active = False
self.user_bad_passwd.password = UNUSABLE_PASSWORD_PREFIX
self.user_bad_passwd.save()
def uidb36_to_uidb64(self, uidb36=None):
""" Converts uidb36 into uidb64 """
return force_text(urlsafe_base64_encode(force_bytes(base36_to_int(uidb36 or self.uidb36))))
@patch('student.views.render_to_string', Mock(side_effect=mock_render_to_string, autospec=True))
def test_user_bad_password_reset(self):
"""Tests password reset behavior for user with password marked UNUSABLE_PASSWORD_PREFIX"""
bad_pwd_req = self.request_factory.post('/password_reset/', {'email': self.user_bad_passwd.email})
bad_pwd_resp = password_reset(bad_pwd_req)
# If they've got an unusable password, we return a successful response code
self.assertEquals(bad_pwd_resp.status_code, 200)
obj = json.loads(bad_pwd_resp.content)
self.assertEquals(obj, {
'success': True,
'value': "('registration/password_reset_done.html', [])",
})
self.assert_no_events_were_emitted()
@patch('student.views.render_to_string', Mock(side_effect=mock_render_to_string, autospec=True))
def test_nonexist_email_password_reset(self):
"""Now test the exception cases with of reset_password called with invalid email."""
bad_email_req = self.request_factory.post('/password_reset/', {'email': self.user.email + "makeItFail"})
bad_email_resp = password_reset(bad_email_req)
# Note: even if the email is bad, we return a successful response code
# This prevents someone potentially trying to "brute-force" find out which
# emails are and aren't registered with edX
self.assertEquals(bad_email_resp.status_code, 200)
obj = json.loads(bad_email_resp.content)
self.assertEquals(obj, {
'success': True,
'value': "('registration/password_reset_done.html', [])",
})
self.assert_no_events_were_emitted()
@patch('student.views.render_to_string', Mock(side_effect=mock_render_to_string, autospec=True))
def test_password_reset_ratelimited(self):
""" Try (and fail) resetting password 30 times in a row on an non-existant email address """
cache.clear()
for i in xrange(30):
good_req = self.request_factory.post('/password_reset/', {
'email': 'thisdoesnotexist{0}@foo.com'.format(i)
})
good_resp = password_reset(good_req)
self.assertEquals(good_resp.status_code, 200)
# then the rate limiter should kick in and give a HttpForbidden response
bad_req = self.request_factory.post('/password_reset/', {'email': '[email protected]'})
bad_resp = password_reset(bad_req)
self.assertEquals(bad_resp.status_code, 403)
self.assert_no_events_were_emitted()
cache.clear()
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
@patch('django.core.mail.send_mail')
@patch('student.views.render_to_string', Mock(side_effect=mock_render_to_string, autospec=True))
def test_reset_password_email(self, send_email):
"""Tests contents of reset password email, and that user is not active"""
good_req = self.request_factory.post('/password_reset/', {'email': self.user.email})
good_req.user = self.user
good_resp = password_reset(good_req)
self.assertEquals(good_resp.status_code, 200)
obj = json.loads(good_resp.content)
self.assertEquals(obj, {
'success': True,
'value': "('registration/password_reset_done.html', [])",
})
(subject, msg, from_addr, to_addrs) = send_email.call_args[0]
self.assertIn("Password reset", subject)
self.assertIn("You're receiving this e-mail because you requested a password reset", msg)
self.assertEquals(from_addr, settings.DEFAULT_FROM_EMAIL)
self.assertEquals(len(to_addrs), 1)
self.assertIn(self.user.email, to_addrs)
self.assert_event_emitted(
SETTING_CHANGE_INITIATED, user_id=self.user.id, setting=u'password', old=None, new=None,
)
#test that the user is not active
self.user = User.objects.get(pk=self.user.pk)
self.assertFalse(self.user.is_active)
re.search(r'password_reset_confirm/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/', msg).groupdict()
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
@patch('django.core.mail.send_mail')
@ddt.data((False, 'http://'), (True, 'https://'))
@ddt.unpack
def test_reset_password_email_https(self, is_secure, protocol, send_email):
"""
Tests that the right url protocol is included in the reset password link
"""
req = self.request_factory.post(
'/password_reset/', {'email': self.user.email}
)
req.is_secure = Mock(return_value=is_secure)
req.user = self.user
password_reset(req)
_, msg, _, _ = send_email.call_args[0]
expected_msg = "Please go to the following page and choose a new password:\n\n" + protocol
self.assertIn(expected_msg, msg)
self.assert_event_emitted(
SETTING_CHANGE_INITIATED, user_id=self.user.id, setting=u'password', old=None, new=None
)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
@patch('django.core.mail.send_mail')
@ddt.data(('Crazy Awesome Site', 'Crazy Awesome Site'), (None, 'edX'))
@ddt.unpack
def test_reset_password_email_domain(self, domain_override, platform_name, send_email):
"""
Tests that the right url domain and platform name is included in
the reset password email
"""
with patch("django.conf.settings.PLATFORM_NAME", platform_name):
req = self.request_factory.post(
'/password_reset/', {'email': self.user.email}
)
req.get_host = Mock(return_value=domain_override)
req.user = self.user
password_reset(req)
_, msg, _, _ = send_email.call_args[0]
reset_msg = "you requested a password reset for your user account at {}"
if domain_override:
reset_msg = reset_msg.format(domain_override)
else:
reset_msg = reset_msg.format(settings.SITE_NAME)
self.assertIn(reset_msg, msg)
sign_off = "The {} Team".format(platform_name)
self.assertIn(sign_off, msg)
self.assert_event_emitted(
SETTING_CHANGE_INITIATED, user_id=self.user.id, setting=u'password', old=None, new=None
)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
@patch("microsite_configuration.microsite.get_value", fake_microsite_get_value)
@patch('django.core.mail.send_mail')
def test_reset_password_email_microsite(self, send_email):
"""
Tests that the right url domain and platform name is included in
the reset password email
"""
req = self.request_factory.post(
'/password_reset/', {'email': self.user.email}
)
req.get_host = Mock(return_value=None)
req.user = self.user
password_reset(req)
_, msg, from_addr, _ = send_email.call_args[0]
reset_msg = "you requested a password reset for your user account at openedx.localhost"
self.assertIn(reset_msg, msg)
self.assert_event_emitted(
SETTING_CHANGE_INITIATED, user_id=self.user.id, setting=u'password', old=None, new=None
)
self.assertEqual(from_addr, "[email protected]")
@patch('student.views.password_reset_confirm')
def test_reset_password_bad_token(self, reset_confirm):
"""Tests bad token and uidb36 in password reset"""
bad_reset_req = self.request_factory.get('/password_reset_confirm/NO-OP/')
password_reset_confirm_wrapper(bad_reset_req, 'NO', 'OP')
confirm_kwargs = reset_confirm.call_args[1]
self.assertEquals(confirm_kwargs['uidb64'], self.uidb36_to_uidb64('NO'))
self.assertEquals(confirm_kwargs['token'], 'OP')
self.user = User.objects.get(pk=self.user.pk)
self.assertFalse(self.user.is_active)
@patch('student.views.password_reset_confirm')
def test_reset_password_good_token(self, reset_confirm):
"""Tests good token and uidb36 in password reset"""
good_reset_req = self.request_factory.get('/password_reset_confirm/{0}-{1}/'.format(self.uidb36, self.token))
password_reset_confirm_wrapper(good_reset_req, self.uidb36, self.token)
confirm_kwargs = reset_confirm.call_args[1]
self.assertEquals(confirm_kwargs['uidb64'], self.uidb36_to_uidb64())
self.assertEquals(confirm_kwargs['token'], self.token)
self.user = User.objects.get(pk=self.user.pk)
self.assertTrue(self.user.is_active)
@patch('student.views.password_reset_confirm')
@patch("microsite_configuration.microsite.get_value", fake_microsite_get_value)
def test_reset_password_good_token_microsite(self, reset_confirm):
"""Tests password reset confirmation page for micro site"""
good_reset_req = self.request_factory.get('/password_reset_confirm/{0}-{1}/'.format(self.uidb36, self.token))
password_reset_confirm_wrapper(good_reset_req, self.uidb36, self.token)
confirm_kwargs = reset_confirm.call_args[1]
self.assertEquals(confirm_kwargs['extra_context']['platform_name'], 'Fake University')
@patch('student.views.password_reset_confirm')
def test_reset_password_with_reused_password(self, reset_confirm):
"""Tests good token and uidb36 in password reset"""
good_reset_req = self.request_factory.get('/password_reset_confirm/{0}-{1}/'.format(self.uidb36, self.token))
password_reset_confirm_wrapper(good_reset_req, self.uidb36, self.token)
confirm_kwargs = reset_confirm.call_args[1]
self.assertEquals(confirm_kwargs['uidb64'], self.uidb36_to_uidb64())
self.assertEquals(confirm_kwargs['token'], self.token)
self.user = User.objects.get(pk=self.user.pk)
self.assertTrue(self.user.is_active)
| agpl-3.0 | -6,638,949,522,384,647,000 | -8,531,300,563,244,310,000 | 43.939394 | 117 | 0.656608 | false |
christiandev/l10n-brazil | __unported__/l10n_br_account_product/sped/nfe/validator/txt.py | 2 | 12103 | # -*- encoding: utf-8 -*-
###############################################################################
# #
# Copyright (C) 2012 Renato Lima - Akretion #
# #
#This program is free software: you can redistribute it and/or modify #
#it under the terms of the GNU Affero General Public License as published by #
#the Free Software Foundation, either version 3 of the License, or #
#(at your option) any later version. #
# #
#This program is distributed in the hope that it will be useful, #
#but WITHOUT ANY WARRANTY; without even the implied warranty of #
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
#GNU Affero General Public License for more details. #
# #
#You should have received a copy of the GNU Affero General Public License #
#along with this program. If not, see <http://www.gnu.org/licenses/>. #
###############################################################################
from openerp import pooler
from openerp.osv import orm
from openerp.tools.translate import _
def validate(cr, uid, ids, context=None):
strErro = u''
pool = pooler.get_pool(cr.dbname)
if context is None:
context = {}
for inv in pool.get('account.invoice').browse(cr, uid, ids):
#Nota fiscal
if inv.issuer == '1' or inv.fiscal_type == 'service' or \
not inv.fiscal_document_electronic:
continue
if not inv.document_serie_id:
strErro = u'Nota Fiscal - Série da nota fiscal\n'
if not inv.fiscal_document_id:
strErro += u'Nota Fiscal - Tipo de documento fiscal\n'
if not inv.document_serie_id.internal_sequence_id:
strErro += u'Nota Fiscal - Número da nota fiscal, a série deve ter uma sequencia interna\n'
#Emitente
if not inv.company_id.partner_id.legal_name:
strErro += u'Emitente - Razão Social\n'
if not inv.company_id.partner_id.name:
strErro += u'Emitente - Fantasia\n'
if not inv.company_id.partner_id.cnpj_cpf:
strErro += u'Emitente - CNPJ/CPF\n'
if not inv.company_id.partner_id.street:
strErro += u'Emitente / Endereço - Logradouro\n'
if not inv.company_id.partner_id.number:
strErro += u'Emitente / Endereço - Número\n'
if not inv.company_id.partner_id.zip:
strErro += u'Emitente / Endereço - CEP\n'
if not inv.company_id.cnae_main_id:
strErro += u'Emitente / CNAE Principal\n'
if not inv.company_id.partner_id.inscr_est:
strErro += u'Emitente / Inscrição Estadual\n'
if not inv.company_id.partner_id.state_id:
strErro += u'Emitente / Endereço - Estado\n'
else:
if not inv.company_id.partner_id.state_id.ibge_code:
strErro += u'Emitente / Endereço - Código do IBGE do estado\n'
if not inv.company_id.partner_id.state_id.name:
strErro += u'Emitente / Endereço - Nome do estado\n'
if not inv.company_id.partner_id.l10n_br_city_id:
strErro += u'Emitente / Endereço - município\n'
else:
if not inv.company_id.partner_id.l10n_br_city_id.name:
strErro += u'Emitente / Endereço - Nome do município\n'
if not inv.company_id.partner_id.l10n_br_city_id.ibge_code:
strErro += u'Emitente / Endereço - Código do IBGE do município\n'
if not inv.company_id.partner_id.country_id:
strErro += u'Emitente / Endereço - país\n'
else:
if not inv.company_id.partner_id.country_id.name:
strErro += u'Emitente / Endereço - Nome do país\n'
if not inv.company_id.partner_id.country_id.bc_code:
strErro += u'Emitente / Endereço - Código do BC do país\n'
#Destinatário
if inv.partner_id.is_company and not inv.partner_id.legal_name:
strErro += u'Destinatário - Razão Social\n'
if inv.partner_id.country_id.id == inv.company_id.partner_id.country_id.id:
if not inv.partner_id.cnpj_cpf:
strErro += u'Destinatário - CNPJ/CPF\n'
if not inv.partner_id.street:
strErro += u'Destinatário / Endereço - Logradouro\n'
if not inv.partner_id.number:
strErro += u'Destinatário / Endereço - Número\n'
if inv.partner_id.country_id.id == inv.company_id.partner_id.country_id.id:
if not inv.partner_id.zip:
strErro += u'Destinatário / Endereço - CEP\n'
if inv.partner_id.country_id.id == inv.company_id.partner_id.country_id.id:
if not inv.partner_id.state_id:
strErro += u'Destinatário / Endereço - Estado\n'
else:
if not inv.partner_id.state_id.ibge_code:
strErro += u'Destinatário / Endereço - Código do IBGE do estado\n'
if not inv.partner_id.state_id.name:
strErro += u'Destinatário / Endereço - Nome do estado\n'
if inv.partner_id.country_id.id == inv.company_id.partner_id.country_id.id:
if not inv.partner_id.l10n_br_city_id:
strErro += u'Destinatário / Endereço - Município\n'
else:
if not inv.partner_id.l10n_br_city_id.name:
strErro += u'Destinatário / Endereço - Nome do município\n'
if not inv.partner_id.l10n_br_city_id.ibge_code:
strErro += u'Destinatário / Endereço - Código do IBGE do município\n'
if not inv.partner_id.country_id:
strErro += u'Destinatário / Endereço - País\n'
else:
if not inv.partner_id.country_id.name:
strErro += u'Destinatário / Endereço - Nome do país\n'
if not inv.partner_id.country_id.bc_code:
strErro += u'Destinatário / Endereço - Código do BC do país\n'
#endereco de entrega
if inv.partner_shipping_id:
if inv.partner_id.id != inv.partner_shipping_id.id:
if not inv.partner_shipping_id.street:
strErro += u'Destinatário / Endereço de Entrega - Logradouro\n'
if not inv.partner_shipping_id.number:
strErro += u'Destinatário / Endereço de Entrega - Número\n'
if not inv.partner_shipping_id.zip:
strErro += u'Destinatário / Endereço de Entrega - CEP\n'
if not inv.partner_shipping_id.state_id:
strErro += u'Destinatário / Endereço de Entrega - Estado\n'
else:
if not inv.partner_shipping_id.state_id.ibge_code:
strErro += u'Destinatário / Endereço de Entrega - Código do IBGE do estado\n'
if not inv.partner_shipping_id.state_id.name:
strErro += u'Destinatário / Endereço de Entrega - Nome do estado\n'
if not inv.partner_shipping_id.l10n_br_city_id:
strErro += u'Destinatário / Endereço - Município\n'
else:
if not inv.partner_shipping_id.l10n_br_city_id.name:
strErro += u'Destinatário / Endereço de Entrega - Nome do município\n'
if not inv.partner_shipping_id.l10n_br_city_id.ibge_code:
strErro += u'Destinatário / Endereço de Entrega - Código do IBGE do município\n'
if not inv.partner_shipping_id.country_id:
strErro += u'Destinatário / Endereço de Entrega - País\n'
else:
if not inv.partner_shipping_id.country_id.name:
strErro += u'Destinatário / Endereço de Entrega - Nome do país\n'
if not inv.partner_shipping_id.country_id.bc_code:
strErro += u'Destinatário / Endereço de Entrega - Código do BC do país\n'
#produtos
for inv_line in inv.invoice_line:
if inv_line.product_id:
if not inv_line.product_id.default_code:
strErro += u'Produtos e Serviços: %s, Qtde: %s - Referência/Código do produto\n' % (inv_line.product_id.name, inv_line.quantity)
if not inv_line.product_id.name:
strErro += u'Produtos e Serviços: %s - %s, Qtde: %s - Nome do produto\n' % (inv_line.product_id.default_code, inv_line.product_id.name, inv_line.quantity)
if not inv_line.cfop_id:
strErro += u'Produtos e Serviços: %s - %s, Qtde: %s - CFOP\n' % (inv_line.product_id.default_code, inv_line.product_id.name, inv_line.quantity)
else:
if not inv_line.cfop_id.code:
strErro += u'Produtos e Serviços: %s - %s, Qtde: %s - Código do CFOP\n' % (inv_line.product_id.default_code, inv_line.product_id.name, inv_line.quantity)
if not inv_line.uos_id:
strErro += u'Produtos e Serviços: %s - %s, Qtde: %s - Unidade de medida\n' % (inv_line.product_id.default_code, inv_line.product_id.name, inv_line.quantity)
if not inv_line.quantity:
strErro += u'Produtos e Serviços: %s - %s, Qtde: %s - Quantidade\n' % (inv_line.product_id.default_code, inv_line.product_id.name, inv_line.quantity)
#Se for Documento Fiscal de Produto
if inv.fiscal_type == 'product':
if not inv_line.fiscal_classification_id:
strErro += u'Produtos e Serviços: %s - %s, Qtde: %s - Classificação Fiscal(NCM)\n' % (inv_line.product_id.default_code, inv_line.product_id.name, inv_line.quantity)
if not inv_line.price_unit:
strErro += u'Produtos e Serviços: %s - %s, Qtde: %s - Preco unitario\n' % (inv_line.product_id.default_code, inv_line.product_id.name, inv_line.quantity)
if inv_line.product_type == 'product':
if not inv_line.icms_cst_id:
strErro += u'Produtos e Serviços: %s - %s, Qtde: %s - CST do ICMS\n' % (inv_line.product_id.default_code, inv_line.product_id.name, inv_line.quantity)
if not inv_line.ipi_cst_id:
strErro += u'Produtos e Serviços: %s - %s, Qtde: %s - CST do IPI\n' % (inv_line.product_id.default_code, inv_line.product_id.name, inv_line.quantity)
if inv_line.product_type == 'service':
if not inv_line.issqn_type:
strErro += u'Produtos e Serviços: %s - %s, Qtde: %s - Tipo do ISSQN\n' % (inv_line.product_id.default_code, inv_line.product_id.name, inv_line.quantity)
if not inv_line.service_type_id:
strErro += u'Produtos e Serviços: %s - %s, Qtde: %s - Tipo do Serviço\n' % (inv_line.product_id.default_code, inv_line.product_id.name, inv_line.quantity)
if not inv_line.pis_cst_id:
strErro += u'Produtos e Serviços: %s - %s, Qtde: %s - CST do PIS\n' % (inv_line.product_id.default_code, inv_line.product_id.name, inv_line.quantity)
if not inv_line.cofins_cst_id:
strErro += u'Produtos e Serviços: %s - %s, Qtde: %s - CST do COFINS\n' % (inv_line.product_id.default_code, inv_line.product_id.name, inv_line.quantity)
if strErro:
raise orm.except_orm(
_('Error !'), ("Error Validating NFE:\n '%s'") % (strErro, ))
return True
| agpl-3.0 | 834,232,554,838,592,000 | 9,171,956,236,636,623,000 | 50.874459 | 188 | 0.550864 | false |
arthurdarcet/riverrun | riverrun/http/utils.py | 1 | 1593 | import bson
import cherrypy
import functools
import json
import logging
import traceback
logger = logging.getLogger(__name__)
def json_exposed(fn):
@cherrypy.expose
@functools.wraps(fn)
def wrapper(*args, **kwargs):
try:
code = 200
value = fn(*args, **kwargs)
except cherrypy.HTTPError as e:
code = e.code
value = {'status': e.code, 'error': e.reason}
except Exception as e:
msg = '{}: {}'.format(e.__class__.__qualname__, e)
logger.error(msg)
logger.debug(traceback.format_exc())
code = 500
value = {'status': 500, 'error': msg}
cherrypy.response.headers['Content-Type'] = 'application/json'
cherrypy.response.status = code
return json.dumps(value).encode('utf-8')
return wrapper
def paginated(fn):
@functools.wraps(fn)
def wrapper(*args, page=0, **kwargs):
try:
page = int(page)
except TypeError:
raise cherrypy.NotFound()
else:
return fn(*args, **kwargs).skip(page * 30).limit(30)
return wrapper
class _LogManager(cherrypy._cplogging.LogManager):
def __init__(self):
self.error_log = logging.getLogger('cherrypy.error')
self.access_log = logging.getLogger('cherrypy.access')
self.access_log_format = '{h}, {s} "{r}"'
class BaseApp:
def mount(self):
app = cherrypy.Application(self, self.mount_to, getattr(self, 'config', {'/': {}}))
app.log = _LogManager()
cherrypy.tree.mount(app)
| mit | -2,622,728,074,597,176,000 | -4,353,124,877,633,092,000 | 28.5 | 91 | 0.578154 | false |
tow/dnspython | tests/test_rrset.py | 59 | 2282 | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import dns.rrset
class RRsetTestCase(unittest.TestCase):
def testEqual1(self):
r1 = dns.rrset.from_text('foo', 300, 'in', 'a', '10.0.0.1', '10.0.0.2')
r2 = dns.rrset.from_text('FOO', 300, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 == r2)
def testEqual2(self):
r1 = dns.rrset.from_text('foo', 300, 'in', 'a', '10.0.0.1', '10.0.0.2')
r2 = dns.rrset.from_text('FOO', 600, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 == r2)
def testNotEqual1(self):
r1 = dns.rrset.from_text('fooa', 30, 'in', 'a', '10.0.0.1', '10.0.0.2')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
def testNotEqual2(self):
r1 = dns.rrset.from_text('foo', 30, 'in', 'a', '10.0.0.1', '10.0.0.3')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
def testNotEqual3(self):
r1 = dns.rrset.from_text('foo', 30, 'in', 'a', '10.0.0.1', '10.0.0.2',
'10.0.0.3')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
def testNotEqual4(self):
r1 = dns.rrset.from_text('foo', 30, 'in', 'a', '10.0.0.1')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
if __name__ == '__main__':
unittest.main()
| isc | -8,946,726,623,541,176,000 | -117,451,774,656,938,460 | 41.259259 | 79 | 0.595968 | false |
tudorvio/tempest | tempest/api/volume/test_volumes_list.py | 10 | 9131 | # Copyright 2012 OpenStack Foundation
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import operator
from oslo_log import log as logging
from testtools import matchers
from tempest.api.volume import base
from tempest.common.utils import data_utils
from tempest import test
LOG = logging.getLogger(__name__)
class VolumesV2ListTestJSON(base.BaseVolumeTest):
"""
This test creates a number of 1G volumes. To run successfully,
ensure that the backing file for the volume group that Nova uses
has space for at least 3 1G volumes!
If you are running a Devstack environment, ensure that the
VOLUME_BACKING_FILE_SIZE is at least 4G in your localrc
"""
VOLUME_FIELDS = ('id', 'name')
def assertVolumesIn(self, fetched_list, expected_list, fields=None):
if fields:
expected_list = map(operator.itemgetter(*fields), expected_list)
fetched_list = map(operator.itemgetter(*fields), fetched_list)
missing_vols = [v for v in expected_list if v not in fetched_list]
if len(missing_vols) == 0:
return
def str_vol(vol):
return "%s:%s" % (vol['id'], vol[self.name])
raw_msg = "Could not find volumes %s in expected list %s; fetched %s"
self.fail(raw_msg % ([str_vol(v) for v in missing_vols],
[str_vol(v) for v in expected_list],
[str_vol(v) for v in fetched_list]))
@classmethod
def setup_clients(cls):
super(VolumesV2ListTestJSON, cls).setup_clients()
cls.client = cls.volumes_client
@classmethod
def resource_setup(cls):
super(VolumesV2ListTestJSON, cls).resource_setup()
cls.name = cls.VOLUME_FIELDS[1]
# Create 3 test volumes
cls.volume_list = []
cls.volume_id_list = []
cls.metadata = {'Type': 'work'}
for i in range(3):
volume = cls.create_volume(metadata=cls.metadata)
volume = cls.client.show_volume(volume['id'])
cls.volume_list.append(volume)
cls.volume_id_list.append(volume['id'])
@classmethod
def resource_cleanup(cls):
# Delete the created volumes
for volid in cls.volume_id_list:
cls.client.delete_volume(volid)
cls.client.wait_for_resource_deletion(volid)
super(VolumesV2ListTestJSON, cls).resource_cleanup()
def _list_by_param_value_and_assert(self, params, with_detail=False):
"""
Perform list or list_details action with given params
and validates result.
"""
if with_detail:
fetched_vol_list = \
self.client.list_volumes(detail=True, params=params)
else:
fetched_vol_list = self.client.list_volumes(params=params)
# Validating params of fetched volumes
# In v2, only list detail view includes items in params.
# In v1, list view and list detail view are same. So the
# following check should be run when 'with_detail' is True
# or v1 tests.
if with_detail or self._api_version == 1:
for volume in fetched_vol_list:
for key in params:
msg = "Failed to list volumes %s by %s" % \
('details' if with_detail else '', key)
if key == 'metadata':
self.assertThat(
volume[key].items(),
matchers.ContainsAll(params[key].items()),
msg)
else:
self.assertEqual(params[key], volume[key], msg)
@test.attr(type='smoke')
@test.idempotent_id('0b6ddd39-b948-471f-8038-4787978747c4')
def test_volume_list(self):
# Get a list of Volumes
# Fetch all volumes
fetched_list = self.client.list_volumes()
self.assertVolumesIn(fetched_list, self.volume_list,
fields=self.VOLUME_FIELDS)
@test.idempotent_id('adcbb5a7-5ad8-4b61-bd10-5380e111a877')
def test_volume_list_with_details(self):
# Get a list of Volumes with details
# Fetch all Volumes
fetched_list = self.client.list_volumes(detail=True)
self.assertVolumesIn(fetched_list, self.volume_list)
@test.idempotent_id('a28e8da4-0b56-472f-87a8-0f4d3f819c02')
def test_volume_list_by_name(self):
volume = self.volume_list[data_utils.rand_int_id(0, 2)]
params = {self.name: volume[self.name]}
fetched_vol = self.client.list_volumes(params=params)
self.assertEqual(1, len(fetched_vol), str(fetched_vol))
self.assertEqual(fetched_vol[0][self.name],
volume[self.name])
@test.idempotent_id('2de3a6d4-12aa-403b-a8f2-fdeb42a89623')
def test_volume_list_details_by_name(self):
volume = self.volume_list[data_utils.rand_int_id(0, 2)]
params = {self.name: volume[self.name]}
fetched_vol = self.client.list_volumes(detail=True, params=params)
self.assertEqual(1, len(fetched_vol), str(fetched_vol))
self.assertEqual(fetched_vol[0][self.name],
volume[self.name])
@test.idempotent_id('39654e13-734c-4dab-95ce-7613bf8407ce')
def test_volumes_list_by_status(self):
params = {'status': 'available'}
fetched_list = self.client.list_volumes(params=params)
self._list_by_param_value_and_assert(params)
self.assertVolumesIn(fetched_list, self.volume_list,
fields=self.VOLUME_FIELDS)
@test.idempotent_id('2943f712-71ec-482a-bf49-d5ca06216b9f')
def test_volumes_list_details_by_status(self):
params = {'status': 'available'}
fetched_list = self.client.list_volumes(detail=True, params=params)
for volume in fetched_list:
self.assertEqual('available', volume['status'])
self.assertVolumesIn(fetched_list, self.volume_list)
@test.idempotent_id('c0cfa863-3020-40d7-b587-e35f597d5d87')
def test_volumes_list_by_availability_zone(self):
volume = self.volume_list[data_utils.rand_int_id(0, 2)]
zone = volume['availability_zone']
params = {'availability_zone': zone}
fetched_list = self.client.list_volumes(params=params)
self._list_by_param_value_and_assert(params)
self.assertVolumesIn(fetched_list, self.volume_list,
fields=self.VOLUME_FIELDS)
@test.idempotent_id('e1b80d13-94f0-4ba2-a40e-386af29f8db1')
def test_volumes_list_details_by_availability_zone(self):
volume = self.volume_list[data_utils.rand_int_id(0, 2)]
zone = volume['availability_zone']
params = {'availability_zone': zone}
fetched_list = self.client.list_volumes(detail=True, params=params)
for volume in fetched_list:
self.assertEqual(zone, volume['availability_zone'])
self.assertVolumesIn(fetched_list, self.volume_list)
@test.idempotent_id('b5ebea1b-0603-40a0-bb41-15fcd0a53214')
def test_volume_list_with_param_metadata(self):
# Test to list volumes when metadata param is given
params = {'metadata': self.metadata}
self._list_by_param_value_and_assert(params)
@test.idempotent_id('1ca92d3c-4a8e-4b43-93f5-e4c7fb3b291d')
def test_volume_list_with_detail_param_metadata(self):
# Test to list volumes details when metadata param is given
params = {'metadata': self.metadata}
self._list_by_param_value_and_assert(params, with_detail=True)
@test.idempotent_id('777c87c1-2fc4-4883-8b8e-5c0b951d1ec8')
def test_volume_list_param_display_name_and_status(self):
# Test to list volume when display name and status param is given
volume = self.volume_list[data_utils.rand_int_id(0, 2)]
params = {self.name: volume[self.name],
'status': 'available'}
self._list_by_param_value_and_assert(params)
@test.idempotent_id('856ab8ca-6009-4c37-b691-be1065528ad4')
def test_volume_list_with_detail_param_display_name_and_status(self):
# Test to list volume when name and status param is given
volume = self.volume_list[data_utils.rand_int_id(0, 2)]
params = {self.name: volume[self.name],
'status': 'available'}
self._list_by_param_value_and_assert(params, with_detail=True)
class VolumesV1ListTestJSON(VolumesV2ListTestJSON):
_api_version = 1
VOLUME_FIELDS = ('id', 'display_name')
| apache-2.0 | -2,185,445,639,934,998,000 | -6,390,878,171,257,482,000 | 41.469767 | 78 | 0.633775 | false |
adoosii/edx-platform | pavelib/acceptance_test.py | 12 | 1983 | """
Acceptance test tasks
"""
from paver.easy import task, cmdopts, needs
from pavelib.utils.test.suites import AcceptanceTestSuite
from optparse import make_option
try:
from pygments.console import colorize
except ImportError:
colorize = lambda color, text: text # pylint: disable=invalid-name
__test__ = False # do not collect
@task
@needs(
'pavelib.prereqs.install_prereqs',
'pavelib.utils.test.utils.clean_reports_dir',
)
@cmdopts([
("system=", "s", "System to act on"),
("default_store=", "m", "Default modulestore to use for course creation"),
("fasttest", "a", "Run without collectstatic"),
("extra_args=", "e", "adds as extra args to the test command"),
make_option("--verbose", action="store_const", const=2, dest="verbosity"),
make_option("-q", "--quiet", action="store_const", const=0, dest="verbosity"),
make_option("-v", "--verbosity", action="count", dest="verbosity"),
make_option("--pdb", action="store_true", help="Launches an interactive debugger upon error"),
])
def test_acceptance(options):
"""
Run the acceptance tests for the either lms or cms
"""
opts = {
'fasttest': getattr(options, 'fasttest', False),
'system': getattr(options, 'system', None),
'default_store': getattr(options, 'default_store', None),
'verbosity': getattr(options, 'verbosity', 3),
'extra_args': getattr(options, 'extra_args', ''),
'pdb': getattr(options, 'pdb', False),
}
if opts['system'] not in ['cms', 'lms']:
msg = colorize(
'red',
'No system specified, running tests for both cms and lms.'
)
print msg
if opts['default_store'] not in ['draft', 'split']:
msg = colorize(
'red',
'No modulestore specified, running tests for both draft and split.'
)
print msg
suite = AcceptanceTestSuite('{} acceptance'.format(opts['system']), **opts)
suite.run()
| agpl-3.0 | -4,344,827,877,160,944,000 | 3,333,200,961,691,912,000 | 33.189655 | 98 | 0.619264 | false |
khchine5/xl | lino_xl/lib/ledger/mixins.py | 1 | 10913 | # -*- coding: UTF-8 -*-
# Copyright 2008-2017 Luc Saffre
# License: BSD (see file COPYING for details)
"""Model mixins for `lino_xl.lib.ledger`.
.. autosummary::
"""
from __future__ import unicode_literals
from builtins import str
from django.db import models
from lino.api import dd, rt, _
from lino.mixins import Sequenced
from etgen.html import E
from .choicelists import VoucherTypes
from .roles import LedgerUser
# from etgen.html import E
# from lino.modlib.notify.utils import rich_text_to_elems
# FKMATCH = False
if dd.is_installed('ledger'):
project_model = dd.plugins.ledger.project_model
else:
project_model = None
class ProjectRelated(dd.Model):
class Meta:
abstract = True
project = dd.ForeignKey(
project_model,
blank=True, null=True,
related_name="%(app_label)s_%(class)s_set_by_project")
@classmethod
def get_registrable_fields(cls, site):
for f in super(ProjectRelated, cls).get_registrable_fields(site):
yield f
if project_model:
yield 'project'
class PartnerRelated(dd.Model):
class Meta:
abstract = True
partner = dd.ForeignKey(
'contacts.Partner',
related_name="%(app_label)s_%(class)s_set_by_partner",
blank=True, null=True)
payment_term = dd.ForeignKey(
'ledger.PaymentTerm',
related_name="%(app_label)s_%(class)s_set_by_payment_term",
blank=True, null=True)
def get_partner(self):
"""Overrides Voucher.get_partner"""
return self.partner
def get_print_language(self):
p = self.get_partner()
if p is not None:
return p.language
def get_recipient(self):
return self.partner
recipient = property(get_recipient)
def partner_changed(self, ar=None):
# does nothing but we need it so that subclasses like
# BankAccount can call super().partner_changed()
pass
@classmethod
def get_registrable_fields(cls, site):
for f in super(PartnerRelated, cls).get_registrable_fields(site):
yield f
yield 'partner'
yield 'payment_term'
def full_clean(self, *args, **kw):
self.fill_defaults()
super(PartnerRelated, self).full_clean(*args, **kw)
def fill_defaults(self):
if not self.payment_term and self.partner_id:
self.payment_term = self.partner.payment_term
if self.payment_term:
self.payment_term_changed()
def payment_term_changed(self, ar=None):
if self.payment_term:
self.due_date = self.payment_term.get_due_date(self.entry_date)
class Matching(dd.Model):
class Meta:
abstract = True
match = dd.CharField(
_("Match"), max_length=20, blank=True,
help_text=_("The movement to be matched."))
@classmethod
def get_match_choices(cls, journal, partner):
"""This is the general algorithm.
"""
matchable_accounts = rt.models.accounts.Account.objects.filter(
matchrule__journal=journal)
fkw = dict(account__in=matchable_accounts)
fkw.update(cleared=False)
if partner:
fkw.update(partner=partner)
qs = rt.models.ledger.Movement.objects.filter(**fkw)
qs = qs.order_by('value_date')
# qs = qs.distinct('match')
return qs.values_list('match', flat=True)
@dd.chooser(simple_values=True)
def match_choices(cls, journal, partner):
# todo: move this to implementing classes?
return cls.get_match_choices(journal, partner)
def get_match(self):
return self.match or self.get_default_match()
class VoucherItem(dd.Model):
allow_cascaded_delete = ['voucher']
class Meta:
abstract = True
# title = models.CharField(_("Description"), max_length=200, blank=True)
def get_row_permission(self, ar, state, ba):
"""Items of registered invoices may not be edited
"""
if not self.voucher.state.editable:
if not ba.action.readonly:
return False
return super(VoucherItem, self).get_row_permission(ar, state, ba)
def get_ana_account(self):
return None
class SequencedVoucherItem(Sequenced):
class Meta:
abstract = True
def get_siblings(self):
return self.voucher.items.all()
class AccountVoucherItem(VoucherItem, SequencedVoucherItem):
class Meta:
abstract = True
account = dd.ForeignKey(
'accounts.Account',
related_name="%(app_label)s_%(class)s_set_by_account")
def get_base_account(self, tt):
return self.account
@dd.chooser()
def account_choices(self, voucher):
if voucher and voucher.journal:
return voucher.journal.get_allowed_accounts()
return rt.models.accounts.Account.objects.none()
# def set_partner_invoice_account(sender, instance=None, **kwargs):
# if instance.account:
# return
# if not instance.voucher:
# return
# p = instance.voucher.partner
# if not p:
# return
# tt = instance.voucher.get_trade_type()
# instance.account = tt.get_partner_invoice_account(p)
# @dd.receiver(dd.post_analyze)
# def on_post_analyze(sender, **kw):
# for m in rt.models_by_base(AccountVoucherItem):
# dd.post_init.connect(set_partner_invoice_account, sender=m)
def JournalRef(**kw):
# ~ kw.update(blank=True,null=True) # Django Ticket #12708
kw.update(related_name="%(app_label)s_%(class)s_set_by_journal")
return dd.ForeignKey('ledger.Journal', **kw)
def VoucherNumber(*args, **kwargs):
return models.IntegerField(*args, **kwargs)
class PeriodRange(dd.Model):
class Meta:
abstract = True
start_period = dd.ForeignKey(
'ledger.AccountingPeriod',
blank=True, verbose_name=_("Start period"),
related_name="%(app_label)s_%(class)s_set_by_start_period")
end_period = dd.ForeignKey(
'ledger.AccountingPeriod',
blank=True, null=True,
verbose_name=_("End period"),
related_name="%(app_label)s_%(class)s_set_by_end_period")
def get_period_filter(self, fieldname, **kwargs):
return rt.models.ledger.AccountingPeriod.get_period_filter(
fieldname, self.start_period, self.end_period, **kwargs)
class PeriodRangeObservable(dd.Model):
class Meta:
abstract = True
observable_period_field = 'accounting_period'
@classmethod
def setup_parameters(cls, fields):
fields.update(
start_period=dd.ForeignKey(
'ledger.AccountingPeriod',
blank=True, null=True,
help_text=_("Start of observed period range"),
verbose_name=_("Period from")))
fields.update(
end_period=dd.ForeignKey(
'ledger.AccountingPeriod',
blank=True, null=True,
help_text=_(
"Optional end of observed period range. "
"Leave empty to consider only the Start period."),
verbose_name=_("Period until")))
super(PeriodRangeObservable, cls).setup_parameters(fields)
@classmethod
def get_request_queryset(cls, ar, **kwargs):
pv = ar.param_values
qs = super(PeriodRangeObservable, cls).get_request_queryset(ar, **kwargs)
flt = rt.models.ledger.AccountingPeriod.get_period_filter(
cls.observable_period_field, pv.start_period, pv.end_period)
return qs.filter(**flt)
@classmethod
def get_title_tags(cls, ar):
for t in super(PeriodRangeObservable, cls).get_title_tags(ar):
yield t
pv = ar.param_values
if pv.start_period is not None:
if pv.end_period is None:
yield str(pv.start_period)
else:
yield "{}..{}".format(pv.start_period, pv.end_period)
class ItemsByVoucher(dd.Table):
label = _("Content")
required_roles = dd.login_required(LedgerUser)
master_key = 'voucher'
order_by = ["seqno"]
auto_fit_column_widths = True
display_mode = 'html'
preview_limit = 0
class VouchersByPartnerBase(dd.VirtualTable):
"""Shows all ledger vouchers of a given partner.
This is a :class:`lino.core.tables.VirtualTable` with a customized
slave summary.
"""
label = _("Partner vouchers")
required_roles = dd.login_required(LedgerUser)
order_by = ["-entry_date", '-id']
master = 'contacts.Partner'
display_mode = 'summary'
_master_field_name = 'partner'
_voucher_base = PartnerRelated
@classmethod
def get_data_rows(self, ar):
obj = ar.master_instance
rows = []
if obj is not None:
flt = {self._master_field_name: obj}
for M in rt.models_by_base(self._voucher_base):
rows += list(M.objects.filter(**flt))
# def by_date(a, b):
# return cmp(b.entry_date, a.entry_date)
rows.sort(key= lambda i: i.entry_date)
return rows
@dd.displayfield(_("Voucher"))
def voucher(self, row, ar):
return ar.obj2html(row)
@dd.virtualfield('ledger.Movement.partner')
def partner(self, row, ar):
return row.partner
@dd.virtualfield('ledger.Voucher.entry_date')
def entry_date(self, row, ar):
return row.entry_date
@dd.virtualfield('ledger.Voucher.state')
def state(self, row, ar):
return row.state
@classmethod
def get_table_summary(self, obj, ar):
elems = []
sar = self.request(master_instance=obj)
# elems += ["Partner:", unicode(ar.master_instance)]
for voucher in sar:
vc = voucher.get_mti_leaf()
if vc and vc.state.name == "draft":
elems += [ar.obj2html(vc), " "]
vtypes = []
for vt in VoucherTypes.items():
if issubclass(vt.model, self._voucher_base):
vtypes.append(vt)
actions = []
def add_action(btn):
if btn is None:
return False
actions.append(btn)
return True
if not ar.get_user().user_type.readonly:
flt = {self._master_field_name: obj}
for vt in vtypes:
for jnl in vt.get_journals():
sar = vt.table_class.insert_action.request_from(
ar, master_instance=jnl,
known_values=flt)
btn = sar.ar2button(label=str(jnl), icon_name=None)
if len(actions):
actions.append(', ')
actions.append(btn)
elems += [E.br(), str(_("Create voucher in journal")), " "] + actions
return E.div(*elems)
| bsd-2-clause | 3,907,710,314,087,128,000 | -6,794,396,655,166,655,000 | 28.101333 | 81 | 0.598461 | false |
maartenq/ansible | lib/ansible/modules/network/nxos/nxos_vtp_domain.py | 70 | 5923 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_vtp_domain
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages VTP domain configuration.
description:
- Manages VTP domain configuration.
author:
- Gabriele Gerbino (@GGabriele)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- VTP feature must be active on the device to use this module.
- This module is used to manage only VTP domain names.
- VTP domain names are case-sensible.
- If it's never been configured before, VTP version is set to 1 by default.
Otherwise, it leaves the previous configured version untouched.
Use M(nxos_vtp_version) to change it.
- Use this in combination with M(nxos_vtp_password) and M(nxos_vtp_version)
to fully manage VTP operations.
options:
domain:
description:
- VTP domain name.
required: true
'''
EXAMPLES = '''
# ENSURE VTP DOMAIN IS CONFIGURED
- nxos_vtp_domain:
domain: ntc
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ pwd }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"domain": "ntc"}
existing:
description:
- k/v pairs of existing vtp domain
returned: always
type: dict
sample: {"domain": "testing", "version": "2", "vtp_password": "\"}
end_state:
description: k/v pairs of vtp domain after module execution
returned: always
type: dict
sample: {"domain": "ntc", "version": "2", "vtp_password": "\"}
updates:
description: command sent to the device
returned: always
type: list
sample: ["vtp domain ntc"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
from ansible.module_utils.network.nxos.nxos import load_config, run_commands
from ansible.module_utils.network.nxos.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
import re
def execute_show_command(command, module, command_type='cli_show'):
if 'status' not in command:
output = 'json'
else:
output = 'text'
cmds = [{
'command': command,
'output': output,
}]
body = run_commands(module, cmds)
return body
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def get_vtp_config(module):
command = 'show vtp status'
body = execute_show_command(
command, module)[0]
vtp_parsed = {}
if body:
version_regex = r'.*VTP version running\s+:\s+(?P<version>\d).*'
domain_regex = r'.*VTP Domain Name\s+:\s+(?P<domain>\S+).*'
try:
match_version = re.match(version_regex, body, re.DOTALL)
version = match_version.groupdict()['version']
except AttributeError:
version = ''
try:
match_domain = re.match(domain_regex, body, re.DOTALL)
domain = match_domain.groupdict()['domain']
except AttributeError:
domain = ''
if domain and version:
vtp_parsed['domain'] = domain
vtp_parsed['version'] = version
vtp_parsed['vtp_password'] = get_vtp_password(module)
return vtp_parsed
def get_vtp_password(module):
command = 'show vtp password'
body = execute_show_command(command, module)[0]
try:
password = body['passwd']
if password:
return str(password)
else:
return ""
except TypeError:
return ""
def main():
argument_spec = dict(
domain=dict(type='str', required=True),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
domain = module.params['domain']
existing = get_vtp_config(module)
end_state = existing
args = dict(domain=domain)
changed = False
proposed = dict((k, v) for k, v in args.items() if v is not None)
delta = dict(set(proposed.items()).difference(existing.items()))
commands = []
if delta:
commands.append(['vtp domain {0}'.format(domain)])
cmds = flatten_list(commands)
if cmds:
if module.check_mode:
module.exit_json(changed=True, commands=cmds)
else:
changed = True
load_config(module, cmds)
end_state = get_vtp_config(module)
if 'configure' in cmds:
cmds.pop(0)
results = {}
results['proposed'] = proposed
results['existing'] = existing
results['end_state'] = end_state
results['updates'] = cmds
results['changed'] = changed
results['warnings'] = warnings
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 | 3,588,925,553,333,836,300 | 7,578,218,883,054,854,000 | 26.807512 | 81 | 0.626709 | false |
anandpdoshi/frappe | frappe/core/page/data_import_tool/importer.py | 1 | 8937 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, json
import frappe.permissions
import frappe.async
from frappe import _
from frappe.utils.csvutils import getlink
from frappe.utils.dateutils import parse_date
from frappe.utils import cint, cstr, flt, getdate, get_datetime
from frappe.core.page.data_import_tool.data_import_tool import get_data_keys
#@frappe.async.handler
@frappe.whitelist()
def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, overwrite=None,
ignore_links=False, pre_process=None, via_console=False):
"""upload data"""
frappe.flags.mute_emails = True
frappe.flags.in_import = True
# extra input params
params = json.loads(frappe.form_dict.get("params") or '{}')
if params.get("submit_after_import"):
submit_after_import = True
if params.get("ignore_encoding_errors"):
ignore_encoding_errors = True
from frappe.utils.csvutils import read_csv_content_from_uploaded_file
def get_data_keys_definition():
return get_data_keys()
def bad_template():
frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator))
def check_data_length():
max_rows = 5000
if not data:
frappe.throw(_("No data found"))
elif not via_console and len(data) > max_rows:
frappe.throw(_("Only allowed {0} rows in one import").format(max_rows))
def get_start_row():
for i, row in enumerate(rows):
if row and row[0]==get_data_keys_definition().data_separator:
return i+1
bad_template()
def get_header_row(key):
return get_header_row_and_idx(key)[0]
def get_header_row_and_idx(key):
for i, row in enumerate(header):
if row and row[0]==key:
return row, i
return [], -1
def filter_empty_columns(columns):
empty_cols = filter(lambda x: x in ("", None), columns)
if empty_cols:
if columns[-1*len(empty_cols):] == empty_cols:
# filter empty columns if they exist at the end
columns = columns[:-1*len(empty_cols)]
else:
frappe.msgprint(_("Please make sure that there are no empty columns in the file."),
raise_exception=1)
return columns
def make_column_map():
doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype)
if row_idx == -1: # old style
return
dt = None
for i, d in enumerate(doctype_row[1:]):
if d not in ("~", "-"):
if d: # value in doctype_row
if doctype_row[i]==dt:
# prev column is doctype (in case of parentfield)
doctype_parentfield[dt] = doctype_row[i+1]
else:
dt = d
doctypes.append(d)
column_idx_to_fieldname[dt] = {}
column_idx_to_fieldtype[dt] = {}
if dt:
column_idx_to_fieldname[dt][i+1] = rows[row_idx + 2][i+1]
column_idx_to_fieldtype[dt][i+1] = rows[row_idx + 4][i+1]
def get_doc(start_idx):
if doctypes:
doc = {}
for idx in xrange(start_idx, len(rows)):
if (not doc) or main_doc_empty(rows[idx]):
for dt in doctypes:
d = {}
for column_idx in column_idx_to_fieldname[dt]:
try:
fieldname = column_idx_to_fieldname[dt][column_idx]
fieldtype = column_idx_to_fieldtype[dt][column_idx]
d[fieldname] = rows[idx][column_idx]
if fieldtype in ("Int", "Check"):
d[fieldname] = cint(d[fieldname])
elif fieldtype in ("Float", "Currency", "Percent"):
d[fieldname] = flt(d[fieldname])
elif fieldtype == "Date":
d[fieldname] = getdate(parse_date(d[fieldname])) if d[fieldname] else None
elif fieldtype == "Datetime":
if d[fieldname]:
_date, _time = d[fieldname].split()
_date = parse_date(d[fieldname])
d[fieldname] = get_datetime(_date + " " + _time)
else:
d[fieldname] = None
except IndexError:
pass
# scrub quotes from name and modified
if d.get("name") and d["name"].startswith('"'):
d["name"] = d["name"][1:-1]
if sum([0 if not val else 1 for val in d.values()]):
d['doctype'] = dt
if dt == doctype:
doc.update(d)
else:
if not overwrite:
d['parent'] = doc["name"]
d['parenttype'] = doctype
d['parentfield'] = doctype_parentfield[dt]
doc.setdefault(d['parentfield'], []).append(d)
else:
break
return doc
else:
doc = frappe._dict(zip(columns, rows[start_idx][1:]))
doc['doctype'] = doctype
return doc
def main_doc_empty(row):
return not (row and ((len(row) > 1 and row[1]) or (len(row) > 2 and row[2])))
users = frappe.db.sql_list("select name from tabUser")
def prepare_for_insert(doc):
# don't block data import if user is not set
# migrating from another system
if not doc.owner in users:
doc.owner = frappe.session.user
if not doc.modified_by in users:
doc.modified_by = frappe.session.user
# header
if not rows:
rows = read_csv_content_from_uploaded_file(ignore_encoding_errors)
start_row = get_start_row()
header = rows[:start_row]
data = rows[start_row:]
doctype = get_header_row(get_data_keys_definition().main_table)[1]
columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:])
doctypes = []
doctype_parentfield = {}
column_idx_to_fieldname = {}
column_idx_to_fieldtype = {}
if submit_after_import and not cint(frappe.db.get_value("DocType",
doctype, "is_submittable")):
submit_after_import = False
parenttype = get_header_row(get_data_keys_definition().parent_table)
if len(parenttype) > 1:
parenttype = parenttype[1]
# check permissions
if not frappe.permissions.can_import(parenttype or doctype):
frappe.flags.mute_emails = False
return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True}
# allow limit rows to be uploaded
check_data_length()
make_column_map()
if overwrite==None:
overwrite = params.get('overwrite')
# delete child rows (if parenttype)
parentfield = None
if parenttype:
parentfield = get_parent_field(doctype, parenttype)
if overwrite:
delete_child_rows(data, doctype)
ret = []
def log(msg):
if via_console:
print msg.encode('utf-8')
else:
ret.append(msg)
def as_link(doctype, name):
if via_console:
return "{0}: {1}".format(doctype, name)
else:
return getlink(doctype, name)
error = False
total = len(data)
for i, row in enumerate(data):
# bypass empty rows
if main_doc_empty(row):
continue
row_idx = i + start_row
doc = None
# publish task_update
frappe.publish_realtime("data_import_progress", {"progress": [i, total]},
user=frappe.session.user)
try:
doc = get_doc(row_idx)
if pre_process:
pre_process(doc)
if parentfield:
parent = frappe.get_doc(parenttype, doc["parent"])
doc = parent.append(parentfield, doc)
parent.save()
log('Inserted row for %s at #%s' % (as_link(parenttype,
doc.parent), unicode(doc.idx)))
else:
if overwrite and doc["name"] and frappe.db.exists(doctype, doc["name"]):
original = frappe.get_doc(doctype, doc["name"])
original_name = original.name
original.update(doc)
# preserve original name for case sensitivity
original.name = original_name
original.flags.ignore_links = ignore_links
original.save()
log('Updated row (#%d) %s' % (row_idx + 1, as_link(original.doctype, original.name)))
doc = original
else:
doc = frappe.get_doc(doc)
prepare_for_insert(doc)
doc.flags.ignore_links = ignore_links
doc.insert()
log('Inserted row (#%d) %s' % (row_idx + 1, as_link(doc.doctype, doc.name)))
if submit_after_import:
doc.submit()
log('Submitted row (#%d) %s' % (row_idx + 1, as_link(doc.doctype, doc.name)))
except Exception, e:
error = True
if doc:
frappe.errprint(doc if isinstance(doc, dict) else doc.as_dict())
err_msg = frappe.local.message_log and "\n\n".join(frappe.local.message_log) or cstr(e)
log('Error for row (#%d) %s : %s' % (row_idx + 1,
len(row)>1 and row[1] or "", err_msg))
frappe.errprint(frappe.get_traceback())
finally:
frappe.local.message_log = []
if error:
frappe.db.rollback()
else:
frappe.db.commit()
frappe.flags.mute_emails = False
frappe.flags.in_import = False
return {"messages": ret, "error": error}
def get_parent_field(doctype, parenttype):
parentfield = None
# get parentfield
if parenttype:
for d in frappe.get_meta(parenttype).get_table_fields():
if d.options==doctype:
parentfield = d.fieldname
break
if not parentfield:
frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype))
raise Exception
return parentfield
def delete_child_rows(rows, doctype):
"""delete child rows for all parents"""
for p in list(set([r[1] for r in rows])):
if p:
frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p)
| mit | -1,254,521,797,726,217,200 | 8,143,705,403,094,419,000 | 28.398026 | 110 | 0.651449 | false |
tedelhourani/ansible | lib/ansible/modules/network/netscaler/netscaler_gslb_vserver.py | 22 | 33857 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Citrix Systems
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: netscaler_gslb_vserver
short_description: Configure gslb vserver entities in Netscaler.
description:
- Configure gslb vserver entities in Netscaler.
version_added: "2.4.0"
author: George Nikolopoulos (@giorgos-nikolopoulos)
options:
name:
description:
- >-
Name for the GSLB virtual server. Must begin with an ASCII alphanumeric or underscore C(_) character,
and must contain only ASCII alphanumeric, underscore C(_), hash C(#), period C(.), space, colon C(:), at C(@),
equals C(=), and hyphen C(-) characters. Can be changed after the virtual server is created.
- "Minimum length = 1"
servicetype:
choices:
- 'HTTP'
- 'FTP'
- 'TCP'
- 'UDP'
- 'SSL'
- 'SSL_BRIDGE'
- 'SSL_TCP'
- 'NNTP'
- 'ANY'
- 'SIP_UDP'
- 'SIP_TCP'
- 'SIP_SSL'
- 'RADIUS'
- 'RDP'
- 'RTSP'
- 'MYSQL'
- 'MSSQL'
- 'ORACLE'
description:
- "Protocol used by services bound to the virtual server."
- >-
dnsrecordtype:
choices:
- 'A'
- 'AAAA'
- 'CNAME'
- 'NAPTR'
description:
- "DNS record type to associate with the GSLB virtual server's domain name."
- "Default value: A"
- "Possible values = A, AAAA, CNAME, NAPTR"
lbmethod:
choices:
- 'ROUNDROBIN'
- 'LEASTCONNECTION'
- 'LEASTRESPONSETIME'
- 'SOURCEIPHASH'
- 'LEASTBANDWIDTH'
- 'LEASTPACKETS'
- 'STATICPROXIMITY'
- 'RTT'
- 'CUSTOMLOAD'
description:
- "Load balancing method for the GSLB virtual server."
- "Default value: LEASTCONNECTION"
- >-
Possible values = ROUNDROBIN, LEASTCONNECTION, LEASTRESPONSETIME, SOURCEIPHASH, LEASTBANDWIDTH,
LEASTPACKETS, STATICPROXIMITY, RTT, CUSTOMLOAD
backuplbmethod:
choices:
- 'ROUNDROBIN'
- 'LEASTCONNECTION'
- 'LEASTRESPONSETIME'
- 'SOURCEIPHASH'
- 'LEASTBANDWIDTH'
- 'LEASTPACKETS'
- 'STATICPROXIMITY'
- 'RTT'
- 'CUSTOMLOAD'
description:
- >-
Backup load balancing method. Becomes operational if the primary load balancing method fails or
cannot be used. Valid only if the primary method is based on either round-trip time (RTT) or static
proximity.
netmask:
description:
- "IPv4 network mask for use in the SOURCEIPHASH load balancing method."
- "Minimum length = 1"
v6netmasklen:
description:
- >-
Number of bits to consider, in an IPv6 source IP address, for creating the hash that is required by
the C(SOURCEIPHASH) load balancing method.
- "Default value: C(128)"
- "Minimum value = C(1)"
- "Maximum value = C(128)"
tolerance:
description:
- >-
Site selection tolerance, in milliseconds, for implementing the RTT load balancing method. If a
site's RTT deviates from the lowest RTT by more than the specified tolerance, the site is not
considered when the NetScaler appliance makes a GSLB decision. The appliance implements the round
robin method of global server load balancing between sites whose RTT values are within the specified
tolerance. If the tolerance is 0 (zero), the appliance always sends clients the IP address of the
site with the lowest RTT.
- "Minimum value = C(0)"
- "Maximum value = C(100)"
persistencetype:
choices:
- 'SOURCEIP'
- 'NONE'
description:
- "Use source IP address based persistence for the virtual server."
- >-
After the load balancing method selects a service for the first packet, the IP address received in
response to the DNS query is used for subsequent requests from the same client.
persistenceid:
description:
- >-
The persistence ID for the GSLB virtual server. The ID is a positive integer that enables GSLB sites
to identify the GSLB virtual server, and is required if source IP address based or spill over based
persistence is enabled on the virtual server.
- "Minimum value = C(0)"
- "Maximum value = C(65535)"
persistmask:
description:
- >-
The optional IPv4 network mask applied to IPv4 addresses to establish source IP address based
persistence.
- "Minimum length = 1"
v6persistmasklen:
description:
- >-
Number of bits to consider in an IPv6 source IP address when creating source IP address based
persistence sessions.
- "Default value: C(128)"
- "Minimum value = C(1)"
- "Maximum value = C(128)"
timeout:
description:
- "Idle time, in minutes, after which a persistence entry is cleared."
- "Default value: C(2)"
- "Minimum value = C(2)"
- "Maximum value = C(1440)"
mir:
choices:
- 'enabled'
- 'disabled'
description:
- "Include multiple IP addresses in the DNS responses sent to clients."
disableprimaryondown:
choices:
- 'enabled'
- 'disabled'
description:
- >-
Continue to direct traffic to the backup chain even after the primary GSLB virtual server returns to
the UP state. Used when spillover is configured for the virtual server.
dynamicweight:
choices:
- 'SERVICECOUNT'
- 'SERVICEWEIGHT'
- 'DISABLED'
description:
- >-
Specify if the appliance should consider the service count, service weights, or ignore both when
using weight-based load balancing methods. The state of the number of services bound to the virtual
server help the appliance to select the service.
considereffectivestate:
choices:
- 'NONE'
- 'STATE_ONLY'
description:
- >-
If the primary state of all bound GSLB services is DOWN, consider the effective states of all the
GSLB services, obtained through the Metrics Exchange Protocol (MEP), when determining the state of
the GSLB virtual server. To consider the effective state, set the parameter to STATE_ONLY. To
disregard the effective state, set the parameter to NONE.
- >-
The effective state of a GSLB service is the ability of the corresponding virtual server to serve
traffic. The effective state of the load balancing virtual server, which is transferred to the GSLB
service, is UP even if only one virtual server in the backup chain of virtual servers is in the UP
state.
comment:
description:
- "Any comments that you might want to associate with the GSLB virtual server."
somethod:
choices:
- 'CONNECTION'
- 'DYNAMICCONNECTION'
- 'BANDWIDTH'
- 'HEALTH'
- 'NONE'
description:
- "Type of threshold that, when exceeded, triggers spillover. Available settings function as follows:"
- "* C(CONNECTION) - Spillover occurs when the number of client connections exceeds the threshold."
- >-
* C(DYNAMICCONNECTION) - Spillover occurs when the number of client connections at the GSLB virtual
server exceeds the sum of the maximum client (Max Clients) settings for bound GSLB services. Do not
specify a spillover threshold for this setting, because the threshold is implied by the Max Clients
settings of the bound GSLB services.
- >-
* C(BANDWIDTH) - Spillover occurs when the bandwidth consumed by the GSLB virtual server's incoming and
outgoing traffic exceeds the threshold.
- >-
* C(HEALTH) - Spillover occurs when the percentage of weights of the GSLB services that are UP drops
below the threshold. For example, if services gslbSvc1, gslbSvc2, and gslbSvc3 are bound to a virtual
server, with weights 1, 2, and 3, and the spillover threshold is 50%, spillover occurs if gslbSvc1
and gslbSvc3 or gslbSvc2 and gslbSvc3 transition to DOWN.
- "* C(NONE) - Spillover does not occur."
sopersistence:
choices:
- 'enabled'
- 'disabled'
description:
- >-
If spillover occurs, maintain source IP address based persistence for both primary and backup GSLB
virtual servers.
sopersistencetimeout:
description:
- "Timeout for spillover persistence, in minutes."
- "Default value: C(2)"
- "Minimum value = C(2)"
- "Maximum value = C(1440)"
sothreshold:
description:
- >-
Threshold at which spillover occurs. Specify an integer for the CONNECTION spillover method, a
bandwidth value in kilobits per second for the BANDWIDTH method (do not enter the units), or a
percentage for the HEALTH method (do not enter the percentage symbol).
- "Minimum value = C(1)"
- "Maximum value = C(4294967287)"
sobackupaction:
choices:
- 'DROP'
- 'ACCEPT'
- 'REDIRECT'
description:
- >-
Action to be performed if spillover is to take effect, but no backup chain to spillover is usable or
exists.
appflowlog:
choices:
- 'enabled'
- 'disabled'
description:
- "Enable logging appflow flow information."
domain_bindings:
description:
- >-
List of bindings for domains for this glsb vserver.
suboptions:
cookietimeout:
description:
- Timeout, in minutes, for the GSLB site cookie.
domainname:
description:
- Domain name for which to change the time to live (TTL) and/or backup service IP address.
ttl:
description:
- Time to live (TTL) for the domain.
sitedomainttl:
description:
- >-
TTL, in seconds, for all internally created site domains (created when a site prefix is
configured on a GSLB service) that are associated with this virtual server.
- Minimum value = C(1)
service_bindings:
description:
- List of bindings for gslb services bound to this gslb virtual server.
suboptions:
servicename:
description:
- Name of the GSLB service for which to change the weight.
weight:
description:
- Weight to assign to the GSLB service.
disabled:
description:
- When set to C(yes) the GSLB Vserver state will be set to C(disabled).
- When set to C(no) the GSLB Vserver state will be set to C(enabled).
- >-
Note that due to limitations of the underlying NITRO API a C(disabled) state change alone
does not cause the module result to report a changed status.
type: bool
default: false
extends_documentation_fragment: netscaler
requirements:
- nitro python sdk
'''
EXAMPLES = '''
'''
RETURN = '''
'''
import copy
try:
from nssrc.com.citrix.netscaler.nitro.resource.config.gslb.gslbvserver import gslbvserver
from nssrc.com.citrix.netscaler.nitro.resource.config.gslb.gslbvserver_gslbservice_binding import gslbvserver_gslbservice_binding
from nssrc.com.citrix.netscaler.nitro.resource.config.gslb.gslbvserver_domain_binding import gslbvserver_domain_binding
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
PYTHON_SDK_IMPORTED = True
except ImportError as e:
PYTHON_SDK_IMPORTED = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netscaler import (
ConfigProxy,
get_nitro_client,
netscaler_common_arguments,
log,
loglines,
ensure_feature_is_enabled,
get_immutables_intersection,
complete_missing_attributes
)
gslbvserver_domain_binding_rw_attrs = [
'name',
'domainname',
'backupipflag',
'cookietimeout',
'backupip',
'ttl',
'sitedomainttl',
'cookie_domainflag',
]
gslbvserver_gslbservice_binding_rw_attrs = [
'name',
'servicename',
'weight',
]
def get_actual_domain_bindings(client, module):
log('get_actual_domain_bindings')
# Get actual domain bindings and index them by domainname
actual_domain_bindings = {}
if gslbvserver_domain_binding.count(client, name=module.params['name']) != 0:
# Get all domain bindings associated with the named gslb vserver
fetched_domain_bindings = gslbvserver_domain_binding.get(client, name=module.params['name'])
# index by domainname
for binding in fetched_domain_bindings:
complete_missing_attributes(binding, gslbvserver_domain_binding_rw_attrs, fill_value=None)
actual_domain_bindings[binding.domainname] = binding
return actual_domain_bindings
def get_configured_domain_bindings_proxys(client, module):
log('get_configured_domain_bindings_proxys')
configured_domain_proxys = {}
# Get configured domain bindings and index them by domainname
if module.params['domain_bindings'] is not None:
for configured_domain_binding in module.params['domain_bindings']:
binding_values = copy.deepcopy(configured_domain_binding)
binding_values['name'] = module.params['name']
gslbvserver_domain_binding_proxy = ConfigProxy(
actual=gslbvserver_domain_binding(),
client=client,
attribute_values_dict=binding_values,
readwrite_attrs=gslbvserver_domain_binding_rw_attrs,
readonly_attrs=[],
)
configured_domain_proxys[configured_domain_binding['domainname']] = gslbvserver_domain_binding_proxy
return configured_domain_proxys
def sync_domain_bindings(client, module):
log('sync_domain_bindings')
actual_domain_bindings = get_actual_domain_bindings(client, module)
configured_domain_proxys = get_configured_domain_bindings_proxys(client, module)
# Delete actual bindings not in configured bindings
for domainname, actual_domain_binding in actual_domain_bindings.items():
if domainname not in configured_domain_proxys.keys():
log('Deleting absent binding for domain %s' % domainname)
gslbvserver_domain_binding.delete(client, actual_domain_binding)
# Delete actual bindings that differ from configured
for proxy_key, binding_proxy in configured_domain_proxys.items():
if proxy_key in actual_domain_bindings:
actual_binding = actual_domain_bindings[proxy_key]
if not binding_proxy.has_equal_attributes(actual_binding):
log('Deleting differing binding for domain %s' % binding_proxy.domainname)
gslbvserver_domain_binding.delete(client, actual_binding)
log('Adding anew binding for domain %s' % binding_proxy.domainname)
binding_proxy.add()
# Add configured domains that are missing from actual
for proxy_key, binding_proxy in configured_domain_proxys.items():
if proxy_key not in actual_domain_bindings.keys():
log('Adding domain binding for domain %s' % binding_proxy.domainname)
binding_proxy.add()
def domain_bindings_identical(client, module):
log('domain_bindings_identical')
actual_domain_bindings = get_actual_domain_bindings(client, module)
configured_domain_proxys = get_configured_domain_bindings_proxys(client, module)
actual_keyset = set(actual_domain_bindings.keys())
configured_keyset = set(configured_domain_proxys.keys())
symmetric_difference = actual_keyset ^ configured_keyset
log('symmetric difference %s' % symmetric_difference)
if len(symmetric_difference) != 0:
return False
# Item for item equality test
for key, proxy in configured_domain_proxys.items():
diff = proxy.diff_object(actual_domain_bindings[key])
if 'backupipflag' in diff:
del diff['backupipflag']
if not len(diff) == 0:
return False
# Fallthrough to True result
return True
def get_actual_service_bindings(client, module):
log('get_actual_service_bindings')
# Get actual domain bindings and index them by domainname
actual_bindings = {}
if gslbvserver_gslbservice_binding.count(client, name=module.params['name']) != 0:
# Get all service bindings associated with the named gslb vserver
fetched_bindings = gslbvserver_gslbservice_binding.get(client, name=module.params['name'])
# index by servicename
for binding in fetched_bindings:
complete_missing_attributes(binding, gslbvserver_gslbservice_binding_rw_attrs, fill_value=None)
actual_bindings[binding.servicename] = binding
return actual_bindings
def get_configured_service_bindings(client, module):
log('get_configured_service_bindings_proxys')
configured_proxys = {}
# Get configured domain bindings and index them by domainname
if module.params['service_bindings'] is not None:
for configured_binding in module.params['service_bindings']:
binding_values = copy.deepcopy(configured_binding)
binding_values['name'] = module.params['name']
gslbvserver_service_binding_proxy = ConfigProxy(
actual=gslbvserver_gslbservice_binding(),
client=client,
attribute_values_dict=binding_values,
readwrite_attrs=gslbvserver_gslbservice_binding_rw_attrs,
readonly_attrs=[],
)
configured_proxys[configured_binding['servicename']] = gslbvserver_service_binding_proxy
return configured_proxys
def sync_service_bindings(client, module):
actual = get_actual_service_bindings(client, module)
configured = get_configured_service_bindings(client, module)
# Delete extraneous
extraneous_service_bindings = list(set(actual.keys()) - set(configured.keys()))
for servicename in extraneous_service_bindings:
log('Deleting missing binding from service %s' % servicename)
binding = actual[servicename]
binding.name = module.params['name']
gslbvserver_gslbservice_binding.delete(client, binding)
# Recreate different
common_service_bindings = list(set(actual.keys()) & set(configured.keys()))
for servicename in common_service_bindings:
proxy = configured[servicename]
binding = actual[servicename]
if not proxy.has_equal_attributes(actual):
log('Recreating differing service binding %s' % servicename)
gslbvserver_gslbservice_binding.delete(client, binding)
proxy.add()
# Add missing
missing_service_bindings = list(set(configured.keys()) - set(actual.keys()))
for servicename in missing_service_bindings:
proxy = configured[servicename]
log('Adding missing service binding %s' % servicename)
proxy.add()
def service_bindings_identical(client, module):
actual_bindings = get_actual_service_bindings(client, module)
configured_proxys = get_configured_service_bindings(client, module)
actual_keyset = set(actual_bindings.keys())
configured_keyset = set(configured_proxys.keys())
symmetric_difference = actual_keyset ^ configured_keyset
if len(symmetric_difference) != 0:
return False
# Item for item equality test
for key, proxy in configured_proxys.items():
if key in actual_bindings.keys():
if not proxy.has_equal_attributes(actual_bindings[key]):
return False
# Fallthrough to True result
return True
def gslb_vserver_exists(client, module):
if gslbvserver.count_filtered(client, 'name:%s' % module.params['name']) > 0:
return True
else:
return False
def gslb_vserver_identical(client, module, gslb_vserver_proxy):
gslb_vserver_list = gslbvserver.get_filtered(client, 'name:%s' % module.params['name'])
diff_dict = gslb_vserver_proxy.diff_object(gslb_vserver_list[0])
if len(diff_dict) != 0:
return False
else:
return True
def all_identical(client, module, gslb_vserver_proxy):
return (
gslb_vserver_identical(client, module, gslb_vserver_proxy) and
domain_bindings_identical(client, module) and
service_bindings_identical(client, module)
)
def diff_list(client, module, gslb_vserver_proxy):
gslb_vserver_list = gslbvserver.get_filtered(client, 'name:%s' % module.params['name'])
return gslb_vserver_proxy.diff_object(gslb_vserver_list[0])
def do_state_change(client, module, gslb_vserver_proxy):
if module.params['disabled']:
log('Disabling glsb_vserver')
result = gslbvserver.disable(client, gslb_vserver_proxy.actual)
else:
log('Enabling gslbvserver')
result = gslbvserver.enable(client, gslb_vserver_proxy.actual)
return result
def main():
module_specific_arguments = dict(
name=dict(type='str'),
servicetype=dict(
type='str',
choices=[
'HTTP',
'FTP',
'TCP',
'UDP',
'SSL',
'SSL_BRIDGE',
'SSL_TCP',
'NNTP',
'ANY',
'SIP_UDP',
'SIP_TCP',
'SIP_SSL',
'RADIUS',
'RDP',
'RTSP',
'MYSQL',
'MSSQL',
'ORACLE',
]
),
dnsrecordtype=dict(
type='str',
choices=[
'A',
'AAAA',
'CNAME',
'NAPTR',
]
),
lbmethod=dict(
type='str',
choices=[
'ROUNDROBIN',
'LEASTCONNECTION',
'LEASTRESPONSETIME',
'SOURCEIPHASH',
'LEASTBANDWIDTH',
'LEASTPACKETS',
'STATICPROXIMITY',
'RTT',
'CUSTOMLOAD',
]
),
backuplbmethod=dict(
type='str',
choices=[
'ROUNDROBIN',
'LEASTCONNECTION',
'LEASTRESPONSETIME',
'SOURCEIPHASH',
'LEASTBANDWIDTH',
'LEASTPACKETS',
'STATICPROXIMITY',
'RTT',
'CUSTOMLOAD',
]
),
netmask=dict(type='str'),
v6netmasklen=dict(type='float'),
tolerance=dict(type='float'),
persistencetype=dict(
type='str',
choices=[
'SOURCEIP',
'NONE',
]
),
persistenceid=dict(type='float'),
persistmask=dict(type='str'),
v6persistmasklen=dict(type='float'),
timeout=dict(type='float'),
mir=dict(
type='str',
choices=[
'enabled',
'disabled',
]
),
disableprimaryondown=dict(
type='str',
choices=[
'enabled',
'disabled',
]
),
dynamicweight=dict(
type='str',
choices=[
'SERVICECOUNT',
'SERVICEWEIGHT',
'DISABLED',
]
),
considereffectivestate=dict(
type='str',
choices=[
'NONE',
'STATE_ONLY',
]
),
comment=dict(type='str'),
somethod=dict(
type='str',
choices=[
'CONNECTION',
'DYNAMICCONNECTION',
'BANDWIDTH',
'HEALTH',
'NONE',
]
),
sopersistence=dict(
type='str',
choices=[
'enabled',
'disabled',
]
),
sopersistencetimeout=dict(type='float'),
sothreshold=dict(type='float'),
sobackupaction=dict(
type='str',
choices=[
'DROP',
'ACCEPT',
'REDIRECT',
]
),
appflowlog=dict(
type='str',
choices=[
'enabled',
'disabled',
]
),
domainname=dict(type='str'),
cookie_domain=dict(type='str'),
)
hand_inserted_arguments = dict(
domain_bindings=dict(type='list'),
service_bindings=dict(type='list'),
disabled=dict(
type='bool',
default=False,
),
)
argument_spec = dict()
argument_spec.update(netscaler_common_arguments)
argument_spec.update(module_specific_arguments)
argument_spec.update(hand_inserted_arguments)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
module_result = dict(
changed=False,
failed=False,
loglines=loglines,
)
# Fail the module if imports failed
if not PYTHON_SDK_IMPORTED:
module.fail_json(msg='Could not load nitro python sdk')
# Fallthrough to rest of execution
client = get_nitro_client(module)
try:
client.login()
except nitro_exception as e:
msg = "nitro exception during login. errorcode=%s, message=%s" % (str(e.errorcode), e.message)
module.fail_json(msg=msg)
except Exception as e:
if str(type(e)) == "<class 'requests.exceptions.ConnectionError'>":
module.fail_json(msg='Connection error %s' % str(e))
elif str(type(e)) == "<class 'requests.exceptions.SSLError'>":
module.fail_json(msg='SSL Error %s' % str(e))
else:
module.fail_json(msg='Unexpected error during login %s' % str(e))
readwrite_attrs = [
'name',
'servicetype',
'dnsrecordtype',
'lbmethod',
'backuplbmethod',
'netmask',
'v6netmasklen',
'tolerance',
'persistencetype',
'persistenceid',
'persistmask',
'v6persistmasklen',
'timeout',
'mir',
'disableprimaryondown',
'dynamicweight',
'considereffectivestate',
'comment',
'somethod',
'sopersistence',
'sopersistencetimeout',
'sothreshold',
'sobackupaction',
'appflowlog',
'cookie_domain',
]
readonly_attrs = [
'curstate',
'status',
'lbrrreason',
'iscname',
'sitepersistence',
'totalservices',
'activeservices',
'statechangetimesec',
'statechangetimemsec',
'tickssincelaststatechange',
'health',
'policyname',
'priority',
'gotopriorityexpression',
'type',
'vsvrbindsvcip',
'vsvrbindsvcport',
'__count',
]
immutable_attrs = [
'name',
'servicetype',
]
transforms = {
'mir': [lambda v: v.upper()],
'disableprimaryondown': [lambda v: v.upper()],
'sopersistence': [lambda v: v.upper()],
'appflowlog': [lambda v: v.upper()],
}
# Instantiate config proxy
gslb_vserver_proxy = ConfigProxy(
actual=gslbvserver(),
client=client,
attribute_values_dict=module.params,
readwrite_attrs=readwrite_attrs,
readonly_attrs=readonly_attrs,
immutable_attrs=immutable_attrs,
transforms=transforms,
)
try:
ensure_feature_is_enabled(client, 'GSLB')
# Apply appropriate state
if module.params['state'] == 'present':
log('Applying state present')
if not gslb_vserver_exists(client, module):
log('Creating object')
if not module.check_mode:
gslb_vserver_proxy.add()
sync_domain_bindings(client, module)
sync_service_bindings(client, module)
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
elif not all_identical(client, module, gslb_vserver_proxy):
log('Entering update actions')
# Check if we try to change value of immutable attributes
if not gslb_vserver_identical(client, module, gslb_vserver_proxy):
log('Updating gslb vserver')
immutables_changed = get_immutables_intersection(gslb_vserver_proxy, diff_list(client, module, gslb_vserver_proxy).keys())
if immutables_changed != []:
module.fail_json(
msg='Cannot update immutable attributes %s' % (immutables_changed,),
diff=diff_list(client, module, gslb_vserver_proxy),
**module_result
)
if not module.check_mode:
gslb_vserver_proxy.update()
# Update domain bindings
if not domain_bindings_identical(client, module):
if not module.check_mode:
sync_domain_bindings(client, module)
# Update service bindings
if not service_bindings_identical(client, module):
if not module.check_mode:
sync_service_bindings(client, module)
module_result['changed'] = True
if not module.check_mode:
if module.params['save_config']:
client.save_config()
else:
module_result['changed'] = False
if not module.check_mode:
res = do_state_change(client, module, gslb_vserver_proxy)
if res.errorcode != 0:
msg = 'Error when setting disabled state. errorcode: %s message: %s' % (res.errorcode, res.message)
module.fail_json(msg=msg, **module_result)
# Sanity check for state
if not module.check_mode:
if not gslb_vserver_exists(client, module):
module.fail_json(msg='GSLB Vserver does not exist', **module_result)
if not gslb_vserver_identical(client, module, gslb_vserver_proxy):
module.fail_json(msg='GSLB Vserver differs from configured', diff=diff_list(client, module, gslb_vserver_proxy), **module_result)
if not domain_bindings_identical(client, module):
module.fail_json(msg='Domain bindings differ from configured', diff=diff_list(client, module, gslb_vserver_proxy), **module_result)
if not service_bindings_identical(client, module):
module.fail_json(msg='Service bindings differ from configured', diff=diff_list(client, module, gslb_vserver_proxy), **module_result)
elif module.params['state'] == 'absent':
if gslb_vserver_exists(client, module):
if not module.check_mode:
gslb_vserver_proxy.delete()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
else:
module_result['changed'] = False
# Sanity check for state
if not module.check_mode:
if gslb_vserver_exists(client, module):
module.fail_json(msg='GSLB Vserver still exists', **module_result)
except nitro_exception as e:
msg = "nitro exception errorcode=%s, message=%s" % (str(e.errorcode), e.message)
module.fail_json(msg=msg, **module_result)
client.logout()
module.exit_json(**module_result)
if __name__ == "__main__":
main()
| gpl-3.0 | -3,965,707,992,212,138,500 | 8,346,702,635,440,560,000 | 34.489518 | 152 | 0.571787 | false |
ehashman/oh-mainline | vendor/packages/Django/django/utils/importlib.py | 124 | 1228 | # Taken from Python 2.7 with permission from/by the original author.
import sys
def _resolve_name(name, package, level):
"""Return the absolute name of the module to be imported."""
if not hasattr(package, 'rindex'):
raise ValueError("'package' not set to a string")
dot = len(package)
for x in range(level, 1, -1):
try:
dot = package.rindex('.', 0, dot)
except ValueError:
raise ValueError("attempted relative import beyond top-level "
"package")
return "%s.%s" % (package[:dot], name)
def import_module(name, package=None):
"""Import a module.
The 'package' argument is required when performing a relative import. It
specifies the package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
if name.startswith('.'):
if not package:
raise TypeError("relative imports require the 'package' argument")
level = 0
for character in name:
if character != '.':
break
level += 1
name = _resolve_name(name[level:], package, level)
__import__(name)
return sys.modules[name]
| agpl-3.0 | 3,124,244,247,401,352,700 | 8,796,936,003,163,117,000 | 33.111111 | 78 | 0.601792 | false |
philetus/geosolver | workbench/tree.py | 1 | 7545 | from includes import *
from parameters import Settings
class Tree:
def __init__(self, root):
self.settings = Settings()
self.orientation = self.settings.dvData.treeAlignment
self.maxDepth = 100
self.siblingSeperation = 5
self.subtreeSeperation = 5
self.levelSeperation = 40
self.maxLevelHeight = []
self.maxLevelWidth = []
self.previousLevelNode = []
self.root = None
self.topXAdjustment = 0
self.topYAdjustment = 0
self.rootOffset = QtCore.QPoint()
def firstWalk(self, tree, node, level):
leftSibbling = None
node.position.setX(0.0)
node.position.setY(0.0)
node.leftNeighbour = None
node.rightNeighbour = None
tree.setLevelHeight(node, level)
tree.setLevelWidth(node, level)
tree.setNeighbours(node, level)
if (node.getChildrenCount() == 0) or (level == tree.maxDepth):
leftSibling = node.getLeftSibling()
if leftSibling != None:
node.prelim = leftSibling.prelim + tree.getNodeSize(leftSibling) + tree.siblingSeperation
else:
node.prelim = 0.0
else:
for chldNode in node.children:
self.firstWalk(tree, chldNode, level+1)
midPoint = node.getChildrenCenter(tree)
midPoint -= tree.getNodeSize(node)/2.0
leftSibling = node.getLeftSibling()
if leftSibling != None:
node.prelim = leftSibling.prelim + tree.getNodeSize(leftSibling) + tree.siblingSeperation
node.modifier = node.prelim - midPoint
self.apportion(tree, node, level)
else:
node.prelim = midPoint
def apportion(self, tree, node, level):
k = tree.maxDepth - level
j = 1
if node.getChildrenCount() != 0:
firstChild = node.children[0]
firstChildLeftNeighbour = node.children[0].leftNeighbour
else:
firstChild = None
firstChildLeftNeighbour = None
while firstChild != None and firstChildLeftNeighbour != None and j <= k:
modifierSumRight = 0.0
modifierSumLeft = 0.0
rightAncestor = firstChild
leftAncestor = firstChildLeftNeighbour
for i in range(j):
rightAncestor = rightAncestor.parentNode
leftAncestor = leftAncestor.parentNode
modifierSumRight += rightAncestor.modifier
modifierSumLeft += leftAncestor.modifier
totalGap = (firstChildLeftNeighbour.prelim + modifierSumLeft + tree.getNodeSize(firstChildLeftNeighbour) + tree.subtreeSeperation) - (firstChild.prelim + modifierSumRight)
if totalGap > 0:
subtreeAux = node
numSubtrees = 0
while subtreeAux != None and subtreeAux != leftAncestor:
numSubtrees +=1
subtreeAux = subtreeAux.getLeftSibling()
if subtreeAux != None:
subtreeMoveAux = node
singleGap = totalGap / numSubtrees
while subtreeMoveAux != None and subtreeMoveAux != leftAncestor:
subtreeMoveAux.prelim += totalGap
subtreeMoveAux.modifier += totalGap
totalGap -= singleGap
subtreeMoveAux = subtreeMoveAux.getLeftSibling()
j += 1
if firstChild.getChildrenCount() == 0:
firstChild = tree.getLeftMost(node, 0, j)
else:
firstChild = firstChild.children[0]
if firstChild != None:
firstChildLeftNeighbour = firstChild.leftNeighbour
def secondWalk(self, tree, node, level, posX, posY):
if level <= tree.maxDepth:
xTmp = tree.rootOffset.x() + node.prelim + posX
yTmp = tree.rootOffset.y() + posY
maxSizeTmp = 0
nodeSizeTmp = 0
flag = False
if self.orientation == TreeOrientation.TOP or self.orientation == TreeOrientation.BOTTOM:
maxSizeTmp = tree.maxLevelHeight[level]
nodeSizeTmp = node.height
elif self.orientation == TreeOrientation.LEFT or self.orientation == TreeOrientation.RIGHT:
maxSizeTmp = tree.maxLevelWidth[level]
nodeSizeTmp = node.width
flag = True
node.position.setX(xTmp)
node.position.setY(yTmp)
if flag:
swapTmp = node.position.x()
node.position.setX(node.position.y())
node.position.setY(swapTmp)
if self.orientation == TreeOrientation.BOTTOM:
node.position.setY(-node.position.y() - nodeSizeTmp)
elif self.orientation == TreeOrientation.RIGHT:
node.position.setX(-node.position.x() - nodeSizeTmp)
if node.getChildrenCount() != 0:
self.secondWalk(tree, node.children[0], level+1, posX + node.modifier, posY + maxSizeTmp + tree.levelSeperation)
rightSibling = node.getRightSibling()
if rightSibling != None:
self.secondWalk(tree, rightSibling, level, posX, posY)
def positionTree(self):
self.maxLevelWidth = []
self.maxLevelHeight = []
self.previousLevelNode = []
self.firstWalk(self, self.root, 0)
self.rootOffset.setX( self.topXAdjustment + self.root.position.x())
self.rootOffset.setY( self.topYAdjustment + self.root.position.y())
self.secondWalk(self, self.root, 0, 0, 0)
def updateTree(self):
self.positionTree()
def setLevelHeight(self, node, level):
if len(self.maxLevelHeight) <= level:
for i in range(level-len(self.maxLevelHeight)+1):
self.maxLevelHeight += [None]
if self.maxLevelHeight[level]< node.height:
self.maxLevelHeight[level] = node.height
def setLevelWidth(self, node, level):
if len(self.maxLevelWidth) <= level:
for i in range(level-len(self.maxLevelWidth)+1):
self.maxLevelWidth += [None]
if self.maxLevelWidth[level]< node.width:
self.maxLevelWidth[level] = node.width
def setNeighbours(self, node, level):
if len(self.previousLevelNode) > level:
node.leftNeighbour = self.previousLevelNode[level]
else:
for i in range(level - len(self.previousLevelNode)+1):
self.previousLevelNode += [None]
if node.leftNeighbour != None:
node.leftNeighbour.rightNeighbour = node
self.previousLevelNode[level] = node
def getLeftMost(self, node, level, maxLevel):
if level >= maxLevel:
return node
if node.getChildrenCount() == 0:
return None
for chldNode in node.children:
leftMostDescendant = self.getLeftMost(chldNode, level+1, maxLevel)
if leftMostDescendant != None:
return leftMostDescendant
def getNodeSize(self, node):
if self.orientation == TreeOrientation.TOP or self.orientation == TreeOrientation.BOTTOM:
return node.width
elif self.orientation == TreeOrientation.LEFT or self.orientation == TreeOrientation.RIGHT:
return node.height
def clear(self, node):
node.clear()
for childNode in node.children:
self.clear(childNode)
def __str__(self):
pass
def __str_recursive_(self):
pass
class Node:
def __init__(self, parentNode):
self.prelim = 0
self.position = QtCore.QPointF()
self.modifier = 0.0
self.width = 50.0
self.height = 40.0
self.isCollapsed = False
self.canCollapse = True
self.parentNode = parentNode
self.leftNeighbour = None
self.rightNeighbour = None
self.children = []
self.variables = []
def collapse(self):
pass
def expand(self):
pass
def getLeftSibling(self):
if self.leftNeighbour != None and self.leftNeighbour.parentNode == self.parentNode:
return self.leftNeighbour
else:
return None
def getRightSibling(self):
if self.rightNeighbour != None and self.rightNeighbour.parentNode == self.parentNode:
return self.rightNeighbour
else:
return None
def getChildrenCenter(self, tree):
if len(self.children) > 0:
return self.children[0].prelim + ((self.children[-1].prelim - self.children[0].prelim) + tree.getNodeSize(self.children[-1]))/2.0
else:
return 0.0
def getChildrenCount(self):
if self.isCollapsed:
return 0
else:
return len(self.children)
def clear(self):
self.position.setX(0.0)
self.position.setY(0.0)
self.prelim = 0.0
self.modifier = 0.0
| gpl-3.0 | -998,450,176,498,137,500 | -2,512,628,911,925,475,000 | 28.826087 | 176 | 0.705765 | false |
myang321/django | django/contrib/gis/gdal/prototypes/geom.py | 450 | 4735 | from ctypes import POINTER, c_char_p, c_double, c_int, c_void_p
from django.contrib.gis.gdal.envelope import OGREnvelope
from django.contrib.gis.gdal.libgdal import lgdal
from django.contrib.gis.gdal.prototypes.errcheck import check_envelope
from django.contrib.gis.gdal.prototypes.generation import (
const_string_output, double_output, geom_output, int_output, srs_output,
string_output, void_output,
)
# ### Generation routines specific to this module ###
def env_func(f, argtypes):
"For getting OGREnvelopes."
f.argtypes = argtypes
f.restype = None
f.errcheck = check_envelope
return f
def pnt_func(f):
"For accessing point information."
return double_output(f, [c_void_p, c_int])
def topology_func(f):
f.argtypes = [c_void_p, c_void_p]
f.restype = c_int
f.errchck = bool
return f
# ### OGR_G ctypes function prototypes ###
# GeoJSON routines.
from_json = geom_output(lgdal.OGR_G_CreateGeometryFromJson, [c_char_p])
to_json = string_output(lgdal.OGR_G_ExportToJson, [c_void_p], str_result=True, decoding='ascii')
to_kml = string_output(lgdal.OGR_G_ExportToKML, [c_void_p, c_char_p], str_result=True, decoding='ascii')
# GetX, GetY, GetZ all return doubles.
getx = pnt_func(lgdal.OGR_G_GetX)
gety = pnt_func(lgdal.OGR_G_GetY)
getz = pnt_func(lgdal.OGR_G_GetZ)
# Geometry creation routines.
from_wkb = geom_output(lgdal.OGR_G_CreateFromWkb, [c_char_p, c_void_p, POINTER(c_void_p), c_int], offset=-2)
from_wkt = geom_output(lgdal.OGR_G_CreateFromWkt, [POINTER(c_char_p), c_void_p, POINTER(c_void_p)], offset=-1)
create_geom = geom_output(lgdal.OGR_G_CreateGeometry, [c_int])
clone_geom = geom_output(lgdal.OGR_G_Clone, [c_void_p])
get_geom_ref = geom_output(lgdal.OGR_G_GetGeometryRef, [c_void_p, c_int])
get_boundary = geom_output(lgdal.OGR_G_GetBoundary, [c_void_p])
geom_convex_hull = geom_output(lgdal.OGR_G_ConvexHull, [c_void_p])
geom_diff = geom_output(lgdal.OGR_G_Difference, [c_void_p, c_void_p])
geom_intersection = geom_output(lgdal.OGR_G_Intersection, [c_void_p, c_void_p])
geom_sym_diff = geom_output(lgdal.OGR_G_SymmetricDifference, [c_void_p, c_void_p])
geom_union = geom_output(lgdal.OGR_G_Union, [c_void_p, c_void_p])
# Geometry modification routines.
add_geom = void_output(lgdal.OGR_G_AddGeometry, [c_void_p, c_void_p])
import_wkt = void_output(lgdal.OGR_G_ImportFromWkt, [c_void_p, POINTER(c_char_p)])
# Destroys a geometry
destroy_geom = void_output(lgdal.OGR_G_DestroyGeometry, [c_void_p], errcheck=False)
# Geometry export routines.
to_wkb = void_output(lgdal.OGR_G_ExportToWkb, None, errcheck=True) # special handling for WKB.
to_wkt = string_output(lgdal.OGR_G_ExportToWkt, [c_void_p, POINTER(c_char_p)], decoding='ascii')
to_gml = string_output(lgdal.OGR_G_ExportToGML, [c_void_p], str_result=True, decoding='ascii')
get_wkbsize = int_output(lgdal.OGR_G_WkbSize, [c_void_p])
# Geometry spatial-reference related routines.
assign_srs = void_output(lgdal.OGR_G_AssignSpatialReference, [c_void_p, c_void_p], errcheck=False)
get_geom_srs = srs_output(lgdal.OGR_G_GetSpatialReference, [c_void_p])
# Geometry properties
get_area = double_output(lgdal.OGR_G_GetArea, [c_void_p])
get_centroid = void_output(lgdal.OGR_G_Centroid, [c_void_p, c_void_p])
get_dims = int_output(lgdal.OGR_G_GetDimension, [c_void_p])
get_coord_dim = int_output(lgdal.OGR_G_GetCoordinateDimension, [c_void_p])
set_coord_dim = void_output(lgdal.OGR_G_SetCoordinateDimension, [c_void_p, c_int], errcheck=False)
get_geom_count = int_output(lgdal.OGR_G_GetGeometryCount, [c_void_p])
get_geom_name = const_string_output(lgdal.OGR_G_GetGeometryName, [c_void_p], decoding='ascii')
get_geom_type = int_output(lgdal.OGR_G_GetGeometryType, [c_void_p])
get_point_count = int_output(lgdal.OGR_G_GetPointCount, [c_void_p])
get_point = void_output(lgdal.OGR_G_GetPoint,
[c_void_p, c_int, POINTER(c_double), POINTER(c_double), POINTER(c_double)], errcheck=False
)
geom_close_rings = void_output(lgdal.OGR_G_CloseRings, [c_void_p], errcheck=False)
# Topology routines.
ogr_contains = topology_func(lgdal.OGR_G_Contains)
ogr_crosses = topology_func(lgdal.OGR_G_Crosses)
ogr_disjoint = topology_func(lgdal.OGR_G_Disjoint)
ogr_equals = topology_func(lgdal.OGR_G_Equals)
ogr_intersects = topology_func(lgdal.OGR_G_Intersects)
ogr_overlaps = topology_func(lgdal.OGR_G_Overlaps)
ogr_touches = topology_func(lgdal.OGR_G_Touches)
ogr_within = topology_func(lgdal.OGR_G_Within)
# Transformation routines.
geom_transform = void_output(lgdal.OGR_G_Transform, [c_void_p, c_void_p])
geom_transform_to = void_output(lgdal.OGR_G_TransformTo, [c_void_p, c_void_p])
# For retrieving the envelope of the geometry.
get_envelope = env_func(lgdal.OGR_G_GetEnvelope, [c_void_p, POINTER(OGREnvelope)])
| bsd-3-clause | 7,324,165,802,888,296,000 | -6,430,488,318,697,943,000 | 44.095238 | 110 | 0.729884 | false |
ronfung/incubator-airflow | airflow/utils/dates.py | 28 | 8416 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from datetime import datetime, date, timedelta
from dateutil.relativedelta import relativedelta # for doctest
import six
from croniter import croniter
cron_presets = {
'@hourly': '0 * * * *',
'@daily': '0 0 * * *',
'@weekly': '0 0 * * 0',
'@monthly': '0 0 1 * *',
'@yearly': '0 0 1 1 *',
}
def date_range(
start_date,
end_date=None,
num=None,
delta=None):
"""
Get a set of dates as a list based on a start, end and delta, delta
can be something that can be added to ``datetime.datetime``
or a cron expression as a ``str``
:param start_date: anchor date to start the series from
:type start_date: datetime.datetime
:param end_date: right boundary for the date range
:type end_date: datetime.datetime
:param num: alternatively to end_date, you can specify the number of
number of entries you want in the range. This number can be negative,
output will always be sorted regardless
:type num: int
>>> date_range(datetime(2016, 1, 1), datetime(2016, 1, 3), delta=timedelta(1))
[datetime.datetime(2016, 1, 1, 0, 0), datetime.datetime(2016, 1, 2, 0, 0), datetime.datetime(2016, 1, 3, 0, 0)]
>>> date_range(datetime(2016, 1, 1), datetime(2016, 1, 3), delta='0 0 * * *')
[datetime.datetime(2016, 1, 1, 0, 0), datetime.datetime(2016, 1, 2, 0, 0), datetime.datetime(2016, 1, 3, 0, 0)]
>>> date_range(datetime(2016, 1, 1), datetime(2016, 3, 3), delta="0 0 0 * *")
[datetime.datetime(2016, 1, 1, 0, 0), datetime.datetime(2016, 2, 1, 0, 0), datetime.datetime(2016, 3, 1, 0, 0)]
"""
if not delta:
return []
if end_date and start_date > end_date:
raise Exception("Wait. start_date needs to be before end_date")
if end_date and num:
raise Exception("Wait. Either specify end_date OR num")
if not end_date and not num:
end_date = datetime.now()
delta_iscron = False
if isinstance(delta, six.string_types):
delta_iscron = True
cron = croniter(delta, start_date)
elif isinstance(delta, timedelta):
delta = abs(delta)
l = []
if end_date:
while start_date <= end_date:
l.append(start_date)
if delta_iscron:
start_date = cron.get_next(datetime)
else:
start_date += delta
else:
for i in range(abs(num)):
l.append(start_date)
if delta_iscron:
if num > 0:
start_date = cron.get_next(datetime)
else:
start_date = cron.get_prev(datetime)
else:
if num > 0:
start_date += delta
else:
start_date -= delta
return sorted(l)
def round_time(dt, delta, start_date=datetime.min):
"""
Returns the datetime of the form start_date + i * delta
which is closest to dt for any non-negative integer i.
Note that delta may be a datetime.timedelta or a dateutil.relativedelta
>>> round_time(datetime(2015, 1, 1, 6), timedelta(days=1))
datetime.datetime(2015, 1, 1, 0, 0)
>>> round_time(datetime(2015, 1, 2), relativedelta(months=1))
datetime.datetime(2015, 1, 1, 0, 0)
>>> round_time(datetime(2015, 9, 16, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 16, 0, 0)
>>> round_time(datetime(2015, 9, 15, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 15, 0, 0)
>>> round_time(datetime(2015, 9, 14, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 14, 0, 0)
>>> round_time(datetime(2015, 9, 13, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 14, 0, 0)
"""
if isinstance(delta, six.string_types):
# It's cron based, so it's easy
cron = croniter(delta, start_date)
prev = cron.get_prev(datetime)
if prev == start_date:
return start_date
else:
return prev
# Ignore the microseconds of dt
dt -= timedelta(microseconds=dt.microsecond)
# We are looking for a datetime in the form start_date + i * delta
# which is as close as possible to dt. Since delta could be a relative
# delta we don't know it's exact length in seconds so we cannot rely on
# division to find i. Instead we employ a binary search algorithm, first
# finding an upper and lower limit and then disecting the interval until
# we have found the closest match.
# We first search an upper limit for i for which start_date + upper * delta
# exceeds dt.
upper = 1
while start_date + upper*delta < dt:
# To speed up finding an upper limit we grow this exponentially by a
# factor of 2
upper *= 2
# Since upper is the first value for which start_date + upper * delta
# exceeds dt, upper // 2 is below dt and therefore forms a lower limited
# for the i we are looking for
lower = upper // 2
# We now continue to intersect the interval between
# start_date + lower * delta and start_date + upper * delta
# until we find the closest value
while True:
# Invariant: start + lower * delta < dt <= start + upper * delta
# If start_date + (lower + 1)*delta exceeds dt, then either lower or
# lower+1 has to be the solution we are searching for
if start_date + (lower + 1)*delta >= dt:
# Check if start_date + (lower + 1)*delta or
# start_date + lower*delta is closer to dt and return the solution
if (
(start_date + (lower + 1) * delta) - dt <=
dt - (start_date + lower * delta)):
return start_date + (lower + 1)*delta
else:
return start_date + lower * delta
# We intersect the interval and either replace the lower or upper
# limit with the candidate
candidate = lower + (upper - lower) // 2
if start_date + candidate*delta >= dt:
upper = candidate
else:
lower = candidate
# in the special case when start_date > dt the search for upper will
# immediately stop for upper == 1 which results in lower = upper // 2 = 0
# and this function returns start_date.
def infer_time_unit(time_seconds_arr):
"""
Determine the most appropriate time unit for an array of time durations
specified in seconds.
e.g. 5400 seconds => 'minutes', 36000 seconds => 'hours'
"""
if len(time_seconds_arr) == 0:
return 'hours'
max_time_seconds = max(time_seconds_arr)
if max_time_seconds <= 60*2:
return 'seconds'
elif max_time_seconds <= 60*60*2:
return 'minutes'
elif max_time_seconds <= 24*60*60*2:
return 'hours'
else:
return 'days'
def scale_time_units(time_seconds_arr, unit):
"""
Convert an array of time durations in seconds to the specified time unit.
"""
if unit == 'minutes':
return list(map(lambda x: x*1.0/60, time_seconds_arr))
elif unit == 'hours':
return list(map(lambda x: x*1.0/(60*60), time_seconds_arr))
elif unit == 'days':
return list(map(lambda x: x*1.0/(24*60*60), time_seconds_arr))
return time_seconds_arr
def days_ago(n, hour=0, minute=0, second=0, microsecond=0):
"""
Get a datetime object representing `n` days ago. By default the time is
set to midnight.
"""
today = datetime.today().replace(
hour=hour,
minute=minute,
second=second,
microsecond=microsecond)
return today - timedelta(days=n)
| apache-2.0 | -8,611,879,256,583,201,000 | 9,011,098,546,499,075,000 | 36.07489 | 115 | 0.609316 | false |
ReganBell/QReview | networkx/algorithms/connectivity/cuts.py | 41 | 22905 | # -*- coding: utf-8 -*-
"""
Flow based cut algorithms
"""
import itertools
import networkx as nx
# Define the default maximum flow function to use in all flow based
# cut algorithms.
from networkx.algorithms.flow import edmonds_karp, shortest_augmenting_path
from networkx.algorithms.flow import build_residual_network
default_flow_func = edmonds_karp
from .utils import (build_auxiliary_node_connectivity,
build_auxiliary_edge_connectivity)
__author__ = '\n'.join(['Jordi Torrents <[email protected]>'])
__all__ = ['minimum_st_node_cut',
'minimum_node_cut',
'minimum_st_edge_cut',
'minimum_edge_cut']
def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None,
residual=None):
"""Returns the edges of the cut-set of a minimum (s, t)-cut.
This function returns the set of edges of minimum cardinality that,
if removed, would destroy all paths among source and target in G.
Edge weights are not considered
Parameters
----------
G : NetworkX graph
Edges of the graph are expected to have an attribute called
'capacity'. If this attribute is not present, the edge is
considered to have infinite capacity.
s : node
Source node for the flow.
t : node
Sink node for the flow.
auxiliary : NetworkX DiGraph
Auxiliary digraph to compute flow based node connectivity. It has
to have a graph attribute called mapping with a dictionary mapping
node names in G and in the auxiliary digraph. If provided
it will be reused instead of recreated. Default value: None.
flow_func : function
A function for computing the maximum flow among a pair of nodes.
The function has to accept at least three parameters: a Digraph,
a source node, and a target node. And return a residual network
that follows NetworkX conventions (see :meth:`maximum_flow` for
details). If flow_func is None, the default maximum flow function
(:meth:`edmonds_karp`) is used. See :meth:`node_connectivity` for
details. The choice of the default function may change from version
to version and should not be relied on. Default value: None.
residual : NetworkX DiGraph
Residual network to compute maximum flow. If provided it will be
reused instead of recreated. Default value: None.
Returns
-------
cutset : set
Set of edges that, if removed from the graph, will disconnect it.
See also
--------
:meth:`minimum_cut`
:meth:`minimum_node_cut`
:meth:`minimum_edge_cut`
:meth:`stoer_wagner`
:meth:`node_connectivity`
:meth:`edge_connectivity`
:meth:`maximum_flow`
:meth:`edmonds_karp`
:meth:`preflow_push`
:meth:`shortest_augmenting_path`
Examples
--------
This function is not imported in the base NetworkX namespace, so you
have to explicitly import it from the connectivity package:
>>> from networkx.algorithms.connectivity import minimum_st_edge_cut
We use in this example the platonic icosahedral graph, which has edge
connectivity 5.
>>> G = nx.icosahedral_graph()
>>> len(minimum_st_edge_cut(G, 0, 6))
5
If you need to compute local edge cuts on several pairs of
nodes in the same graph, it is recommended that you reuse the
data structures that NetworkX uses in the computation: the
auxiliary digraph for edge connectivity, and the residual
network for the underlying maximum flow computation.
Example of how to compute local edge cuts among all pairs of
nodes of the platonic icosahedral graph reusing the data
structures.
>>> import itertools
>>> # You also have to explicitly import the function for
>>> # building the auxiliary digraph from the connectivity package
>>> from networkx.algorithms.connectivity import (
... build_auxiliary_edge_connectivity)
>>> H = build_auxiliary_edge_connectivity(G)
>>> # And the function for building the residual network from the
>>> # flow package
>>> from networkx.algorithms.flow import build_residual_network
>>> # Note that the auxiliary digraph has an edge attribute named capacity
>>> R = build_residual_network(H, 'capacity')
>>> result = dict.fromkeys(G, dict())
>>> # Reuse the auxiliary digraph and the residual network by passing them
>>> # as parameters
>>> for u, v in itertools.combinations(G, 2):
... k = len(minimum_st_edge_cut(G, u, v, auxiliary=H, residual=R))
... result[u][v] = k
>>> all(result[u][v] == 5 for u, v in itertools.combinations(G, 2))
True
You can also use alternative flow algorithms for computing edge
cuts. For instance, in dense networks the algorithm
:meth:`shortest_augmenting_path` will usually perform better than
the default :meth:`edmonds_karp` which is faster for sparse
networks with highly skewed degree distributions. Alternative flow
functions have to be explicitly imported from the flow package.
>>> from networkx.algorithms.flow import shortest_augmenting_path
>>> len(minimum_st_edge_cut(G, 0, 6, flow_func=shortest_augmenting_path))
5
"""
if flow_func is None:
flow_func = default_flow_func
if auxiliary is None:
H = build_auxiliary_edge_connectivity(G)
else:
H = auxiliary
kwargs = dict(capacity='capacity', flow_func=flow_func, residual=residual)
cut_value, partition = nx.minimum_cut(H, s, t, **kwargs)
reachable, non_reachable = partition
# Any edge in the original graph linking the two sets in the
# partition is part of the edge cutset
cutset = set()
for u, nbrs in ((n, G[n]) for n in reachable):
cutset.update((u, v) for v in nbrs if v in non_reachable)
return cutset
def minimum_st_node_cut(G, s, t, flow_func=None, auxiliary=None, residual=None):
r"""Returns a set of nodes of minimum cardinality that disconnect source
from target in G.
This function returns the set of nodes of minimum cardinality that,
if removed, would destroy all paths among source and target in G.
Parameters
----------
G : NetworkX graph
s : node
Source node.
t : node
Target node.
flow_func : function
A function for computing the maximum flow among a pair of nodes.
The function has to accept at least three parameters: a Digraph,
a source node, and a target node. And return a residual network
that follows NetworkX conventions (see :meth:`maximum_flow` for
details). If flow_func is None, the default maximum flow function
(:meth:`edmonds_karp`) is used. See below for details. The choice
of the default function may change from version to version and
should not be relied on. Default value: None.
auxiliary : NetworkX DiGraph
Auxiliary digraph to compute flow based node connectivity. It has
to have a graph attribute called mapping with a dictionary mapping
node names in G and in the auxiliary digraph. If provided
it will be reused instead of recreated. Default value: None.
residual : NetworkX DiGraph
Residual network to compute maximum flow. If provided it will be
reused instead of recreated. Default value: None.
Returns
-------
cutset : set
Set of nodes that, if removed, would destroy all paths between
source and target in G.
Examples
--------
This function is not imported in the base NetworkX namespace, so you
have to explicitly import it from the connectivity package:
>>> from networkx.algorithms.connectivity import minimum_st_node_cut
We use in this example the platonic icosahedral graph, which has node
connectivity 5.
>>> G = nx.icosahedral_graph()
>>> len(minimum_st_node_cut(G, 0, 6))
5
If you need to compute local st cuts between several pairs of
nodes in the same graph, it is recommended that you reuse the
data structures that NetworkX uses in the computation: the
auxiliary digraph for node connectivity and node cuts, and the
residual network for the underlying maximum flow computation.
Example of how to compute local st node cuts reusing the data
structures:
>>> # You also have to explicitly import the function for
>>> # building the auxiliary digraph from the connectivity package
>>> from networkx.algorithms.connectivity import (
... build_auxiliary_node_connectivity)
>>> H = build_auxiliary_node_connectivity(G)
>>> # And the function for building the residual network from the
>>> # flow package
>>> from networkx.algorithms.flow import build_residual_network
>>> # Note that the auxiliary digraph has an edge attribute named capacity
>>> R = build_residual_network(H, 'capacity')
>>> # Reuse the auxiliary digraph and the residual network by passing them
>>> # as parameters
>>> len(minimum_st_node_cut(G, 0, 6, auxiliary=H, residual=R))
5
You can also use alternative flow algorithms for computing minimum st
node cuts. For instance, in dense networks the algorithm
:meth:`shortest_augmenting_path` will usually perform better than
the default :meth:`edmonds_karp` which is faster for sparse
networks with highly skewed degree distributions. Alternative flow
functions have to be explicitly imported from the flow package.
>>> from networkx.algorithms.flow import shortest_augmenting_path
>>> len(minimum_st_node_cut(G, 0, 6, flow_func=shortest_augmenting_path))
5
Notes
-----
This is a flow based implementation of minimum node cut. The algorithm
is based in solving a number of maximum flow computations to determine
the capacity of the minimum cut on an auxiliary directed network that
corresponds to the minimum node cut of G. It handles both directed
and undirected graphs. This implementation is based on algorithm 11
in [1]_.
See also
--------
:meth:`minimum_node_cut`
:meth:`minimum_edge_cut`
:meth:`stoer_wagner`
:meth:`node_connectivity`
:meth:`edge_connectivity`
:meth:`maximum_flow`
:meth:`edmonds_karp`
:meth:`preflow_push`
:meth:`shortest_augmenting_path`
References
----------
.. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
"""
if auxiliary is None:
H = build_auxiliary_node_connectivity(G)
else:
H = auxiliary
mapping = H.graph.get('mapping', None)
if mapping is None:
raise nx.NetworkXError('Invalid auxiliary digraph.')
kwargs = dict(flow_func=flow_func, residual=residual, auxiliary=H)
# The edge cut in the auxiliary digraph corresponds to the node cut in the
# original graph.
edge_cut = minimum_st_edge_cut(H, '%sB' % mapping[s], '%sA' % mapping[t],
**kwargs)
# Each node in the original graph maps to two nodes of the auxiliary graph
node_cut = set(H.node[node]['id'] for edge in edge_cut for node in edge)
return node_cut - set([s, t])
def minimum_node_cut(G, s=None, t=None, flow_func=None):
r"""Returns a set of nodes of minimum cardinality that disconnects G.
If source and target nodes are provided, this function returns the
set of nodes of minimum cardinality that, if removed, would destroy
all paths among source and target in G. If not, it returns a set
of nodes of minimum cardinality that disconnects G.
Parameters
----------
G : NetworkX graph
s : node
Source node. Optional. Default value: None.
t : node
Target node. Optional. Default value: None.
flow_func : function
A function for computing the maximum flow among a pair of nodes.
The function has to accept at least three parameters: a Digraph,
a source node, and a target node. And return a residual network
that follows NetworkX conventions (see :meth:`maximum_flow` for
details). If flow_func is None, the default maximum flow function
(:meth:`edmonds_karp`) is used. See below for details. The
choice of the default function may change from version
to version and should not be relied on. Default value: None.
Returns
-------
cutset : set
Set of nodes that, if removed, would disconnect G. If source
and target nodes are provided, the set contians the nodes that
if removed, would destroy all paths between source and target.
Examples
--------
>>> # Platonic icosahedral graph has node connectivity 5
>>> G = nx.icosahedral_graph()
>>> node_cut = nx.minimum_node_cut(G)
>>> len(node_cut)
5
You can use alternative flow algorithms for the underlying maximum
flow computation. In dense networks the algorithm
:meth:`shortest_augmenting_path` will usually perform better
than the default :meth:`edmonds_karp`, which is faster for
sparse networks with highly skewed degree distributions. Alternative
flow functions have to be explicitly imported from the flow package.
>>> from networkx.algorithms.flow import shortest_augmenting_path
>>> node_cut == nx.minimum_node_cut(G, flow_func=shortest_augmenting_path)
True
If you specify a pair of nodes (source and target) as parameters,
this function returns a local st node cut.
>>> len(nx.minimum_node_cut(G, 3, 7))
5
If you need to perform several local st cuts among different
pairs of nodes on the same graph, it is recommended that you reuse
the data structures used in the maximum flow computations. See
:meth:`minimum_st_node_cut` for details.
Notes
-----
This is a flow based implementation of minimum node cut. The algorithm
is based in solving a number of maximum flow computations to determine
the capacity of the minimum cut on an auxiliary directed network that
corresponds to the minimum node cut of G. It handles both directed
and undirected graphs. This implementation is based on algorithm 11
in [1]_.
See also
--------
:meth:`minimum_st_node_cut`
:meth:`minimum_cut`
:meth:`minimum_edge_cut`
:meth:`stoer_wagner`
:meth:`node_connectivity`
:meth:`edge_connectivity`
:meth:`maximum_flow`
:meth:`edmonds_karp`
:meth:`preflow_push`
:meth:`shortest_augmenting_path`
References
----------
.. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
"""
if (s is not None and t is None) or (s is None and t is not None):
raise nx.NetworkXError('Both source and target must be specified.')
# Local minimum node cut.
if s is not None and t is not None:
if s not in G:
raise nx.NetworkXError('node %s not in graph' % s)
if t not in G:
raise nx.NetworkXError('node %s not in graph' % t)
return minimum_st_node_cut(G, s, t, flow_func=flow_func)
# Global minimum node cut.
# Analog to the algoritm 11 for global node connectivity in [1].
if G.is_directed():
if not nx.is_weakly_connected(G):
raise nx.NetworkXError('Input graph is not connected')
iter_func = itertools.permutations
def neighbors(v):
return itertools.chain.from_iterable([G.predecessors_iter(v),
G.successors_iter(v)])
else:
if not nx.is_connected(G):
raise nx.NetworkXError('Input graph is not connected')
iter_func = itertools.combinations
neighbors = G.neighbors_iter
# Reuse the auxiliary digraph and the residual network.
H = build_auxiliary_node_connectivity(G)
R = build_residual_network(H, 'capacity')
kwargs = dict(flow_func=flow_func, auxiliary=H, residual=R)
# Choose a node with minimum degree.
v = min(G, key=G.degree)
# Initial node cutset is all neighbors of the node with minimum degree.
min_cut = set(G[v])
# Compute st node cuts between v and all its non-neighbors nodes in G.
for w in set(G) - set(neighbors(v)) - set([v]):
this_cut = minimum_st_node_cut(G, v, w, **kwargs)
if len(min_cut) >= len(this_cut):
min_cut = this_cut
# Also for non adjacent pairs of neighbors of v.
for x, y in iter_func(neighbors(v), 2):
if y in G[x]:
continue
this_cut = minimum_st_node_cut(G, x, y, **kwargs)
if len(min_cut) >= len(this_cut):
min_cut = this_cut
return min_cut
def minimum_edge_cut(G, s=None, t=None, flow_func=None):
r"""Returns a set of edges of minimum cardinality that disconnects G.
If source and target nodes are provided, this function returns the
set of edges of minimum cardinality that, if removed, would break
all paths among source and target in G. If not, it returns a set of
edges of minimum cardinality that disconnects G.
Parameters
----------
G : NetworkX graph
s : node
Source node. Optional. Default value: None.
t : node
Target node. Optional. Default value: None.
flow_func : function
A function for computing the maximum flow among a pair of nodes.
The function has to accept at least three parameters: a Digraph,
a source node, and a target node. And return a residual network
that follows NetworkX conventions (see :meth:`maximum_flow` for
details). If flow_func is None, the default maximum flow function
(:meth:`edmonds_karp`) is used. See below for details. The
choice of the default function may change from version
to version and should not be relied on. Default value: None.
Returns
-------
cutset : set
Set of edges that, if removed, would disconnect G. If source
and target nodes are provided, the set contians the edges that
if removed, would destroy all paths between source and target.
Examples
--------
>>> # Platonic icosahedral graph has edge connectivity 5
>>> G = nx.icosahedral_graph()
>>> len(nx.minimum_edge_cut(G))
5
You can use alternative flow algorithms for the underlying
maximum flow computation. In dense networks the algorithm
:meth:`shortest_augmenting_path` will usually perform better
than the default :meth:`edmonds_karp`, which is faster for
sparse networks with highly skewed degree distributions.
Alternative flow functions have to be explicitly imported
from the flow package.
>>> from networkx.algorithms.flow import shortest_augmenting_path
>>> len(nx.minimum_edge_cut(G, flow_func=shortest_augmenting_path))
5
If you specify a pair of nodes (source and target) as parameters,
this function returns the value of local edge connectivity.
>>> nx.edge_connectivity(G, 3, 7)
5
If you need to perform several local computations among different
pairs of nodes on the same graph, it is recommended that you reuse
the data structures used in the maximum flow computations. See
:meth:`local_edge_connectivity` for details.
Notes
-----
This is a flow based implementation of minimum edge cut. For
undirected graphs the algorithm works by finding a 'small' dominating
set of nodes of G (see algorithm 7 in [1]_) and computing the maximum
flow between an arbitrary node in the dominating set and the rest of
nodes in it. This is an implementation of algorithm 6 in [1]_. For
directed graphs, the algorithm does n calls to the max flow function.
It is an implementation of algorithm 8 in [1]_.
See also
--------
:meth:`minimum_st_edge_cut`
:meth:`minimum_node_cut`
:meth:`stoer_wagner`
:meth:`node_connectivity`
:meth:`edge_connectivity`
:meth:`maximum_flow`
:meth:`edmonds_karp`
:meth:`preflow_push`
:meth:`shortest_augmenting_path`
References
----------
.. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
"""
if (s is not None and t is None) or (s is None and t is not None):
raise nx.NetworkXError('Both source and target must be specified.')
# reuse auxiliary digraph and residual network
H = build_auxiliary_edge_connectivity(G)
R = build_residual_network(H, 'capacity')
kwargs = dict(flow_func=flow_func, residual=R, auxiliary=H)
# Local minimum edge cut if s and t are not None
if s is not None and t is not None:
if s not in G:
raise nx.NetworkXError('node %s not in graph' % s)
if t not in G:
raise nx.NetworkXError('node %s not in graph' % t)
return minimum_st_edge_cut(H, s, t, **kwargs)
# Global minimum edge cut
# Analog to the algoritm for global edge connectivity
if G.is_directed():
# Based on algorithm 8 in [1]
if not nx.is_weakly_connected(G):
raise nx.NetworkXError('Input graph is not connected')
# Initial cutset is all edges of a node with minimum degree
node = min(G, key=G.degree)
min_cut = G.edges(node)
nodes = G.nodes()
n = len(nodes)
for i in range(n):
try:
this_cut = minimum_st_edge_cut(H, nodes[i], nodes[i+1], **kwargs)
if len(this_cut) <= len(min_cut):
min_cut = this_cut
except IndexError: # Last node!
this_cut = minimum_st_edge_cut(H, nodes[i], nodes[0], **kwargs)
if len(this_cut) <= len(min_cut):
min_cut = this_cut
return min_cut
else: # undirected
# Based on algorithm 6 in [1]
if not nx.is_connected(G):
raise nx.NetworkXError('Input graph is not connected')
# Initial cutset is all edges of a node with minimum degree
node = min(G, key=G.degree)
min_cut = G.edges(node)
# A dominating set is \lambda-covering
# We need a dominating set with at least two nodes
for node in G:
D = nx.dominating_set(G, start_with=node)
v = D.pop()
if D:
break
else:
# in complete graphs the dominating set will always be of one node
# thus we return min_cut, which now contains the edges of a node
# with minimum degree
return min_cut
for w in D:
this_cut = minimum_st_edge_cut(H, v, w, **kwargs)
if len(this_cut) <= len(min_cut):
min_cut = this_cut
return min_cut
| bsd-3-clause | 3,106,514,625,052,716,500 | 5,982,772,776,788,496,000 | 37.048173 | 81 | 0.659812 | false |
Srisai85/scipy | scipy/linalg/lapack.py | 46 | 5636 | """
Low-level LAPACK functions
==========================
This module contains low-level functions from the LAPACK library.
.. versionadded:: 0.12.0
.. warning::
These functions do little to no error checking.
It is possible to cause crashes by mis-using them,
so prefer using the higher-level routines in `scipy.linalg`.
Finding functions
=================
.. autosummary::
get_lapack_funcs
All functions
=============
.. autosummary::
:toctree: generated/
sgbsv
dgbsv
cgbsv
zgbsv
sgbtrf
dgbtrf
cgbtrf
zgbtrf
sgbtrs
dgbtrs
cgbtrs
zgbtrs
sgebal
dgebal
cgebal
zgebal
sgees
dgees
cgees
zgees
sgeev
dgeev
cgeev
zgeev
sgeev_lwork
dgeev_lwork
cgeev_lwork
zgeev_lwork
sgegv
dgegv
cgegv
zgegv
sgehrd
dgehrd
cgehrd
zgehrd
sgehrd_lwork
dgehrd_lwork
cgehrd_lwork
zgehrd_lwork
sgelss
dgelss
cgelss
zgelss
sgelss_lwork
dgelss_lwork
cgelss_lwork
zgelss_lwork
sgelsd
dgelsd
cgelsd
zgelsd
sgelsd_lwork
dgelsd_lwork
cgelsd_lwork
zgelsd_lwork
sgelsy
dgelsy
cgelsy
zgelsy
sgelsy_lwork
dgelsy_lwork
cgelsy_lwork
zgelsy_lwork
sgeqp3
dgeqp3
cgeqp3
zgeqp3
sgeqrf
dgeqrf
cgeqrf
zgeqrf
sgerqf
dgerqf
cgerqf
zgerqf
sgesdd
dgesdd
cgesdd
zgesdd
sgesdd_lwork
dgesdd_lwork
cgesdd_lwork
zgesdd_lwork
sgesv
dgesv
cgesv
zgesv
sgetrf
dgetrf
cgetrf
zgetrf
sgetri
dgetri
cgetri
zgetri
sgetri_lwork
dgetri_lwork
cgetri_lwork
zgetri_lwork
sgetrs
dgetrs
cgetrs
zgetrs
sgges
dgges
cgges
zgges
sggev
dggev
cggev
zggev
chbevd
zhbevd
chbevx
zhbevx
cheev
zheev
cheevd
zheevd
cheevr
zheevr
chegv
zhegv
chegvd
zhegvd
chegvx
zhegvx
slarf
dlarf
clarf
zlarf
slarfg
dlarfg
clarfg
zlarfg
slartg
dlartg
clartg
zlartg
dlasd4
slasd4
slaswp
dlaswp
claswp
zlaswp
slauum
dlauum
clauum
zlauum
spbsv
dpbsv
cpbsv
zpbsv
spbtrf
dpbtrf
cpbtrf
zpbtrf
spbtrs
dpbtrs
cpbtrs
zpbtrs
sposv
dposv
cposv
zposv
spotrf
dpotrf
cpotrf
zpotrf
spotri
dpotri
cpotri
zpotri
spotrs
dpotrs
cpotrs
zpotrs
crot
zrot
strsyl
dtrsyl
ctrsyl
ztrsyl
strtri
dtrtri
ctrtri
ztrtri
strtrs
dtrtrs
ctrtrs
ztrtrs
cunghr
zunghr
cungqr
zungqr
cungrq
zungrq
cunmqr
zunmqr
sgtsv
dgtsv
cgtsv
zgtsv
sptsv
dptsv
cptsv
zptsv
slamch
dlamch
sorghr
dorghr
sorgqr
dorgqr
sorgrq
dorgrq
sormqr
dormqr
ssbev
dsbev
ssbevd
dsbevd
ssbevx
dsbevx
ssyev
dsyev
ssyevd
dsyevd
ssyevr
dsyevr
ssygv
dsygv
ssygvd
dsygvd
ssygvx
dsygvx
slange
dlange
clange
zlange
"""
#
# Author: Pearu Peterson, March 2002
#
from __future__ import division, print_function, absolute_import
__all__ = ['get_lapack_funcs']
from .blas import _get_funcs
# Backward compatibility:
from .blas import find_best_blas_type as find_best_lapack_type
from scipy.linalg import _flapack
try:
from scipy.linalg import _clapack
except ImportError:
_clapack = None
# Backward compatibility
from scipy._lib._util import DeprecatedImport as _DeprecatedImport
clapack = _DeprecatedImport("scipy.linalg.blas.clapack", "scipy.linalg.lapack")
flapack = _DeprecatedImport("scipy.linalg.blas.flapack", "scipy.linalg.lapack")
# Expose all functions (only flapack --- clapack is an implementation detail)
empty_module = None
from scipy.linalg._flapack import *
del empty_module
# some convenience alias for complex functions
_lapack_alias = {
'corghr': 'cunghr', 'zorghr': 'zunghr',
'corghr_lwork': 'cunghr_lwork', 'zorghr_lwork': 'zunghr_lwork',
'corgqr': 'cungqr', 'zorgqr': 'zungqr',
'cormqr': 'cunmqr', 'zormqr': 'zunmqr',
'corgrq': 'cungrq', 'zorgrq': 'zungrq',
}
def get_lapack_funcs(names, arrays=(), dtype=None):
"""Return available LAPACK function objects from names.
Arrays are used to determine the optimal prefix of LAPACK routines.
Parameters
----------
names : str or sequence of str
Name(s) of LAPACK functions without type prefix.
arrays : sequence of ndarrays, optional
Arrays can be given to determine optimal prefix of LAPACK
routines. If not given, double-precision routines will be
used, otherwise the most generic type in arrays will be used.
dtype : str or dtype, optional
Data-type specifier. Not used if `arrays` is non-empty.
Returns
-------
funcs : list
List containing the found function(s).
Notes
-----
This routine automatically chooses between Fortran/C
interfaces. Fortran code is used whenever possible for arrays with
column major order. In all other cases, C code is preferred.
In LAPACK, the naming convention is that all functions start with a
type prefix, which depends on the type of the principal
matrix. These can be one of {'s', 'd', 'c', 'z'} for the numpy
types {float32, float64, complex64, complex128} respectevely, and
are stored in attribute `typecode` of the returned functions.
"""
return _get_funcs(names, arrays, dtype,
"LAPACK", _flapack, _clapack,
"flapack", "clapack", _lapack_alias)
| bsd-3-clause | -325,591,641,331,396,700 | -1,230,575,754,148,016,600 | 12.387173 | 79 | 0.633783 | false |
maohongyuan/kbengine | kbe/src/lib/python/Lib/encodings/iso8859_4.py | 272 | 13376 | """ Python Character Mapping Codec iso8859_4 generated from 'MAPPINGS/ISO8859/8859-4.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-4',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\x80' # 0x80 -> <control>
'\x81' # 0x81 -> <control>
'\x82' # 0x82 -> <control>
'\x83' # 0x83 -> <control>
'\x84' # 0x84 -> <control>
'\x85' # 0x85 -> <control>
'\x86' # 0x86 -> <control>
'\x87' # 0x87 -> <control>
'\x88' # 0x88 -> <control>
'\x89' # 0x89 -> <control>
'\x8a' # 0x8A -> <control>
'\x8b' # 0x8B -> <control>
'\x8c' # 0x8C -> <control>
'\x8d' # 0x8D -> <control>
'\x8e' # 0x8E -> <control>
'\x8f' # 0x8F -> <control>
'\x90' # 0x90 -> <control>
'\x91' # 0x91 -> <control>
'\x92' # 0x92 -> <control>
'\x93' # 0x93 -> <control>
'\x94' # 0x94 -> <control>
'\x95' # 0x95 -> <control>
'\x96' # 0x96 -> <control>
'\x97' # 0x97 -> <control>
'\x98' # 0x98 -> <control>
'\x99' # 0x99 -> <control>
'\x9a' # 0x9A -> <control>
'\x9b' # 0x9B -> <control>
'\x9c' # 0x9C -> <control>
'\x9d' # 0x9D -> <control>
'\x9e' # 0x9E -> <control>
'\x9f' # 0x9F -> <control>
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\u0104' # 0xA1 -> LATIN CAPITAL LETTER A WITH OGONEK
'\u0138' # 0xA2 -> LATIN SMALL LETTER KRA
'\u0156' # 0xA3 -> LATIN CAPITAL LETTER R WITH CEDILLA
'\xa4' # 0xA4 -> CURRENCY SIGN
'\u0128' # 0xA5 -> LATIN CAPITAL LETTER I WITH TILDE
'\u013b' # 0xA6 -> LATIN CAPITAL LETTER L WITH CEDILLA
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\u0160' # 0xA9 -> LATIN CAPITAL LETTER S WITH CARON
'\u0112' # 0xAA -> LATIN CAPITAL LETTER E WITH MACRON
'\u0122' # 0xAB -> LATIN CAPITAL LETTER G WITH CEDILLA
'\u0166' # 0xAC -> LATIN CAPITAL LETTER T WITH STROKE
'\xad' # 0xAD -> SOFT HYPHEN
'\u017d' # 0xAE -> LATIN CAPITAL LETTER Z WITH CARON
'\xaf' # 0xAF -> MACRON
'\xb0' # 0xB0 -> DEGREE SIGN
'\u0105' # 0xB1 -> LATIN SMALL LETTER A WITH OGONEK
'\u02db' # 0xB2 -> OGONEK
'\u0157' # 0xB3 -> LATIN SMALL LETTER R WITH CEDILLA
'\xb4' # 0xB4 -> ACUTE ACCENT
'\u0129' # 0xB5 -> LATIN SMALL LETTER I WITH TILDE
'\u013c' # 0xB6 -> LATIN SMALL LETTER L WITH CEDILLA
'\u02c7' # 0xB7 -> CARON
'\xb8' # 0xB8 -> CEDILLA
'\u0161' # 0xB9 -> LATIN SMALL LETTER S WITH CARON
'\u0113' # 0xBA -> LATIN SMALL LETTER E WITH MACRON
'\u0123' # 0xBB -> LATIN SMALL LETTER G WITH CEDILLA
'\u0167' # 0xBC -> LATIN SMALL LETTER T WITH STROKE
'\u014a' # 0xBD -> LATIN CAPITAL LETTER ENG
'\u017e' # 0xBE -> LATIN SMALL LETTER Z WITH CARON
'\u014b' # 0xBF -> LATIN SMALL LETTER ENG
'\u0100' # 0xC0 -> LATIN CAPITAL LETTER A WITH MACRON
'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
'\u012e' # 0xC7 -> LATIN CAPITAL LETTER I WITH OGONEK
'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
'\u0118' # 0xCA -> LATIN CAPITAL LETTER E WITH OGONEK
'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\u0116' # 0xCC -> LATIN CAPITAL LETTER E WITH DOT ABOVE
'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\u012a' # 0xCF -> LATIN CAPITAL LETTER I WITH MACRON
'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE
'\u0145' # 0xD1 -> LATIN CAPITAL LETTER N WITH CEDILLA
'\u014c' # 0xD2 -> LATIN CAPITAL LETTER O WITH MACRON
'\u0136' # 0xD3 -> LATIN CAPITAL LETTER K WITH CEDILLA
'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd7' # 0xD7 -> MULTIPLICATION SIGN
'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
'\u0172' # 0xD9 -> LATIN CAPITAL LETTER U WITH OGONEK
'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\u0168' # 0xDD -> LATIN CAPITAL LETTER U WITH TILDE
'\u016a' # 0xDE -> LATIN CAPITAL LETTER U WITH MACRON
'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
'\u0101' # 0xE0 -> LATIN SMALL LETTER A WITH MACRON
'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
'\u012f' # 0xE7 -> LATIN SMALL LETTER I WITH OGONEK
'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
'\u0119' # 0xEA -> LATIN SMALL LETTER E WITH OGONEK
'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
'\u0117' # 0xEC -> LATIN SMALL LETTER E WITH DOT ABOVE
'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\u012b' # 0xEF -> LATIN SMALL LETTER I WITH MACRON
'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE
'\u0146' # 0xF1 -> LATIN SMALL LETTER N WITH CEDILLA
'\u014d' # 0xF2 -> LATIN SMALL LETTER O WITH MACRON
'\u0137' # 0xF3 -> LATIN SMALL LETTER K WITH CEDILLA
'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf7' # 0xF7 -> DIVISION SIGN
'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
'\u0173' # 0xF9 -> LATIN SMALL LETTER U WITH OGONEK
'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
'\u0169' # 0xFD -> LATIN SMALL LETTER U WITH TILDE
'\u016b' # 0xFE -> LATIN SMALL LETTER U WITH MACRON
'\u02d9' # 0xFF -> DOT ABOVE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| lgpl-3.0 | 5,315,155,932,837,388,000 | -3,346,769,244,691,067,400 | 42.570033 | 107 | 0.527288 | false |
Belxjander/Kirito | Python-2.3.3-Amiga/Tools/pynche/ChipViewer.py | 1 | 5028 | """Chip viewer and widget.
In the lower left corner of the main Pynche window, you will see two
ChipWidgets, one for the selected color and one for the nearest color. The
selected color is the actual RGB value expressed as an X11 #COLOR name. The
nearest color is the named color from the X11 database that is closest to the
selected color in 3D space. There may be other colors equally close, but the
nearest one is the first one found.
Clicking on the nearest color chip selects that named color.
The ChipViewer class includes the entire lower left quandrant; i.e. both the
selected and nearest ChipWidgets.
"""
from types import StringType
from Tkinter import *
import ColorDB
class ChipWidget:
_WIDTH = 150
_HEIGHT = 80
def __init__(self,
master = None,
width = _WIDTH,
height = _HEIGHT,
text = 'Color',
initialcolor = 'blue',
presscmd = None,
releasecmd = None):
# create the text label
self.__label = Label(master, text=text)
self.__label.grid(row=0, column=0)
# create the color chip, implemented as a frame
self.__chip = Frame(master, relief=RAISED, borderwidth=2,
width=width,
height=height,
background=initialcolor)
self.__chip.grid(row=1, column=0)
# create the color name
self.__namevar = StringVar()
self.__namevar.set(initialcolor)
self.__name = Entry(master, textvariable=self.__namevar,
relief=FLAT, justify=CENTER, state=DISABLED,
font=self.__label['font'])
self.__name.grid(row=2, column=0)
# create the message area
self.__msgvar = StringVar()
self.__name = Entry(master, textvariable=self.__msgvar,
relief=FLAT, justify=CENTER, state=DISABLED,
font=self.__label['font'])
self.__name.grid(row=3, column=0)
# set bindings
if presscmd:
self.__chip.bind('<ButtonPress-1>', presscmd)
if releasecmd:
self.__chip.bind('<ButtonRelease-1>', releasecmd)
def set_color(self, color):
self.__chip.config(background=color)
def get_color(self):
return self.__chip['background']
def set_name(self, colorname):
self.__namevar.set(colorname)
def set_message(self, message):
self.__msgvar.set(message)
def press(self):
self.__chip.configure(relief=SUNKEN)
def release(self):
self.__chip.configure(relief=RAISED)
class ChipViewer:
def __init__(self, switchboard, master=None):
self.__sb = switchboard
self.__frame = Frame(master, relief=RAISED, borderwidth=1)
self.__frame.grid(row=3, column=0, ipadx=5, sticky='NSEW')
# create the chip that will display the currently selected color
# exactly
self.__sframe = Frame(self.__frame)
self.__sframe.grid(row=0, column=0)
self.__selected = ChipWidget(self.__sframe, text='Selected')
# create the chip that will display the nearest real X11 color
# database color name
self.__nframe = Frame(self.__frame)
self.__nframe.grid(row=0, column=1)
self.__nearest = ChipWidget(self.__nframe, text='Nearest',
presscmd = self.__buttonpress,
releasecmd = self.__buttonrelease)
def update_yourself(self, red, green, blue):
# Selected always shows the #rrggbb name of the color, nearest always
# shows the name of the nearest color in the database. BAW: should
# an exact match be indicated in some way?
#
# Always use the #rrggbb style to actually set the color, since we may
# not be using X color names (e.g. "web-safe" names)
colordb = self.__sb.colordb()
rgbtuple = (red, green, blue)
rrggbb = ColorDB.triplet_to_rrggbb(rgbtuple)
# find the nearest
nearest = colordb.nearest(red, green, blue)
nearest_tuple = colordb.find_byname(nearest)
nearest_rrggbb = ColorDB.triplet_to_rrggbb(nearest_tuple)
self.__selected.set_color(rrggbb)
self.__nearest.set_color(nearest_rrggbb)
# set the name and messages areas
self.__selected.set_name(rrggbb)
if rrggbb == nearest_rrggbb:
self.__selected.set_message(nearest)
else:
self.__selected.set_message('')
self.__nearest.set_name(nearest_rrggbb)
self.__nearest.set_message(nearest)
def __buttonpress(self, event=None):
self.__nearest.press()
def __buttonrelease(self, event=None):
self.__nearest.release()
rrggbb = self.__nearest.get_color()
red, green, blue = ColorDB.rrggbb_to_triplet(rrggbb)
self.__sb.update_views(red, green, blue)
| gpl-3.0 | 8,242,805,875,189,429,000 | -6,490,895,186,254,791,000 | 36.804511 | 79 | 0.594073 | false |
wkoathp/glance | glance/search/plugins/metadefs.py | 6 | 9241 | # Copyright 2015 Intel Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import six
import glance.db
from glance.db.sqlalchemy import models_metadef as models
from glance.search.plugins import base
from glance.search.plugins import metadefs_notification_handler
class MetadefIndex(base.IndexBase):
def __init__(self):
super(MetadefIndex, self).__init__()
self.db_api = glance.db.get_api()
def get_index_name(self):
return 'glance'
def get_document_type(self):
return 'metadef'
def get_mapping(self):
property_mapping = {
'dynamic': True,
'type': 'nested',
'properties': {
'property': {'type': 'string', 'index': 'not_analyzed'},
'type': {'type': 'string'},
'title': {'type': 'string'},
'description': {'type': 'string'},
}
}
mapping = {
'_id': {
'path': 'namespace',
},
'properties': {
'display_name': {'type': 'string'},
'description': {'type': 'string'},
'namespace': {'type': 'string', 'index': 'not_analyzed'},
'owner': {'type': 'string', 'index': 'not_analyzed'},
'visibility': {'type': 'string', 'index': 'not_analyzed'},
'resource_types': {
'type': 'nested',
'properties': {
'name': {'type': 'string'},
'prefix': {'type': 'string'},
'properties_target': {'type': 'string'},
},
},
'objects': {
'type': 'nested',
'properties': {
'id': {'type': 'string', 'index': 'not_analyzed'},
'name': {'type': 'string'},
'description': {'type': 'string'},
'properties': property_mapping,
}
},
'properties': property_mapping,
'tags': {
'type': 'nested',
'properties': {
'name': {'type': 'string'},
}
}
},
}
return mapping
def get_rbac_filter(self, request_context):
# TODO(krykowski): Define base get_rbac_filter in IndexBase class
# which will provide some common subset of query pieces.
# Something like:
# def get_common_context_pieces(self, request_context):
# return [{'term': {'owner': request_context.owner,
# 'type': {'value': self.get_document_type()}}]
return [
{
"and": [
{
'or': [
{
'term': {
'owner': request_context.owner
}
},
{
'term': {
'visibility': 'public'
}
}
]
},
{
'type': {
'value': self.get_document_type()
}
}
]
}
]
def get_objects(self):
session = self.db_api.get_session()
namespaces = session.query(models.MetadefNamespace).all()
resource_types = session.query(models.MetadefResourceType).all()
resource_types_map = {r.id: r.name for r in resource_types}
for namespace in namespaces:
namespace.resource_types = self.get_namespace_resource_types(
namespace.id, resource_types_map)
namespace.objects = self.get_namespace_objects(namespace.id)
namespace.properties = self.get_namespace_properties(namespace.id)
namespace.tags = self.get_namespace_tags(namespace.id)
return namespaces
def get_namespace_resource_types(self, namespace_id, resource_types):
session = self.db_api.get_session()
namespace_resource_types = session.query(
models.MetadefNamespaceResourceType
).filter_by(namespace_id=namespace_id)
resource_associations = [{
'prefix': r.prefix,
'properties_target': r.properties_target,
'name': resource_types[r.resource_type_id],
} for r in namespace_resource_types]
return resource_associations
def get_namespace_properties(self, namespace_id):
session = self.db_api.get_session()
properties = session.query(
models.MetadefProperty
).filter_by(namespace_id=namespace_id)
return list(properties)
def get_namespace_objects(self, namespace_id):
session = self.db_api.get_session()
namespace_objects = session.query(
models.MetadefObject
).filter_by(namespace_id=namespace_id)
return list(namespace_objects)
def get_namespace_tags(self, namespace_id):
session = self.db_api.get_session()
namespace_tags = session.query(
models.MetadefTag
).filter_by(namespace_id=namespace_id)
return list(namespace_tags)
def serialize(self, obj):
object_docs = [self.serialize_object(ns_obj) for ns_obj in obj.objects]
property_docs = [self.serialize_property(prop.name, prop.json_schema)
for prop in obj.properties]
resource_type_docs = [self.serialize_namespace_resource_type(rt)
for rt in obj.resource_types]
tag_docs = [self.serialize_tag(tag) for tag in obj.tags]
namespace_doc = self.serialize_namespace(obj)
namespace_doc.update({
'objects': object_docs,
'properties': property_docs,
'resource_types': resource_type_docs,
'tags': tag_docs,
})
return namespace_doc
def serialize_namespace(self, namespace):
return {
'namespace': namespace.namespace,
'display_name': namespace.display_name,
'description': namespace.description,
'visibility': namespace.visibility,
'protected': namespace.protected,
'owner': namespace.owner,
}
def serialize_object(self, obj):
obj_properties = obj.json_schema
property_docs = []
for name, schema in six.iteritems(obj_properties):
property_doc = self.serialize_property(name, schema)
property_docs.append(property_doc)
document = {
'name': obj.name,
'description': obj.description,
'properties': property_docs,
}
return document
def serialize_property(self, name, schema):
document = copy.deepcopy(schema)
document['property'] = name
if 'default' in document:
document['default'] = str(document['default'])
if 'enum' in document:
document['enum'] = map(str, document['enum'])
return document
def serialize_namespace_resource_type(self, ns_resource_type):
return {
'name': ns_resource_type['name'],
'prefix': ns_resource_type['prefix'],
'properties_target': ns_resource_type['properties_target']
}
def serialize_tag(self, tag):
return {
'name': tag.name
}
def get_notification_handler(self):
return metadefs_notification_handler.MetadefHandler(
self.engine,
self.get_index_name(),
self.get_document_type()
)
def get_notification_supported_events(self):
return [
"metadef_namespace.create",
"metadef_namespace.update",
"metadef_namespace.delete",
"metadef_object.create",
"metadef_object.update",
"metadef_object.delete",
"metadef_property.create",
"metadef_property.update",
"metadef_property.delete",
"metadef_tag.create",
"metadef_tag.update",
"metadef_tag.delete",
"metadef_resource_type.create",
"metadef_resource_type.delete",
"metadef_namespace.delete_properties",
"metadef_namespace.delete_objects",
"metadef_namespace.delete_tags"
]
| apache-2.0 | 7,604,879,849,975,487,000 | 1,782,061,639,612,566,300 | 34.679537 | 79 | 0.517801 | false |
dropbox/dropbox-sdk-dotnet | generator/csproj.py | 1 | 15683 | from __future__ import unicode_literals
from StringIO import StringIO
COMPILE_INCLUDES = [
"Stone\\Decoder.cs",
"Stone\\Empty.cs",
"Stone\\Encoder.cs",
"Stone\\IEncoder.cs",
"Stone\\IDecoder.cs",
"Stone\\IJsonReader.cs",
"Stone\\IJsonWriter.cs",
"Stone\\ITransport.cs",
"Stone\\JsonReader.cs",
"Stone\\JsonWriter.cs",
"ApiException.cs",
"StructuredException.cs",
"Stone\\Util.cs",
"DropboxCertHelper.cs",
"DropboxClient.cs",
"DropboxClientBase.cs",
"DropboxAppClient.cs",
"DropboxTeamClient.cs",
"DropboxClientConfig.cs",
"DropboxException.cs",
"DropboxOauth2Helper.cs",
"DropboxRequestHandler.cs",
"AppProperties\\AssemblyInfo.cs",
]
NONE_INCLUDES = [
"packages.config",
]
PORTABLE40_NONE_INCLUDES = [
"app.config",
"packages.Dropbox.Api.Portable40.config",
]
PORTABLE_NONE_INCLUDES = [
"packages.Dropbox.Api.Portable.config",
]
DOC_NONE_INCLUDES = [
"packages.config",
]
CSPROJ_START_BLOCK = r"""<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Library</OutputType>
<AppDesignerFolder>AppProperties</AppDesignerFolder>
<RootNamespace>Dropbox.Api</RootNamespace>
<AssemblyName>Dropbox.Api</AssemblyName>
<TargetFrameworks>net45;netstandard2.0</TargetFrameworks>
<BaseIntermediateOutputPath>obj\</BaseIntermediateOutputPath>
<EnableDefaultCompileItems>false</EnableDefaultCompileItems>
<GenerateAssemblyInfo>false</GenerateAssemblyInfo>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
</PropertyGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'net45' ">
<Reference Include="System" />
<Reference Include="System.Core" />
<Reference Include="System.Xml.Linq" />
<Reference Include="System.Data.DataSetExtensions" />
<Reference Include="System.Net.Http" />
<Reference Include="Microsoft.CSharp" />
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
<Reference Include="Newtonsoft.Json">
<HintPath>..\packages\Newtonsoft.Json.7.0.1\lib\net45\Newtonsoft.Json.dll</HintPath>
</Reference>
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'net45' ">
<None Include="packages.config" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
<None Remove="app.config" />
<None Remove="Dropbox.Api.nuspec" />
<None Remove="dropbox_api_key.snk" />
<None Remove="packages.config" />
<None Remove="packages.Dropbox.Api.Portable.config" />
<None Remove="packages.Dropbox.Api.Portable40.config" />
<None Remove="Settings.StyleCop" />
<None Remove="stone_summaries.xml" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
<PackageReference Include="Newtonsoft.Json" Version="10.0.3" />
</ItemGroup>
"""
CSPROJ_END_BLOCK = r"""
</Project>
"""
PORTABLE40_CSPROJ_START_BLOCK = r"""<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="12.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProjectGuid>{D7B167CE-3AF8-478E-82F2-684D38F1DF98}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>AppProperties</AppDesignerFolder>
<RootNamespace>Dropbox.Api</RootNamespace>
<AssemblyName>Dropbox.Api</AssemblyName>
<TargetFrameworkVersion>v4.0</TargetFrameworkVersion>
<TargetFrameworkProfile>Profile344</TargetFrameworkProfile>
<FileAlignment>512</FileAlignment>
<ProjectTypeGuids>{786C830F-07A1-408B-BD7F-6EE04809D6DB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}</ProjectTypeGuids>
<MinimumVisualStudioVersion>10.0</MinimumVisualStudioVersion>
<BaseIntermediateOutputPath>portable40obj\</BaseIntermediateOutputPath>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\portable40</OutputPath>
<DefineConstants>DEBUG;TRACE;PORTABLE40</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<DocumentationFile>bin\Debug\portable40\Dropbox.Api.XML</DocumentationFile>
<RunCodeAnalysis>true</RunCodeAnalysis>
<NoWarn>419</NoWarn>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\portable40</OutputPath>
<DefineConstants>TRACE;PORTABLE40</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<DocumentationFile>bin\Release\portable40\Dropbox.Api.XML</DocumentationFile>
<NoWarn>419</NoWarn>
</PropertyGroup>
<ItemGroup>
<Reference Include="Microsoft.Threading.Tasks">
<HintPath>..\packages\Microsoft.Bcl.Async.1.0.168\lib\portable-net40+sl4+win8+wp71+wpa81\Microsoft.Threading.Tasks.dll</HintPath>
</Reference>
<Reference Include="Microsoft.Threading.Tasks.Extensions">
<HintPath>..\packages\Microsoft.Bcl.Async.1.0.168\lib\portable-net40+sl4+win8+wp71+wpa81\Microsoft.Threading.Tasks.Extensions.dll</HintPath>
</Reference>
<Reference Include="System.IO">
<HintPath>..\packages\Microsoft.Bcl.1.1.10\lib\portable-net40+sl5+win8+wp8+wpa81\System.IO.dll</HintPath>
</Reference>
<Reference Include="System.Net.Http">
<HintPath>..\packages\Microsoft.Net.Http.2.2.29\lib\portable-net40+sl4+win8+wp71+wpa81\System.Net.Http.dll</HintPath>
</Reference>
<Reference Include="System.Net.Http.Extensions">
<HintPath>..\packages\Microsoft.Net.Http.2.2.29\lib\portable-net40+sl4+win8+wp71+wpa81\System.Net.Http.Extensions.dll</HintPath>
</Reference>
<Reference Include="System.Net.Http.Primitives">
<HintPath>..\packages\Microsoft.Net.Http.2.2.29\lib\portable-net40+sl4+win8+wp71+wpa81\System.Net.Http.Primitives.dll</HintPath>
</Reference>
<Reference Include="System.Runtime">
<HintPath>..\packages\Microsoft.Bcl.1.1.10\lib\portable-net40+sl5+win8+wp8+wpa81\System.Runtime.dll</HintPath>
</Reference>
<Reference Include="System.Threading.Tasks">
<HintPath>..\packages\Microsoft.Bcl.1.1.10\lib\portable-net40+sl5+win8+wp8+wpa81\System.Threading.Tasks.dll</HintPath>
</Reference>
<Reference Include="Newtonsoft.Json">
<HintPath>..\packages\Newtonsoft.Json.7.0.1\lib\portable-net40+sl5+wp80+win8+wpa81\Newtonsoft.Json.dll</HintPath>
</Reference>
</ItemGroup>
"""
PORTABLE40_CSPROJ_END_BLOCK = r""" <Import Project="$(MSBuildExtensionsPath32)\Microsoft\Portable\$(TargetFrameworkVersion)\Microsoft.Portable.CSharp.targets" />
<Import Project="..\packages\Microsoft.Bcl.Build.1.0.14\tools\Microsoft.Bcl.Build.targets" Condition="Exists('..\packages\Microsoft.Bcl.Build.1.0.14\tools\Microsoft.Bcl.Build.targets')" />
<Target Name="EnsureBclBuildImported" BeforeTargets="BeforeBuild" Condition="'$(BclBuildImported)' == ''">
<Error Condition="!Exists('..\packages\Microsoft.Bcl.Build.1.0.14\tools\Microsoft.Bcl.Build.targets')" Text="This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=317567." HelpKeyword="BCLBUILD2001" />
<Error Condition="Exists('..\packages\Microsoft.Bcl.Build.1.0.14\tools\Microsoft.Bcl.Build.targets')" Text="The build restored NuGet packages. Build the project again to include these packages in the build. For more information, see http://go.microsoft.com/fwlink/?LinkID=317568." HelpKeyword="BCLBUILD2002" />
</Target>
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>
"""
PORTABLE_CSPROJ_START_BLOCK = r"""<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="12.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProjectGuid>{786C830F-07A1-408B-BD7F-6EE04809D6DB}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>AppProperties</AppDesignerFolder>
<RootNamespace>Dropbox.Api</RootNamespace>
<AssemblyName>Dropbox.Api</AssemblyName>
<TargetFrameworkVersion>v4.5</TargetFrameworkVersion>
<TargetFrameworkProfile>Profile111</TargetFrameworkProfile>
<FileAlignment>512</FileAlignment>
<ProjectTypeGuids>{786C830F-07A1-408B-BD7F-6EE04809D6DB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}</ProjectTypeGuids>
<MinimumVisualStudioVersion>11.0</MinimumVisualStudioVersion>
<BaseIntermediateOutputPath>portableobj\</BaseIntermediateOutputPath>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\portable</OutputPath>
<DefineConstants>DEBUG;TRACE;PORTABLE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<DocumentationFile>bin\Debug\portable\Dropbox.Api.XML</DocumentationFile>
<RunCodeAnalysis>true</RunCodeAnalysis>
<NoWarn>419</NoWarn>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\portable</OutputPath>
<DefineConstants>TRACE;PORTABLE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<DocumentationFile>bin\Release\portable\Dropbox.Api.XML</DocumentationFile>
<NoWarn>419</NoWarn>
</PropertyGroup>
<ItemGroup>
<Reference Include="Newtonsoft.Json">
<HintPath>..\packages\Newtonsoft.Json.7.0.1\lib\portable-net45+wp80+win8+wpa81+dnxcore50\Newtonsoft.Json.dll</HintPath>
</Reference>
</ItemGroup>
"""
PORTABLE_CSPROJ_END_BLOCK = r""" <Import Project="$(MSBuildExtensionsPath32)\Microsoft\Portable\$(TargetFrameworkVersion)\Microsoft.Portable.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>
"""
DOC_CSPROJ_START_BLOCK = r"""<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="12.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProjectGuid>{0E57A534-F4CA-402B-88F4-0B43E55264BA}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>AppProperties</AppDesignerFolder>
<RootNamespace>Dropbox.Api</RootNamespace>
<AssemblyName>Dropbox.Api</AssemblyName>
<TargetFrameworkVersion>v4.5</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
<SolutionDir Condition="$(SolutionDir) == '' Or $(SolutionDir) == '*Undefined*'">..\</SolutionDir>
<RestorePackages>true</RestorePackages>
<BaseIntermediateOutputPath>docobj\</BaseIntermediateOutputPath>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>docbin\Debug\</OutputPath>
<DefineConstants>TRACE;DEBUG;DOC</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<DocumentationFile>docbin\Debug\Dropbox.Api.XML</DocumentationFile>
<NoWarn>419</NoWarn>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>docbin\Release\</OutputPath>
<DefineConstants>TRACE;DOC</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<DocumentationFile>docbin\Release\Dropbox.Api.XML</DocumentationFile>
<NoWarn>419</NoWarn>
</PropertyGroup>
<ItemGroup>
<Reference Include="System" />
<Reference Include="System.Core" />
<Reference Include="System.Net.Http" />
<Reference Include="System.Xml.Linq" />
<Reference Include="System.Data.DataSetExtensions" />
<Reference Include="Microsoft.CSharp" />
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
<Reference Include="Newtonsoft.Json">
<HintPath>..\packages\Newtonsoft.Json.7.0.1\lib\net45\Newtonsoft.Json.dll</HintPath>
</Reference>
</ItemGroup>
"""
DOC_CSPROJ_END_BLOCK = r""" <ItemGroup>
<None Include="stone_summaries.xml" />
<None Include="Generated\namespace_summaries.xml" />
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>
"""
def _include_items(buf, item_type, paths):
buf.write(' <ItemGroup>\n')
for path in paths:
file_path = path.replace('/', '\\')
buf.write(' <{0} Include="{1}" />\n'.format(item_type, file_path))
buf.write(' </ItemGroup>\n')
def make_csproj_file(files, mode):
mode = mode.lower()
if mode == 'doc':
start = DOC_CSPROJ_START_BLOCK
end = DOC_CSPROJ_END_BLOCK
none_includes = DOC_NONE_INCLUDES
elif mode == 'portable40':
start = PORTABLE40_CSPROJ_START_BLOCK
end = PORTABLE40_CSPROJ_END_BLOCK
none_includes = PORTABLE40_NONE_INCLUDES
elif mode == 'portable':
start = PORTABLE_CSPROJ_START_BLOCK
end = PORTABLE_CSPROJ_END_BLOCK
none_includes = PORTABLE_NONE_INCLUDES
else:
start = CSPROJ_START_BLOCK
end = CSPROJ_END_BLOCK
none_includes = NONE_INCLUDES
buf = StringIO()
buf.write(start)
_include_items(buf, 'Compile', COMPILE_INCLUDES)
_include_items(buf, 'Compile', sorted(files, key=lambda x: x.replace('\\', '/')))
_include_items(buf, 'None', none_includes)
buf.write(end)
return buf.getvalue()
| mit | 2,980,729,591,722,110,000 | 5,698,491,117,041,438,000 | 43.427762 | 340 | 0.715106 | false |
npuichigo/ttsflow | third_party/tensorflow/tensorflow/contrib/slim/python/slim/data/tfexample_decoder_test.py | 34 | 30604 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for slim.data.tfexample_decoder."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.slim.python.slim.data import tfexample_decoder
from tensorflow.core.example import example_pb2
from tensorflow.core.example import feature_pb2
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import image_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.platform import test
class TFExampleDecoderTest(test.TestCase):
def _EncodedFloatFeature(self, ndarray):
return feature_pb2.Feature(float_list=feature_pb2.FloatList(
value=ndarray.flatten().tolist()))
def _EncodedInt64Feature(self, ndarray):
return feature_pb2.Feature(int64_list=feature_pb2.Int64List(
value=ndarray.flatten().tolist()))
def _EncodedBytesFeature(self, tf_encoded):
with self.test_session():
encoded = tf_encoded.eval()
def BytesList(value):
return feature_pb2.BytesList(value=[value])
return feature_pb2.Feature(bytes_list=BytesList(encoded))
def _BytesFeature(self, ndarray):
values = ndarray.flatten().tolist()
for i in range(len(values)):
values[i] = values[i].encode('utf-8')
return feature_pb2.Feature(bytes_list=feature_pb2.BytesList(value=values))
def _StringFeature(self, value):
value = value.encode('utf-8')
return feature_pb2.Feature(bytes_list=feature_pb2.BytesList(value=[value]))
def _Encoder(self, image, image_format):
assert image_format in ['jpeg', 'JPEG', 'png', 'PNG', 'raw', 'RAW']
if image_format in ['jpeg', 'JPEG']:
tf_image = constant_op.constant(image, dtype=dtypes.uint8)
return image_ops.encode_jpeg(tf_image)
if image_format in ['png', 'PNG']:
tf_image = constant_op.constant(image, dtype=dtypes.uint8)
return image_ops.encode_png(tf_image)
if image_format in ['raw', 'RAW']:
return constant_op.constant(image.tostring(), dtype=dtypes.string)
def GenerateImage(self, image_format, image_shape):
"""Generates an image and an example containing the encoded image.
Args:
image_format: the encoding format of the image.
image_shape: the shape of the image to generate.
Returns:
image: the generated image.
example: a TF-example with a feature key 'image/encoded' set to the
serialized image and a feature key 'image/format' set to the image
encoding format ['jpeg', 'JPEG', 'png', 'PNG', 'raw'].
"""
num_pixels = image_shape[0] * image_shape[1] * image_shape[2]
image = np.linspace(
0, num_pixels - 1, num=num_pixels).reshape(image_shape).astype(np.uint8)
tf_encoded = self._Encoder(image, image_format)
example = example_pb2.Example(features=feature_pb2.Features(feature={
'image/encoded': self._EncodedBytesFeature(tf_encoded),
'image/format': self._StringFeature(image_format)
}))
return image, example.SerializeToString()
def DecodeExample(self, serialized_example, item_handler, image_format):
"""Decodes the given serialized example with the specified item handler.
Args:
serialized_example: a serialized TF example string.
item_handler: the item handler used to decode the image.
image_format: the image format being decoded.
Returns:
the decoded image found in the serialized Example.
"""
serialized_example = array_ops.reshape(serialized_example, shape=[])
decoder = tfexample_decoder.TFExampleDecoder(
keys_to_features={
'image/encoded':
parsing_ops.FixedLenFeature(
(), dtypes.string, default_value=''),
'image/format':
parsing_ops.FixedLenFeature(
(), dtypes.string, default_value=image_format),
},
items_to_handlers={'image': item_handler})
[tf_image] = decoder.decode(serialized_example, ['image'])
return tf_image
def RunDecodeExample(self, serialized_example, item_handler, image_format):
tf_image = self.DecodeExample(serialized_example, item_handler,
image_format)
with self.test_session():
decoded_image = tf_image.eval()
# We need to recast them here to avoid some issues with uint8.
return decoded_image.astype(np.float32)
def testDecodeExampleWithJpegEncoding(self):
image_shape = (2, 3, 3)
image, serialized_example = self.GenerateImage(
image_format='jpeg', image_shape=image_shape)
decoded_image = self.RunDecodeExample(
serialized_example, tfexample_decoder.Image(), image_format='jpeg')
# Need to use a tolerance of 1 because of noise in the jpeg encode/decode
self.assertAllClose(image, decoded_image, atol=1.001)
def testDecodeExampleWithJPEGEncoding(self):
test_image_channels = [1, 3]
for channels in test_image_channels:
image_shape = (2, 3, channels)
image, serialized_example = self.GenerateImage(
image_format='JPEG', image_shape=image_shape)
decoded_image = self.RunDecodeExample(
serialized_example,
tfexample_decoder.Image(channels=channels),
image_format='JPEG')
# Need to use a tolerance of 1 because of noise in the jpeg encode/decode
self.assertAllClose(image, decoded_image, atol=1.001)
def testDecodeExampleWithNoShapeInfo(self):
test_image_channels = [1, 3]
for channels in test_image_channels:
image_shape = (2, 3, channels)
_, serialized_example = self.GenerateImage(
image_format='jpeg', image_shape=image_shape)
tf_decoded_image = self.DecodeExample(
serialized_example,
tfexample_decoder.Image(
shape=None, channels=channels),
image_format='jpeg')
self.assertEqual(tf_decoded_image.get_shape().ndims, 3)
def testDecodeExampleWithPngEncoding(self):
test_image_channels = [1, 3, 4]
for channels in test_image_channels:
image_shape = (2, 3, channels)
image, serialized_example = self.GenerateImage(
image_format='png', image_shape=image_shape)
decoded_image = self.RunDecodeExample(
serialized_example,
tfexample_decoder.Image(channels=channels),
image_format='png')
self.assertAllClose(image, decoded_image, atol=0)
def testDecodeExampleWithPNGEncoding(self):
test_image_channels = [1, 3, 4]
for channels in test_image_channels:
image_shape = (2, 3, channels)
image, serialized_example = self.GenerateImage(
image_format='PNG', image_shape=image_shape)
decoded_image = self.RunDecodeExample(
serialized_example,
tfexample_decoder.Image(channels=channels),
image_format='PNG')
self.assertAllClose(image, decoded_image, atol=0)
def testDecodeExampleWithRawEncoding(self):
image_shape = (2, 3, 3)
image, serialized_example = self.GenerateImage(
image_format='raw', image_shape=image_shape)
decoded_image = self.RunDecodeExample(
serialized_example,
tfexample_decoder.Image(shape=image_shape),
image_format='raw')
self.assertAllClose(image, decoded_image, atol=0)
def testDecodeExampleWithRAWEncoding(self):
image_shape = (2, 3, 3)
image, serialized_example = self.GenerateImage(
image_format='RAW', image_shape=image_shape)
decoded_image = self.RunDecodeExample(
serialized_example,
tfexample_decoder.Image(shape=image_shape),
image_format='RAW')
self.assertAllClose(image, decoded_image, atol=0)
def testDecodeExampleWithJpegEncodingAt16BitCausesError(self):
image_shape = (2, 3, 3)
unused_image, serialized_example = self.GenerateImage(
image_format='jpeg', image_shape=image_shape)
with self.assertRaises(TypeError):
unused_decoded_image = self.RunDecodeExample(
serialized_example,
tfexample_decoder.Image(dtype=dtypes.uint16),
image_format='jpeg')
def testDecodeExampleWithStringTensor(self):
tensor_shape = (2, 3, 1)
np_array = np.array([[['ab'], ['cd'], ['ef']],
[['ghi'], ['jkl'], ['mnop']]])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'labels': self._BytesFeature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'labels':
parsing_ops.FixedLenFeature(
tensor_shape,
dtypes.string,
default_value=constant_op.constant(
'', shape=tensor_shape, dtype=dtypes.string))
}
items_to_handlers = {'labels': tfexample_decoder.Tensor('labels'),}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
labels = labels.astype(np_array.dtype)
self.assertTrue(np.array_equal(np_array, labels))
def testDecodeExampleWithFloatTensor(self):
np_array = np.random.rand(2, 3, 1).astype('f')
example = example_pb2.Example(features=feature_pb2.Features(feature={
'array': self._EncodedFloatFeature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'array': parsing_ops.FixedLenFeature(np_array.shape, dtypes.float32)
}
items_to_handlers = {'array': tfexample_decoder.Tensor('array'),}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_array] = decoder.decode(serialized_example, ['array'])
self.assertAllEqual(tf_array.eval(), np_array)
def testDecodeExampleWithInt64Tensor(self):
np_array = np.random.randint(1, 10, size=(2, 3, 1))
example = example_pb2.Example(features=feature_pb2.Features(feature={
'array': self._EncodedInt64Feature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'array': parsing_ops.FixedLenFeature(np_array.shape, dtypes.int64)
}
items_to_handlers = {'array': tfexample_decoder.Tensor('array'),}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_array] = decoder.decode(serialized_example, ['array'])
self.assertAllEqual(tf_array.eval(), np_array)
def testDecodeExampleWithVarLenTensor(self):
np_array = np.array([[[1], [2], [3]], [[4], [5], [6]]])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'labels': self._EncodedInt64Feature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'labels': parsing_ops.VarLenFeature(dtype=dtypes.int64),
}
items_to_handlers = {'labels': tfexample_decoder.Tensor('labels'),}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
self.assertAllEqual(labels, np_array.flatten())
def testDecodeExampleWithFixLenTensorWithShape(self):
np_array = np.array([[1, 2, 3], [4, 5, 6]])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'labels': self._EncodedInt64Feature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'labels':
parsing_ops.FixedLenFeature(
np_array.shape, dtype=dtypes.int64),
}
items_to_handlers = {
'labels': tfexample_decoder.Tensor(
'labels', shape=np_array.shape),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
self.assertAllEqual(labels, np_array)
def testDecodeExampleWithVarLenTensorToDense(self):
np_array = np.array([[1, 2, 3], [4, 5, 6]])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'labels': self._EncodedInt64Feature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'labels': parsing_ops.VarLenFeature(dtype=dtypes.int64),
}
items_to_handlers = {
'labels': tfexample_decoder.Tensor(
'labels', shape=np_array.shape),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
self.assertAllEqual(labels, np_array)
def testDecodeExampleShapeKeyTensor(self):
np_image = np.random.rand(2, 3, 1).astype('f')
np_labels = np.array([[[1], [2], [3]], [[4], [5], [6]]])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'image': self._EncodedFloatFeature(np_image),
'image/shape': self._EncodedInt64Feature(np.array(np_image.shape)),
'labels': self._EncodedInt64Feature(np_labels),
'labels/shape': self._EncodedInt64Feature(np.array(np_labels.shape)),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'image': parsing_ops.VarLenFeature(dtype=dtypes.float32),
'image/shape': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'labels': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'labels/shape': parsing_ops.VarLenFeature(dtype=dtypes.int64),
}
items_to_handlers = {
'image':
tfexample_decoder.Tensor(
'image', shape_keys='image/shape'),
'labels':
tfexample_decoder.Tensor(
'labels', shape_keys='labels/shape'),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_image, tf_labels] = decoder.decode(serialized_example,
['image', 'labels'])
self.assertAllEqual(tf_image.eval(), np_image)
self.assertAllEqual(tf_labels.eval(), np_labels)
def testDecodeExampleMultiShapeKeyTensor(self):
np_image = np.random.rand(2, 3, 1).astype('f')
np_labels = np.array([[[1], [2], [3]], [[4], [5], [6]]])
height, width, depth = np_labels.shape
example = example_pb2.Example(features=feature_pb2.Features(feature={
'image': self._EncodedFloatFeature(np_image),
'image/shape': self._EncodedInt64Feature(np.array(np_image.shape)),
'labels': self._EncodedInt64Feature(np_labels),
'labels/height': self._EncodedInt64Feature(np.array([height])),
'labels/width': self._EncodedInt64Feature(np.array([width])),
'labels/depth': self._EncodedInt64Feature(np.array([depth])),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'image': parsing_ops.VarLenFeature(dtype=dtypes.float32),
'image/shape': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'labels': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'labels/height': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'labels/width': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'labels/depth': parsing_ops.VarLenFeature(dtype=dtypes.int64),
}
items_to_handlers = {
'image':
tfexample_decoder.Tensor(
'image', shape_keys='image/shape'),
'labels':
tfexample_decoder.Tensor(
'labels',
shape_keys=['labels/height', 'labels/width', 'labels/depth']),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_image, tf_labels] = decoder.decode(serialized_example,
['image', 'labels'])
self.assertAllEqual(tf_image.eval(), np_image)
self.assertAllEqual(tf_labels.eval(), np_labels)
def testDecodeExampleWithSparseTensor(self):
np_indices = np.array([[1], [2], [5]])
np_values = np.array([0.1, 0.2, 0.6]).astype('f')
example = example_pb2.Example(features=feature_pb2.Features(feature={
'indices': self._EncodedInt64Feature(np_indices),
'values': self._EncodedFloatFeature(np_values),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'indices': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'values': parsing_ops.VarLenFeature(dtype=dtypes.float32),
}
items_to_handlers = {'labels': tfexample_decoder.SparseTensor(),}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
self.assertAllEqual(labels.indices, np_indices)
self.assertAllEqual(labels.values, np_values)
self.assertAllEqual(labels.dense_shape, np_values.shape)
def testDecodeExampleWithSparseTensorWithKeyShape(self):
np_indices = np.array([[1], [2], [5]])
np_values = np.array([0.1, 0.2, 0.6]).astype('f')
np_shape = np.array([6])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'indices': self._EncodedInt64Feature(np_indices),
'values': self._EncodedFloatFeature(np_values),
'shape': self._EncodedInt64Feature(np_shape),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'indices': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'values': parsing_ops.VarLenFeature(dtype=dtypes.float32),
'shape': parsing_ops.VarLenFeature(dtype=dtypes.int64),
}
items_to_handlers = {
'labels': tfexample_decoder.SparseTensor(shape_key='shape'),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
self.assertAllEqual(labels.indices, np_indices)
self.assertAllEqual(labels.values, np_values)
self.assertAllEqual(labels.dense_shape, np_shape)
def testDecodeExampleWithSparseTensorWithGivenShape(self):
np_indices = np.array([[1], [2], [5]])
np_values = np.array([0.1, 0.2, 0.6]).astype('f')
np_shape = np.array([6])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'indices': self._EncodedInt64Feature(np_indices),
'values': self._EncodedFloatFeature(np_values),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'indices': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'values': parsing_ops.VarLenFeature(dtype=dtypes.float32),
}
items_to_handlers = {
'labels': tfexample_decoder.SparseTensor(shape=np_shape),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
self.assertAllEqual(labels.indices, np_indices)
self.assertAllEqual(labels.values, np_values)
self.assertAllEqual(labels.dense_shape, np_shape)
def testDecodeExampleWithSparseTensorToDense(self):
np_indices = np.array([1, 2, 5])
np_values = np.array([0.1, 0.2, 0.6]).astype('f')
np_shape = np.array([6])
np_dense = np.array([0.0, 0.1, 0.2, 0.0, 0.0, 0.6]).astype('f')
example = example_pb2.Example(features=feature_pb2.Features(feature={
'indices': self._EncodedInt64Feature(np_indices),
'values': self._EncodedFloatFeature(np_values),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'indices': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'values': parsing_ops.VarLenFeature(dtype=dtypes.float32),
}
items_to_handlers = {
'labels':
tfexample_decoder.SparseTensor(
shape=np_shape, densify=True),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
self.assertAllClose(labels, np_dense)
def testDecodeExampleWithTensor(self):
tensor_shape = (2, 3, 1)
np_array = np.random.rand(2, 3, 1)
example = example_pb2.Example(features=feature_pb2.Features(feature={
'image/depth_map': self._EncodedFloatFeature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'image/depth_map':
parsing_ops.FixedLenFeature(
tensor_shape,
dtypes.float32,
default_value=array_ops.zeros(tensor_shape))
}
items_to_handlers = {'depth': tfexample_decoder.Tensor('image/depth_map')}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_depth] = decoder.decode(serialized_example, ['depth'])
depth = tf_depth.eval()
self.assertAllClose(np_array, depth)
def testDecodeExampleWithItemHandlerCallback(self):
np.random.seed(0)
tensor_shape = (2, 3, 1)
np_array = np.random.rand(2, 3, 1)
example = example_pb2.Example(features=feature_pb2.Features(feature={
'image/depth_map': self._EncodedFloatFeature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'image/depth_map':
parsing_ops.FixedLenFeature(
tensor_shape,
dtypes.float32,
default_value=array_ops.zeros(tensor_shape))
}
def HandleDepth(keys_to_tensors):
depth = list(keys_to_tensors.values())[0]
depth += 1
return depth
items_to_handlers = {
'depth':
tfexample_decoder.ItemHandlerCallback('image/depth_map',
HandleDepth)
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_depth] = decoder.decode(serialized_example, ['depth'])
depth = tf_depth.eval()
self.assertAllClose(np_array, depth - 1)
def testDecodeImageWithItemHandlerCallback(self):
image_shape = (2, 3, 3)
for image_encoding in ['jpeg', 'png']:
image, serialized_example = self.GenerateImage(
image_format=image_encoding, image_shape=image_shape)
with self.test_session():
def ConditionalDecoding(keys_to_tensors):
"""See base class."""
image_buffer = keys_to_tensors['image/encoded']
image_format = keys_to_tensors['image/format']
def DecodePng():
return image_ops.decode_png(image_buffer, 3)
def DecodeJpg():
return image_ops.decode_jpeg(image_buffer, 3)
image = control_flow_ops.case(
{
math_ops.equal(image_format, 'png'): DecodePng,
},
default=DecodeJpg,
exclusive=True)
image = array_ops.reshape(image, image_shape)
return image
keys_to_features = {
'image/encoded':
parsing_ops.FixedLenFeature(
(), dtypes.string, default_value=''),
'image/format':
parsing_ops.FixedLenFeature(
(), dtypes.string, default_value='jpeg')
}
items_to_handlers = {
'image':
tfexample_decoder.ItemHandlerCallback(
['image/encoded', 'image/format'], ConditionalDecoding)
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_image] = decoder.decode(serialized_example, ['image'])
decoded_image = tf_image.eval()
if image_encoding == 'jpeg':
# For jenkins:
image = image.astype(np.float32)
decoded_image = decoded_image.astype(np.float32)
self.assertAllClose(image, decoded_image, rtol=.5, atol=1.001)
else:
self.assertAllClose(image, decoded_image, atol=0)
def testDecodeExampleWithBoundingBox(self):
num_bboxes = 10
np_ymin = np.random.rand(num_bboxes, 1)
np_xmin = np.random.rand(num_bboxes, 1)
np_ymax = np.random.rand(num_bboxes, 1)
np_xmax = np.random.rand(num_bboxes, 1)
np_bboxes = np.hstack([np_ymin, np_xmin, np_ymax, np_xmax])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'image/object/bbox/ymin': self._EncodedFloatFeature(np_ymin),
'image/object/bbox/xmin': self._EncodedFloatFeature(np_xmin),
'image/object/bbox/ymax': self._EncodedFloatFeature(np_ymax),
'image/object/bbox/xmax': self._EncodedFloatFeature(np_xmax),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'image/object/bbox/ymin': parsing_ops.VarLenFeature(dtypes.float32),
'image/object/bbox/xmin': parsing_ops.VarLenFeature(dtypes.float32),
'image/object/bbox/ymax': parsing_ops.VarLenFeature(dtypes.float32),
'image/object/bbox/xmax': parsing_ops.VarLenFeature(dtypes.float32),
}
items_to_handlers = {
'object/bbox':
tfexample_decoder.BoundingBox(['ymin', 'xmin', 'ymax', 'xmax'],
'image/object/bbox/'),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_bboxes] = decoder.decode(serialized_example, ['object/bbox'])
bboxes = tf_bboxes.eval()
self.assertAllClose(np_bboxes, bboxes)
def testDecodeExampleWithRepeatedImages(self):
image_shape = (2, 3, 3)
image_format = 'png'
image, _ = self.GenerateImage(
image_format=image_format, image_shape=image_shape)
tf_encoded = self._Encoder(image, image_format)
with self.test_session():
tf_string = tf_encoded.eval()
example = example_pb2.Example(features=feature_pb2.Features(feature={
'image/encoded': feature_pb2.Feature(bytes_list=feature_pb2.BytesList(
value=[tf_string, tf_string])),
'image/format': self._StringFeature(image_format),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
decoder = tfexample_decoder.TFExampleDecoder(
keys_to_features={
'image/encoded':
parsing_ops.FixedLenFeature(
(2,), dtypes.string),
'image/format':
parsing_ops.FixedLenFeature(
(), dtypes.string, default_value=image_format),
},
items_to_handlers={'image': tfexample_decoder.Image(repeated=True)})
[tf_image] = decoder.decode(serialized_example, ['image'])
output_image = tf_image.eval()
self.assertEqual(output_image.shape, (2, 2, 3, 3))
self.assertAllEqual(np.squeeze(output_image[0, :, :, :]), image)
self.assertAllEqual(np.squeeze(output_image[1, :, :, :]), image)
if __name__ == '__main__':
test.main()
| apache-2.0 | 7,030,351,110,286,354,000 | -6,002,545,431,406,689,000 | 38.745455 | 80 | 0.627108 | false |
ianawilson/kafka-python | kafka/common.py | 15 | 6023 | import inspect
import sys
from collections import namedtuple
###############
# Structs #
###############
# https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-MetadataAPI
MetadataRequest = namedtuple("MetadataRequest",
["topics"])
MetadataResponse = namedtuple("MetadataResponse",
["brokers", "topics"])
# https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-ProduceAPI
ProduceRequest = namedtuple("ProduceRequest",
["topic", "partition", "messages"])
ProduceResponse = namedtuple("ProduceResponse",
["topic", "partition", "error", "offset"])
# https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-FetchAPI
FetchRequest = namedtuple("FetchRequest",
["topic", "partition", "offset", "max_bytes"])
FetchResponse = namedtuple("FetchResponse",
["topic", "partition", "error", "highwaterMark", "messages"])
# https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-OffsetAPI
OffsetRequest = namedtuple("OffsetRequest",
["topic", "partition", "time", "max_offsets"])
OffsetResponse = namedtuple("OffsetResponse",
["topic", "partition", "error", "offsets"])
# https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-OffsetCommit/FetchAPI
OffsetCommitRequest = namedtuple("OffsetCommitRequest",
["topic", "partition", "offset", "metadata"])
OffsetCommitResponse = namedtuple("OffsetCommitResponse",
["topic", "partition", "error"])
OffsetFetchRequest = namedtuple("OffsetFetchRequest",
["topic", "partition"])
OffsetFetchResponse = namedtuple("OffsetFetchResponse",
["topic", "partition", "offset", "metadata", "error"])
# Other useful structs
BrokerMetadata = namedtuple("BrokerMetadata",
["nodeId", "host", "port"])
TopicMetadata = namedtuple("TopicMetadata",
["topic", "error", "partitions"])
PartitionMetadata = namedtuple("PartitionMetadata",
["topic", "partition", "leader", "replicas", "isr", "error"])
OffsetAndMessage = namedtuple("OffsetAndMessage",
["offset", "message"])
Message = namedtuple("Message",
["magic", "attributes", "key", "value"])
TopicAndPartition = namedtuple("TopicAndPartition",
["topic", "partition"])
KafkaMessage = namedtuple("KafkaMessage",
["topic", "partition", "offset", "key", "value"])
# Define retry policy for async producer
# Limit value: int >= 0, 0 means no retries
RetryOptions = namedtuple("RetryOptions",
["limit", "backoff_ms", "retry_on_timeouts"])
#################
# Exceptions #
#################
class KafkaError(RuntimeError):
pass
class BrokerResponseError(KafkaError):
pass
class UnknownError(BrokerResponseError):
errno = -1
message = 'UNKNOWN'
class OffsetOutOfRangeError(BrokerResponseError):
errno = 1
message = 'OFFSET_OUT_OF_RANGE'
class InvalidMessageError(BrokerResponseError):
errno = 2
message = 'INVALID_MESSAGE'
class UnknownTopicOrPartitionError(BrokerResponseError):
errno = 3
message = 'UNKNOWN_TOPIC_OR_PARTITON'
class InvalidFetchRequestError(BrokerResponseError):
errno = 4
message = 'INVALID_FETCH_SIZE'
class LeaderNotAvailableError(BrokerResponseError):
errno = 5
message = 'LEADER_NOT_AVAILABLE'
class NotLeaderForPartitionError(BrokerResponseError):
errno = 6
message = 'NOT_LEADER_FOR_PARTITION'
class RequestTimedOutError(BrokerResponseError):
errno = 7
message = 'REQUEST_TIMED_OUT'
class BrokerNotAvailableError(BrokerResponseError):
errno = 8
message = 'BROKER_NOT_AVAILABLE'
class ReplicaNotAvailableError(BrokerResponseError):
errno = 9
message = 'REPLICA_NOT_AVAILABLE'
class MessageSizeTooLargeError(BrokerResponseError):
errno = 10
message = 'MESSAGE_SIZE_TOO_LARGE'
class StaleControllerEpochError(BrokerResponseError):
errno = 11
message = 'STALE_CONTROLLER_EPOCH'
class OffsetMetadataTooLargeError(BrokerResponseError):
errno = 12
message = 'OFFSET_METADATA_TOO_LARGE'
class StaleLeaderEpochCodeError(BrokerResponseError):
errno = 13
message = 'STALE_LEADER_EPOCH_CODE'
class KafkaUnavailableError(KafkaError):
pass
class KafkaTimeoutError(KafkaError):
pass
class FailedPayloadsError(KafkaError):
def __init__(self, payload, *args):
super(FailedPayloadsError, self).__init__(*args)
self.payload = payload
class ConnectionError(KafkaError):
pass
class BufferUnderflowError(KafkaError):
pass
class ChecksumError(KafkaError):
pass
class ConsumerFetchSizeTooSmall(KafkaError):
pass
class ConsumerNoMoreData(KafkaError):
pass
class ConsumerTimeout(KafkaError):
pass
class ProtocolError(KafkaError):
pass
class UnsupportedCodecError(KafkaError):
pass
class KafkaConfigurationError(KafkaError):
pass
class AsyncProducerQueueFull(KafkaError):
def __init__(self, failed_msgs, *args):
super(AsyncProducerQueueFull, self).__init__(*args)
self.failed_msgs = failed_msgs
def _iter_broker_errors():
for name, obj in inspect.getmembers(sys.modules[__name__]):
if inspect.isclass(obj) and issubclass(obj, BrokerResponseError) and obj != BrokerResponseError:
yield obj
kafka_errors = dict([(x.errno, x) for x in _iter_broker_errors()])
def check_error(response):
if isinstance(response, Exception):
raise response
if response.error:
error_class = kafka_errors.get(response.error, UnknownError)
raise error_class(response)
RETRY_BACKOFF_ERROR_TYPES = (
KafkaUnavailableError, LeaderNotAvailableError,
ConnectionError, FailedPayloadsError
)
RETRY_REFRESH_ERROR_TYPES = (
NotLeaderForPartitionError, UnknownTopicOrPartitionError,
LeaderNotAvailableError, ConnectionError
)
RETRY_ERROR_TYPES = RETRY_BACKOFF_ERROR_TYPES + RETRY_REFRESH_ERROR_TYPES
| apache-2.0 | -4,749,086,253,311,294,000 | 3,614,663,671,503,488,500 | 23.28629 | 128 | 0.717915 | false |
PaulWay/insights-core | insights/parsers/tests/test_rabbitmq_log.py | 1 | 1163 | from insights.parsers.rabbitmq_log import RabbitMQStartupLog
from insights.parsers.rabbitmq_log import RabbitMQStartupErrLog
from insights.tests import context_wrap
STARTUP_LOG = """
starting file handle cache server ...done
starting worker pool ...done
starting database ...done
starting empty DB check ...done
starting exchange recovery ...done
starting queue supervisor and queue recovery ...BOOT ERROR: FAILED
"""
STARTUP_ERR_LOG = """
Error: {node_start_failed,normal}
Crash dump was written to: erl_crash.dump
Kernel pid terminated (application_controller) ({application_start_failure,kernel,{shutdown,{kernel,start,[normal,[]]}}})
"""
def test_rabbitmq_startup_log():
log = RabbitMQStartupLog(context_wrap(STARTUP_LOG))
assert len(log.get('done')) == 5
def test_rabbitmq_start_err_log():
log = RabbitMQStartupErrLog(context_wrap(STARTUP_ERR_LOG))
assert len(log.get('Error')) == 1
| apache-2.0 | -1,745,031,962,683,016,700 | 5,118,034,647,240,928,000 | 37.766667 | 121 | 0.588134 | false |
SCP-028/UGA | protein_pka/mcce/mcce.py | 1 | 17127 | #!python3
"""
Predict protein pKa based on MCCE method.
http://pka.engr.ccny.cuny.edu/
Require MCCE 3.0 to work: https://anaconda.org/SalahSalah/mcce/files
"""
import asyncio
import glob
import gzip
import locale
import logging
import math
import os
import re
import shutil
import subprocess
import sys
import time
from multiprocessing import Pool
from urllib.request import urlopen
import aioftp
import pandas as pd
import uvloop
# Sapelo Locale is broken, quick fix
locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
# Set working directory
ROOTPATH = os.path.dirname(os.path.realpath(sys.argv[0]))
os.chdir(ROOTPATH)
# Log settings
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
handler = logging.FileHandler(f"./pKa_calculation_{__file__}.log")
handler.setLevel(logging.INFO)
formatter = logging.Formatter(
"%(asctime)s\t%(levelname)s\t"
"[%(filename)s:%(lineno)s -%(funcName)12s()]\t%(message)s"
)
handler.setFormatter(formatter)
logger.addHandler(handler)
class pdb:
def __init__(self):
self.all_ids = []
self.download_ids = [] # Download -> Unzip -> Preprocess -> Calculate
self.unzip_ids = [] # Unzip -> Preprocess -> Calculate
self.preprocess_ids = [] # Preprocess -> Calculate
self.ready_ids = [] # Calculate
self.finished_ids = [] # Successfully calculated IDs
self.error_ids = [] # Error in download, unzip, or calculation
# IDs this script will work on (messy queue implementation)
self.working_ids = []
def load_id(self):
"""
First try to get existing pKa values,
then get the list of PDB files to download.
"""
for folder in ["./pdb", "./annotation", "./results"]:
try:
os.makedirs(folder)
except OSError:
pass
self.finished_ids = [id[-8:-4] for id in glob.glob("./results/*.pka")]
logger.debug(f"{len(self.finished_ids)} finished files.")
# Create file even at first run so that the results folder doesn't get deleted
with open("./results/finished_ids.list", "a") as f:
f.write("\n".join(self.finished_ids))
self.ready_ids = list(set(
[id[-12:-8].upper() for id in glob.glob("./pdb/*/*.pdb.bak")]) - set(self.finished_ids))
logger.debug(f"{len(self.ready_ids)} files ready to be calculated.")
self.preprocess_ids = list(set([id[-8:-4].upper() for id in glob.glob(
"./pdb/*/*.pdb") if "out" not in id]) - set(self.finished_ids) - set(self.ready_ids))
logger.debug(
f"{len(self.preprocess_ids)} files ready to be preprocessed.")
self.unzip_ids = [id[-11:-7].upper() for id in glob.glob("./*.ent.gz")]
logger.debug(f"{len(self.unzip_ids)} files ready to be unzipped.")
if not os.path.exists("./annotation/uniprot_id_mapping.dat"):
with urlopen("ftp://ftp.uniprot.org/pub/databases/uniprot/current_release/knowledgebase/idmapping/by_organism/HUMAN_9606_idmapping.dat.gz") as remotefile:
logger.debug(
"Saving UniProt ID mapping data since it doesn't exist...")
with open("./annotation/uniprot_id_mapping.dat.gz", "wb") as f:
f.write(remotefile.read())
with gzip.open(
"./annotation/uniprot_id_mapping.dat.gz", "rb") as inFile, open(
"./annotation/uniprot_id_mapping.dat", "wb") as outFile:
shutil.copyfileobj(inFile, outFile)
os.remove("./annotation/uniprot_id_mapping.dat.gz")
else:
logger.debug("UniProt ID mapping data exists.")
logger.debug("Reading all possible PDB IDs...")
annot = pd.read_csv("./annotation/uniprot_id_mapping.dat",
sep="\t", header=None,
names=["uniprot", "id", "value"])
self.all_ids = annot.loc[annot.id == "PDB", "value"].tolist()
self.download_ids = list(set(self.all_ids) - set(self.unzip_ids) - set(
self.preprocess_ids) - set(self.ready_ids) - set(self.finished_ids))
logger.info(
f"{len(self.download_ids)} PDB files need to be downloaded.")
def get_link(self, ids):
""" Get PDB file links from:
ftp://ftp.wwpdb.org/pub/pdb/data/structures/divided/pdb/ ,
and create folders to store the files.
Parameters
----------
ids: list
The PDB IDs to download.
Returns
-------
Links to download.
"""
if isinstance(ids, list):
ids = [id[:4].lower() for id in ids] # pdb file IDs
pdb_names = [f"{id}.ent.gz" for id in ids] # pdb filenames
# subdirectory of the pdb files
pdbDirs = [id[1:3].lower() for id in ids]
remoteaddr = [
f"ftp://ftp.wwpdb.org/pub/pdb/data/structures/divided/pdb/{pdbDir}/pdb{pdb_name}" for pdbDir, pdb_name in zip(pdbDirs, pdb_names)]
else:
raise TypeError(f"{id} is not a string or list.")
return remoteaddr
def make_dirs(self, ids):
"""Make sure the download directory exists."""
for id in ids:
try:
os.makedirs(os.path.join(ROOTPATH, "pdb", id.upper()))
except OSError:
pass
async def download_worker(self, session, url):
"""Download the given url to working directory."""
url = url[len("ftp://ftp.wwpdb.org"):]
logger.debug(f"Downloading {url}")
try:
await session.download(url)
self.unzip_ids.append(url[-11:-7].upper())
except Exception as e:
self.error_ids.append(url[-11:-7].upper())
logger.warning(f"Error when downloading {url}: {e}")
async def download_session(self, sem, work_queue):
""" Get urls from the queue and pass to worker.
Parameters
----------
sem: asyncio.Semaphore object
work_queue: asyncio.Queue object
"""
while not work_queue.empty():
url = await work_queue.get()
logger.debug(f"Got url from queue: {url}")
async with sem:
async with aioftp.ClientSession("ftp.wwpdb.org") as session:
await self.download_worker(session, url)
def download_queue(self, urls):
""" Create a queue to download all the given urls.
Parameters
----------
urls: list
A list of urls to download.
Returns
-------
Downloaded "*.ent.gz" files in working directory.
"""
logger.debug(f"{len(urls)} urls to download.")
loop = uvloop.new_event_loop()
asyncio.set_event_loop(loop)
q = asyncio.Queue()
sem = asyncio.Semaphore(10)
[q.put_nowait(url) for url in urls]
tasks = [asyncio.ensure_future(self.download_session(sem, q))
for _ in range(len(urls))]
loop.run_until_complete(asyncio.gather(*tasks))
# Zero-sleep to allow underlying connections to close
loop.run_until_complete(asyncio.sleep(0))
loop.close()
def check_mcce(self):
"""Check if MCCE 3.0 exists."""
if not os.path.exists(os.path.join(ROOTPATH, "mcce3.0")):
if not os.path.exists(os.path.join(ROOTPATH, "mcce3.0.tar.bz2")):
logger.debug("MCCE isn't downloaded yet. Retrieving...")
with urlopen("https://anaconda.org/SalahSalah/mcce/3.0/download/linux-64/mcce-3.0-0.tar.bz2") as remotefile:
with open("./mcce-3.0-0.tar.bz2", 'wb') as f:
f.write(remotefile.read())
subprocess.run(["tar", "-xjf", "mcce-3.0-0.tar.bz2"])
shutil.move("./info/recipe/mcce3.0", "./mcce3.0")
shutil.rmtree(os.path.join(ROOTPATH, "info"), ignore_errors=True)
shutil.rmtree(os.path.join(ROOTPATH, "bin"), ignore_errors=True)
else:
logger.info("MCCE 3.0 exists, proceeding to calculation...")
def unzip(self, id):
"""Unzip downloaded *.ent.gz file."""
try:
saved_pdb = os.path.join(ROOTPATH, "pdb", id, f"{id}.pdb")
with gzip.open(f"pdb{id.lower()}.ent.gz", "rb") as inFile, open(saved_pdb, "wb") as outFile:
shutil.copyfileobj(inFile, outFile)
os.remove(f"pdb{id.lower()}.ent.gz")
self.preprocess_ids.append(id)
except Exception as e:
self.error_ids.append(id)
logger.warning(f"Unzip of {id} unsuccessful: {e}")
def preprocess(self, id, backup=True):
"""
This program will:
1) strip lines other than ATOM and HETATM records
2) keep the first model of an NMR structure
3) delete H and D atoms
4) MSE to MET residue
5) keep only one atom alternate position
6) keep defined chains, if chain ID(s) are given in command
7) remove some cofactors and salt ions
Parameters
----------
id: str
The PDB ID to find the file.
backup: bool, optional
Whether to backup the original file or not. Default is True,
and save to "original.bak".
Returns
-------
Nothing, modify the file in place.
"""
removable_res = [
" ZN", "PCA", "XYP", " NA", " CL", " CA", " MG", " MN", "HOH"
]
model_start = False
newlines = []
ID = id.upper()
filepath = os.path.join(ROOTPATH, "pdb", ID, f"{ID}.pdb")
if backup:
shutil.copy2(filepath, f"{filepath}.bak")
with open(filepath) as f:
for line in f:
if line[:5] == "MODEL":
model_start = True
if model_start and line[:6] == "ENDMDL":
break
if line[:6] != "ATOM " and line[:6] != "HETATM":
continue # discard non ATOM records
if line[13] == "H" or line[12] == "H":
continue
if line[16] == "A":
line = f"{line[:16]} {line[17:]}"
elif line[16] != " ":
continue # delete this line, alternative posion is not A or empty
if line[:6] == "HETATM" and line[17:20] == "MSE":
if line[12:15] == "SE ":
line = f"ATOM {line[6:12]} SD{line[15:17]}MET{line[20:]}"
else:
line = f"ATOM {line[6:17]}MET{line[20:]}"
res = line[17:20]
if res in removable_res:
continue
newlines.append(line.rstrip())
with open(filepath, "w") as f:
f.write("\n".join(newlines))
logger.debug(f"{ID} preprocessing complete.")
def set_params(self, id, quickrun=True):
"""
Set the parameters for MCCE.
Parameters
----------
id: str
The PDB ID of the file.
quickrun: bool, optional
Use "run.prm.quick" or "run.prm.default".
Returns
-------
run.prm: a file describing the parameters that points to the PDB file.
"""
pkgpath = os.path.join(ROOTPATH, "mcce3.0")
ID = id.upper()
filepath = os.path.join(ROOTPATH, "pdb", ID)
newlines = []
if quickrun:
shutil.copy2(
os.path.join(pkgpath, "run.prm.quick"),
os.path.join(filepath, "run.prm")
)
else:
shutil.copy2([
os.path.join(pkgpath, "run.prm.default"),
os.path.join(filepath, "run.prm")
])
with open(os.path.join(filepath, "run.prm")) as f:
for line in f:
line = line.rstrip()
if line.endswith("(INPDB)"):
line = re.sub(r"^[^\s]+", fr"{id}.pdb", line)
if line.endswith(("(DO_PREMCCE)", "(DO_ROTAMERS)",
"(DO_ENERGY)", "(DO_MONTE)")):
line = re.sub(r"^f", r"t", line)
if line.endswith("(EPSILON_PROT)"):
line = re.sub(r"^[\d\.]+", r"8.0", line)
if line.startswith("/home/mcce/mcce3.0"):
line = re.sub(r"^/.*3\.0", pkgpath,
line)
newlines.append(line)
with open(os.path.join(filepath, "run.prm"), "w") as f:
f.write("\n".join(newlines))
self.ready_ids.append(ID)
logger.debug(f"Parameters set for {ID}.")
def split_ready_ids(self, num):
""" A naive queue implementation for multiple scripts.
Parameters
----------
num: int
Which part of the IDs to work on.
Returns
-------
A list of the actual IDs to work on, and save the lists of IDs for
other scripts to work with if this is the first instance.
"""
if os.path.isfile(os.path.join(ROOTPATH, "results", "working_ids.list")):
with open(os.path.join(ROOTPATH, "results", f"working_ids.list{num}"), "r") as f:
self.working_ids = [line.strip() for line in f]
else:
n = math.ceil(len(self.ready_ids) / 10)
self.working_ids = [self.ready_ids[i:i + n]
for i in range(0, len(self.ready_ids), n)]
metafile = []
for i, ids in enumerate(self.working_ids):
metafile.append(os.path.join(
ROOTPATH, "results", f"working_ids.list{i}"))
with open(os.path.join(ROOTPATH, "results", f"working_ids.list{i}"), "w") as f:
f.write("\n".join(ids))
logger.debug(
f"Saved {len(ids)} IDs to file working_ids.list{i} .")
with open(os.path.join(ROOTPATH, "results", "working_ids.list"), "w") as f:
f.write("\n".join(metafile))
self.working_ids = self.working_ids[num]
def calc_pka(self, id, clean=True):
""" Calculate protein pKa values using MCCE.
https://sites.google.com/site/mccewiki/home
Parameters
----------
id: str
The PDB ID of the protein calculated.
clean: bool, optional
Only keep the PDB file, run log and pKa output.
Returns
-------
A set of files in a subdirectory named after the ID.
See user manual for detail.
"""
id = id.upper()
os.chdir(os.path.realpath(os.path.join(ROOTPATH, "pdb", id)))
logger.info(f"{id} calculation started.")
start = time.time()
with open(f"{id}.run.log", "w") as f:
subprocess.run(f"{ROOTPATH}/mcce3.0/mcce", stdout=f)
with open(f"{id}.run.log", "rb") as f:
last = f.readlines()[-1].decode().lstrip()
if last.startswith(("Fatal", "FATAL", "WARNING", "STOP")):
self.error_ids.append(id)
logger.warning(
f"{id} calculation aborted after {time.time() - start}s, due to {last}")
else:
self.finished_ids.append(id)
logger.info(
f"{id} calculation finished, used {time.time() - start}s.")
shutil.move("pK.out", os.path.join(
ROOTPATH, "results", f"{id}.pka"))
if clean:
del_list = [i for i in os.listdir() if i not in (
"pK.out", f"{id}.run.log", f"{id}.pdb.bak")]
[os.remove(item) for item in del_list]
if __name__ == "__main__":
x = pdb()
x.load_id()
urls = x.get_link(x.download_ids)
x.make_dirs(x.all_ids)
x.download_queue(urls)
x.check_mcce()
for id in x.unzip_ids:
x.unzip(id)
for id in x.preprocess_ids:
try:
x.preprocess(id)
x.set_params(id)
except Exception as e:
x.error_ids.append(id)
logger.warning(f"Preprocess of {id}: {e}")
# subprocess.run(["find", ".", "-type", "d", "-empty", "-delete"])
x.split_ready_ids(0) # 0 - 9, run 0 first to generate other lists
with Pool(os.cpu_count()) as p:
p.map(x.calc_pka, x.working_ids)
with open("./results/finished_ids.list", "a") as f:
f.write("\n".join(x.working_ids))
with open("./results/error_ids.list", "a") as f:
f.write("\n".join(x.error_ids))
| apache-2.0 | 8,614,330,125,067,833,000 | -8,161,648,011,604,345,000 | 38.11007 | 166 | 0.517896 | false |
Einsteinish/PyTune3 | utils/bootstrap_story_hash.py | 1 | 1046 | import time
import pymongo
from django.conf import settings
from apps.rss_feeds.models import MStory, Feed
db = settings.MONGODB
batch = 0
start = 0
for f in xrange(start, Feed.objects.latest('pk').pk):
if f < batch*100000: continue
start = time.time()
try:
cp1 = time.time() - start
# if feed.active_premium_subscribers < 1: continue
stories = MStory.objects.filter(story_feed_id=f, story_hash__exists=False)\
.only('id', 'story_feed_id', 'story_guid')\
.read_preference(pymongo.ReadPreference.SECONDARY)
cp2 = time.time() - start
count = 0
for story in stories:
count += 1
db.pytune.stories.update({"_id": story.id}, {"$set": {
"story_hash": story.feed_guid_hash
}})
cp3 = time.time() - start
print "%s: %3s stories (%s/%s/%s)" % (f, count, round(cp1, 2), round(cp2, 2), round(cp3, 2))
except Exception, e:
print " ***> (%s) %s" % (f, e)
| mit | 3,038,098,533,888,395,000 | -6,328,642,908,157,138,000 | 35.068966 | 100 | 0.547801 | false |
alvin319/CarnotKE | jyhton/lib-python/2.7/wsgiref/headers.py | 229 | 5879 | """Manage HTTP Response Headers
Much of this module is red-handedly pilfered from email.message in the stdlib,
so portions are Copyright (C) 2001,2002 Python Software Foundation, and were
written by Barry Warsaw.
"""
from types import ListType, TupleType
# Regular expression that matches `special' characters in parameters, the
# existence of which force quoting of the parameter value.
import re
tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]')
def _formatparam(param, value=None, quote=1):
"""Convenience function to format and return a key=value pair.
This will quote the value if needed or if quote is true.
"""
if value is not None and len(value) > 0:
if quote or tspecials.search(value):
value = value.replace('\\', '\\\\').replace('"', r'\"')
return '%s="%s"' % (param, value)
else:
return '%s=%s' % (param, value)
else:
return param
class Headers:
"""Manage a collection of HTTP response headers"""
def __init__(self,headers):
if type(headers) is not ListType:
raise TypeError("Headers must be a list of name/value tuples")
self._headers = headers
def __len__(self):
"""Return the total number of headers, including duplicates."""
return len(self._headers)
def __setitem__(self, name, val):
"""Set the value of a header."""
del self[name]
self._headers.append((name, val))
def __delitem__(self,name):
"""Delete all occurrences of a header, if present.
Does *not* raise an exception if the header is missing.
"""
name = name.lower()
self._headers[:] = [kv for kv in self._headers if kv[0].lower() != name]
def __getitem__(self,name):
"""Get the first header value for 'name'
Return None if the header is missing instead of raising an exception.
Note that if the header appeared multiple times, the first exactly which
occurrance gets returned is undefined. Use getall() to get all
the values matching a header field name.
"""
return self.get(name)
def has_key(self, name):
"""Return true if the message contains the header."""
return self.get(name) is not None
__contains__ = has_key
def get_all(self, name):
"""Return a list of all the values for the named field.
These will be sorted in the order they appeared in the original header
list or were added to this instance, and may contain duplicates. Any
fields deleted and re-inserted are always appended to the header list.
If no fields exist with the given name, returns an empty list.
"""
name = name.lower()
return [kv[1] for kv in self._headers if kv[0].lower()==name]
def get(self,name,default=None):
"""Get the first header value for 'name', or return 'default'"""
name = name.lower()
for k,v in self._headers:
if k.lower()==name:
return v
return default
def keys(self):
"""Return a list of all the header field names.
These will be sorted in the order they appeared in the original header
list, or were added to this instance, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [k for k, v in self._headers]
def values(self):
"""Return a list of all header values.
These will be sorted in the order they appeared in the original header
list, or were added to this instance, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [v for k, v in self._headers]
def items(self):
"""Get all the header fields and values.
These will be sorted in the order they were in the original header
list, or were added to this instance, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return self._headers[:]
def __repr__(self):
return "Headers(%r)" % self._headers
def __str__(self):
"""str() returns the formatted headers, complete with end line,
suitable for direct HTTP transmission."""
return '\r\n'.join(["%s: %s" % kv for kv in self._headers]+['',''])
def setdefault(self,name,value):
"""Return first matching header value for 'name', or 'value'
If there is no header named 'name', add a new header with name 'name'
and value 'value'."""
result = self.get(name)
if result is None:
self._headers.append((name,value))
return value
else:
return result
def add_header(self, _name, _value, **_params):
"""Extended header setting.
_name is the header field to add. keyword arguments can be used to set
additional parameters for the header field, with underscores converted
to dashes. Normally the parameter will be added as key="value" unless
value is None, in which case only the key will be added.
Example:
h.add_header('content-disposition', 'attachment', filename='bud.gif')
Note that unlike the corresponding 'email.message' method, this does
*not* handle '(charset, language, value)' tuples: all values must be
strings or None.
"""
parts = []
if _value is not None:
parts.append(_value)
for k, v in _params.items():
if v is None:
parts.append(k.replace('_', '-'))
else:
parts.append(_formatparam(k.replace('_', '-'), v))
self._headers.append((_name, "; ".join(parts)))
| apache-2.0 | -6,419,877,958,325,767,000 | -5,213,730,288,540,281,000 | 33.786982 | 80 | 0.607756 | false |
yannickcr/Sick-Beard | lib/subliminal/utils.py | 167 | 2027 | # -*- coding: utf-8 -*-
# Copyright 2011-2012 Antoine Bertin <[email protected]>
#
# This file is part of subliminal.
#
# subliminal is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
import re
__all__ = ['get_keywords', 'split_keyword', 'to_unicode']
def get_keywords(guess):
"""Retrieve keywords from guessed informations
:param guess: guessed informations
:type guess: :class:`guessit.guess.Guess`
:return: lower case alphanumeric keywords
:rtype: set
"""
keywords = set()
for k in ['releaseGroup', 'screenSize', 'videoCodec', 'format']:
if k in guess:
keywords = keywords | split_keyword(guess[k].lower())
return keywords
def split_keyword(keyword):
"""Split a keyword in multiple ones on any non-alphanumeric character
:param string keyword: keyword
:return: keywords
:rtype: set
"""
split = set(re.findall(r'\w+', keyword))
return split
def to_unicode(data):
"""Convert a basestring to unicode
:param basestring data: data to decode
:return: data as unicode
:rtype: unicode
"""
if not isinstance(data, basestring):
raise ValueError('Basestring expected')
if isinstance(data, unicode):
return data
for encoding in ('utf-8', 'latin-1'):
try:
return unicode(data, encoding)
except UnicodeDecodeError:
pass
return unicode(data, 'utf-8', 'replace')
| gpl-3.0 | -8,730,876,814,719,412,000 | 7,003,378,357,283,948,000 | 28.376812 | 77 | 0.678342 | false |
pvizeli/hassio | hassio/core.py | 1 | 5205 | """Main file for HassIO."""
import asyncio
import logging
import aiohttp
import docker
from .addons import AddonManager
from .api import RestAPI
from .host_control import HostControl
from .const import (
SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT,
RUN_CLEANUP_API_SESSIONS, STARTUP_AFTER, STARTUP_BEFORE,
STARTUP_INITIALIZE)
from .scheduler import Scheduler
from .dock.homeassistant import DockerHomeAssistant
from .dock.supervisor import DockerSupervisor
from .tasks import (
hassio_update, homeassistant_watchdog, homeassistant_setup,
api_sessions_cleanup)
from .tools import get_local_ip, fetch_timezone
_LOGGER = logging.getLogger(__name__)
class HassIO(object):
"""Main object of hassio."""
def __init__(self, loop, config):
"""Initialize hassio object."""
self.exit_code = 0
self.loop = loop
self.config = config
self.websession = aiohttp.ClientSession(loop=loop)
self.scheduler = Scheduler(loop)
self.api = RestAPI(config, loop)
self.dock = docker.DockerClient(
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
# init basic docker container
self.supervisor = DockerSupervisor(config, loop, self.dock, self.stop)
self.homeassistant = DockerHomeAssistant(config, loop, self.dock)
# init HostControl
self.host_control = HostControl(loop)
# init addon system
self.addons = AddonManager(config, loop, self.dock)
async def setup(self):
"""Setup HassIO orchestration."""
# supervisor
if not await self.supervisor.attach():
_LOGGER.fatal("Can't attach to supervisor docker container!")
await self.supervisor.cleanup()
# set running arch
self.config.arch = self.supervisor.arch
# set api endpoint
self.config.api_endpoint = await get_local_ip(self.loop)
# update timezone
if self.config.timezone == 'UTC':
self.config.timezone = await fetch_timezone(self.websession)
# hostcontrol
await self.host_control.load()
# schedule update info tasks
self.scheduler.register_task(
self.host_control.load, RUN_UPDATE_INFO_TASKS)
# rest api views
self.api.register_host(self.host_control)
self.api.register_network(self.host_control)
self.api.register_supervisor(
self.supervisor, self.addons, self.host_control, self.websession)
self.api.register_homeassistant(self.homeassistant)
self.api.register_addons(self.addons)
self.api.register_security()
self.api.register_panel()
# schedule api session cleanup
self.scheduler.register_task(
api_sessions_cleanup(self.config), RUN_CLEANUP_API_SESSIONS,
now=True)
# first start of supervisor?
if not await self.homeassistant.exists():
_LOGGER.info("No HomeAssistant docker found.")
await homeassistant_setup(
self.config, self.loop, self.homeassistant, self.websession)
else:
await self.homeassistant.attach()
# Load addons
await self.addons.prepare()
# schedule addon update task
self.scheduler.register_task(
self.addons.reload, RUN_RELOAD_ADDONS_TASKS, now=True)
# schedule self update task
self.scheduler.register_task(
hassio_update(self.config, self.supervisor, self.websession),
RUN_UPDATE_SUPERVISOR_TASKS)
# start addon mark as initialize
await self.addons.auto_boot(STARTUP_INITIALIZE)
async def start(self):
"""Start HassIO orchestration."""
# on release channel, try update itself
# on beta channel, only read new versions
await asyncio.wait(
[hassio_update(self.config, self.supervisor, self.websession)()],
loop=self.loop
)
# start api
await self.api.start()
_LOGGER.info("Start hassio api on %s", self.config.api_endpoint)
try:
# HomeAssistant is already running / supervisor have only reboot
if await self.homeassistant.is_running():
_LOGGER.info("HassIO reboot detected")
return
# start addon mark as before
await self.addons.auto_boot(STARTUP_BEFORE)
# run HomeAssistant
await self.homeassistant.run()
# start addon mark as after
await self.addons.auto_boot(STARTUP_AFTER)
finally:
# schedule homeassistant watchdog
self.scheduler.register_task(
homeassistant_watchdog(self.loop, self.homeassistant),
RUN_WATCHDOG_HOMEASSISTANT)
async def stop(self, exit_code=0):
"""Stop a running orchestration."""
# don't process scheduler anymore
self.scheduler.stop()
# process stop tasks
self.websession.close()
await self.api.stop()
self.exit_code = exit_code
self.loop.stop()
| bsd-3-clause | 1,547,565,644,912,629,800 | 4,714,229,101,739,759,000 | 32.152866 | 78 | 0.634006 | false |
yencarnacion/jaikuengine | .google_appengine/lib/yaml-3.10/yaml/reader.py | 424 | 6746 | # This module contains abstractions for the input stream. You don't have to
# looks further, there are no pretty code.
#
# We define two classes here.
#
# Mark(source, line, column)
# It's just a record and its only use is producing nice error messages.
# Parser does not use it for any other purposes.
#
# Reader(source, data)
# Reader determines the encoding of `data` and converts it to unicode.
# Reader provides the following methods and attributes:
# reader.peek(length=1) - return the next `length` characters
# reader.forward(length=1) - move the current position to `length` characters.
# reader.index - the number of the current character.
# reader.line, stream.column - the line and the column of the current character.
__all__ = ['Reader', 'ReaderError']
from error import YAMLError, Mark
import codecs, re
class ReaderError(YAMLError):
def __init__(self, name, position, character, encoding, reason):
self.name = name
self.character = character
self.position = position
self.encoding = encoding
self.reason = reason
def __str__(self):
if isinstance(self.character, str):
return "'%s' codec can't decode byte #x%02x: %s\n" \
" in \"%s\", position %d" \
% (self.encoding, ord(self.character), self.reason,
self.name, self.position)
else:
return "unacceptable character #x%04x: %s\n" \
" in \"%s\", position %d" \
% (self.character, self.reason,
self.name, self.position)
class Reader(object):
# Reader:
# - determines the data encoding and converts it to unicode,
# - checks if characters are in allowed range,
# - adds '\0' to the end.
# Reader accepts
# - a `str` object,
# - a `unicode` object,
# - a file-like object with its `read` method returning `str`,
# - a file-like object with its `read` method returning `unicode`.
# Yeah, it's ugly and slow.
def __init__(self, stream):
self.name = None
self.stream = None
self.stream_pointer = 0
self.eof = True
self.buffer = u''
self.pointer = 0
self.raw_buffer = None
self.raw_decode = None
self.encoding = None
self.index = 0
self.line = 0
self.column = 0
if isinstance(stream, unicode):
self.name = "<unicode string>"
self.check_printable(stream)
self.buffer = stream+u'\0'
elif isinstance(stream, str):
self.name = "<string>"
self.raw_buffer = stream
self.determine_encoding()
else:
self.stream = stream
self.name = getattr(stream, 'name', "<file>")
self.eof = False
self.raw_buffer = ''
self.determine_encoding()
def peek(self, index=0):
try:
return self.buffer[self.pointer+index]
except IndexError:
self.update(index+1)
return self.buffer[self.pointer+index]
def prefix(self, length=1):
if self.pointer+length >= len(self.buffer):
self.update(length)
return self.buffer[self.pointer:self.pointer+length]
def forward(self, length=1):
if self.pointer+length+1 >= len(self.buffer):
self.update(length+1)
while length:
ch = self.buffer[self.pointer]
self.pointer += 1
self.index += 1
if ch in u'\n\x85\u2028\u2029' \
or (ch == u'\r' and self.buffer[self.pointer] != u'\n'):
self.line += 1
self.column = 0
elif ch != u'\uFEFF':
self.column += 1
length -= 1
def get_mark(self):
if self.stream is None:
return Mark(self.name, self.index, self.line, self.column,
self.buffer, self.pointer)
else:
return Mark(self.name, self.index, self.line, self.column,
None, None)
def determine_encoding(self):
while not self.eof and len(self.raw_buffer) < 2:
self.update_raw()
if not isinstance(self.raw_buffer, unicode):
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
self.raw_decode = codecs.utf_16_le_decode
self.encoding = 'utf-16-le'
elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
self.raw_decode = codecs.utf_16_be_decode
self.encoding = 'utf-16-be'
else:
self.raw_decode = codecs.utf_8_decode
self.encoding = 'utf-8'
self.update(1)
NON_PRINTABLE = re.compile(u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]')
def check_printable(self, data):
match = self.NON_PRINTABLE.search(data)
if match:
character = match.group()
position = self.index+(len(self.buffer)-self.pointer)+match.start()
raise ReaderError(self.name, position, ord(character),
'unicode', "special characters are not allowed")
def update(self, length):
if self.raw_buffer is None:
return
self.buffer = self.buffer[self.pointer:]
self.pointer = 0
while len(self.buffer) < length:
if not self.eof:
self.update_raw()
if self.raw_decode is not None:
try:
data, converted = self.raw_decode(self.raw_buffer,
'strict', self.eof)
except UnicodeDecodeError, exc:
character = exc.object[exc.start]
if self.stream is not None:
position = self.stream_pointer-len(self.raw_buffer)+exc.start
else:
position = exc.start
raise ReaderError(self.name, position, character,
exc.encoding, exc.reason)
else:
data = self.raw_buffer
converted = len(data)
self.check_printable(data)
self.buffer += data
self.raw_buffer = self.raw_buffer[converted:]
if self.eof:
self.buffer += u'\0'
self.raw_buffer = None
break
def update_raw(self, size=1024):
data = self.stream.read(size)
if data:
self.raw_buffer += data
self.stream_pointer += len(data)
else:
self.eof = True
#try:
# import psyco
# psyco.bind(Reader)
#except ImportError:
# pass
| apache-2.0 | 5,234,043,475,274,878,000 | -8,156,035,085,946,491,000 | 34.505263 | 87 | 0.544767 | false |
vmg/hg-stable | hgext/keyword.py | 92 | 27955 | # keyword.py - $Keyword$ expansion for Mercurial
#
# Copyright 2007-2012 Christian Ebert <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
#
# $Id$
#
# Keyword expansion hack against the grain of a Distributed SCM
#
# There are many good reasons why this is not needed in a distributed
# SCM, still it may be useful in very small projects based on single
# files (like LaTeX packages), that are mostly addressed to an
# audience not running a version control system.
#
# For in-depth discussion refer to
# <http://mercurial.selenic.com/wiki/KeywordPlan>.
#
# Keyword expansion is based on Mercurial's changeset template mappings.
#
# Binary files are not touched.
#
# Files to act upon/ignore are specified in the [keyword] section.
# Customized keyword template mappings in the [keywordmaps] section.
#
# Run "hg help keyword" and "hg kwdemo" to get info on configuration.
'''expand keywords in tracked files
This extension expands RCS/CVS-like or self-customized $Keywords$ in
tracked text files selected by your configuration.
Keywords are only expanded in local repositories and not stored in the
change history. The mechanism can be regarded as a convenience for the
current user or for archive distribution.
Keywords expand to the changeset data pertaining to the latest change
relative to the working directory parent of each file.
Configuration is done in the [keyword], [keywordset] and [keywordmaps]
sections of hgrc files.
Example::
[keyword]
# expand keywords in every python file except those matching "x*"
**.py =
x* = ignore
[keywordset]
# prefer svn- over cvs-like default keywordmaps
svn = True
.. note::
The more specific you are in your filename patterns the less you
lose speed in huge repositories.
For [keywordmaps] template mapping and expansion demonstration and
control run :hg:`kwdemo`. See :hg:`help templates` for a list of
available templates and filters.
Three additional date template filters are provided:
:``utcdate``: "2006/09/18 15:13:13"
:``svnutcdate``: "2006-09-18 15:13:13Z"
:``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
The default template mappings (view with :hg:`kwdemo -d`) can be
replaced with customized keywords and templates. Again, run
:hg:`kwdemo` to control the results of your configuration changes.
Before changing/disabling active keywords, you must run :hg:`kwshrink`
to avoid storing expanded keywords in the change history.
To force expansion after enabling it, or a configuration change, run
:hg:`kwexpand`.
Expansions spanning more than one line and incremental expansions,
like CVS' $Log$, are not supported. A keyword template map "Log =
{desc}" expands to the first line of the changeset description.
'''
from mercurial import commands, context, cmdutil, dispatch, filelog, extensions
from mercurial import localrepo, match, patch, templatefilters, templater, util
from mercurial import scmutil
from mercurial.hgweb import webcommands
from mercurial.i18n import _
import os, re, shutil, tempfile
commands.optionalrepo += ' kwdemo'
commands.inferrepo += ' kwexpand kwfiles kwshrink'
cmdtable = {}
command = cmdutil.command(cmdtable)
testedwith = 'internal'
# hg commands that do not act on keywords
nokwcommands = ('add addremove annotate bundle export grep incoming init log'
' outgoing push tip verify convert email glog')
# hg commands that trigger expansion only when writing to working dir,
# not when reading filelog, and unexpand when reading from working dir
restricted = 'merge kwexpand kwshrink record qrecord resolve transplant'
# names of extensions using dorecord
recordextensions = 'record'
colortable = {
'kwfiles.enabled': 'green bold',
'kwfiles.deleted': 'cyan bold underline',
'kwfiles.enabledunknown': 'green',
'kwfiles.ignored': 'bold',
'kwfiles.ignoredunknown': 'none'
}
# date like in cvs' $Date
def utcdate(text):
''':utcdate: Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
'''
return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S')
# date like in svn's $Date
def svnisodate(text):
''':svnisodate: Date. Returns a date in this format: "2009-08-18 13:00:13
+0200 (Tue, 18 Aug 2009)".
'''
return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
# date like in svn's $Id
def svnutcdate(text):
''':svnutcdate: Date. Returns a UTC-date in this format: "2009-08-18
11:00:13Z".
'''
return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
templatefilters.filters.update({'utcdate': utcdate,
'svnisodate': svnisodate,
'svnutcdate': svnutcdate})
# make keyword tools accessible
kwtools = {'templater': None, 'hgcmd': ''}
def _defaultkwmaps(ui):
'''Returns default keywordmaps according to keywordset configuration.'''
templates = {
'Revision': '{node|short}',
'Author': '{author|user}',
}
kwsets = ({
'Date': '{date|utcdate}',
'RCSfile': '{file|basename},v',
'RCSFile': '{file|basename},v', # kept for backwards compatibility
# with hg-keyword
'Source': '{root}/{file},v',
'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
}, {
'Date': '{date|svnisodate}',
'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
'LastChangedRevision': '{node|short}',
'LastChangedBy': '{author|user}',
'LastChangedDate': '{date|svnisodate}',
})
templates.update(kwsets[ui.configbool('keywordset', 'svn')])
return templates
def _shrinktext(text, subfunc):
'''Helper for keyword expansion removal in text.
Depending on subfunc also returns number of substitutions.'''
return subfunc(r'$\1$', text)
def _preselect(wstatus, changed):
'''Retrieves modified and added files from a working directory state
and returns the subset of each contained in given changed files
retrieved from a change context.'''
modified, added = wstatus[:2]
modified = [f for f in modified if f in changed]
added = [f for f in added if f in changed]
return modified, added
class kwtemplater(object):
'''
Sets up keyword templates, corresponding keyword regex, and
provides keyword substitution functions.
'''
def __init__(self, ui, repo, inc, exc):
self.ui = ui
self.repo = repo
self.match = match.match(repo.root, '', [], inc, exc)
self.restrict = kwtools['hgcmd'] in restricted.split()
self.postcommit = False
kwmaps = self.ui.configitems('keywordmaps')
if kwmaps: # override default templates
self.templates = dict((k, templater.parsestring(v, False))
for k, v in kwmaps)
else:
self.templates = _defaultkwmaps(self.ui)
@util.propertycache
def escape(self):
'''Returns bar-separated and escaped keywords.'''
return '|'.join(map(re.escape, self.templates.keys()))
@util.propertycache
def rekw(self):
'''Returns regex for unexpanded keywords.'''
return re.compile(r'\$(%s)\$' % self.escape)
@util.propertycache
def rekwexp(self):
'''Returns regex for expanded keywords.'''
return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
def substitute(self, data, path, ctx, subfunc):
'''Replaces keywords in data with expanded template.'''
def kwsub(mobj):
kw = mobj.group(1)
ct = cmdutil.changeset_templater(self.ui, self.repo,
False, None, '', False)
ct.use_template(self.templates[kw])
self.ui.pushbuffer()
ct.show(ctx, root=self.repo.root, file=path)
ekw = templatefilters.firstline(self.ui.popbuffer())
return '$%s: %s $' % (kw, ekw)
return subfunc(kwsub, data)
def linkctx(self, path, fileid):
'''Similar to filelog.linkrev, but returns a changectx.'''
return self.repo.filectx(path, fileid=fileid).changectx()
def expand(self, path, node, data):
'''Returns data with keywords expanded.'''
if not self.restrict and self.match(path) and not util.binary(data):
ctx = self.linkctx(path, node)
return self.substitute(data, path, ctx, self.rekw.sub)
return data
def iskwfile(self, cand, ctx):
'''Returns subset of candidates which are configured for keyword
expansion but are not symbolic links.'''
return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
'''Overwrites selected files expanding/shrinking keywords.'''
if self.restrict or lookup or self.postcommit: # exclude kw_copy
candidates = self.iskwfile(candidates, ctx)
if not candidates:
return
kwcmd = self.restrict and lookup # kwexpand/kwshrink
if self.restrict or expand and lookup:
mf = ctx.manifest()
if self.restrict or rekw:
re_kw = self.rekw
else:
re_kw = self.rekwexp
if expand:
msg = _('overwriting %s expanding keywords\n')
else:
msg = _('overwriting %s shrinking keywords\n')
for f in candidates:
if self.restrict:
data = self.repo.file(f).read(mf[f])
else:
data = self.repo.wread(f)
if util.binary(data):
continue
if expand:
if lookup:
ctx = self.linkctx(f, mf[f])
data, found = self.substitute(data, f, ctx, re_kw.subn)
elif self.restrict:
found = re_kw.search(data)
else:
data, found = _shrinktext(data, re_kw.subn)
if found:
self.ui.note(msg % f)
fp = self.repo.wopener(f, "wb", atomictemp=True)
fp.write(data)
fp.close()
if kwcmd:
self.repo.dirstate.normal(f)
elif self.postcommit:
self.repo.dirstate.normallookup(f)
def shrink(self, fname, text):
'''Returns text with all keyword substitutions removed.'''
if self.match(fname) and not util.binary(text):
return _shrinktext(text, self.rekwexp.sub)
return text
def shrinklines(self, fname, lines):
'''Returns lines with keyword substitutions removed.'''
if self.match(fname):
text = ''.join(lines)
if not util.binary(text):
return _shrinktext(text, self.rekwexp.sub).splitlines(True)
return lines
def wread(self, fname, data):
'''If in restricted mode returns data read from wdir with
keyword substitutions removed.'''
if self.restrict:
return self.shrink(fname, data)
return data
class kwfilelog(filelog.filelog):
'''
Subclass of filelog to hook into its read, add, cmp methods.
Keywords are "stored" unexpanded, and processed on reading.
'''
def __init__(self, opener, kwt, path):
super(kwfilelog, self).__init__(opener, path)
self.kwt = kwt
self.path = path
def read(self, node):
'''Expands keywords when reading filelog.'''
data = super(kwfilelog, self).read(node)
if self.renamed(node):
return data
return self.kwt.expand(self.path, node, data)
def add(self, text, meta, tr, link, p1=None, p2=None):
'''Removes keyword substitutions when adding to filelog.'''
text = self.kwt.shrink(self.path, text)
return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
def cmp(self, node, text):
'''Removes keyword substitutions for comparison.'''
text = self.kwt.shrink(self.path, text)
return super(kwfilelog, self).cmp(node, text)
def _status(ui, repo, wctx, kwt, *pats, **opts):
'''Bails out if [keyword] configuration is not active.
Returns status of working directory.'''
if kwt:
return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
unknown=opts.get('unknown') or opts.get('all'))
if ui.configitems('keyword'):
raise util.Abort(_('[keyword] patterns cannot match'))
raise util.Abort(_('no [keyword] patterns configured'))
def _kwfwrite(ui, repo, expand, *pats, **opts):
'''Selects files and passes them to kwtemplater.overwrite.'''
wctx = repo[None]
if len(wctx.parents()) > 1:
raise util.Abort(_('outstanding uncommitted merge'))
kwt = kwtools['templater']
wlock = repo.wlock()
try:
status = _status(ui, repo, wctx, kwt, *pats, **opts)
modified, added, removed, deleted, unknown, ignored, clean = status
if modified or added or removed or deleted:
raise util.Abort(_('outstanding uncommitted changes'))
kwt.overwrite(wctx, clean, True, expand)
finally:
wlock.release()
@command('kwdemo',
[('d', 'default', None, _('show default keyword template maps')),
('f', 'rcfile', '',
_('read maps from rcfile'), _('FILE'))],
_('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'))
def demo(ui, repo, *args, **opts):
'''print [keywordmaps] configuration and an expansion example
Show current, custom, or default keyword template maps and their
expansions.
Extend the current configuration by specifying maps as arguments
and using -f/--rcfile to source an external hgrc file.
Use -d/--default to disable current configuration.
See :hg:`help templates` for information on templates and filters.
'''
def demoitems(section, items):
ui.write('[%s]\n' % section)
for k, v in sorted(items):
ui.write('%s = %s\n' % (k, v))
fn = 'demo.txt'
tmpdir = tempfile.mkdtemp('', 'kwdemo.')
ui.note(_('creating temporary repository at %s\n') % tmpdir)
repo = localrepo.localrepository(repo.baseui, tmpdir, True)
ui.setconfig('keyword', fn, '')
svn = ui.configbool('keywordset', 'svn')
# explicitly set keywordset for demo output
ui.setconfig('keywordset', 'svn', svn)
uikwmaps = ui.configitems('keywordmaps')
if args or opts.get('rcfile'):
ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
if uikwmaps:
ui.status(_('\textending current template maps\n'))
if opts.get('default') or not uikwmaps:
if svn:
ui.status(_('\toverriding default svn keywordset\n'))
else:
ui.status(_('\toverriding default cvs keywordset\n'))
if opts.get('rcfile'):
ui.readconfig(opts.get('rcfile'))
if args:
# simulate hgrc parsing
rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
fp = repo.opener('hgrc', 'w')
fp.writelines(rcmaps)
fp.close()
ui.readconfig(repo.join('hgrc'))
kwmaps = dict(ui.configitems('keywordmaps'))
elif opts.get('default'):
if svn:
ui.status(_('\n\tconfiguration using default svn keywordset\n'))
else:
ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
kwmaps = _defaultkwmaps(ui)
if uikwmaps:
ui.status(_('\tdisabling current template maps\n'))
for k, v in kwmaps.iteritems():
ui.setconfig('keywordmaps', k, v)
else:
ui.status(_('\n\tconfiguration using current keyword template maps\n'))
if uikwmaps:
kwmaps = dict(uikwmaps)
else:
kwmaps = _defaultkwmaps(ui)
uisetup(ui)
reposetup(ui, repo)
ui.write('[extensions]\nkeyword =\n')
demoitems('keyword', ui.configitems('keyword'))
demoitems('keywordset', ui.configitems('keywordset'))
demoitems('keywordmaps', kwmaps.iteritems())
keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
repo.wopener.write(fn, keywords)
repo[None].add([fn])
ui.note(_('\nkeywords written to %s:\n') % fn)
ui.note(keywords)
repo.dirstate.setbranch('demobranch')
for name, cmd in ui.configitems('hooks'):
if name.split('.', 1)[0].find('commit') > -1:
repo.ui.setconfig('hooks', name, '')
msg = _('hg keyword configuration and expansion example')
ui.note("hg ci -m '%s'\n" % msg) # check-code-ignore
repo.commit(text=msg)
ui.status(_('\n\tkeywords expanded\n'))
ui.write(repo.wread(fn))
shutil.rmtree(tmpdir, ignore_errors=True)
@command('kwexpand', commands.walkopts, _('hg kwexpand [OPTION]... [FILE]...'))
def expand(ui, repo, *pats, **opts):
'''expand keywords in the working directory
Run after (re)enabling keyword expansion.
kwexpand refuses to run if given files contain local changes.
'''
# 3rd argument sets expansion to True
_kwfwrite(ui, repo, True, *pats, **opts)
@command('kwfiles',
[('A', 'all', None, _('show keyword status flags of all files')),
('i', 'ignore', None, _('show files excluded from expansion')),
('u', 'unknown', None, _('only show unknown (not tracked) files')),
] + commands.walkopts,
_('hg kwfiles [OPTION]... [FILE]...'))
def files(ui, repo, *pats, **opts):
'''show files configured for keyword expansion
List which files in the working directory are matched by the
[keyword] configuration patterns.
Useful to prevent inadvertent keyword expansion and to speed up
execution by including only files that are actual candidates for
expansion.
See :hg:`help keyword` on how to construct patterns both for
inclusion and exclusion of files.
With -A/--all and -v/--verbose the codes used to show the status
of files are::
K = keyword expansion candidate
k = keyword expansion candidate (not tracked)
I = ignored
i = ignored (not tracked)
'''
kwt = kwtools['templater']
wctx = repo[None]
status = _status(ui, repo, wctx, kwt, *pats, **opts)
cwd = pats and repo.getcwd() or ''
modified, added, removed, deleted, unknown, ignored, clean = status
files = []
if not opts.get('unknown') or opts.get('all'):
files = sorted(modified + added + clean)
kwfiles = kwt.iskwfile(files, wctx)
kwdeleted = kwt.iskwfile(deleted, wctx)
kwunknown = kwt.iskwfile(unknown, wctx)
if not opts.get('ignore') or opts.get('all'):
showfiles = kwfiles, kwdeleted, kwunknown
else:
showfiles = [], [], []
if opts.get('all') or opts.get('ignore'):
showfiles += ([f for f in files if f not in kwfiles],
[f for f in unknown if f not in kwunknown])
kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
kwstates = zip(kwlabels, 'K!kIi', showfiles)
fm = ui.formatter('kwfiles', opts)
fmt = '%.0s%s\n'
if opts.get('all') or ui.verbose:
fmt = '%s %s\n'
for kwstate, char, filenames in kwstates:
label = 'kwfiles.' + kwstate
for f in filenames:
fm.startitem()
fm.write('kwstatus path', fmt, char,
repo.pathto(f, cwd), label=label)
fm.end()
@command('kwshrink', commands.walkopts, _('hg kwshrink [OPTION]... [FILE]...'))
def shrink(ui, repo, *pats, **opts):
'''revert expanded keywords in the working directory
Must be run before changing/disabling active keywords.
kwshrink refuses to run if given files contain local changes.
'''
# 3rd argument sets expansion to False
_kwfwrite(ui, repo, False, *pats, **opts)
def uisetup(ui):
''' Monkeypatches dispatch._parse to retrieve user command.'''
def kwdispatch_parse(orig, ui, args):
'''Monkeypatch dispatch._parse to obtain running hg command.'''
cmd, func, args, options, cmdoptions = orig(ui, args)
kwtools['hgcmd'] = cmd
return cmd, func, args, options, cmdoptions
extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
def reposetup(ui, repo):
'''Sets up repo as kwrepo for keyword substitution.
Overrides file method to return kwfilelog instead of filelog
if file matches user configuration.
Wraps commit to overwrite configured files with updated
keyword substitutions.
Monkeypatches patch and webcommands.'''
try:
if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
or '.hg' in util.splitpath(repo.root)
or repo._url.startswith('bundle:')):
return
except AttributeError:
pass
inc, exc = [], ['.hg*']
for pat, opt in ui.configitems('keyword'):
if opt != 'ignore':
inc.append(pat)
else:
exc.append(pat)
if not inc:
return
kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
class kwrepo(repo.__class__):
def file(self, f):
if f[0] == '/':
f = f[1:]
return kwfilelog(self.sopener, kwt, f)
def wread(self, filename):
data = super(kwrepo, self).wread(filename)
return kwt.wread(filename, data)
def commit(self, *args, **opts):
# use custom commitctx for user commands
# other extensions can still wrap repo.commitctx directly
self.commitctx = self.kwcommitctx
try:
return super(kwrepo, self).commit(*args, **opts)
finally:
del self.commitctx
def kwcommitctx(self, ctx, error=False):
n = super(kwrepo, self).commitctx(ctx, error)
# no lock needed, only called from repo.commit() which already locks
if not kwt.postcommit:
restrict = kwt.restrict
kwt.restrict = True
kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
False, True)
kwt.restrict = restrict
return n
def rollback(self, dryrun=False, force=False):
wlock = self.wlock()
try:
if not dryrun:
changed = self['.'].files()
ret = super(kwrepo, self).rollback(dryrun, force)
if not dryrun:
ctx = self['.']
modified, added = _preselect(self[None].status(), changed)
kwt.overwrite(ctx, modified, True, True)
kwt.overwrite(ctx, added, True, False)
return ret
finally:
wlock.release()
# monkeypatches
def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
'''Monkeypatch/wrap patch.patchfile.__init__ to avoid
rejects or conflicts due to expanded keywords in working dir.'''
orig(self, ui, gp, backend, store, eolmode)
# shrink keywords read from working dir
self.lines = kwt.shrinklines(self.fname, self.lines)
def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None,
opts=None, prefix=''):
'''Monkeypatch patch.diff to avoid expansion.'''
kwt.restrict = True
return orig(repo, node1, node2, match, changes, opts, prefix)
def kwweb_skip(orig, web, req, tmpl):
'''Wraps webcommands.x turning off keyword expansion.'''
kwt.match = util.never
return orig(web, req, tmpl)
def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
'''Wraps cmdutil.amend expanding keywords after amend.'''
wlock = repo.wlock()
try:
kwt.postcommit = True
newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
if newid != old.node():
ctx = repo[newid]
kwt.restrict = True
kwt.overwrite(ctx, ctx.files(), False, True)
kwt.restrict = False
return newid
finally:
wlock.release()
def kw_copy(orig, ui, repo, pats, opts, rename=False):
'''Wraps cmdutil.copy so that copy/rename destinations do not
contain expanded keywords.
Note that the source of a regular file destination may also be a
symlink:
hg cp sym x -> x is symlink
cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
For the latter we have to follow the symlink to find out whether its
target is configured for expansion and we therefore must unexpand the
keywords in the destination.'''
wlock = repo.wlock()
try:
orig(ui, repo, pats, opts, rename)
if opts.get('dry_run'):
return
wctx = repo[None]
cwd = repo.getcwd()
def haskwsource(dest):
'''Returns true if dest is a regular file and configured for
expansion or a symlink which points to a file configured for
expansion. '''
source = repo.dirstate.copied(dest)
if 'l' in wctx.flags(source):
source = scmutil.canonpath(repo.root, cwd,
os.path.realpath(source))
return kwt.match(source)
candidates = [f for f in repo.dirstate.copies() if
'l' not in wctx.flags(f) and haskwsource(f)]
kwt.overwrite(wctx, candidates, False, False)
finally:
wlock.release()
def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
'''Wraps record.dorecord expanding keywords after recording.'''
wlock = repo.wlock()
try:
# record returns 0 even when nothing has changed
# therefore compare nodes before and after
kwt.postcommit = True
ctx = repo['.']
wstatus = repo[None].status()
ret = orig(ui, repo, commitfunc, *pats, **opts)
recctx = repo['.']
if ctx != recctx:
modified, added = _preselect(wstatus, recctx.files())
kwt.restrict = False
kwt.overwrite(recctx, modified, False, True)
kwt.overwrite(recctx, added, False, True, True)
kwt.restrict = True
return ret
finally:
wlock.release()
def kwfilectx_cmp(orig, self, fctx):
# keyword affects data size, comparing wdir and filelog size does
# not make sense
if (fctx._filerev is None and
(self._repo._encodefilterpats or
kwt.match(fctx.path()) and 'l' not in fctx.flags() or
self.size() - 4 == fctx.size()) or
self.size() == fctx.size()):
return self._filelog.cmp(self._filenode, fctx.data())
return True
extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
extensions.wrapfunction(patch, 'diff', kw_diff)
extensions.wrapfunction(cmdutil, 'amend', kw_amend)
extensions.wrapfunction(cmdutil, 'copy', kw_copy)
for c in 'annotate changeset rev filediff diff'.split():
extensions.wrapfunction(webcommands, c, kwweb_skip)
for name in recordextensions.split():
try:
record = extensions.find(name)
extensions.wrapfunction(record, 'dorecord', kw_dorecord)
except KeyError:
pass
repo.__class__ = kwrepo
| gpl-2.0 | 5,743,919,206,904,441,000 | -1,338,837,277,646,571,300 | 37.242134 | 80 | 0.606797 | false |
grimmjow8/ansible | lib/ansible/modules/cloud/amazon/rds_subnet_group.py | 25 | 5369 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: rds_subnet_group
version_added: "1.5"
short_description: manage RDS database subnet groups
description:
- Creates, modifies, and deletes RDS database subnet groups. This module has a dependency on python-boto >= 2.5.
options:
state:
description:
- Specifies whether the subnet should be present or absent.
required: true
default: present
aliases: []
choices: [ 'present' , 'absent' ]
name:
description:
- Database subnet group identifier.
required: true
default: null
aliases: []
description:
description:
- Database subnet group description. Only set when a new group is added.
required: false
default: null
aliases: []
subnets:
description:
- List of subnet IDs that make up the database subnet group.
required: false
default: null
aliases: []
author: "Scott Anderson (@tastychutney)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Add or change a subnet group
- rds_subnet_group:
state: present
name: norwegian-blue
description: My Fancy Ex Parrot Subnet Group
subnets:
- subnet-aaaaaaaa
- subnet-bbbbbbbb
# Remove a subnet group
- rds_subnet_group:
state: absent
name: norwegian-blue
'''
try:
import boto.rds
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state = dict(required=True, choices=['present', 'absent']),
name = dict(required=True),
description = dict(required=False),
subnets = dict(required=False, type='list'),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
state = module.params.get('state')
group_name = module.params.get('name').lower()
group_description = module.params.get('description')
group_subnets = module.params.get('subnets') or {}
if state == 'present':
for required in ['name', 'description', 'subnets']:
if not module.params.get(required):
module.fail_json(msg = str("Parameter %s required for state='present'" % required))
else:
for not_allowed in ['description', 'subnets']:
if module.params.get(not_allowed):
module.fail_json(msg = str("Parameter %s not allowed for state='absent'" % not_allowed))
# Retrieve any AWS settings from the environment.
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
if not region:
module.fail_json(msg = str("Either region or AWS_REGION or EC2_REGION environment variable or boto config aws_region or ec2_region must be set."))
try:
conn = connect_to_aws(boto.rds, region, **aws_connect_kwargs)
except boto.exception.BotoServerError as e:
module.fail_json(msg = e.error_message)
try:
changed = False
exists = False
try:
matching_groups = conn.get_all_db_subnet_groups(group_name, max_records=100)
exists = len(matching_groups) > 0
except BotoServerError as e:
if e.error_code != 'DBSubnetGroupNotFoundFault':
module.fail_json(msg = e.error_message)
if state == 'absent':
if exists:
conn.delete_db_subnet_group(group_name)
changed = True
else:
if not exists:
new_group = conn.create_db_subnet_group(group_name, desc=group_description, subnet_ids=group_subnets)
changed = True
else:
# Sort the subnet groups before we compare them
matching_groups[0].subnet_ids.sort()
group_subnets.sort()
if ( (matching_groups[0].name != group_name) or (matching_groups[0].description != group_description) or (matching_groups[0].subnet_ids != group_subnets) ):
changed_group = conn.modify_db_subnet_group(group_name, description=group_description, subnet_ids=group_subnets)
changed = True
except BotoServerError as e:
module.fail_json(msg = e.error_message)
module.exit_json(changed=changed)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| gpl-3.0 | 7,809,084,387,823,312,000 | -5,784,793,727,199,888,000 | 32.767296 | 172 | 0.633079 | false |
Drahflow/lymph | lymph/cli/testing.py | 10 | 5373 | import collections
from contextlib import contextmanager
import sys
import os
import tempfile
import textwrap
from kazoo.client import KazooClient
from kazoo.handlers.gevent import SequentialGeventHandler
from pkg_resources import load_entry_point
from six import StringIO, integer_types
import yaml
from lymph.discovery.zookeeper import ZookeeperServiceRegistry
from lymph.events.null import NullEventSystem
from lymph.testing import LymphIntegrationTestCase
@contextmanager
def capture_output():
real_stdout = sys.stdout
real_stderr = sys.stderr
sys.stdout = StringIO()
sys.stderr = StringIO()
try:
yield sys.stdout, sys.stderr
finally:
sys.stdout = real_stdout
sys.stderr = real_stderr
class CliWrapper(object):
Result = collections.namedtuple('CMDLineResult', 'returncode stdout stderr')
def __init__(self, config):
handle, self.config_file_name = tempfile.mkstemp()
with open(self.config_file_name, 'w') as f:
f.write(yaml.dump(config))
self.entry_point = load_entry_point('lymph', 'console_scripts', 'lymph')
def tear_down(self):
os.remove(self.config_file_name)
def __call__(self, cmd, config=True):
with capture_output() as (stdout, stderr):
if config:
cmd = cmd + ['--config=%s' % self.config_file_name]
try:
returncode = self.entry_point(cmd)
except SystemExit as ex:
# Docopt tries to exit on its own unfortunately
returncode = (ex.args[0] or 0) if ex.args else 0
if not isinstance(returncode, integer_types):
# According to sys.exit doc, any other object beside
# an integer or None result to an exit code equal to 1.
returncode = 1
return self.Result(
returncode or 0, stdout.getvalue(), stderr.getvalue())
class CliTestMixin(object):
cli_config = {}
_help_output = None
def setUp(self):
self.__clis = []
super(CliTestMixin, self).setUp()
@property
def cli(self):
cli = CliWrapper(self.cli_config)
self.__clis.append(cli)
return cli
def tearDown(self):
for cli in self.__clis:
cli.tear_down()
super(CliTestMixin, self).tearDown()
def assert_lines_equal(self, cmd, lines, config=True):
expected_lines = set(line for line in textwrap.dedent(lines).splitlines() if line.strip())
result = self.cli(cmd, config=config)
self.assertEqual(result.returncode, 0)
self.assertEqual(set(result.stdout.splitlines()), expected_lines)
def assert_first_line_equals(self, cmd, line, config=True):
result = self.cli(cmd, config=config)
self.assertEqual(result.returncode, 0)
self.assertEqual(result.stdout.splitlines()[0].strip(), line)
def assert_command_appears_in_command_list(self):
result = self.cli(['list'])
self.assertEqual(result.returncode, 0)
self.assertIn(self.command_name, result.stdout)
def assert_help_contains_usage_information(self):
output = self._get_help_output()
self.assertIn('Usage', output)
self.assertIn(self.command_name, output)
def assert_help_contains_parameter(self, parameter, default=None):
self.assert_help_contains(parameter)
if default is not None:
self.assert_help_contains(default)
def assert_help_contains(self, text):
self.assertIn(text, self._get_help_output())
def _get_help_output(self):
if self._help_output is None:
result = self.cli([self.command_name, '--help'])
self._help_output = result.stdout
return self._help_output
class CliIntegrationTestCase(CliTestMixin, LymphIntegrationTestCase):
use_zookeeper = True
def setUp(self):
super(CliIntegrationTestCase, self).setUp()
client = KazooClient(
hosts=self.hosts,
handler=SequentialGeventHandler(),
)
self.registry = ZookeeperServiceRegistry(client)
self.events = NullEventSystem()
self.cli_config = {
"container": {
"registry": {
"class": "lymph.discovery.zookeeper:ZookeeperServiceRegistry",
"zkclient": 'dep:kazoo',
},
"events": {
"class": "lymph.events.null:NullEventSystem",
},
},
"dependencies": {
"kazoo": {
"class": "kazoo.client:KazooClient",
"hosts": self.hosts,
}
}
}
class CommandFactory(object):
"""
Encapsulates the knowledge how to create a command instance.
Intended use is to support smaller unit tests which just need an instance
of a command class to try out some method.
It only supports to pass in parameters as keyword parameters into
the command constructor.
"""
def __init__(self, command_class):
self.command_class = command_class
def __call__(self, **kwargs):
kwargs.setdefault('args', {})
kwargs.setdefault('config', {})
kwargs.setdefault('terminal', None)
return self.command_class(**kwargs)
| apache-2.0 | 6,175,847,199,653,237,000 | -9,008,966,939,256,328,000 | 30.982143 | 98 | 0.612879 | false |
mzhaom/grpc | src/python/src/grpc/framework/face/future_invocation_asynchronous_event_service_test.py | 12 | 1982 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""One of the tests of the Face layer of RPC Framework."""
import unittest
from grpc.framework.face import _test_case
from grpc.framework.face.testing import future_invocation_asynchronous_event_service_test_case as test_case
class FutureInvocationAsynchronousEventServiceTest(
_test_case.FaceTestCase,
test_case.FutureInvocationAsynchronousEventServiceTestCase,
unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | -6,812,446,757,317,581,000 | 8,872,108,696,022,912,000 | 42.086957 | 107 | 0.779516 | false |
GeoCat/QGIS | python/plugins/processing/gui/menus.py | 5 | 11535 | import os
from qgis.PyQt.QtCore import QCoreApplication
from qgis.PyQt.QtWidgets import QAction, QMenu
from qgis.PyQt.QtGui import QIcon
from qgis.PyQt.QtWidgets import QApplication
from processing.core.ProcessingConfig import ProcessingConfig, Setting
from processing.gui.MessageDialog import MessageDialog
from processing.gui.AlgorithmDialog import AlgorithmDialog
from qgis.utils import iface
from qgis.core import QgsApplication
from processing.gui.MessageBarProgress import MessageBarProgress
from processing.gui.AlgorithmExecutor import execute
from processing.gui.Postprocessing import handleAlgorithmResults
from processing.core.Processing import Processing
from processing.tools import dataobjects
algorithmsToolbar = None
menusSettingsGroup = 'Menus'
defaultMenuEntries = {}
vectorMenu = QApplication.translate('MainWindow', 'Vect&or')
analysisToolsMenu = vectorMenu + "/" + Processing.tr('&Analysis Tools')
defaultMenuEntries.update({'qgis:distancematrix': analysisToolsMenu,
'qgis:sumlinelengths': analysisToolsMenu,
'qgis:pointsinpolygon': analysisToolsMenu,
'qgis:countpointsinpolygon': analysisToolsMenu,
'qgis:listuniquevalues': analysisToolsMenu,
'qgis:basicstatisticsforfields': analysisToolsMenu,
'qgis:nearestneighbouranalysis': analysisToolsMenu,
'qgis:meancoordinates': analysisToolsMenu,
'qgis:lineintersections': analysisToolsMenu})
researchToolsMenu = vectorMenu + "/" + Processing.tr('&Research Tools')
defaultMenuEntries.update({'qgis:randomselection': researchToolsMenu,
'qgis:randomselectionwithinsubsets': researchToolsMenu,
'qgis:randompointsinextent': researchToolsMenu,
'qgis:randompointsinlayerbounds': researchToolsMenu,
'qgis:randompointsinsidepolygonsfixed': researchToolsMenu,
'qgis:randompointsinsidepolygonsvariable': researchToolsMenu,
'qgis:regularpoints': researchToolsMenu,
'qgis:vectorgrid': researchToolsMenu,
'qgis:selectbylocation': researchToolsMenu,
'qgis:polygonfromlayerextent': researchToolsMenu})
geoprocessingToolsMenu = vectorMenu + "/" + Processing.tr('&Geoprocessing Tools')
defaultMenuEntries.update({'qgis:convexhull': geoprocessingToolsMenu,
'qgis:fixeddistancebuffer': geoprocessingToolsMenu,
'qgis:variabledistancebuffer': geoprocessingToolsMenu,
'qgis:intersection': geoprocessingToolsMenu,
'qgis:union': geoprocessingToolsMenu,
'qgis:symmetricaldifference': geoprocessingToolsMenu,
'native:clip': geoprocessingToolsMenu,
'qgis:difference': geoprocessingToolsMenu,
'qgis:dissolve': geoprocessingToolsMenu,
'qgis:eliminateselectedpolygons': geoprocessingToolsMenu})
geometryToolsMenu = vectorMenu + "/" + Processing.tr('G&eometry Tools')
defaultMenuEntries.update({'qgis:checkvalidity': geometryToolsMenu,
'qgis:exportaddgeometrycolumns': geometryToolsMenu,
'qgis:centroids': geometryToolsMenu,
'qgis:delaunaytriangulation': geometryToolsMenu,
'qgis:voronoipolygons': geometryToolsMenu,
'qgis:simplifygeometries': geometryToolsMenu,
'qgis:densifygeometries': geometryToolsMenu,
'qgis:multiparttosingleparts': geometryToolsMenu,
'qgis:singlepartstomultipart': geometryToolsMenu,
'qgis:polygonstolines': geometryToolsMenu,
'qgis:linestopolygons': geometryToolsMenu,
'qgis:extractnodes': geometryToolsMenu})
managementToolsMenu = vectorMenu + "/" + Processing.tr('&Data Management Tools')
defaultMenuEntries.update({'qgis:definecurrentprojection': managementToolsMenu,
'qgis:joinattributesbylocation': managementToolsMenu,
'qgis:splitvectorlayer': managementToolsMenu,
'qgis:mergevectorlayers': managementToolsMenu,
'qgis:createspatialindex': managementToolsMenu})
rasterMenu = Processing.tr('&Raster')
projectionsMenu = rasterMenu + "/" + Processing.tr('Projections')
defaultMenuEntries.update({'gdal:warpreproject': projectionsMenu,
'gdal:assignprojection': projectionsMenu,
'gdal:extractprojection': projectionsMenu})
conversionMenu = rasterMenu + "/" + Processing.tr('Conversion')
defaultMenuEntries.update({'gdal:rasterize': conversionMenu,
'gdal:rasterize_over': conversionMenu,
'gdal:polygonize': conversionMenu,
'gdal:translate': conversionMenu,
'gdal:rgbtopct': conversionMenu,
'gdal:pcttorgb': conversionMenu})
extractionMenu = rasterMenu + "/" + Processing.tr('Extraction')
defaultMenuEntries.update({'gdal:contour': extractionMenu,
'gdal:cliprasterbyextent': extractionMenu,
'gdal:cliprasterbymasklayer': extractionMenu})
analysisMenu = rasterMenu + "/" + Processing.tr('Analysis')
defaultMenuEntries.update({'gdal:sieve': analysisMenu,
'gdal:nearblack': analysisMenu,
'gdal:fillnodata': analysisMenu,
'gdal:proximity': analysisMenu,
'gdal:griddatametrics': analysisMenu,
'gdal:gridaverage': analysisMenu,
'gdal:gridinvdist': analysisMenu,
'gdal:gridnearestneighbor': analysisMenu,
'gdal:aspect': analysisMenu,
'gdal:hillshade': analysisMenu,
'gdal:roughness': analysisMenu,
'gdal:slope': analysisMenu,
'gdal:tpi': analysisMenu,
'gdal:tri': analysisMenu})
miscMenu = rasterMenu + "/" + Processing.tr('Miscellaneous')
defaultMenuEntries.update({'gdal:buildvirtualraster': miscMenu,
'gdal:merge': miscMenu,
'gdal:rasterinfo': miscMenu,
'gdal:overviews': miscMenu,
'gdal:tileindex': miscMenu})
def initializeMenus():
for provider in QgsApplication.processingRegistry().providers():
for alg in provider.algorithms():
d = defaultMenuEntries.get(alg.id(), "")
setting = Setting(menusSettingsGroup, "MENU_" + alg.id(),
"Menu path", d)
ProcessingConfig.addSetting(setting)
setting = Setting(menusSettingsGroup, "BUTTON_" + alg.id(),
"Add button", False)
ProcessingConfig.addSetting(setting)
setting = Setting(menusSettingsGroup, "ICON_" + alg.id(),
"Icon", "", valuetype=Setting.FILE)
ProcessingConfig.addSetting(setting)
ProcessingConfig.readSettings()
def updateMenus():
removeMenus()
QCoreApplication.processEvents()
createMenus()
def createMenus():
for alg in QgsApplication.processingRegistry().algorithms():
menuPath = ProcessingConfig.getSetting("MENU_" + alg.id())
addButton = ProcessingConfig.getSetting("BUTTON_" + alg.id())
icon = ProcessingConfig.getSetting("ICON_" + alg.id())
if icon and os.path.exists(icon):
icon = QIcon(icon)
else:
icon = None
if menuPath:
paths = menuPath.split("/")
addAlgorithmEntry(alg, paths[0], paths[-1], addButton=addButton, icon=icon)
def removeMenus():
for alg in QgsApplication.processingRegistry().algorithms():
menuPath = ProcessingConfig.getSetting("MENU_" + alg.id())
if menuPath:
paths = menuPath.split("/")
removeAlgorithmEntry(alg, paths[0], paths[-1])
def addAlgorithmEntry(alg, menuName, submenuName, actionText=None, icon=None, addButton=False):
action = QAction(icon or alg.icon(), actionText or alg.displayName(), iface.mainWindow())
action.triggered.connect(lambda: _executeAlgorithm(alg))
action.setObjectName("mProcessingUserMenu_%s" % alg.id())
if menuName:
menu = getMenu(menuName, iface.mainWindow().menuBar())
submenu = getMenu(submenuName, menu)
submenu.addAction(action)
if addButton:
global algorithmsToolbar
if algorithmsToolbar is None:
algorithmsToolbar = iface.addToolBar('ProcessingAlgorithms')
algorithmsToolbar.addAction(action)
def removeAlgorithmEntry(alg, menuName, submenuName, actionText=None, delButton=True):
if menuName:
menu = getMenu(menuName, iface.mainWindow().menuBar())
subMenu = getMenu(submenuName, menu)
action = findAction(subMenu.actions(), alg, actionText)
if action is not None:
subMenu.removeAction(action)
if len(subMenu.actions()) == 0:
subMenu.deleteLater()
if delButton:
global algorithmsToolbar
if algorithmsToolbar is not None:
action = findAction(algorithmsToolbar.actions(), alg, actionText)
if action is not None:
algorithmsToolbar.removeAction(action)
def _executeAlgorithm(alg):
ok, message = alg.canExecute()
if not ok:
dlg = MessageDialog()
dlg.setTitle(Processing.tr('Missing dependency'))
dlg.setMessage(
Processing.tr('<h3>Missing dependency. This algorithm cannot '
'be run :-( </h3>\n{0}').format(message))
dlg.exec_()
return
if (alg.countVisibleParameters()) > 0:
dlg = alg.createCustomParametersWidget(None)
if not dlg:
dlg = AlgorithmDialog(alg)
canvas = iface.mapCanvas()
prevMapTool = canvas.mapTool()
dlg.show()
dlg.exec_()
# have to manually delete the dialog - otherwise it's owned by the
# iface mainWindow and never deleted
del dlg
if canvas.mapTool() != prevMapTool:
try:
canvas.mapTool().reset()
except:
pass
canvas.setMapTool(prevMapTool)
else:
feedback = MessageBarProgress()
context = dataobjects.createContext(feedback)
parameters = {}
ret, results = execute(alg, parameters, context, feedback)
handleAlgorithmResults(alg, context, feedback)
feedback.close()
def getMenu(name, parent):
menus = [c for c in parent.children() if isinstance(c, QMenu) and c.title() == name]
if menus:
return menus[0]
else:
return parent.addMenu(name)
def findAction(actions, alg, actionText=None):
for action in actions:
if action.text() in [actionText, alg.displayName(), alg.name()]:
return action
return None
| gpl-2.0 | -7,814,197,067,319,944,000 | 8,969,968,411,533,485,000 | 46.27459 | 95 | 0.609103 | false |
steventimberman/masterDebater | env/lib/python2.7/site-packages/django/db/models/sql/constants.py | 633 | 1039 | """
Constants specific to the SQL storage portion of the ORM.
"""
import re
# Valid query types (a set is used for speedy lookups). These are (currently)
# considered SQL-specific; other storage systems may choose to use different
# lookup types.
QUERY_TERMS = {
'exact', 'iexact', 'contains', 'icontains', 'gt', 'gte', 'lt', 'lte', 'in',
'startswith', 'istartswith', 'endswith', 'iendswith', 'range', 'year',
'month', 'day', 'week_day', 'hour', 'minute', 'second', 'isnull', 'search',
'regex', 'iregex',
}
# Size of each "chunk" for get_iterator calls.
# Larger values are slightly faster at the expense of more storage space.
GET_ITERATOR_CHUNK_SIZE = 100
# Namedtuples for sql.* internal use.
# How many results to expect from a cursor.execute call
MULTI = 'multi'
SINGLE = 'single'
CURSOR = 'cursor'
NO_RESULTS = 'no results'
ORDER_PATTERN = re.compile(r'\?|[-+]?[.\w]+$')
ORDER_DIR = {
'ASC': ('ASC', 'DESC'),
'DESC': ('DESC', 'ASC'),
}
# SQL join types.
INNER = 'INNER JOIN'
LOUTER = 'LEFT OUTER JOIN'
| mit | 614,216,303,464,806,500 | -6,998,760,165,360,925,000 | 27.081081 | 79 | 0.649663 | false |
thnee/ansible | test/units/modules/network/fortios/test_fortios_system_switch_interface.py | 21 | 9741 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_system_switch_interface
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_system_switch_interface.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_system_switch_interface_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_switch_interface': {
'intra_switch_policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span_dest_port': 'test_value_6',
'span_direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_switch_interface.fortios_system(input_data, fos_instance)
expected_data = {
'intra-switch-policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span-dest-port': 'test_value_6',
'span-direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
}
set_method_mock.assert_called_with('system', 'switch-interface', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_system_switch_interface_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_switch_interface': {
'intra_switch_policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span_dest_port': 'test_value_6',
'span_direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_switch_interface.fortios_system(input_data, fos_instance)
expected_data = {
'intra-switch-policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span-dest-port': 'test_value_6',
'span-direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
}
set_method_mock.assert_called_with('system', 'switch-interface', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_system_switch_interface_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'system_switch_interface': {
'intra_switch_policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span_dest_port': 'test_value_6',
'span_direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_switch_interface.fortios_system(input_data, fos_instance)
delete_method_mock.assert_called_with('system', 'switch-interface', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_system_switch_interface_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'system_switch_interface': {
'intra_switch_policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span_dest_port': 'test_value_6',
'span_direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_switch_interface.fortios_system(input_data, fos_instance)
delete_method_mock.assert_called_with('system', 'switch-interface', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_system_switch_interface_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_switch_interface': {
'intra_switch_policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span_dest_port': 'test_value_6',
'span_direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_switch_interface.fortios_system(input_data, fos_instance)
expected_data = {
'intra-switch-policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span-dest-port': 'test_value_6',
'span-direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
}
set_method_mock.assert_called_with('system', 'switch-interface', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_system_switch_interface_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_switch_interface': {
'random_attribute_not_valid': 'tag',
'intra_switch_policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span_dest_port': 'test_value_6',
'span_direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_switch_interface.fortios_system(input_data, fos_instance)
expected_data = {
'intra-switch-policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span-dest-port': 'test_value_6',
'span-direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
}
set_method_mock.assert_called_with('system', 'switch-interface', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| gpl-3.0 | 3,325,014,977,285,714,400 | 7,267,093,203,634,085,000 | 36.610039 | 142 | 0.619649 | false |
2014c2g1/c2g1 | exts/w2/static/Brython2.0.0-20140209-164925/Lib/unittest/test/test_suite.py | 791 | 12066 | import unittest
import sys
from .support import LoggingResult, TestEquality
### Support code for Test_TestSuite
################################################################
class Test(object):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def test_3(self): pass
def runTest(self): pass
def _mk_TestSuite(*names):
return unittest.TestSuite(Test.Foo(n) for n in names)
################################################################
class Test_TestSuite(unittest.TestCase, TestEquality):
### Set up attributes needed by inherited tests
################################################################
# Used by TestEquality.test_eq
eq_pairs = [(unittest.TestSuite(), unittest.TestSuite())
,(unittest.TestSuite(), unittest.TestSuite([]))
,(_mk_TestSuite('test_1'), _mk_TestSuite('test_1'))]
# Used by TestEquality.test_ne
ne_pairs = [(unittest.TestSuite(), _mk_TestSuite('test_1'))
,(unittest.TestSuite([]), _mk_TestSuite('test_1'))
,(_mk_TestSuite('test_1', 'test_2'), _mk_TestSuite('test_1', 'test_3'))
,(_mk_TestSuite('test_1'), _mk_TestSuite('test_2'))]
################################################################
### /Set up attributes needed by inherited tests
### Tests for TestSuite.__init__
################################################################
# "class TestSuite([tests])"
#
# The tests iterable should be optional
def test_init__tests_optional(self):
suite = unittest.TestSuite()
self.assertEqual(suite.countTestCases(), 0)
# "class TestSuite([tests])"
# ...
# "If tests is given, it must be an iterable of individual test cases
# or other test suites that will be used to build the suite initially"
#
# TestSuite should deal with empty tests iterables by allowing the
# creation of an empty suite
def test_init__empty_tests(self):
suite = unittest.TestSuite([])
self.assertEqual(suite.countTestCases(), 0)
# "class TestSuite([tests])"
# ...
# "If tests is given, it must be an iterable of individual test cases
# or other test suites that will be used to build the suite initially"
#
# TestSuite should allow any iterable to provide tests
def test_init__tests_from_any_iterable(self):
def tests():
yield unittest.FunctionTestCase(lambda: None)
yield unittest.FunctionTestCase(lambda: None)
suite_1 = unittest.TestSuite(tests())
self.assertEqual(suite_1.countTestCases(), 2)
suite_2 = unittest.TestSuite(suite_1)
self.assertEqual(suite_2.countTestCases(), 2)
suite_3 = unittest.TestSuite(set(suite_1))
self.assertEqual(suite_3.countTestCases(), 2)
# "class TestSuite([tests])"
# ...
# "If tests is given, it must be an iterable of individual test cases
# or other test suites that will be used to build the suite initially"
#
# Does TestSuite() also allow other TestSuite() instances to be present
# in the tests iterable?
def test_init__TestSuite_instances_in_tests(self):
def tests():
ftc = unittest.FunctionTestCase(lambda: None)
yield unittest.TestSuite([ftc])
yield unittest.FunctionTestCase(lambda: None)
suite = unittest.TestSuite(tests())
self.assertEqual(suite.countTestCases(), 2)
################################################################
### /Tests for TestSuite.__init__
# Container types should support the iter protocol
def test_iter(self):
test1 = unittest.FunctionTestCase(lambda: None)
test2 = unittest.FunctionTestCase(lambda: None)
suite = unittest.TestSuite((test1, test2))
self.assertEqual(list(suite), [test1, test2])
# "Return the number of tests represented by the this test object.
# ...this method is also implemented by the TestSuite class, which can
# return larger [greater than 1] values"
#
# Presumably an empty TestSuite returns 0?
def test_countTestCases_zero_simple(self):
suite = unittest.TestSuite()
self.assertEqual(suite.countTestCases(), 0)
# "Return the number of tests represented by the this test object.
# ...this method is also implemented by the TestSuite class, which can
# return larger [greater than 1] values"
#
# Presumably an empty TestSuite (even if it contains other empty
# TestSuite instances) returns 0?
def test_countTestCases_zero_nested(self):
class Test1(unittest.TestCase):
def test(self):
pass
suite = unittest.TestSuite([unittest.TestSuite()])
self.assertEqual(suite.countTestCases(), 0)
# "Return the number of tests represented by the this test object.
# ...this method is also implemented by the TestSuite class, which can
# return larger [greater than 1] values"
def test_countTestCases_simple(self):
test1 = unittest.FunctionTestCase(lambda: None)
test2 = unittest.FunctionTestCase(lambda: None)
suite = unittest.TestSuite((test1, test2))
self.assertEqual(suite.countTestCases(), 2)
# "Return the number of tests represented by the this test object.
# ...this method is also implemented by the TestSuite class, which can
# return larger [greater than 1] values"
#
# Make sure this holds for nested TestSuite instances, too
def test_countTestCases_nested(self):
class Test1(unittest.TestCase):
def test1(self): pass
def test2(self): pass
test2 = unittest.FunctionTestCase(lambda: None)
test3 = unittest.FunctionTestCase(lambda: None)
child = unittest.TestSuite((Test1('test2'), test2))
parent = unittest.TestSuite((test3, child, Test1('test1')))
self.assertEqual(parent.countTestCases(), 4)
# "Run the tests associated with this suite, collecting the result into
# the test result object passed as result."
#
# And if there are no tests? What then?
def test_run__empty_suite(self):
events = []
result = LoggingResult(events)
suite = unittest.TestSuite()
suite.run(result)
self.assertEqual(events, [])
# "Note that unlike TestCase.run(), TestSuite.run() requires the
# "result object to be passed in."
def test_run__requires_result(self):
suite = unittest.TestSuite()
try:
suite.run()
except TypeError:
pass
else:
self.fail("Failed to raise TypeError")
# "Run the tests associated with this suite, collecting the result into
# the test result object passed as result."
def test_run(self):
events = []
result = LoggingResult(events)
class LoggingCase(unittest.TestCase):
def run(self, result):
events.append('run %s' % self._testMethodName)
def test1(self): pass
def test2(self): pass
tests = [LoggingCase('test1'), LoggingCase('test2')]
unittest.TestSuite(tests).run(result)
self.assertEqual(events, ['run test1', 'run test2'])
# "Add a TestCase ... to the suite"
def test_addTest__TestCase(self):
class Foo(unittest.TestCase):
def test(self): pass
test = Foo('test')
suite = unittest.TestSuite()
suite.addTest(test)
self.assertEqual(suite.countTestCases(), 1)
self.assertEqual(list(suite), [test])
# "Add a ... TestSuite to the suite"
def test_addTest__TestSuite(self):
class Foo(unittest.TestCase):
def test(self): pass
suite_2 = unittest.TestSuite([Foo('test')])
suite = unittest.TestSuite()
suite.addTest(suite_2)
self.assertEqual(suite.countTestCases(), 1)
self.assertEqual(list(suite), [suite_2])
# "Add all the tests from an iterable of TestCase and TestSuite
# instances to this test suite."
#
# "This is equivalent to iterating over tests, calling addTest() for
# each element"
def test_addTests(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
test_1 = Foo('test_1')
test_2 = Foo('test_2')
inner_suite = unittest.TestSuite([test_2])
def gen():
yield test_1
yield test_2
yield inner_suite
suite_1 = unittest.TestSuite()
suite_1.addTests(gen())
self.assertEqual(list(suite_1), list(gen()))
# "This is equivalent to iterating over tests, calling addTest() for
# each element"
suite_2 = unittest.TestSuite()
for t in gen():
suite_2.addTest(t)
self.assertEqual(suite_1, suite_2)
# "Add all the tests from an iterable of TestCase and TestSuite
# instances to this test suite."
#
# What happens if it doesn't get an iterable?
def test_addTest__noniterable(self):
suite = unittest.TestSuite()
try:
suite.addTests(5)
except TypeError:
pass
else:
self.fail("Failed to raise TypeError")
def test_addTest__noncallable(self):
suite = unittest.TestSuite()
self.assertRaises(TypeError, suite.addTest, 5)
def test_addTest__casesuiteclass(self):
suite = unittest.TestSuite()
self.assertRaises(TypeError, suite.addTest, Test_TestSuite)
self.assertRaises(TypeError, suite.addTest, unittest.TestSuite)
def test_addTests__string(self):
suite = unittest.TestSuite()
self.assertRaises(TypeError, suite.addTests, "foo")
def test_function_in_suite(self):
def f(_):
pass
suite = unittest.TestSuite()
suite.addTest(f)
# when the bug is fixed this line will not crash
suite.run(unittest.TestResult())
def test_basetestsuite(self):
class Test(unittest.TestCase):
wasSetUp = False
wasTornDown = False
@classmethod
def setUpClass(cls):
cls.wasSetUp = True
@classmethod
def tearDownClass(cls):
cls.wasTornDown = True
def testPass(self):
pass
def testFail(self):
fail
class Module(object):
wasSetUp = False
wasTornDown = False
@staticmethod
def setUpModule():
Module.wasSetUp = True
@staticmethod
def tearDownModule():
Module.wasTornDown = True
Test.__module__ = 'Module'
sys.modules['Module'] = Module
self.addCleanup(sys.modules.pop, 'Module')
suite = unittest.BaseTestSuite()
suite.addTests([Test('testPass'), Test('testFail')])
self.assertEqual(suite.countTestCases(), 2)
result = unittest.TestResult()
suite.run(result)
self.assertFalse(Module.wasSetUp)
self.assertFalse(Module.wasTornDown)
self.assertFalse(Test.wasSetUp)
self.assertFalse(Test.wasTornDown)
self.assertEqual(len(result.errors), 1)
self.assertEqual(len(result.failures), 0)
self.assertEqual(result.testsRun, 2)
def test_overriding_call(self):
class MySuite(unittest.TestSuite):
called = False
def __call__(self, *args, **kw):
self.called = True
unittest.TestSuite.__call__(self, *args, **kw)
suite = MySuite()
result = unittest.TestResult()
wrapper = unittest.TestSuite()
wrapper.addTest(suite)
wrapper(result)
self.assertTrue(suite.called)
# reusing results should be permitted even if abominable
self.assertFalse(result._testRunEntered)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | -5,053,992,351,136,401,000 | 1,999,081,088,161,156,400 | 31.788043 | 86 | 0.594563 | false |
WasatchPhotonics/Foreman | ControlPower.py | 2 | 7442 | # Based on PowerControl from the Griddle, including writing syslog events for
# tracking the status of the phidget
import unittest, logging, serial, sys, time
#Phidget specific imports
from Phidgets.PhidgetException import PhidgetException
from Phidgets.Devices.InterfaceKit import InterfaceKit
ZERO_RELAY = 0
ONE_RELAY = 1
TWO_RELAY = 2
THREE_RELAY = 3
import logging
import logging.handlers
from WasatchLog import PrintLogHandler
log = logging.getLogger('MyLogger')
log.setLevel(logging.DEBUG)
handler = logging.handlers.SysLogHandler(address = '/dev/log')
log.addHandler(handler)
print_handler = PrintLogHandler()
log.addHandler(print_handler)
#log.debug('this is debug')
#log.critical('this is critical')
class Test(unittest.TestCase):
def test_01_open_phidget(self):
powercont = PowerControl()
self.assertTrue( powercont.open_phidget() )
self.assertTrue( powercont.close_phidget() )
def test_02_motor(self):
powercont = PowerControl()
self.assertTrue( powercont.open_phidget() )
self.assertTrue( powercont.motor_on() )
time.sleep(2)
self.assertTrue( powercont.motor_off() )
self.assertTrue( powercont.close_phidget() )
def test_03_cycle_zero(self):
pc = PowerControl()
self.assertTrue(pc.open_phidget())
self.assertTrue(pc.zero_on())
time.sleep(3)
self.assertTrue(pc.zero_off())
def test_04_cycle_one(self):
pc = PowerControl()
self.assertTrue(pc.open_phidget())
self.assertTrue(pc.one_on())
time.sleep(3)
self.assertTrue(pc.one_off())
def test_05_toggle_zero(self):
pc = PowerControl()
self.assertTrue(pc.toggle_line(ZERO_RELAY))
def test_05_zero_off(self):
log.info("Force zero off")
pc = PowerControl()
self.assertTrue(pc.open_phidget())
self.assertTrue(pc.zero_off())
self.assertTrue(pc.close_phidget() )
def test_05_zero_on(self):
log.info("Force zero on")
pc = PowerControl()
self.assertTrue(pc.open_phidget())
self.assertTrue(pc.zero_on())
self.assertTrue(pc.close_phidget() )
def test_06_toggle_one(self):
pc = PowerControl()
self.assertTrue(pc.toggle_line(ONE_RELAY))
def test_07_toggle_two(self):
pc = PowerControl()
self.assertTrue(pc.toggle_line(TWO_RELAY))
def test_08_toggle_three(self):
pc = PowerControl()
self.assertTrue(pc.toggle_line(THREE_RELAY))
class PowerControl(object):
''' PowerControl class wraps language around the 1014_2 -
PhidgetInterfaceKit 0/0/4 4 relay device. '''
def __init__(self):
#log.info("Start of power control object")
pass
def open_phidget(self):
''' Based on the InterfaceKit-simple.py example from Phidgets, create an
relay object, attach the handlers, open it and wait for the attachment.
This function's primarily purpose is to replace the prints with log
statements. '''
try:
self.interface = InterfaceKit()
except RuntimeError as e:
log.critical("Phidget runtime exception: %s" % e.details)
return 0
try:
self.interface.setOnAttachHandler( self.interfaceAttached )
self.interface.setOnDetachHandler( self.interfaceDetached )
self.interface.setOnErrorhandler( self.interfaceError )
except PhidgetException as e:
log.critical("Phidget Exception %i: %s" % (e.code, e.details))
return 0
try:
#print "Force open relay serial: 290968"
self.interface.openPhidget()
except PhidgetException as e:
log.critical("Phidget Exception %i: %s" % (e.code, e.details))
return 0
#log.info("Waiting for attach....")
try:
self.interface.waitForAttach(100)
except PhidgetException as e:
log.critical("Phidget Exception %i: %s" % (e.code, e.details))
try:
self.interface.closePhidget()
except PhidgetException as e:
log.critical("Close Exc. %i: %s" % (e.code, e.details))
return 0
return 1
#Event Handler Callback Functions
def interfaceAttached(self, e):
attached = e.device
#log.info("interface %i Attached!" % (attached.getSerialNum()))
def interfaceDetached(self, e):
detached = e.device
log.info("interface %i Detached!" % (detached.getSerialNum()))
def interfaceError(self, e):
try:
source = e.device
log.critical("Interface %i: Phidget Error %i: %s" % \
(source.getSerialNum(), e.eCode, e.description))
except PhidgetException as e:
log.critical("Phidget Exception %i: %s" % (e.code, e.details))
def close_phidget(self):
try:
self.interface.closePhidget()
except PhidgetException as e:
log.critical("Phidget Exception %i: %s" % (e.code, e.details))
return 0
return 1
def change_relay(self, relay=0, status=0):
''' Toggle the status of the phidget relay line to low(0) or high(1)'''
try:
self.interface.setOutputState(relay, status)
#self.emit_line_change(relay, status)
except Exception as e:
log.critical("Problem setting relay on %s" % e)
return 0
return 1
''' Convenience functions '''
def zero_on(self):
#log.info("Zero relay on")
return self.change_relay(relay=ZERO_RELAY, status=1)
def zero_off(self):
return self.change_relay(relay=ZERO_RELAY, status=0)
def one_on(self):
#log.info("one relay on")
return self.change_relay(relay=ONE_RELAY, status=1)
def one_off(self):
return self.change_relay(relay=ONE_RELAY, status=0)
def two_on(self):
#log.info("two relay on")
return self.change_relay(relay=TWO_RELAY, status=1)
def two_off(self):
return self.change_relay(relay=TWO_RELAY, status=0)
def three_on(self):
#log.info("two relay on")
return self.change_relay(relay=THREE_RELAY, status=1)
def three_off(self):
return self.change_relay(relay=THREE_RELAY, status=0)
def toggle_line(self, line=0):
''' Read the internal state of the specified line, then set the opposite
state for a toggle function'''
if not self.open_phidget():
log.critical("Problem opening phidget")
return 0
try:
curr_state = self.interface.getOutputState(line)
except Exception as e:
log.critical("Problem getting relay on %s" % e)
self.close_phidget()
return 0
if not self.change_relay(line, not curr_state):
log.critical("Problem changing relay")
return 0
if not self.close_phidget():
log.criticla("Problem closing phidget")
return 0
return 1
if __name__ == '__main__':
unittest.main()
| mit | 4,162,022,749,113,708,000 | 360,427,153,829,559,000 | 29.138075 | 80 | 0.589089 | false |
dh4nav/lammps | tools/i-pi/ipi/inputs/cell.py | 41 | 2337 | """Deals with creating the cell class.
Copyright (C) 2013, Joshua More and Michele Ceriotti
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http.//www.gnu.org/licenses/>.
Generates an cell class from a cell vector.
Classes:
InputCell: Deals with creating the Cell object from a file, and
writing the checkpoints.
"""
import numpy as np
from copy import copy
from ipi.engine.cell import *
from ipi.utils.inputvalue import *
from ipi.utils.units import UnitMap
from ipi.utils.messages import verbosity, warning
__all__ = [ 'InputCell' ]
class InputCell(InputArray):
"""Cell input class.
Handles generating the appropriate cell class from the xml input file,
and generating the xml checkpoint tags and data from an instance of the
object.
"""
attribs = copy(InputArray.attribs)
default_help = "Deals with the cell parameters. Takes as array which can be used to initialize the cell vector matrix."
default_label = "CELL"
def __init__(self, help=None, dimension=None, units=None, default=None, dtype=None):
"""Initializes InputCell.
Just calls the parent initialization function with appropriate arguments.
"""
super(InputCell,self).__init__(dtype=float, dimension="length", default=default, help=help)
def store(self, cell):
"""Takes a Cell instance and stores of minimal representation of it.
Args:
cell: A cell object.
"""
super(InputCell,self).store(cell.h)
self.shape.store((3,3))
def fetch(self):
"""Creates a cell object.
Returns:
A cell object of the appropriate type and with the appropriate
properties given the attributes of the InputCell object.
"""
h = super(InputCell,self).fetch()
h.shape = (3,3)
return Cell(h=h)
| gpl-2.0 | -1,832,401,402,431,921,000 | 6,391,319,151,082,912,000 | 29.350649 | 122 | 0.713308 | false |
sencha/chromium-spacewalk | tools/cr/cr/commands/prepare.py | 59 | 1781 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module for the prepare command."""
import cr
class PrepareCommand(cr.Command):
"""The implementation of the prepare command.
The prepare command is used to perform the steps needed to get an output
directory ready to use. These should not be the kind of things that need to
happen every time you build something, but the rarer things that you re-do
only when you get or add new source files, or change your build options.
This delegates all it's behavior to implementations of PrepareOut. These will
(mostly) be in the cr.actions package.
"""
def __init__(self):
super(PrepareCommand, self).__init__()
self.help = 'Prepares an output directory'
self.description = ("""
This does any preparation needed for the output directory, such as
running gyp.
""")
def Run(self):
self.Prepare()
@classmethod
def UpdateContext(cls):
for preparation in PrepareOut.Plugins():
preparation.UpdateContext()
@classmethod
def Prepare(cls):
cls.UpdateContext()
for preparation in PrepareOut.Plugins():
preparation.Prepare()
class PrepareOut(cr.Plugin, cr.Plugin.Type):
"""Base class for output directory preparation plugins.
See PrepareCommand for details.
"""
def UpdateContext(self):
"""Update the context if needed.
This is also used by commands that want the environment setup correctly, but
are not going to call Prepare directly (such as sync)."""
def Prepare(self):
"""All PrepareOut plugins must override this method to do their work."""
raise NotImplementedError('Must be overridden.')
| bsd-3-clause | 5,852,599,065,599,610,000 | -1,215,776,596,984,984,800 | 29.186441 | 80 | 0.712521 | false |
xerxes2/gpodder | src/gpodder/sync.py | 1 | 41915 | # -*- coding: utf-8 -*-
#
# gPodder - A media aggregator and podcast client
# Copyright (c) 2005-2011 Thomas Perl and the gPodder Team
#
# gPodder is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# gPodder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# sync.py -- Device synchronization
# Thomas Perl <[email protected]> 2007-12-06
# based on libipodsync.py (2006-04-05 Thomas Perl)
import gpodder
from gpodder import util
from gpodder import services
from gpodder import libconverter
from gpodder.liblogger import log
import time
import calendar
_ = gpodder.gettext
gpod_available = True
try:
import gpod
except:
gpod_available = False
log('(gpodder.sync) Could not find gpod')
pymtp_available = True
try:
import gpodder.gpopymtp as pymtp
except:
pymtp_available = False
log('(gpodder.sync) Could not load gpopymtp (libmtp not installed?).')
try:
import eyeD3
except:
log( '(gpodder.sync) Could not find eyeD3')
try:
import Image
except:
log('(gpodder.sync) Could not find Python Imaging Library (PIL)')
# Register our dependencies for the synchronization module
services.dependency_manager.depend_on(_('iPod synchronization'), _('Support synchronization of podcasts to Apple iPod devices via libgpod.'), ['gpod', 'gst'], [])
services.dependency_manager.depend_on(_('iPod OGG converter'), _('Convert OGG podcasts to MP3 files on synchronization to iPods using oggdec and LAME.'), [], ['oggdec', 'lame'])
services.dependency_manager.depend_on(_('iPod video podcasts'), _('Detect video lengths via MPlayer, to synchronize video podcasts to iPods.'), [], ['mplayer'])
services.dependency_manager.depend_on(_('Rockbox cover art support'), _('Copy podcast cover art to filesystem-based MP3 players running Rockbox.org firmware. Needs Python Imaging.'), ['Image'], [])
import os
import os.path
import glob
import time
if pymtp_available:
class MTP(pymtp.MTP):
sep = os.path.sep
def __init__(self):
pymtp.MTP.__init__(self)
self.folders = {}
def connect(self):
pymtp.MTP.connect(self)
self.folders = self.unfold(self.mtp.LIBMTP_Get_Folder_List(self.device))
def get_folder_list(self):
return self.folders
def unfold(self, folder, path=''):
result = {}
while folder:
folder = folder.contents
name = self.sep.join([path, folder.name]).lstrip(self.sep)
result[name] = folder.folder_id
if folder.child:
result.update(self.unfold(folder.child, name))
folder = folder.sibling
return result
def mkdir(self, path):
folder_id = 0
prefix = []
parts = path.split(self.sep)
while parts:
prefix.append(parts[0])
tmpath = self.sep.join(prefix)
if self.folders.has_key(tmpath):
folder_id = self.folders[tmpath]
else:
folder_id = self.create_folder(parts[0], parent=folder_id)
# log('Creating subfolder %s in %s (id=%u)' % (parts[0], self.sep.join(prefix), folder_id))
tmpath = self.sep.join(prefix + [parts[0]])
self.folders[tmpath] = folder_id
# log(">>> %s = %s" % (tmpath, folder_id))
del parts[0]
# log('MTP.mkdir: %s = %u' % (path, folder_id))
return folder_id
def open_device(config):
device_type = config.device_type
if device_type == 'ipod':
return iPodDevice(config)
elif device_type == 'filesystem':
return MP3PlayerDevice(config)
elif device_type == 'mtp':
return MTPDevice(config)
else:
return None
def get_track_length(filename):
if util.find_command('mplayer') is not None:
try:
mplayer_output = os.popen('mplayer -msglevel all=-1 -identify -vo null -ao null -frames 0 "%s" 2>/dev/null' % filename).read()
return int(float(mplayer_output[mplayer_output.index('ID_LENGTH'):].splitlines()[0][10:])*1000)
except:
pass
else:
log('Please install MPlayer for track length detection.')
try:
eyed3_info = eyeD3.Mp3AudioFile(filename)
return int(eyed3_info.getPlayTime()*1000)
except:
pass
return int(60*60*1000*3) # Default is three hours (to be on the safe side)
class SyncTrack(object):
"""
This represents a track that is on a device. You need
to specify at least the following keyword arguments,
because these will be used to display the track in the
GUI. All other keyword arguments are optional and can
be used to reference internal objects, etc... See the
iPod synchronization code for examples.
Keyword arguments needed:
playcount (How often has the track been played?)
podcast (Which podcast is this track from? Or: Folder name)
released (The release date of the episode)
If any of these fields is unknown, it should not be
passed to the function (the values will default to None
for all required fields).
"""
def __init__(self, title, length, modified, **kwargs):
self.title = title
self.length = length
self.filesize = util.format_filesize(length)
self.modified = modified
# Set some (possible) keyword arguments to default values
self.playcount = None
self.podcast = None
self.released = None
# Convert keyword arguments to object attributes
self.__dict__.update(kwargs)
class Device(services.ObservableService):
def __init__(self, config):
self._config = config
self.cancelled = False
self.allowed_types = ['audio', 'video']
self.errors = []
self.tracks_list = []
signals = ['progress', 'sub-progress', 'status', 'done', 'post-done']
services.ObservableService.__init__(self, signals)
def open(self):
pass
def cancel(self):
self.cancelled = True
self.notify('status', _('Cancelled by user'))
def close(self):
self.notify('status', _('Writing data to disk'))
if self._config.sync_disks_after_transfer and not gpodder.win32:
successful_sync = (os.system('sync') == 0)
else:
log('Not syncing disks. Unmount your device before unplugging.', sender=self)
successful_sync = True
self.notify('done')
self.notify('post-done', self, successful_sync)
return True
def add_tracks(self, tracklist=[], force_played=False):
for track in list(tracklist):
# Filter tracks that are not meant to be synchronized
does_not_exist = not track.was_downloaded(and_exists=True)
exclude_played = track.is_played and not force_played and \
self._config.only_sync_not_played
wrong_type = track.file_type() not in self.allowed_types
if does_not_exist or exclude_played or wrong_type:
log('Excluding %s from sync', track.title, sender=self)
tracklist.remove(track)
for id, track in enumerate(sorted(tracklist, key=lambda e: e.pubDate)):
if self.cancelled:
return False
self.notify('progress', id+1, len(tracklist))
added = self.add_track(track)
if self._config.on_sync_mark_played:
log('Marking as played on transfer: %s', track.url, sender=self)
track.mark(is_played=True)
if added and self._config.on_sync_delete and not track.is_locked:
log('Removing episode after transfer: %s', track.url, sender=self)
track.delete_from_disk()
return True
def convert_track(self, episode):
filename = episode.local_filename(create=False)
# The file has to exist, if we ought to transfer it, and therefore,
# local_filename(create=False) must never return None as filename
assert filename is not None
(fn, extension) = os.path.splitext(filename)
if libconverter.converters.has_converter(extension):
if self._config.disable_pre_sync_conversion:
log('Pre-sync conversion is not enabled, set disable_pre_sync_conversion to "False" to enable')
return filename
log('Converting: %s', filename, sender=self)
callback_status = lambda percentage: self.notify('sub-progress', int(percentage))
local_filename = libconverter.converters.convert(filename, callback=callback_status)
if local_filename is None:
log('Cannot convert %s', filename, sender=self)
return filename
return str(local_filename)
return filename
def remove_tracks(self, tracklist=[]):
for id, track in enumerate(tracklist):
if self.cancelled:
return False
self.notify('progress', id, len(tracklist))
self.remove_track(track)
return True
def get_all_tracks(self):
pass
def add_track(self, track):
pass
def remove_track(self, track):
pass
def get_free_space(self):
pass
def episode_on_device(self, episode):
return self._track_on_device(episode.title)
def _track_on_device(self, track_name):
for t in self.tracks_list:
title = t.title
if track_name == title:
return t
return None
class iPodDevice(Device):
def __init__(self, config):
Device.__init__(self, config)
self.mountpoint = str(self._config.ipod_mount)
self.itdb = None
self.podcast_playlist = None
def get_free_space(self):
# Reserve 10 MiB for iTunesDB writing (to be on the safe side)
RESERVED_FOR_ITDB = 1024*1024*10
return util.get_free_disk_space(self.mountpoint) - RESERVED_FOR_ITDB
def open(self):
Device.open(self)
if not gpod_available or not os.path.isdir(self.mountpoint):
return False
self.notify('status', _('Opening iPod database'))
self.itdb = gpod.itdb_parse(self.mountpoint, None)
if self.itdb is None:
return False
self.itdb.mountpoint = self.mountpoint
self.podcasts_playlist = gpod.itdb_playlist_podcasts(self.itdb)
self.master_playlist = gpod.itdb_playlist_mpl(self.itdb)
if self.podcasts_playlist:
self.notify('status', _('iPod opened'))
# build the initial tracks_list
self.tracks_list = self.get_all_tracks()
return True
else:
return False
def close(self):
if self.itdb is not None:
self.notify('status', _('Saving iPod database'))
gpod.itdb_write(self.itdb, None)
self.itdb = None
if self._config.ipod_write_gtkpod_extended:
self.notify('status', _('Writing extended gtkpod database'))
ext_filename = os.path.join(self.mountpoint, 'iPod_Control', 'iTunes', 'iTunesDB.ext')
idb_filename = os.path.join(self.mountpoint, 'iPod_Control', 'iTunes', 'iTunesDB')
if os.path.exists(ext_filename) and os.path.exists(idb_filename):
try:
db = gpod.ipod.Database(self.mountpoint)
gpod.gtkpod.parse(ext_filename, db, idb_filename)
gpod.gtkpod.write(ext_filename, db, idb_filename)
db.close()
except:
log('Error when writing iTunesDB.ext', sender=self, traceback=True)
else:
log('I could not find %s or %s. Will not update extended gtkpod DB.', ext_filename, idb_filename, sender=self)
else:
log('Not writing extended gtkpod DB. Set "ipod_write_gpod_extended" to True if I should write it.', sender=self)
Device.close(self)
return True
def update_played_or_delete(self, channel, episodes, delete_from_db):
"""
Check whether episodes on ipod are played and update as played
and delete if required.
"""
for episode in episodes:
track = self.episode_on_device(episode)
if track:
gtrack = track.libgpodtrack
if gtrack.playcount > 0:
if delete_from_db and not gtrack.rating:
log('Deleting episode from db %s', gtrack.title, sender=self)
channel.delete_episode(episode)
else:
log('Marking episode as played %s', gtrack.title, sender=self)
episode.mark(is_played=True)
def purge(self):
for track in gpod.sw_get_playlist_tracks(self.podcasts_playlist):
if gpod.itdb_filename_on_ipod(track) is None:
log('Episode has no file: %s', track.title, sender=self)
# self.remove_track_gpod(track)
elif track.playcount > 0 and not track.rating:
log('Purging episode: %s', track.title, sender=self)
self.remove_track_gpod(track)
def get_all_tracks(self):
tracks = []
for track in gpod.sw_get_playlist_tracks(self.podcasts_playlist):
filename = gpod.itdb_filename_on_ipod(track)
if filename is None:
# This can happen if the episode is deleted on the device
log('Episode has no file: %s', track.title, sender=self)
self.remove_track_gpod(track)
continue
length = util.calculate_size(filename)
timestamp = util.file_modification_timestamp(filename)
modified = util.format_date(timestamp)
try:
released = gpod.itdb_time_mac_to_host(track.time_released)
released = util.format_date(released)
except ValueError, ve:
# timestamp out of range for platform time_t (bug 418)
log('Cannot convert track time: %s', ve, sender=self)
released = 0
t = SyncTrack(track.title, length, modified, modified_sort=timestamp, libgpodtrack=track, playcount=track.playcount, released=released, podcast=track.artist)
tracks.append(t)
return tracks
def remove_track(self, track):
self.notify('status', _('Removing %s') % track.title)
self.remove_track_gpod(track.libgpodtrack)
def remove_track_gpod(self, track):
filename = gpod.itdb_filename_on_ipod(track)
try:
gpod.itdb_playlist_remove_track(self.podcasts_playlist, track)
except:
log('Track %s not in playlist', track.title, sender=self)
gpod.itdb_track_unlink(track)
util.delete_file(filename)
def add_track(self, episode):
self.notify('status', _('Adding %s') % episode.title)
for track in gpod.sw_get_playlist_tracks(self.podcasts_playlist):
if episode.url == track.podcasturl:
if track.playcount > 0:
episode.mark(is_played=True)
# Mark as played on iPod if played locally (and set podcast flags)
self.set_podcast_flags(track, episode)
return True
original_filename = episode.local_filename(create=False)
# The file has to exist, if we ought to transfer it, and therefore,
# local_filename(create=False) must never return None as filename
assert original_filename is not None
local_filename = original_filename
if util.calculate_size(original_filename) > self.get_free_space():
log('Not enough space on %s, sync aborted...', self.mountpoint, sender = self)
d = {'episode': episode.title, 'mountpoint': self.mountpoint}
message =_('Error copying %(episode)s: Not enough free space on %(mountpoint)s')
self.errors.append(message % d)
self.cancelled = True
return False
local_filename = self.convert_track(episode)
(fn, extension) = os.path.splitext(local_filename)
if extension.lower().endswith('ogg'):
log('Cannot copy .ogg files to iPod.', sender=self)
return False
track = gpod.itdb_track_new()
# Add release time to track if pubDate has a valid value
if episode.pubDate > 0:
try:
# libgpod>= 0.5.x uses a new timestamp format
track.time_released = gpod.itdb_time_host_to_mac(int(episode.pubDate))
except:
# old (pre-0.5.x) libgpod versions expect mactime, so
# we're going to manually build a good mactime timestamp here :)
#
# + 2082844800 for unixtime => mactime (1970 => 1904)
track.time_released = int(episode.pubDate + 2082844800)
track.title = str(episode.title)
track.album = str(episode.channel.title)
track.artist = str(episode.channel.title)
track.description = str(util.remove_html_tags(episode.description))
track.podcasturl = str(episode.url)
track.podcastrss = str(episode.channel.url)
track.tracklen = get_track_length(local_filename)
track.size = os.path.getsize(local_filename)
if episode.file_type() == 'audio':
track.filetype = 'mp3'
track.mediatype = 0x00000004
elif episode.file_type() == 'video':
track.filetype = 'm4v'
track.mediatype = 0x00000006
self.set_podcast_flags(track, episode)
self.set_cover_art(track, local_filename)
gpod.itdb_track_add(self.itdb, track, -1)
gpod.itdb_playlist_add_track(self.master_playlist, track, -1)
gpod.itdb_playlist_add_track(self.podcasts_playlist, track, -1)
copied = gpod.itdb_cp_track_to_ipod(track, str(local_filename), None)
if copied and gpodder.user_hooks is not None:
gpodder.user_hooks.on_file_copied_to_ipod(self, local_filename)
# If the file has been converted, delete the temporary file here
if local_filename != original_filename:
util.delete_file(local_filename)
return True
def set_podcast_flags(self, track, episode):
try:
# Set blue bullet for unplayed tracks on 5G iPods
if episode.is_played:
track.mark_unplayed = 0x01
if track.playcount == 0:
track.playcount = 1
else:
if track.playcount > 0 or track.bookmark_time > 0:
#track is partially played so no blue bullet
track.mark_unplayed = 0x01
else:
#totally unplayed
track.mark_unplayed = 0x02
# Set several flags for to podcast values
track.remember_playback_position = 0x01
track.flag1 = 0x02
track.flag2 = 0x01
track.flag3 = 0x01
track.flag4 = 0x01
except:
log('Seems like your python-gpod is out-of-date.', sender=self)
def set_cover_art(self, track, local_filename):
try:
tag = eyeD3.Tag()
if tag.link(local_filename):
if 'APIC' in tag.frames and len(tag.frames['APIC']) > 0:
apic = tag.frames['APIC'][0]
extension = 'jpg'
if apic.mimeType == 'image/png':
extension = 'png'
cover_filename = '%s.cover.%s' (local_filename, extension)
cover_file = open(cover_filename, 'w')
cover_file.write(apic.imageData)
cover_file.close()
gpod.itdb_track_set_thumbnails(track, cover_filename)
return True
except:
log('Error getting cover using eyeD3', sender=self)
try:
cover_filename = os.path.join(os.path.dirname(local_filename), 'folder.jpg')
if os.path.isfile(cover_filename):
gpod.itdb_track_set_thumbnails(track, cover_filename)
return True
except:
log('Error getting cover using channel cover', sender=self)
return False
class MP3PlayerDevice(Device):
# if different players use other filenames besides
# .scrobbler.log, add them to this list
scrobbler_log_filenames = ['.scrobbler.log']
def __init__(self, config):
Device.__init__(self, config)
self.destination = self._config.mp3_player_folder
self.buffer_size = 1024*1024 # 1 MiB
self.scrobbler_log = []
def get_free_space(self):
return util.get_free_disk_space(self.destination)
def open(self):
Device.open(self)
self.notify('status', _('Opening MP3 player'))
if util.directory_is_writable(self.destination):
self.notify('status', _('MP3 player opened'))
# build the initial tracks_list
self.tracks_list = self.get_all_tracks()
if self._config.mp3_player_use_scrobbler_log:
mp3_player_mount_point = util.find_mount_point(self.destination)
# If a moint point cannot be found look inside self.destination for scrobbler_log_filenames
# this prevents us from os.walk()'ing the entire / filesystem
if mp3_player_mount_point == '/':
mp3_player_mount_point = self.destination
log_location = self.find_scrobbler_log(mp3_player_mount_point)
if log_location is not None and self.load_audioscrobbler_log(log_location):
log('Using Audioscrobbler log data to mark tracks as played', sender=self)
return True
else:
return False
def add_track(self, episode):
self.notify('status', _('Adding %s') % episode.title.decode('utf-8', 'ignore'))
if self._config.fssync_channel_subfolders:
# Add channel title as subfolder
folder = episode.channel.title
# Clean up the folder name for use on limited devices
folder = util.sanitize_filename(folder, self._config.mp3_player_max_filename_length)
folder = os.path.join(self.destination, folder)
else:
folder = self.destination
folder = util.sanitize_encoding(folder)
from_file = util.sanitize_encoding(self.convert_track(episode))
filename_base = util.sanitize_filename(episode.sync_filename(self._config.custom_sync_name_enabled, self._config.custom_sync_name), self._config.mp3_player_max_filename_length)
to_file = filename_base + os.path.splitext(from_file)[1].lower()
# dirty workaround: on bad (empty) episode titles,
# we simply use the from_file basename
# (please, podcast authors, FIX YOUR RSS FEEDS!)
if os.path.splitext(to_file)[0] == '':
to_file = os.path.basename(from_file)
to_file = util.sanitize_encoding(os.path.join(folder, to_file))
if not os.path.exists(folder):
try:
os.makedirs(folder)
except:
log('Cannot create folder on MP3 player: %s', folder, sender=self)
return False
if self._config.mp3_player_use_scrobbler_log and not episode.is_played:
# FIXME: This misses some things when channel.title<>album tag which is what
# the scrobbling entity will be using.
if [episode.channel.title, episode.title] in self.scrobbler_log:
log('Marking "%s" from "%s" as played', episode.title, episode.channel.title, sender=self)
episode.mark(is_played=True)
if self._config.rockbox_copy_coverart and not os.path.exists(os.path.join(folder, 'cover.bmp')):
log('Creating Rockbox album art for "%s"', episode.channel.title, sender=self)
self.copy_player_cover_art(folder, from_file, \
'cover.bmp', 'BMP', self._config.rockbox_coverart_size)
if self._config.custom_player_copy_coverart \
and not os.path.exists(os.path.join(folder, \
self._config.custom_player_coverart_name)):
log('Creating custom player album art for "%s"',
episode.channel.title, sender=self)
self.copy_player_cover_art(folder, from_file, \
self._config.custom_player_coverart_name, \
self._config.custom_player_coverart_format, \
self._config.custom_player_coverart_size)
if not os.path.exists(to_file):
log('Copying %s => %s', os.path.basename(from_file), to_file.decode(util.encoding), sender=self)
copied = self.copy_file_progress(from_file, to_file)
if copied and gpodder.user_hooks is not None:
gpodder.user_hooks.on_file_copied_to_filesystem(self, from_file, to_file)
return copied
return True
def copy_file_progress(self, from_file, to_file):
try:
out_file = open(to_file, 'wb')
except IOError, ioerror:
d = {'filename': ioerror.filename, 'message': ioerror.strerror}
self.errors.append(_('Error opening %(filename)s: %(message)s') % d)
self.cancel()
return False
try:
in_file = open(from_file, 'rb')
except IOError, ioerror:
d = {'filename': ioerror.filename, 'message': ioerror.strerror}
self.errors.append(_('Error opening %(filename)s: %(message)s') % d)
self.cancel()
return False
in_file.seek(0, 2)
bytes = in_file.tell()
in_file.seek(0)
bytes_read = 0
s = in_file.read(self.buffer_size)
while s:
bytes_read += len(s)
try:
out_file.write(s)
except IOError, ioerror:
self.errors.append(ioerror.strerror)
try:
out_file.close()
except:
pass
try:
log('Trying to remove partially copied file: %s' % to_file, sender=self)
os.unlink( to_file)
log('Yeah! Unlinked %s at least..' % to_file, sender=self)
except:
log('Error while trying to unlink %s. OH MY!' % to_file, sender=self)
self.cancel()
return False
self.notify('sub-progress', int(min(100, 100*float(bytes_read)/float(bytes))))
s = in_file.read(self.buffer_size)
out_file.close()
in_file.close()
return True
def get_all_tracks(self):
tracks = []
if self._config.fssync_channel_subfolders:
files = glob.glob(os.path.join(self.destination, '*', '*'))
else:
files = glob.glob(os.path.join(self.destination, '*'))
for filename in files:
(title, extension) = os.path.splitext(os.path.basename(filename))
length = util.calculate_size(filename)
timestamp = util.file_modification_timestamp(filename)
modified = util.format_date(timestamp)
if self._config.fssync_channel_subfolders:
podcast_name = os.path.basename(os.path.dirname(filename))
else:
podcast_name = None
t = SyncTrack(title, length, modified, modified_sort=timestamp, filename=filename, podcast=podcast_name)
tracks.append(t)
return tracks
def episode_on_device(self, episode):
e = util.sanitize_filename(episode.sync_filename(self._config.custom_sync_name_enabled, self._config.custom_sync_name), self._config.mp3_player_max_filename_length)
return self._track_on_device(e)
def remove_track(self, track):
self.notify('status', _('Removing %s') % track.title)
util.delete_file(track.filename)
directory = os.path.dirname(track.filename)
if self.directory_is_empty(directory) and self._config.fssync_channel_subfolders:
try:
os.rmdir(directory)
except:
log('Cannot remove %s', directory, sender=self)
def directory_is_empty(self, directory):
files = glob.glob(os.path.join(directory, '*'))
dotfiles = glob.glob(os.path.join(directory, '.*'))
return len(files+dotfiles) == 0
def find_scrobbler_log(self, mount_point):
""" find an audioscrobbler log file from log_filenames in the mount_point dir """
for dirpath, dirnames, filenames in os.walk(mount_point):
for log_file in self.scrobbler_log_filenames:
filename = os.path.join(dirpath, log_file)
if os.path.isfile(filename):
return filename
# No scrobbler log on that device
return None
def copy_player_cover_art(self, destination, local_filename, \
cover_dst_name, cover_dst_format, \
cover_dst_size):
"""
Try to copy the channel cover to the podcast folder on the MP3
player. This makes the player, e.g. Rockbox (rockbox.org), display the
cover art in its interface.
You need the Python Imaging Library (PIL) installed to be able to
convert the cover file to a Bitmap file, which Rockbox needs.
"""
try:
cover_loc = os.path.join(os.path.dirname(local_filename), 'folder.jpg')
cover_dst = os.path.join(destination, cover_dst_name)
if os.path.isfile(cover_loc):
log('Creating cover art file on player', sender=self)
log('Cover art size is %s', cover_dst_size, sender=self)
size = (cover_dst_size, cover_dst_size)
try:
cover = Image.open(cover_loc)
cover.thumbnail(size)
cover.save(cover_dst, cover_dst_format)
except IOError:
log('Cannot create %s (PIL?)', cover_dst, traceback=True, sender=self)
return True
else:
log('No cover available to set as player cover', sender=self)
return True
except:
log('Error getting cover using channel cover', sender=self)
return False
def load_audioscrobbler_log(self, log_file):
""" Retrive track title and artist info for all the entries
in an audioscrobbler portable player format logfile
http://www.audioscrobbler.net/wiki/Portable_Player_Logging """
try:
log('Opening "%s" as AudioScrobbler log.', log_file, sender=self)
f = open(log_file, 'r')
entries = f.readlines()
f.close()
except IOError, ioerror:
log('Error: "%s" cannot be read.', log_file, sender=self)
return False
try:
# Scrobble Log Format: http://www.audioscrobbler.net/wiki/Portable_Player_Logging
# Notably some fields are optional so will appear as \t\t.
# Conforming scrobblers should strip any \t's from the actual fields.
for entry in entries:
entry = entry.split('\t')
if len(entry)>=5:
artist, album, track, pos, length, rating = entry[:6]
# L means at least 50% of the track was listened to (S means < 50%)
if 'L' in rating:
# Whatever is writing the logs will only have the taginfo in the
# file to work from. Mostly album~=channel name
if len(track):
self.scrobbler_log.append([album, track])
else:
log('Skipping logging of %s (missing track)', album)
else:
log('Skipping scrobbler entry: %d elements %s', len(entry), entry)
except:
log('Error while parsing "%s".', log_file, sender=self)
return True
class MTPDevice(Device):
def __init__(self, config):
Device.__init__(self, config)
self.__model_name = None
try:
self.__MTPDevice = MTP()
except NameError, e:
# pymtp not available / not installed (see bug 924)
log('pymtp not found: %s', str(e), sender=self)
self.__MTPDevice = None
def __callback(self, sent, total):
if self.cancelled:
return -1
percentage = round(float(sent)/float(total)*100)
text = ('%i%%' % percentage)
self.notify('progress', sent, total, text)
def __date_to_mtp(self, date):
"""
this function format the given date and time to a string representation
according to MTP specifications: YYYYMMDDThhmmss.s
return
the string representation od the given date
"""
if not date:
return ""
try:
d = time.gmtime(date)
return time.strftime("%Y%m%d-%H%M%S.0Z", d)
except Exception, exc:
log('ERROR: An error has happend while trying to convert date to an mtp string (%s)', exc, sender=self)
return None
def __mtp_to_date(self, mtp):
"""
this parse the mtp's string representation for date
according to specifications (YYYYMMDDThhmmss.s) to
a python time object
"""
if not mtp:
return None
try:
mtp = mtp.replace(" ", "0") # replace blank with 0 to fix some invalid string
d = time.strptime(mtp[:8] + mtp[9:13],"%Y%m%d%H%M%S")
_date = calendar.timegm(d)
if len(mtp)==20:
# TIME ZONE SHIFTING: the string contains a hour/min shift relative to a time zone
try:
shift_direction=mtp[15]
hour_shift = int(mtp[16:18])
minute_shift = int(mtp[18:20])
shift_in_sec = hour_shift * 3600 + minute_shift * 60
if shift_direction == "+":
_date += shift_in_sec
elif shift_direction == "-":
_date -= shift_in_sec
else:
raise ValueError("Expected + or -")
except Exception, exc:
log('WARNING: ignoring invalid time zone information for %s (%s)', mtp, exc, sender=self)
return max( 0, _date )
except Exception, exc:
log('WARNING: the mtp date "%s" can not be parsed against mtp specification (%s)', mtp, exc, sender=self)
return None
def get_name(self):
"""
this function try to find a nice name for the device.
First, it tries to find a friendly (user assigned) name
(this name can be set by other application and is stored on the device).
if no friendly name was assign, it tries to get the model name (given by the vendor).
If no name is found at all, a generic one is returned.
Once found, the name is cached internaly to prevent reading again the device
return
the name of the device
"""
if self.__model_name:
return self.__model_name
if self.__MTPDevice is None:
return _('MTP device')
self.__model_name = self.__MTPDevice.get_devicename() # actually libmtp.Get_Friendlyname
if not self.__model_name or self.__model_name == "?????":
self.__model_name = self.__MTPDevice.get_modelname()
if not self.__model_name:
self.__model_name = _('MTP device')
return self.__model_name
def open(self):
Device.open(self)
log("opening the MTP device", sender=self)
self.notify('status', _('Opening the MTP device'), )
try:
self.__MTPDevice.connect()
# build the initial tracks_list
self.tracks_list = self.get_all_tracks()
except Exception, exc:
log('unable to find an MTP device (%s)', exc, sender=self, traceback=True)
return False
self.notify('status', _('%s opened') % self.get_name())
return True
def close(self):
log("closing %s", self.get_name(), sender=self)
self.notify('status', _('Closing %s') % self.get_name())
try:
self.__MTPDevice.disconnect()
except Exception, exc:
log('unable to close %s (%s)', self.get_name(), exc, sender=self)
return False
self.notify('status', _('%s closed') % self.get_name())
Device.close(self)
return True
def add_track(self, episode):
self.notify('status', _('Adding %s...') % episode.title)
filename = str(self.convert_track(episode))
log("sending %s (%s).", filename, episode.title, sender=self)
try:
# verify free space
needed = util.calculate_size(filename)
free = self.get_free_space()
if needed > free:
log('Not enough space on device %s: %s available, but need at least %s', self.get_name(), util.format_filesize(free), util.format_filesize(needed), sender=self)
self.cancelled = True
return False
# fill metadata
metadata = pymtp.LIBMTP_Track()
metadata.title = str(episode.title)
metadata.artist = str(episode.channel.title)
metadata.album = str(episode.channel.title)
metadata.genre = "podcast"
metadata.date = self.__date_to_mtp(episode.pubDate)
metadata.duration = get_track_length(str(filename))
folder_name = ''
if episode.mimetype.startswith('audio/') and self._config.mtp_audio_folder:
folder_name = self._config.mtp_audio_folder
if episode.mimetype.startswith('video/') and self._config.mtp_video_folder:
folder_name = self._config.mtp_video_folder
if episode.mimetype.startswith('image/') and self._config.mtp_image_folder:
folder_name = self._config.mtp_image_folder
if folder_name != '' and self._config.mtp_podcast_folders:
folder_name += os.path.sep + str(episode.channel.title)
# log('Target MTP folder: %s' % folder_name)
if folder_name == '':
folder_id = 0
else:
folder_id = self.__MTPDevice.mkdir(folder_name)
# send the file
to_file = util.sanitize_filename(metadata.title) + episode.extension()
self.__MTPDevice.send_track_from_file(filename, to_file,
metadata, folder_id, callback=self.__callback)
if gpodder.user_hooks is not None:
gpodder.user_hooks.on_file_copied_to_mtp(self, filename, to_file)
except:
log('unable to add episode %s', episode.title, sender=self, traceback=True)
return False
return True
def remove_track(self, sync_track):
self.notify('status', _('Removing %s') % sync_track.mtptrack.title)
log("removing %s", sync_track.mtptrack.title, sender=self)
try:
self.__MTPDevice.delete_object(sync_track.mtptrack.item_id)
except Exception, exc:
log('unable remove file %s (%s)', sync_track.mtptrack.filename, exc, sender=self)
log('%s removed', sync_track.mtptrack.title , sender=self)
def get_all_tracks(self):
try:
listing = self.__MTPDevice.get_tracklisting(callback=self.__callback)
except Exception, exc:
log('unable to get file listing %s (%s)', exc, sender=self)
tracks = []
for track in listing:
title = track.title
if not title or title=="": title=track.filename
if len(title) > 50: title = title[0:49] + '...'
artist = track.artist
if artist and len(artist) > 50: artist = artist[0:49] + '...'
length = track.filesize
age_in_days = 0
date = self.__mtp_to_date(track.date)
if not date:
modified = track.date # not a valid mtp date. Display what mtp gave anyway
modified_sort = -1 # no idea how to sort invalid date
else:
modified = util.format_date(date)
modified_sort = date
t = SyncTrack(title, length, modified, modified_sort=modified_sort, mtptrack=track, podcast=artist)
tracks.append(t)
return tracks
def get_free_space(self):
if self.__MTPDevice is not None:
return self.__MTPDevice.get_freespace()
else:
return 0
| gpl-3.0 | 6,744,702,642,180,576,000 | -6,942,153,260,805,017,000 | 38.805318 | 197 | 0.578122 | false |
rebost/django | tests/modeltests/signals/tests.py | 40 | 5237 | from __future__ import absolute_import
from django.db.models import signals
from django.dispatch import receiver
from django.test import TestCase
from .models import Person, Car
# #8285: signals can be any callable
class PostDeleteHandler(object):
def __init__(self, data):
self.data = data
def __call__(self, signal, sender, instance, **kwargs):
self.data.append(
(instance, instance.id is None)
)
class MyReceiver(object):
def __init__(self, param):
self.param = param
self._run = False
def __call__(self, signal, sender, **kwargs):
self._run = True
signal.disconnect(receiver=self, sender=sender)
class SignalTests(TestCase):
def test_basic(self):
# Save up the number of connected signals so that we can check at the
# end that all the signals we register get properly unregistered (#9989)
pre_signals = (
len(signals.pre_save.receivers),
len(signals.post_save.receivers),
len(signals.pre_delete.receivers),
len(signals.post_delete.receivers),
)
data = []
def pre_save_test(signal, sender, instance, **kwargs):
data.append(
(instance, kwargs.get("raw", False))
)
signals.pre_save.connect(pre_save_test)
def post_save_test(signal, sender, instance, **kwargs):
data.append(
(instance, kwargs.get("created"), kwargs.get("raw", False))
)
signals.post_save.connect(post_save_test)
def pre_delete_test(signal, sender, instance, **kwargs):
data.append(
(instance, instance.id is None)
)
signals.pre_delete.connect(pre_delete_test)
post_delete_test = PostDeleteHandler(data)
signals.post_delete.connect(post_delete_test)
# throw a decorator syntax receiver into the mix
@receiver(signals.pre_save)
def pre_save_decorator_test(signal, sender, instance, **kwargs):
data.append(instance)
@receiver(signals.pre_save, sender=Car)
def pre_save_decorator_sender_test(signal, sender, instance, **kwargs):
data.append(instance)
p1 = Person(first_name="John", last_name="Smith")
self.assertEqual(data, [])
p1.save()
self.assertEqual(data, [
(p1, False),
p1,
(p1, True, False),
])
data[:] = []
p1.first_name = "Tom"
p1.save()
self.assertEqual(data, [
(p1, False),
p1,
(p1, False, False),
])
data[:] = []
# Car signal (sender defined)
c1 = Car(make="Volkswagon", model="Passat")
c1.save()
self.assertEqual(data, [
(c1, False),
c1,
c1,
(c1, True, False),
])
data[:] = []
# Calling an internal method purely so that we can trigger a "raw" save.
p1.save_base(raw=True)
self.assertEqual(data, [
(p1, True),
p1,
(p1, False, True),
])
data[:] = []
p1.delete()
self.assertEqual(data, [
(p1, False),
(p1, False),
])
data[:] = []
p2 = Person(first_name="James", last_name="Jones")
p2.id = 99999
p2.save()
self.assertEqual(data, [
(p2, False),
p2,
(p2, True, False),
])
data[:] = []
p2.id = 99998
p2.save()
self.assertEqual(data, [
(p2, False),
p2,
(p2, True, False),
])
data[:] = []
p2.delete()
self.assertEqual(data, [
(p2, False),
(p2, False)
])
self.assertQuerysetEqual(
Person.objects.all(), [
"James Jones",
],
unicode
)
signals.post_delete.disconnect(post_delete_test)
signals.pre_delete.disconnect(pre_delete_test)
signals.post_save.disconnect(post_save_test)
signals.pre_save.disconnect(pre_save_test)
signals.pre_save.disconnect(pre_save_decorator_test)
signals.pre_save.disconnect(pre_save_decorator_sender_test, sender=Car)
# Check that all our signals got disconnected properly.
post_signals = (
len(signals.pre_save.receivers),
len(signals.post_save.receivers),
len(signals.pre_delete.receivers),
len(signals.post_delete.receivers),
)
self.assertEqual(pre_signals, post_signals)
def test_disconnect_in_dispatch(self):
"""
Test that signals that disconnect when being called don't mess future
dispatching.
"""
a, b = MyReceiver(1), MyReceiver(2)
signals.post_save.connect(sender=Person, receiver=a)
signals.post_save.connect(sender=Person, receiver=b)
p = Person.objects.create(first_name='John', last_name='Smith')
self.assertTrue(a._run)
self.assertTrue(b._run)
self.assertEqual(signals.post_save.receivers, [])
| bsd-3-clause | -8,520,155,545,400,347,000 | 8,402,422,153,662,074,000 | 28.421348 | 80 | 0.54134 | false |
abhijithch/MozDef | alerts/cloudtrail.py | 12 | 1882 | #!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
#
# Contributors:
# Anthony Verez [email protected]
from lib.alerttask import AlertTask
class AlertCloudtrail(AlertTask):
def main(self):
# look for events in last 160 hours
date_timedelta = dict(hours=160)
# Configure filters by importing a kibana dashboard
self.filtersFromKibanaDash('cloudtrail_dashboard.json', date_timedelta)
# Search events
self.searchEventsSimple()
self.walkEvents()
# Set alert properties
def onEvent(self, event):
category = 'AWSCloudtrail'
tags = ['cloudtrail','aws']
severity = 'INFO'
summary = ('{0} called {1} from {2}'.format(event['_source']['userIdentity']['userName'], event['_source']['eventName'], event['_source']['sourceIPAddress']))
if event['_source']['eventName'] == 'RunInstances':
for i in event['_source']['responseElements']['instancesSet']['items']:
if 'privateDnsName' in i.keys():
summary += (' running {0} '.format(i['privateDnsName']))
elif 'instanceId' in i.keys():
summary += (' running {0} '.format(i['instanceId']))
else:
summary += (' running {0} '.format(flattenDict(i)))
if event['_source']['eventName'] == 'StartInstances':
for i in event['_source']['requestParameters']['instancesSet']['items']:
summary += (' starting {0} '.format(i['instanceId']))
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, [event], severity) | mpl-2.0 | 6,495,591,570,359,561,000 | -2,869,591,399,193,107,000 | 41.795455 | 166 | 0.610521 | false |
kyrias/cjdns | node_build/dependencies/libuv/build/gyp/test/mac/gyptest-xcode-env-order.py | 119 | 3284 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that dependent Xcode settings are processed correctly.
"""
import TestGyp
import TestMac
import subprocess
import sys
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
CHDIR = 'xcode-env-order'
INFO_PLIST_PATH = 'Test.app/Contents/Info.plist'
test.run_gyp('test.gyp', chdir=CHDIR)
test.build('test.gyp', test.ALL, chdir=CHDIR)
# Env vars in 'copies' filenames.
test.built_file_must_exist('Test-copy-brace/main.c', chdir=CHDIR)
test.built_file_must_exist('Test-copy-paren/main.c', chdir=CHDIR)
test.built_file_must_exist('Test-copy-bare/main.c', chdir=CHDIR)
# Env vars in 'actions' filenames and inline actions
test.built_file_must_exist('action-copy-brace.txt', chdir=CHDIR)
test.built_file_must_exist('action-copy-paren.txt', chdir=CHDIR)
test.built_file_must_exist('action-copy-bare.txt', chdir=CHDIR)
# Env vars in 'rules' filenames and inline actions
test.built_file_must_exist('rule-copy-brace.txt', chdir=CHDIR)
test.built_file_must_exist('rule-copy-paren.txt', chdir=CHDIR)
# TODO: see comment in test.gyp for this file.
#test.built_file_must_exist('rule-copy-bare.txt', chdir=CHDIR)
# Env vars in Info.plist.
info_plist = test.built_file_path(INFO_PLIST_PATH, chdir=CHDIR)
test.must_exist(info_plist)
test.must_contain(info_plist, '''\
\t<key>BraceProcessedKey1</key>
\t<string>D:/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>BraceProcessedKey2</key>
\t<string>/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>BraceProcessedKey3</key>
\t<string>com.apple.product-type.application:D:/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>ParenProcessedKey1</key>
\t<string>D:/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>ParenProcessedKey2</key>
\t<string>/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>ParenProcessedKey3</key>
\t<string>com.apple.product-type.application:D:/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>BareProcessedKey1</key>
\t<string>D:/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>BareProcessedKey2</key>
\t<string>/Source/Project/Test</string>''')
# NOTE: For bare variables, $PRODUCT_TYPE is not replaced! It _is_ replaced
# if it's not right at the start of the string (e.g. ':$PRODUCT_TYPE'), so
# this looks like an Xcode bug. This bug isn't emulated (yet?), so check this
# only for Xcode.
if test.format == 'xcode' and TestMac.Xcode.Version() < '0500':
test.must_contain(info_plist, '''\
\t<key>BareProcessedKey3</key>
\t<string>$PRODUCT_TYPE:D:/Source/Project/Test</string>''')
else:
# The bug has been fixed by Xcode version 5.0.0.
test.must_contain(info_plist, '''\
\t<key>BareProcessedKey3</key>
\t<string>com.apple.product-type.application:D:/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>MixedProcessedKey</key>
\t<string>/Source/Project:Test:mh_execute</string>''')
test.pass_test()
| gpl-3.0 | 6,816,348,993,673,824,000 | -5,188,131,478,102,203,000 | 35.488889 | 80 | 0.708587 | false |
bdero/edx-platform | cms/djangoapps/contentstore/features/component_settings_editor_helpers.py | 18 | 9372 | # disable missing docstring
# pylint: disable=C0111
from lettuce import world
from nose.tools import assert_equal, assert_in # pylint: disable=E0611
from terrain.steps import reload_the_page
from common import type_in_codemirror
from selenium.webdriver.common.keys import Keys
@world.absorb
def create_component_instance(step, category, component_type=None, is_advanced=False, advanced_component=None):
"""
Create a new component in a Unit.
Parameters
----------
category: component type (discussion, html, problem, video, advanced)
component_type: for components with multiple templates, the link text in the menu
is_advanced: for problems, is the desired component under the advanced menu?
advanced_component: for advanced components, the related value of policy key 'advanced_modules'
"""
assert_in(category, ['advanced', 'problem', 'html', 'video', 'discussion'])
component_button_css = 'span.large-{}-icon'.format(category.lower())
if category == 'problem':
module_css = 'div.xmodule_CapaModule'
elif category == 'advanced':
module_css = 'div.xmodule_{}Module'.format(advanced_component.title())
else:
module_css = 'div.xmodule_{}Module'.format(category.title())
# Count how many of that module is on the page. Later we will
# assert that one more was added.
# We need to use world.browser.find_by_css instead of world.css_find
# because it's ok if there are currently zero of them.
module_count_before = len(world.browser.find_by_css(module_css))
# Disable the jquery animation for the transition to the menus.
world.disable_jquery_animations()
world.css_click(component_button_css)
if category in ('problem', 'html', 'advanced'):
world.wait_for_invisible(component_button_css)
click_component_from_menu(category, component_type, is_advanced)
expected_count = module_count_before + 1
world.wait_for(
lambda _: len(world.css_find(module_css)) == expected_count,
timeout=20
)
@world.absorb
def click_new_component_button(step, component_button_css):
step.given('I have clicked the new unit button')
world.css_click(component_button_css)
def _click_advanced():
css = 'ul.problem-type-tabs a[href="#tab2"]'
world.css_click(css)
# Wait for the advanced tab items to be displayed
tab2_css = 'div.ui-tabs-panel#tab2'
world.wait_for_visible(tab2_css)
def _find_matching_link(category, component_type):
"""
Find the link with the specified text. There should be one and only one.
"""
# The tab shows links for the given category
links = world.css_find('div.new-component-{} a'.format(category))
# Find the link whose text matches what you're looking for
matched_links = [link for link in links if link.text == component_type]
# There should be one and only one
assert_equal(len(matched_links), 1)
return matched_links[0]
def click_component_from_menu(category, component_type, is_advanced):
"""
Creates a component for a category with more
than one template, i.e. HTML and Problem.
For some problem types, it is necessary to click to
the Advanced tab.
The component_type is the link text, e.g. "Blank Common Problem"
"""
if is_advanced:
# Sometimes this click does not work if you go too fast.
world.retry_on_exception(_click_advanced,
ignored_exceptions=AssertionError)
# Retry this in case the list is empty because you tried too fast.
link = world.retry_on_exception(
lambda: _find_matching_link(category, component_type),
ignored_exceptions=AssertionError
)
# Wait for the link to be clickable. If you go too fast it is not.
world.retry_on_exception(lambda: link.click())
@world.absorb
def edit_component_and_select_settings():
world.edit_component()
world.ensure_settings_visible()
@world.absorb
def ensure_settings_visible():
# Select the 'settings' tab if there is one (it isn't displayed if it is the only option)
settings_button = world.browser.find_by_css('.settings-button')
if len(settings_button) > 0:
world.css_click('.settings-button')
@world.absorb
def edit_component(index=0):
# Verify that the "loading" indication has been hidden.
world.wait_for_loading()
# Verify that the "edit" button is present.
world.wait_for(lambda _driver: world.css_visible('a.edit-button'))
world.css_click('a.edit-button', index)
world.wait_for_ajax_complete()
@world.absorb
def select_editor_tab(tab_name):
editor_tabs = world.browser.find_by_css('.editor-tabs a')
expected_tab_text = tab_name.strip().upper()
matching_tabs = [tab for tab in editor_tabs if tab.text.upper() == expected_tab_text]
assert len(matching_tabs) == 1
tab = matching_tabs[0]
tab.click()
world.wait_for_ajax_complete()
def enter_xml_in_advanced_problem(step, text):
"""
Edits an advanced problem (assumes only on page),
types the provided XML, and saves the component.
"""
world.edit_component()
type_in_codemirror(0, text)
world.save_component()
@world.absorb
def verify_setting_entry(setting, display_name, value, explicitly_set):
"""
Verify the capa module fields are set as expected in the
Advanced Settings editor.
Parameters
----------
setting: the WebDriverElement object found in the browser
display_name: the string expected as the label
html: the expected field value
explicitly_set: True if the value is expected to have been explicitly set
for the problem, rather than derived from the defaults. This is verified
by the existence of a "Clear" button next to the field value.
"""
assert_equal(display_name, setting.find_by_css('.setting-label')[0].html.strip())
# Check if the web object is a list type
# If so, we use a slightly different mechanism for determining its value
if setting.has_class('metadata-list-enum') or setting.has_class('metadata-dict') or setting.has_class('metadata-video-translations'):
list_value = ', '.join(ele.value for ele in setting.find_by_css('.list-settings-item'))
assert_equal(value, list_value)
elif setting.has_class('metadata-videolist-enum'):
list_value = ', '.join(ele.find_by_css('input')[0].value for ele in setting.find_by_css('.videolist-settings-item'))
assert_equal(value, list_value)
else:
assert_equal(value, setting.find_by_css('.setting-input')[0].value)
# VideoList doesn't have clear button
if not setting.has_class('metadata-videolist-enum'):
settingClearButton = setting.find_by_css('.setting-clear')[0]
assert_equal(explicitly_set, settingClearButton.has_class('active'))
assert_equal(not explicitly_set, settingClearButton.has_class('inactive'))
@world.absorb
def verify_all_setting_entries(expected_entries):
settings = world.browser.find_by_css('.wrapper-comp-setting')
assert_equal(len(expected_entries), len(settings))
for (counter, setting) in enumerate(settings):
world.verify_setting_entry(
setting, expected_entries[counter][0],
expected_entries[counter][1], expected_entries[counter][2]
)
@world.absorb
def save_component():
world.css_click("a.action-save")
world.wait_for_ajax_complete()
@world.absorb
def save_component_and_reopen(step):
save_component()
# We have a known issue that modifications are still shown within the edit window after cancel (though)
# they are not persisted. Refresh the browser to make sure the changes WERE persisted after Save.
reload_the_page(step)
edit_component_and_select_settings()
@world.absorb
def cancel_component(step):
world.css_click("a.action-cancel")
# We have a known issue that modifications are still shown within the edit window after cancel (though)
# they are not persisted. Refresh the browser to make sure the changes were not persisted.
reload_the_page(step)
@world.absorb
def revert_setting_entry(label):
get_setting_entry(label).find_by_css('.setting-clear')[0].click()
@world.absorb
def get_setting_entry(label):
def get_setting():
settings = world.css_find('.wrapper-comp-setting')
for setting in settings:
if setting.find_by_css('.setting-label')[0].value == label:
return setting
return None
return world.retry_on_exception(get_setting)
@world.absorb
def get_setting_entry_index(label):
def get_index():
settings = world.css_find('.metadata_edit .wrapper-comp-setting')
for index, setting in enumerate(settings):
if setting.find_by_css('.setting-label')[0].value == label:
return index
return None
return world.retry_on_exception(get_index)
@world.absorb
def set_field_value(index, value):
"""
Set the field to the specified value.
Note: we cannot use css_fill here because the value is not set
until after you move away from that field.
Instead we will find the element, set its value, then hit the Tab key
to get to the next field.
"""
elem = world.css_find('.metadata_edit div.wrapper-comp-setting input.setting-input')[index]
elem.value = value
elem.type(Keys.TAB)
| agpl-3.0 | 4,122,302,865,999,682,000 | 931,497,554,298,061,800 | 34.770992 | 137 | 0.689074 | false |
arrabito/DIRAC | DataManagementSystem/scripts/dirac-dms-set-replica-status.py | 9 | 2642 | #!/usr/bin/env python
########################################################################
# $HeadURL$
########################################################################
"""
Set the status of the replicas of given files at the provided SE
"""
__RCSID__ = "$Id$"
from DIRAC.Core.Base import Script
Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
'\nUsage:',
' %s [option|cfgfile] ... <LFN|File> SE Status' % Script.scriptName,
'Arguments:',
' LFN: LFN',
' File: File name containing a list of affected LFNs',
' SE: Name of Storage Element',
' Status: New Status for the replica' ] ) )
Script.parseCommandLine( ignoreErrors = False )
import DIRAC
from DIRAC import gConfig, gLogger
from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
import os
args = Script.getPositionalArgs()
if not len( args ) == 3:
Script.showHelp()
inputFileName = args[0]
storageElement = args[1]
status = args[2]
if os.path.exists( inputFileName ):
inputFile = open( inputFileName, 'r' )
string = inputFile.read()
inputFile.close()
lfns = sorted( string.splitlines() )
else:
lfns = [inputFileName]
fc = FileCatalog()
replicaDict = {}
res = fc.getReplicas( lfns, allStatus = True )
if not res['OK']:
gLogger.error( "Failed to get catalog replicas.", res['Message'] )
DIRAC.exit( -1 )
lfnDict = {}
for lfn, error in res['Value']['Failed'].items():
gLogger.error( "Failed to get replicas for file.", "%s:%s" % ( lfn, error ) )
for lfn, replicas in res['Value']['Successful'].items():
if not storageElement in replicas.keys():
gLogger.error( "LFN not registered at provided storage element." , "%s %s" % ( lfn, storageElement ) )
else:
lfnDict[lfn] = {'SE':storageElement, 'PFN':replicas[storageElement], 'Status':status}
if not lfnDict:
gLogger.error( "No files found at the supplied storage element." )
DIRAC.exit( 2 )
res = fc.setReplicaStatus( lfnDict )
if not res['OK']:
gLogger.error( "Failed to set catalog replica status.", res['Message'] )
DIRAC.exit( -1 )
for lfn, error in res['Value']['Failed'].items():
gLogger.error( "Failed to set replica status for file.", "%s:%s" % ( lfn, error ) )
gLogger.notice( "Successfully updated the status of %d files at %s." % ( len( res['Value']['Successful'].keys() ), storageElement ) )
DIRAC.exit( 0 )
| gpl-3.0 | -6,584,943,398,649,638,000 | 8,632,608,517,155,347,000 | 36.742857 | 133 | 0.55299 | false |
PaddlePaddle/Paddle | python/paddle/fluid/tests/unittests/test_rpn_target_assign_op.py | 2 | 24786 | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid import Program, program_guard
from op_test import OpTest
from test_anchor_generator_op import anchor_generator_in_python
from test_generate_proposal_labels_op import _generate_groundtruth
from test_generate_proposal_labels_op import _bbox_overlaps, _box_to_delta
def rpn_target_assign(anchor_by_gt_overlap,
rpn_batch_size_per_im,
rpn_positive_overlap,
rpn_negative_overlap,
rpn_fg_fraction,
use_random=True):
anchor_to_gt_argmax = anchor_by_gt_overlap.argmax(axis=1)
anchor_to_gt_max = anchor_by_gt_overlap[np.arange(
anchor_by_gt_overlap.shape[0]), anchor_to_gt_argmax]
gt_to_anchor_argmax = anchor_by_gt_overlap.argmax(axis=0)
gt_to_anchor_max = anchor_by_gt_overlap[gt_to_anchor_argmax, np.arange(
anchor_by_gt_overlap.shape[1])]
anchors_with_max_overlap = np.where(
anchor_by_gt_overlap == gt_to_anchor_max)[0]
labels = np.ones((anchor_by_gt_overlap.shape[0], ), dtype=np.int32) * -1
labels[anchors_with_max_overlap] = 1
labels[anchor_to_gt_max >= rpn_positive_overlap] = 1
num_fg = int(rpn_fg_fraction * rpn_batch_size_per_im)
fg_inds = np.where(labels == 1)[0]
if len(fg_inds) > num_fg and use_random:
disable_inds = np.random.choice(
fg_inds, size=(len(fg_inds) - num_fg), replace=False)
else:
disable_inds = fg_inds[num_fg:]
labels[disable_inds] = -1
fg_inds = np.where(labels == 1)[0]
bbox_inside_weight = np.zeros((len(fg_inds), 4), dtype=np.float32)
num_bg = rpn_batch_size_per_im - np.sum(labels == 1)
bg_inds = np.where(anchor_to_gt_max < rpn_negative_overlap)[0]
if len(bg_inds) > num_bg and use_random:
enable_inds = bg_inds[np.random.randint(len(bg_inds), size=num_bg)]
else:
enable_inds = bg_inds[:num_bg]
fg_fake_inds = np.array([], np.int32)
fg_value = np.array([fg_inds[0]], np.int32)
fake_num = 0
for bg_id in enable_inds:
if bg_id in fg_inds:
fake_num += 1
fg_fake_inds = np.hstack([fg_fake_inds, fg_value])
labels[enable_inds] = 0
bbox_inside_weight[fake_num:, :] = 1
fg_inds = np.where(labels == 1)[0]
bg_inds = np.where(labels == 0)[0]
loc_index = np.hstack([fg_fake_inds, fg_inds])
score_index = np.hstack([fg_inds, bg_inds])
labels = labels[score_index]
assert not np.any(labels == -1), "Wrong labels with -1"
gt_inds = anchor_to_gt_argmax[loc_index]
return loc_index, score_index, labels, gt_inds, bbox_inside_weight
def get_anchor(n, c, h, w):
input_feat = np.random.random((n, c, h, w)).astype('float32')
anchors, _ = anchor_generator_in_python(
input_feat=input_feat,
anchor_sizes=[32., 64.],
aspect_ratios=[0.5, 1.0],
variances=[1.0, 1.0, 1.0, 1.0],
stride=[16.0, 16.0],
offset=0.5)
return anchors
def rpn_target_assign_in_python(all_anchors,
gt_boxes,
is_crowd,
im_info,
lod,
rpn_straddle_thresh,
rpn_batch_size_per_im,
rpn_positive_overlap,
rpn_negative_overlap,
rpn_fg_fraction,
use_random=True):
anchor_num = all_anchors.shape[0]
batch_size = len(lod) - 1
for i in range(batch_size):
im_height = im_info[i][0]
im_width = im_info[i][1]
im_scale = im_info[i][2]
if rpn_straddle_thresh >= 0:
# Only keep anchors inside the image by a margin of straddle_thresh
inds_inside = np.where(
(all_anchors[:, 0] >= -rpn_straddle_thresh) &
(all_anchors[:, 1] >= -rpn_straddle_thresh) & (
all_anchors[:, 2] < im_width + rpn_straddle_thresh) & (
all_anchors[:, 3] < im_height + rpn_straddle_thresh))[0]
# keep only inside anchors
inside_anchors = all_anchors[inds_inside, :]
else:
inds_inside = np.arange(all_anchors.shape[0])
inside_anchors = all_anchors
b, e = lod[i], lod[i + 1]
gt_boxes_slice = gt_boxes[b:e, :] * im_scale
is_crowd_slice = is_crowd[b:e]
not_crowd_inds = np.where(is_crowd_slice == 0)[0]
gt_boxes_slice = gt_boxes_slice[not_crowd_inds]
iou = _bbox_overlaps(inside_anchors, gt_boxes_slice)
loc_inds, score_inds, labels, gt_inds, bbox_inside_weight = \
rpn_target_assign(iou, rpn_batch_size_per_im,
rpn_positive_overlap,
rpn_negative_overlap,
rpn_fg_fraction,
use_random)
# unmap to all anchor
loc_inds = inds_inside[loc_inds]
score_inds = inds_inside[score_inds]
sampled_gt = gt_boxes_slice[gt_inds]
sampled_anchor = all_anchors[loc_inds]
box_deltas = _box_to_delta(sampled_anchor, sampled_gt, [1., 1., 1., 1.])
if i == 0:
loc_indexes = loc_inds
score_indexes = score_inds
tgt_labels = labels
tgt_bboxes = box_deltas
bbox_inside_weights = bbox_inside_weight
else:
loc_indexes = np.concatenate(
[loc_indexes, loc_inds + i * anchor_num])
score_indexes = np.concatenate(
[score_indexes, score_inds + i * anchor_num])
tgt_labels = np.concatenate([tgt_labels, labels])
tgt_bboxes = np.vstack([tgt_bboxes, box_deltas])
bbox_inside_weights = np.vstack([bbox_inside_weights, \
bbox_inside_weight])
return loc_indexes, score_indexes, tgt_bboxes, tgt_labels, bbox_inside_weights
def retinanet_target_assign(anchor_by_gt_overlap, gt_labels, positive_overlap,
negative_overlap):
anchor_to_gt_argmax = anchor_by_gt_overlap.argmax(axis=1)
anchor_to_gt_max = anchor_by_gt_overlap[np.arange(
anchor_by_gt_overlap.shape[0]), anchor_to_gt_argmax]
gt_to_anchor_argmax = anchor_by_gt_overlap.argmax(axis=0)
gt_to_anchor_max = anchor_by_gt_overlap[gt_to_anchor_argmax, np.arange(
anchor_by_gt_overlap.shape[1])]
anchors_with_max_overlap = np.where(
anchor_by_gt_overlap == gt_to_anchor_max)[0]
labels = np.ones((anchor_by_gt_overlap.shape[0], ), dtype=np.int32) * -1
labels[anchors_with_max_overlap] = 1
labels[anchor_to_gt_max >= positive_overlap] = 1
fg_inds = np.where(labels == 1)[0]
bbox_inside_weight = np.zeros((len(fg_inds), 4), dtype=np.float32)
bg_inds = np.where(anchor_to_gt_max < negative_overlap)[0]
enable_inds = bg_inds
fg_fake_inds = np.array([], np.int32)
fg_value = np.array([fg_inds[0]], np.int32)
fake_num = 0
for bg_id in enable_inds:
if bg_id in fg_inds:
fake_num += 1
fg_fake_inds = np.hstack([fg_fake_inds, fg_value])
labels[enable_inds] = 0
bbox_inside_weight[fake_num:, :] = 1
fg_inds = np.where(labels == 1)[0]
bg_inds = np.where(labels == 0)[0]
loc_index = np.hstack([fg_fake_inds, fg_inds])
score_index = np.hstack([fg_inds, bg_inds])
score_index_tmp = np.hstack([fg_inds])
labels = labels[score_index]
gt_inds = anchor_to_gt_argmax[loc_index]
label_inds = anchor_to_gt_argmax[score_index_tmp]
labels[0:len(fg_inds)] = np.squeeze(gt_labels[label_inds])
fg_num = len(fg_fake_inds) + len(fg_inds) + 1
assert not np.any(labels == -1), "Wrong labels with -1"
return loc_index, score_index, labels, gt_inds, bbox_inside_weight, fg_num
def retinanet_target_assign_in_python(all_anchors, gt_boxes, gt_labels,
is_crowd, im_info, lod, positive_overlap,
negative_overlap):
anchor_num = all_anchors.shape[0]
batch_size = len(lod) - 1
for i in range(batch_size):
im_scale = im_info[i][2]
inds_inside = np.arange(all_anchors.shape[0])
inside_anchors = all_anchors
b, e = lod[i], lod[i + 1]
gt_boxes_slice = gt_boxes[b:e, :] * im_scale
gt_labels_slice = gt_labels[b:e, :]
is_crowd_slice = is_crowd[b:e]
not_crowd_inds = np.where(is_crowd_slice == 0)[0]
gt_boxes_slice = gt_boxes_slice[not_crowd_inds]
gt_labels_slice = gt_labels_slice[not_crowd_inds]
iou = _bbox_overlaps(inside_anchors, gt_boxes_slice)
loc_inds, score_inds, labels, gt_inds, bbox_inside_weight, fg_num = \
retinanet_target_assign(iou, gt_labels_slice,
positive_overlap, negative_overlap)
# unmap to all anchor
loc_inds = inds_inside[loc_inds]
score_inds = inds_inside[score_inds]
sampled_gt = gt_boxes_slice[gt_inds]
sampled_anchor = all_anchors[loc_inds]
box_deltas = _box_to_delta(sampled_anchor, sampled_gt, [1., 1., 1., 1.])
if i == 0:
loc_indexes = loc_inds
score_indexes = score_inds
tgt_labels = labels
tgt_bboxes = box_deltas
bbox_inside_weights = bbox_inside_weight
fg_nums = [[fg_num]]
else:
loc_indexes = np.concatenate(
[loc_indexes, loc_inds + i * anchor_num])
score_indexes = np.concatenate(
[score_indexes, score_inds + i * anchor_num])
tgt_labels = np.concatenate([tgt_labels, labels])
tgt_bboxes = np.vstack([tgt_bboxes, box_deltas])
bbox_inside_weights = np.vstack([bbox_inside_weights, \
bbox_inside_weight])
fg_nums = np.concatenate([fg_nums, [[fg_num]]])
return loc_indexes, score_indexes, tgt_bboxes, tgt_labels, bbox_inside_weights, fg_nums
class TestRpnTargetAssignOp(OpTest):
def setUp(self):
n, c, h, w = 2, 4, 14, 14
all_anchors = get_anchor(n, c, h, w)
gt_num = 10
all_anchors = all_anchors.reshape(-1, 4)
anchor_num = all_anchors.shape[0]
images_shape = [[64, 64], [64, 64]]
#images_shape = [[64, 64]]
groundtruth, lod = _generate_groundtruth(images_shape, 3, 4)
lod = [0, 4, 8]
#lod = [0, 4]
im_info = np.ones((len(images_shape), 3)).astype(np.float32)
for i in range(len(images_shape)):
im_info[i, 0] = images_shape[i][0]
im_info[i, 1] = images_shape[i][1]
im_info[i, 2] = 0.8 #scale
gt_boxes = np.vstack([v['boxes'] for v in groundtruth])
is_crowd = np.hstack([v['is_crowd'] for v in groundtruth])
all_anchors = all_anchors.astype('float32')
gt_boxes = gt_boxes.astype('float32')
rpn_straddle_thresh = 0.0
rpn_batch_size_per_im = 256
rpn_positive_overlap = 0.7
rpn_negative_overlap = 0.3
rpn_fg_fraction = 0.5
use_random = False
loc_index, score_index, tgt_bbox, labels, bbox_inside_weights = \
rpn_target_assign_in_python(all_anchors, gt_boxes, is_crowd,
im_info, lod, rpn_straddle_thresh,
rpn_batch_size_per_im, rpn_positive_overlap,
rpn_negative_overlap,
rpn_fg_fraction, use_random)
labels = labels[:, np.newaxis]
self.op_type = "rpn_target_assign"
self.inputs = {
'Anchor': all_anchors,
'GtBoxes': (gt_boxes, [[4, 4]]),
'IsCrowd': (is_crowd, [[4, 4]]),
'ImInfo': (im_info, [[1, 1]])
}
self.attrs = {
'rpn_batch_size_per_im': rpn_batch_size_per_im,
'rpn_straddle_thresh': rpn_straddle_thresh,
'rpn_positive_overlap': rpn_positive_overlap,
'rpn_negative_overlap': rpn_negative_overlap,
'rpn_fg_fraction': rpn_fg_fraction,
'use_random': use_random
}
self.outputs = {
'LocationIndex': loc_index.astype('int32'),
'ScoreIndex': score_index.astype('int32'),
'TargetBBox': tgt_bbox.astype('float32'),
'TargetLabel': labels.astype('int32'),
'BBoxInsideWeight': bbox_inside_weights.astype('float32')
}
def test_check_output(self):
self.check_output()
class TestRetinanetTargetAssignOp(OpTest):
def setUp(self):
n, c, h, w = 2, 4, 14, 14
all_anchors = get_anchor(n, c, h, w)
gt_num = 10
all_anchors = all_anchors.reshape(-1, 4)
anchor_num = all_anchors.shape[0]
images_shape = [[64, 64], [64, 64]]
groundtruth, lod = _generate_groundtruth(images_shape, 3, 4)
lod = [0, 4, 8]
im_info = np.ones((len(images_shape), 3)).astype(np.float32)
for i in range(len(images_shape)):
im_info[i, 0] = images_shape[i][0]
im_info[i, 1] = images_shape[i][1]
im_info[i, 2] = 0.8 #scale
gt_boxes = np.vstack([v['boxes'] for v in groundtruth])
is_crowd = np.hstack([v['is_crowd'] for v in groundtruth])
gt_labels = np.vstack([
v['gt_classes'].reshape(len(v['gt_classes']), 1)
for v in groundtruth
])
gt_labels = gt_labels.reshape(len(gt_labels), 1)
all_anchors = all_anchors.astype('float32')
gt_boxes = gt_boxes.astype('float32')
gt_labels = gt_labels.astype('int32')
positive_overlap = 0.5
negative_overlap = 0.4
loc_index, score_index, tgt_bbox, labels, bbox_inside_weights, fg_num = \
retinanet_target_assign_in_python(all_anchors, gt_boxes, gt_labels, is_crowd,
im_info, lod, positive_overlap, negative_overlap)
labels = labels[:, np.newaxis]
self.op_type = "retinanet_target_assign"
self.inputs = {
'Anchor': all_anchors,
'GtBoxes': (gt_boxes, [[4, 4]]),
'GtLabels': (gt_labels, [[4, 4]]),
'IsCrowd': (is_crowd, [[4, 4]]),
'ImInfo': (im_info, [[1, 1]])
}
self.attrs = {
'positive_overlap': positive_overlap,
'negative_overlap': negative_overlap
}
self.outputs = {
'LocationIndex': loc_index.astype('int32'),
'ScoreIndex': score_index.astype('int32'),
'TargetBBox': tgt_bbox.astype('float32'),
'TargetLabel': labels.astype('int32'),
'BBoxInsideWeight': bbox_inside_weights.astype('float32'),
'ForegroundNumber': fg_num.astype('int32')
}
def test_check_output(self):
self.check_output()
class TestRetinanetTargetAssignOpError(unittest.TestCase):
def test_errors(self):
with program_guard(Program(), Program()):
bbox_pred1 = fluid.data(
name='bbox_pred1', shape=[1, 100, 4], dtype='float32')
cls_logits1 = fluid.data(
name='cls_logits1', shape=[1, 100, 10], dtype='float32')
anchor_box1 = fluid.data(
name='anchor_box1', shape=[100, 4], dtype='float32')
anchor_var1 = fluid.data(
name='anchor_var1', shape=[100, 4], dtype='float32')
gt_boxes1 = fluid.data(
name='gt_boxes1', shape=[10, 4], dtype='float32')
gt_labels1 = fluid.data(
name='gt_labels1', shape=[10, 1], dtype='int32')
is_crowd1 = fluid.data(name='is_crowd1', shape=[1], dtype='float32')
im_info1 = fluid.data(
name='im_info1', shape=[1, 3], dtype='float32')
# The `bbox_pred` must be Variable and the data type of `bbox_pred` Tensor
# one of float32 and float64.
def test_bbox_pred_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign([1], cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_bbox_pred_type)
def test_bbox_pred_tensor_dtype():
bbox_pred2 = fluid.data(
name='bbox_pred2', shape=[1, 100, 4], dtype='intt32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred2, cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_bbox_pred_tensor_dtype)
# The `cls_logits` must be Variable and the data type of `cls_logits` Tensor
# one of float32 and float64.
def test_cls_logits_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, 2, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_cls_logits_type)
def test_cls_logits_tensor_dtype():
cls_logits2 = fluid.data(
name='cls_logits2', shape=[1, 100, 10], dtype='int32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits2, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_cls_logits_tensor_dtype)
# The `anchor_box` must be Variable and the data type of `anchor_box` Tensor
# one of float32 and float64.
def test_anchor_box_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, [5],
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_anchor_box_type)
def test_anchor_box_tensor_dtype():
anchor_box2 = fluid.data(
name='anchor_box2', shape=[100, 4], dtype='int32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box2,
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_anchor_box_tensor_dtype)
# The `anchor_var` must be Variable and the data type of `anchor_var` Tensor
# one of float32 and float64.
def test_anchor_var_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
5, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_anchor_var_type)
def test_anchor_var_tensor_dtype():
anchor_var2 = fluid.data(
name='anchor_var2', shape=[100, 4], dtype='int32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var2, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_anchor_var_tensor_dtype)
# The `gt_boxes` must be Variable and the data type of `gt_boxes` Tensor
# one of float32 and float64.
def test_gt_boxes_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, [4], gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_gt_boxes_type)
def test_gt_boxes_tensor_dtype():
gt_boxes2 = fluid.data(
name='gt_boxes2', shape=[10, 4], dtype='int32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, gt_boxes2, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_gt_boxes_tensor_dtype)
# The `gt_label` must be Variable and the data type of `gt_label` Tensor
# int32.
def test_gt_label_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, 9, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_gt_label_type)
def test_gt_label_tensor_dtype():
gt_labels2 = fluid.data(
name='label2', shape=[10, 1], dtype='float32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, gt_labels2, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_gt_label_tensor_dtype)
# The `is_crowd` must be Variable and the data type of `is_crowd` Tensor
# int32.
def test_is_crowd_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, [10], im_info1, 10)
self.assertRaises(TypeError, test_is_crowd_type)
def test_is_crowd_tensor_dtype():
is_crowd2 = fluid.data(
name='is_crowd2', shape=[10, 1], dtype='float32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, is_crowd2, im_info1, 10)
self.assertRaises(TypeError, test_is_crowd_tensor_dtype)
# The `im_info` must be Variable and the data type of `im_info` Tensor
# must be one of float32 and float64.
def test_im_info_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, 1, 10)
self.assertRaises(TypeError, test_im_info_type)
def test_im_info_tensor_dtype():
im_info2 = fluid.data(
name='im_info2', shape=[1, 3], dtype='int32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, im_info2, 10)
self.assertRaises(TypeError, test_im_info_tensor_dtype)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -3,875,588,891,133,964,300 | 5,857,876,421,516,491,000 | 43.024867 | 94 | 0.562092 | false |
pusnik/pyexchange | pyexchange/connection.py | 1 | 4274 | """
(c) 2013 LinkedIn Corp. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");?you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software?distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
"""
import requests
from requests_ntlm import HttpNtlmAuth
from requests.auth import HTTPBasicAuth
import logging
from .exceptions import FailedExchangeException
log = logging.getLogger('pyexchange')
class ExchangeBaseConnection(object):
""" Base class for Exchange connections."""
def send(self, body, headers=None, retries=2, timeout=30):
raise NotImplementedError
class ExchangeNTLMAuthConnection(ExchangeBaseConnection):
""" Connection to Exchange that uses NTLM authentication """
def __init__(self, url, username, password, verify_certificate=True, **kwargs):
self.url = url
self.username = username
self.password = password
self.verify_certificate = verify_certificate
self.handler = None
self.session = None
self.password_manager = None
def build_password_manager(self):
if self.password_manager:
return self.password_manager
log.debug(u'Constructing NTLM auth password manager')
self.password_manager = HttpNtlmAuth(self.username, self.password)
return self.password_manager
def build_session(self):
if self.session:
return self.session
log.debug(u'Constructing NTLM auth opener')
self.password_manager = self.build_password_manager()
self.session = requests.Session()
self.session.auth = self.password_manager
return self.session
def send(self, body, headers=None, retries=2, timeout=30):
if not self.session:
self.session = self.build_session()
try:
response = self.session.post(self.url, data=body, headers=headers, verify = self.verify_certificate)
response.raise_for_status()
except requests.exceptions.RequestException as err:
log.debug(getattr(err.response, 'content', 'No response.'))
raise FailedExchangeException(u'Unable to connect to Exchange with NTLM: %s' % err)
log.info(u'Got response: {code}'.format(code=response.status_code))
log.debug(u'Got response headers: {headers}'.format(headers=response.headers))
log.debug(u'Got body: {body}'.format(body=response.text))
return response.content
class ExchangeBasicAuthConnection(ExchangeBaseConnection):
""" Connection to Exchange, Office365 that uses Basic authentication """
def __init__(self, url, username, password, verify_certificate=True, **kwargs):
self.url = url
self.username = username
self.password = password
self.verify_certificate = verify_certificate
self.handler = None
self.session = None
self.password_manager = None
def build_password_manager(self):
if self.password_manager:
return self.password_manager
log.debug(u'Constructing basic auth password manager')
self.password_manager = HTTPBasicAuth(self.username, self.password)
return self.password_manager
def build_session(self):
if self.session:
return self.session
log.debug(u'Constructing opener with Basic auth')
self.password_manager = self.build_password_manager()
self.session = requests.Session()
self.session.auth = self.password_manager
return self.session
def send(self, body, headers=None, retries=2, timeout=30):
if not self.session:
self.session = self.build_session()
try:
response = self.session.post(self.url, data=body, headers=headers, verify = self.verify_certificate)
response.raise_for_status()
except requests.exceptions.RequestException as err:
log.debug(err.response.content)
raise FailedExchangeException(u'Unable to connect to Exchange with Basic auth: %s' % err)
log.info(u'Got response: {code}'.format(code=response.status_code))
log.debug(u'Got response headers: {headers}'.format(headers=response.headers))
log.debug(u'Got body: {body}'.format(body=response.text))
return response.content
| apache-2.0 | 2,137,834,657,569,709,300 | -152,502,206,094,491,460 | 32.390625 | 212 | 0.726954 | false |
denny820909/builder | lib/python2.7/site-packages/Twisted-12.2.0-py2.7-linux-x86_64.egg/twisted/trial/test/test_test_visitor.py | 90 | 2282 | from twisted.trial import unittest
from twisted.trial.runner import TestSuite, suiteVisit
pyunit = __import__('unittest')
class MockVisitor(object):
def __init__(self):
self.calls = []
def __call__(self, testCase):
self.calls.append(testCase)
class TestTestVisitor(unittest.TestCase):
def setUp(self):
self.visitor = MockVisitor()
def test_visitCase(self):
"""
Test that C{visit} works for a single test case.
"""
testCase = TestTestVisitor('test_visitCase')
testCase.visit(self.visitor)
self.assertEqual(self.visitor.calls, [testCase])
def test_visitSuite(self):
"""
Test that C{visit} hits all tests in a suite.
"""
tests = [TestTestVisitor('test_visitCase'),
TestTestVisitor('test_visitSuite')]
testSuite = TestSuite(tests)
testSuite.visit(self.visitor)
self.assertEqual(self.visitor.calls, tests)
def test_visitEmptySuite(self):
"""
Test that C{visit} on an empty suite hits nothing.
"""
TestSuite().visit(self.visitor)
self.assertEqual(self.visitor.calls, [])
def test_visitNestedSuite(self):
"""
Test that C{visit} recurses through suites.
"""
tests = [TestTestVisitor('test_visitCase'),
TestTestVisitor('test_visitSuite')]
testSuite = TestSuite([TestSuite([test]) for test in tests])
testSuite.visit(self.visitor)
self.assertEqual(self.visitor.calls, tests)
def test_visitPyunitSuite(self):
"""
Test that C{suiteVisit} visits stdlib unittest suites
"""
test = TestTestVisitor('test_visitPyunitSuite')
suite = pyunit.TestSuite([test])
suiteVisit(suite, self.visitor)
self.assertEqual(self.visitor.calls, [test])
def test_visitPyunitCase(self):
"""
Test that a stdlib test case in a suite gets visited.
"""
class PyunitCase(pyunit.TestCase):
def test_foo(self):
pass
test = PyunitCase('test_foo')
TestSuite([test]).visit(self.visitor)
self.assertEqual(
[call.id() for call in self.visitor.calls], [test.id()])
| mit | -8,829,932,341,644,161,000 | -55,194,661,511,597,670 | 26.829268 | 68 | 0.601227 | false |
abztrakt/django-badger | badger/migrations/0006_auto__add_field_nomination_rejecter__add_field_nomination_rejection_re.py | 9 | 11582 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Nomination.rejected_by'
db.add_column('badger_nomination', 'rejected_by',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='nomination_rejected_by', null=True, to=orm['auth.User']),
keep_default=False)
# Adding field 'Nomination.rejected_reason'
db.add_column('badger_nomination', 'rejected_reason',
self.gf('django.db.models.fields.TextField')(default='', blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Nomination.rejected_by'
db.delete_column('badger_nomination', 'rejected_by_id')
# Deleting field 'Nomination.rejected_reason'
db.delete_column('badger_nomination', 'rejected_reason')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'badger.award': {
'Meta': {'ordering': "['-modified', '-created']", 'object_name': 'Award'},
'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['badger.Badge']"}),
'claim_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'db_index': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'award_creator'", 'null': 'True', 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'badger.badge': {
'Meta': {'ordering': "['-modified', '-created']", 'unique_together': "(('title', 'slug'),)", 'object_name': 'Badge'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'nominations_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'prerequisites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['badger.Badge']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'unique': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'badger.deferredaward': {
'Meta': {'ordering': "['-modified', '-created']", 'object_name': 'DeferredAward'},
'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['badger.Badge']"}),
'claim_code': ('django.db.models.fields.CharField', [], {'default': "'xamuuk'", 'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'claim_group': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'db_index': 'True', 'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'reusable': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'badger.nomination': {
'Meta': {'object_name': 'Nomination'},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'approver': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'nomination_approver'", 'null': 'True', 'to': "orm['auth.User']"}),
'award': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['badger.Award']", 'null': 'True', 'blank': 'True'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['badger.Badge']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'nomination_creator'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'nominee': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'nomination_nominee'", 'to': "orm['auth.User']"}),
'rejected_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'nomination_rejected_by'", 'null': 'True', 'to': "orm['auth.User']"}),
'rejected_reason': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'badger.progress': {
'Meta': {'unique_together': "(('badge', 'user'),)", 'object_name': 'Progress'},
'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['badger.Badge']"}),
'counter': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'notes': ('badger.models.JSONField', [], {'null': 'True', 'blank': 'True'}),
'percent': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'progress_user'", 'to': "orm['auth.User']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['badger']
| bsd-3-clause | -4,403,783,001,536,741,000 | -7,625,071,050,684,849,000 | 78.875862 | 183 | 0.551718 | false |
smart-developerr/my-first-blog | Lib/site-packages/django/core/management/sql.py | 108 | 1972 | from __future__ import unicode_literals
from django.apps import apps
from django.db import models
def sql_flush(style, connection, only_django=False, reset_sequences=True, allow_cascade=False):
"""
Returns a list of the SQL statements used to flush the database.
If only_django is True, then only table names that have associated Django
models and are in INSTALLED_APPS will be included.
"""
if only_django:
tables = connection.introspection.django_table_names(only_existing=True, include_views=False)
else:
tables = connection.introspection.table_names(include_views=False)
seqs = connection.introspection.sequence_list() if reset_sequences else ()
statements = connection.ops.sql_flush(style, tables, seqs, allow_cascade)
return statements
def emit_pre_migrate_signal(verbosity, interactive, db, **kwargs):
# Emit the pre_migrate signal for every application.
for app_config in apps.get_app_configs():
if app_config.models_module is None:
continue
if verbosity >= 2:
print("Running pre-migrate handlers for application %s" % app_config.label)
models.signals.pre_migrate.send(
sender=app_config,
app_config=app_config,
verbosity=verbosity,
interactive=interactive,
using=db,
**kwargs
)
def emit_post_migrate_signal(verbosity, interactive, db, **kwargs):
# Emit the post_migrate signal for every application.
for app_config in apps.get_app_configs():
if app_config.models_module is None:
continue
if verbosity >= 2:
print("Running post-migrate handlers for application %s" % app_config.label)
models.signals.post_migrate.send(
sender=app_config,
app_config=app_config,
verbosity=verbosity,
interactive=interactive,
using=db,
**kwargs
)
| gpl-3.0 | -553,998,357,976,888,100 | -8,411,187,365,635,385,000 | 35.518519 | 101 | 0.651623 | false |
angela278/UPDream | lib/requests/packages/chardet/euckrfreq.py | 3121 | 45978 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Sampling from about 20M text materials include literature and computer technology
# 128 --> 0.79
# 256 --> 0.92
# 512 --> 0.986
# 1024 --> 0.99944
# 2048 --> 0.99999
#
# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
# Random Distribution Ration = 512 / (2350-512) = 0.279.
#
# Typical Distribution Ratio
EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
EUCKR_TABLE_SIZE = 2352
# Char to FreqOrder table ,
EUCKRCharToFreqOrder = ( \
13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87,
1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734,
945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622,
708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750,
1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19,
1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885,
127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841,
1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910,
1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610,
269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939,
887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934,
1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7,
103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250,
423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824,
532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745,
619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61,
191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032,
2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075,
1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274,
962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721,
1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463,
2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285,
2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10,
2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350,
1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247,
1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259,
1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262,
2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273,
295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117,
432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312,
2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229,
2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315,
501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170,
1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485,
1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494,
434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544,
2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562,
2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431,
202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406,
2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256
#Everything below is of no interest for detection purpose
2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658,
2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674,
2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690,
2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704,
2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720,
2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734,
2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750,
2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765,
2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779,
2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793,
2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809,
2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824,
2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840,
2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856,
1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869,
2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883,
2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899,
2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915,
2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331,
2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945,
2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961,
2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976,
2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992,
2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008,
3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021,
3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037,
3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052,
3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066,
3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080,
3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095,
3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110,
3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124,
3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140,
3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156,
3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172,
3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187,
3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201,
3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217,
3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233,
3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248,
3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264,
3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279,
3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295,
3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311,
3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327,
3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343,
3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359,
3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374,
3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389,
3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405,
3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338,
3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432,
3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446,
3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191,
3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471,
3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486,
1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499,
1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513,
3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525,
3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541,
3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557,
3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573,
3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587,
3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603,
3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618,
3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632,
3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648,
3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663,
3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679,
3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695,
3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583,
1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722,
3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738,
3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753,
3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767,
3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782,
3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796,
3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810,
3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591,
1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836,
3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851,
3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866,
3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880,
3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895,
1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905,
3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921,
3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934,
3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603,
3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964,
3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978,
3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993,
3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009,
4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024,
4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040,
1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055,
4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069,
4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083,
4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098,
4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113,
4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610,
4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142,
4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157,
4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173,
4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189,
4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205,
4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220,
4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234,
4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249,
4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265,
4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279,
4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294,
4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310,
4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326,
4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341,
4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357,
4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371,
4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387,
4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403,
4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418,
4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432,
4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446,
4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461,
4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476,
4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491,
4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507,
4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623,
4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536,
4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551,
4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567,
4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581,
4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627,
4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611,
4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626,
4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642,
4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657,
4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672,
4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687,
1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700,
4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715,
4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731,
4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633,
4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758,
4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773,
4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788,
4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803,
4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817,
4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832,
4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847,
4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863,
4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879,
4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893,
4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909,
4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923,
4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938,
4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954,
4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970,
4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645,
4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999,
5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078,
5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028,
1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042,
5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056,
5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072,
5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087,
5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103,
5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118,
1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132,
5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,
5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161,
5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177,
5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192,
5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206,
1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218,
5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,
5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249,
5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262,
5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,
5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,
5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308,
5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323,
5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338,
5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353,
5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369,
5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385,
5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400,
5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415,
5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430,
5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445,
5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461,
5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477,
5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491,
5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507,
5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523,
5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539,
5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554,
5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570,
1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585,
5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600,
5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615,
5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,
5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,
5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,
1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673,
5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688,
5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703,
5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716,
5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729,
5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744,
1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758,
5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773,
1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786,
5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801,
5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815,
5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831,
5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847,
5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862,
5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876,
5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889,
5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,
5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,
5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687,
5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,
5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963,
5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,
5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993,
5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009,
6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025,
6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039,
6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055,
6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071,
6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086,
6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102,
6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118,
6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133,
6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147,
6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,
6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,
6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194,
6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,
6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225,
6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,
6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256,
6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271, #1024
6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287,
6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699,
6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317,
6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,
6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347,
6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,
6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,
6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,
6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411,
6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425,
6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440,
6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456,
6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,
6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488,
6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266,
6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,
6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535,
6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551,
1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565,
6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581,
6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597,
6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613,
6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629,
6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644,
1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659,
6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674,
1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689,
6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705,
6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721,
6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736,
1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748,
6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763,
6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779,
6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794,
6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711,
6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825,
6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840,
6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856,
6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872,
6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888,
6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903,
6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918,
6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934,
6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950,
6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,
6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981,
6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996,
6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011,
7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,
7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042,
7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,
7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074,
7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090,
7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106,
7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122,
7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138,
7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154,
7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170,
7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186,
7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202,
7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216,
7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232,
7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248,
7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264,
7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280,
7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296,
7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312,
7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327,
7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343,
7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359,
7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375,
7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391,
7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407,
7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423,
7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439,
7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455,
7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,
7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,
7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503,
7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,
7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,
7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551,
7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567,
7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583,
7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599,
7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615,
7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631,
7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647,
7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663,
7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679,
7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695,
7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711,
7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727,
7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743,
7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759,
7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,
7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,
7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807,
7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,
7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,
7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,
7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,
7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,
7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,
7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,
7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935,
7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951,
7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967,
7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983,
7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999,
8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,
8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031,
8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047,
8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,
8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,
8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,
8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111,
8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127,
8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,
8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,
8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,
8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,
8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,
8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,
8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,
8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,
8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,
8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,
8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,
8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,
8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,
8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,
8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,
8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,
8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,
8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,
8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,
8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,
8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,
8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,
8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,
8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,
8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,
8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,
8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,
8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,
8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,
8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,
8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,
8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,
8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,
8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,
8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,
8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,
8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,
8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,
8736,8737,8738,8739,8740,8741)
# flake8: noqa
| apache-2.0 | -1,519,528,141,597,379,800 | -5,597,367,078,816,182,000 | 76.144295 | 92 | 0.762452 | false |
stonegithubs/odoo | addons/board/__openerp__.py | 261 | 1647 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2012 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Dashboards',
'version': '1.0',
'category': 'Hidden',
'description': """
Lets the user create a custom dashboard.
========================================
Allows users to create custom dashboard.
""",
'author': 'OpenERP SA',
'depends': ['base', 'web'],
'data': [
'security/ir.model.access.csv',
'board_view.xml',
'board_mydashboard_view.xml',
'views/board.xml',
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -4,346,815,813,543,549,400 | 6,575,748,324,982,140,000 | 35.6 | 78 | 0.576806 | false |
olivierdalang/QGIS | tests/src/python/test_qgsrasterlayer.py | 4 | 63707 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsRasterLayer.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
from builtins import str
__author__ = 'Tim Sutton'
__date__ = '20/08/2012'
__copyright__ = 'Copyright 2012, The QGIS Project'
import qgis # NOQA
import os
import filecmp
from shutil import copyfile
from qgis.PyQt.QtCore import QSize, QFileInfo, Qt, QTemporaryDir
from qgis.PyQt.QtGui import (
QColor,
QImage,
QPainter,
QResizeEvent
)
from qgis.PyQt.QtXml import QDomDocument
from qgis.core import (QgsRaster,
QgsRasterLayer,
QgsReadWriteContext,
QgsColorRampShader,
QgsContrastEnhancement,
QgsDataProvider,
QgsProject,
QgsMapSettings,
QgsPointXY,
QgsRasterMinMaxOrigin,
QgsRasterShader,
QgsRasterTransparency,
QgsRenderChecker,
QgsPalettedRasterRenderer,
QgsSingleBandGrayRenderer,
QgsSingleBandPseudoColorRenderer,
QgsLimitedRandomColorRamp,
QgsGradientColorRamp,
QgsHueSaturationFilter,
QgsCoordinateTransformContext,
QgsCoordinateReferenceSystem,
QgsRasterHistogram,
QgsCubicRasterResampler,
QgsBilinearRasterResampler,
QgsLayerDefinition
)
from utilities import unitTestDataPath
from qgis.testing import start_app, unittest
from qgis.testing.mocked import get_iface
# Convenience instances in case you may need them
# not used in this test
start_app()
class TestQgsRasterLayer(unittest.TestCase):
def setUp(self):
self.iface = get_iface()
QgsProject.instance().removeAllMapLayers()
self.iface.mapCanvas().viewport().resize(400, 400)
# For some reason the resizeEvent is not delivered, fake it
self.iface.mapCanvas().resizeEvent(QResizeEvent(QSize(400, 400), self.iface.mapCanvas().size()))
def testIdentify(self):
myPath = os.path.join(unitTestDataPath(), 'landsat.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
myRasterLayer = QgsRasterLayer(myPath, myBaseName)
myMessage = 'Raster not loaded: %s' % myPath
assert myRasterLayer.isValid(), myMessage
myPoint = QgsPointXY(786690, 3345803)
# print 'Extents: %s' % myRasterLayer.extent().toString()
# myResult, myRasterValues = myRasterLayer.identify(myPoint)
# assert myResult
myRasterValues = myRasterLayer.dataProvider().identify(myPoint, QgsRaster.IdentifyFormatValue).results()
assert len(myRasterValues) > 0
# Get the name of the first band
myBand = list(myRasterValues.keys())[0]
# myExpectedName = 'Band 1
myExpectedBand = 1
myMessage = 'Expected "%s" got "%s" for first raster band name' % (
myExpectedBand, myBand)
assert myExpectedBand == myBand, myMessage
# Convert each band value to a list of ints then to a string
myValues = list(myRasterValues.values())
myIntValues = []
for myValue in myValues:
myIntValues.append(int(myValue))
myValues = str(myIntValues)
myExpectedValues = '[127, 141, 112, 72, 86, 126, 156, 211, 170]'
myMessage = 'Expected: %s\nGot: %s' % (myValues, myExpectedValues)
self.assertEqual(myValues, myExpectedValues, myMessage)
def testTransparency(self):
myPath = os.path.join(unitTestDataPath('raster'),
'band1_float32_noct_epsg4326.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
myRasterLayer = QgsRasterLayer(myPath, myBaseName)
myMessage = 'Raster not loaded: %s' % myPath
assert myRasterLayer.isValid(), myMessage
renderer = QgsSingleBandGrayRenderer(myRasterLayer.dataProvider(), 1)
myRasterLayer.setRenderer(renderer)
myRasterLayer.setContrastEnhancement(
QgsContrastEnhancement.StretchToMinimumMaximum,
QgsRasterMinMaxOrigin.MinMax)
myContrastEnhancement = myRasterLayer.renderer().contrastEnhancement()
# print ("myContrastEnhancement.minimumValue = %.17g" %
# myContrastEnhancement.minimumValue())
# print ("myContrastEnhancement.maximumValue = %.17g" %
# myContrastEnhancement.maximumValue())
# Unfortunately the minimum/maximum values calculated in C++ and Python
# are slightly different (e.g. 3.3999999521443642e+38 x
# 3.3999999521444001e+38)
# It is not clear where the precision is lost.
# We set the same values as C++.
myContrastEnhancement.setMinimumValue(-3.3319999287625854e+38)
myContrastEnhancement.setMaximumValue(3.3999999521443642e+38)
# myType = myRasterLayer.dataProvider().dataType(1);
# myEnhancement = QgsContrastEnhancement(myType);
myTransparentSingleValuePixelList = []
rasterTransparency = QgsRasterTransparency()
myTransparentPixel1 = \
QgsRasterTransparency.TransparentSingleValuePixel()
myTransparentPixel1.min = -2.5840000772112106e+38
myTransparentPixel1.max = -1.0879999684602689e+38
myTransparentPixel1.percentTransparent = 50
myTransparentSingleValuePixelList.append(myTransparentPixel1)
myTransparentPixel2 = \
QgsRasterTransparency.TransparentSingleValuePixel()
myTransparentPixel2.min = 1.359999960575336e+37
myTransparentPixel2.max = 9.520000231087593e+37
myTransparentPixel2.percentTransparent = 70
myTransparentSingleValuePixelList.append(myTransparentPixel2)
rasterTransparency.setTransparentSingleValuePixelList(
myTransparentSingleValuePixelList)
rasterRenderer = myRasterLayer.renderer()
assert rasterRenderer
rasterRenderer.setRasterTransparency(rasterTransparency)
QgsProject.instance().addMapLayers([myRasterLayer, ])
myMapSettings = QgsMapSettings()
myMapSettings.setLayers([myRasterLayer])
myMapSettings.setExtent(myRasterLayer.extent())
myChecker = QgsRenderChecker()
myChecker.setControlName("expected_raster_transparency")
myChecker.setMapSettings(myMapSettings)
myResultFlag = myChecker.runTest("raster_transparency_python")
assert myResultFlag, "Raster transparency rendering test failed"
def testIssue7023(self):
"""Check if converting a raster from 1.8 to 2 works."""
myPath = os.path.join(unitTestDataPath('raster'),
'raster-palette-crash2.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
myRasterLayer = QgsRasterLayer(myPath, myBaseName)
myMessage = 'Raster not loaded: %s' % myPath
assert myRasterLayer.isValid(), myMessage
# crash on next line
QgsProject.instance().addMapLayers([myRasterLayer])
def testShaderCrash(self):
"""Check if we assign a shader and then reassign it no crash occurs."""
myPath = os.path.join(unitTestDataPath('raster'),
'band1_float32_noct_epsg4326.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
myRasterLayer = QgsRasterLayer(myPath, myBaseName)
myMessage = 'Raster not loaded: %s' % myPath
assert myRasterLayer.isValid(), myMessage
myRasterShader = QgsRasterShader()
myColorRampShader = QgsColorRampShader()
myColorRampShader.setColorRampType(QgsColorRampShader.Interpolated)
myItems = []
myItem = QgsColorRampShader.ColorRampItem(
10, QColor('#ffff00'), 'foo')
myItems.append(myItem)
myItem = QgsColorRampShader.ColorRampItem(
100, QColor('#ff00ff'), 'bar')
myItems.append(myItem)
myItem = QgsColorRampShader.ColorRampItem(
1000, QColor('#00ff00'), 'kazam')
myItems.append(myItem)
myColorRampShader.setColorRampItemList(myItems)
myRasterShader.setRasterShaderFunction(myColorRampShader)
myPseudoRenderer = QgsSingleBandPseudoColorRenderer(
myRasterLayer.dataProvider(), 1, myRasterShader)
myRasterLayer.setRenderer(myPseudoRenderer)
return
# ####### works first time #############
myRasterShader = QgsRasterShader()
myColorRampShader = QgsColorRampShader()
myColorRampShader.setColorRampType(QgsColorRampShader.Interpolated)
myItems = []
myItem = QgsColorRampShader.ColorRampItem(10,
QColor('#ffff00'), 'foo')
myItems.append(myItem)
myItem = QgsColorRampShader.ColorRampItem(100,
QColor('#ff00ff'), 'bar')
myItems.append(myItem)
myItem = QgsColorRampShader.ColorRampItem(1000,
QColor('#00ff00'), 'kazam')
myItems.append(myItem)
myColorRampShader.setColorRampItemList(myItems)
myRasterShader.setRasterShaderFunction(myColorRampShader)
# ####### crash on next line (fixed now)##################
myPseudoRenderer = QgsSingleBandPseudoColorRenderer(
myRasterLayer.dataProvider(), 1, myRasterShader)
myRasterLayer.setRenderer(myPseudoRenderer)
def onRendererChanged(self):
self.rendererChanged = True
def test_setRenderer(self):
myPath = os.path.join(unitTestDataPath('raster'),
'band1_float32_noct_epsg4326.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
layer = QgsRasterLayer(myPath, myBaseName)
self.rendererChanged = False
layer.rendererChanged.connect(self.onRendererChanged)
rShader = QgsRasterShader()
r = QgsSingleBandPseudoColorRenderer(layer.dataProvider(), 1, rShader)
layer.setRenderer(r)
assert self.rendererChanged
assert layer.renderer() == r
def testQgsRasterMinMaxOrigin(self):
mmo = QgsRasterMinMaxOrigin()
mmo_default = QgsRasterMinMaxOrigin()
self.assertEqual(mmo, mmo_default)
mmo = QgsRasterMinMaxOrigin()
self.assertEqual(mmo.limits(), QgsRasterMinMaxOrigin.None_)
mmo.setLimits(QgsRasterMinMaxOrigin.CumulativeCut)
self.assertEqual(mmo.limits(), QgsRasterMinMaxOrigin.CumulativeCut)
self.assertNotEqual(mmo, mmo_default)
mmo = QgsRasterMinMaxOrigin()
self.assertEqual(mmo.extent(), QgsRasterMinMaxOrigin.WholeRaster)
mmo.setExtent(QgsRasterMinMaxOrigin.UpdatedCanvas)
self.assertEqual(mmo.extent(), QgsRasterMinMaxOrigin.UpdatedCanvas)
self.assertNotEqual(mmo, mmo_default)
mmo = QgsRasterMinMaxOrigin()
self.assertEqual(mmo.statAccuracy(), QgsRasterMinMaxOrigin.Estimated)
mmo.setStatAccuracy(QgsRasterMinMaxOrigin.Exact)
self.assertEqual(mmo.statAccuracy(), QgsRasterMinMaxOrigin.Exact)
self.assertNotEqual(mmo, mmo_default)
mmo = QgsRasterMinMaxOrigin()
self.assertAlmostEqual(mmo.cumulativeCutLower(), 0.02)
mmo.setCumulativeCutLower(0.1)
self.assertAlmostEqual(mmo.cumulativeCutLower(), 0.1)
self.assertNotEqual(mmo, mmo_default)
mmo = QgsRasterMinMaxOrigin()
self.assertAlmostEqual(mmo.cumulativeCutUpper(), 0.98)
mmo.setCumulativeCutUpper(0.9)
self.assertAlmostEqual(mmo.cumulativeCutUpper(), 0.9)
self.assertNotEqual(mmo, mmo_default)
mmo = QgsRasterMinMaxOrigin()
self.assertAlmostEqual(mmo.stdDevFactor(), 2.0)
mmo.setStdDevFactor(2.5)
self.assertAlmostEqual(mmo.stdDevFactor(), 2.5)
self.assertNotEqual(mmo, mmo_default)
mmo = QgsRasterMinMaxOrigin()
mmo.setLimits(QgsRasterMinMaxOrigin.CumulativeCut)
mmo.setExtent(QgsRasterMinMaxOrigin.UpdatedCanvas)
mmo.setStatAccuracy(QgsRasterMinMaxOrigin.Exact)
mmo.setCumulativeCutLower(0.1)
mmo.setCumulativeCutUpper(0.9)
mmo.setStdDevFactor(2.5)
doc = QDomDocument()
parentElem = doc.createElement("test")
mmo.writeXml(doc, parentElem)
mmoUnserialized = QgsRasterMinMaxOrigin()
mmoUnserialized.readXml(parentElem)
self.assertEqual(mmo, mmoUnserialized)
def testPaletted(self):
""" test paletted raster renderer with raster with color table"""
path = os.path.join(unitTestDataPath('raster'),
'with_color_table.tif')
info = QFileInfo(path)
base_name = info.baseName()
layer = QgsRasterLayer(path, base_name)
self.assertTrue(layer.isValid(), 'Raster not loaded: {}'.format(path))
renderer = QgsPalettedRasterRenderer(layer.dataProvider(), 1,
[QgsPalettedRasterRenderer.Class(1, QColor(0, 255, 0), 'class 2'),
QgsPalettedRasterRenderer.Class(3, QColor(255, 0, 0), 'class 1')])
self.assertEqual(renderer.nColors(), 2)
self.assertEqual(renderer.usesBands(), [1])
# test labels
self.assertEqual(renderer.label(1), 'class 2')
self.assertEqual(renderer.label(3), 'class 1')
self.assertFalse(renderer.label(101))
# test legend symbology - should be sorted by value
legend = renderer.legendSymbologyItems()
self.assertEqual(legend[0][0], 'class 2')
self.assertEqual(legend[1][0], 'class 1')
self.assertEqual(legend[0][1].name(), '#00ff00')
self.assertEqual(legend[1][1].name(), '#ff0000')
# test retrieving classes
classes = renderer.classes()
self.assertEqual(classes[0].value, 1)
self.assertEqual(classes[1].value, 3)
self.assertEqual(classes[0].label, 'class 2')
self.assertEqual(classes[1].label, 'class 1')
self.assertEqual(classes[0].color.name(), '#00ff00')
self.assertEqual(classes[1].color.name(), '#ff0000')
# test set label
# bad index
renderer.setLabel(1212, 'bad')
renderer.setLabel(3, 'new class')
self.assertEqual(renderer.label(3), 'new class')
# color ramp
r = QgsLimitedRandomColorRamp(5)
renderer.setSourceColorRamp(r)
self.assertEqual(renderer.sourceColorRamp().type(), 'random')
self.assertEqual(renderer.sourceColorRamp().count(), 5)
# clone
new_renderer = renderer.clone()
classes = new_renderer.classes()
self.assertEqual(classes[0].value, 1)
self.assertEqual(classes[1].value, 3)
self.assertEqual(classes[0].label, 'class 2')
self.assertEqual(classes[1].label, 'new class')
self.assertEqual(classes[0].color.name(), '#00ff00')
self.assertEqual(classes[1].color.name(), '#ff0000')
self.assertEqual(new_renderer.sourceColorRamp().type(), 'random')
self.assertEqual(new_renderer.sourceColorRamp().count(), 5)
# write to xml and read
doc = QDomDocument('testdoc')
elem = doc.createElement('qgis')
renderer.writeXml(doc, elem)
restored = QgsPalettedRasterRenderer.create(elem.firstChild().toElement(), layer.dataProvider())
self.assertTrue(restored)
self.assertEqual(restored.usesBands(), [1])
classes = restored.classes()
self.assertTrue(classes)
self.assertEqual(classes[0].value, 1)
self.assertEqual(classes[1].value, 3)
self.assertEqual(classes[0].label, 'class 2')
self.assertEqual(classes[1].label, 'new class')
self.assertEqual(classes[0].color.name(), '#00ff00')
self.assertEqual(classes[1].color.name(), '#ff0000')
self.assertEqual(restored.sourceColorRamp().type(), 'random')
self.assertEqual(restored.sourceColorRamp().count(), 5)
# render test
layer.setRenderer(renderer)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_paletted_renderer")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_paletted_renderer"), "Paletted rendering test failed")
def testPalettedBand(self):
""" test paletted raster render band"""
path = os.path.join(unitTestDataPath(),
'landsat_4326.tif')
info = QFileInfo(path)
base_name = info.baseName()
layer = QgsRasterLayer(path, base_name)
self.assertTrue(layer.isValid(), 'Raster not loaded: {}'.format(path))
renderer = QgsPalettedRasterRenderer(layer.dataProvider(), 2,
[QgsPalettedRasterRenderer.Class(137, QColor(0, 255, 0), 'class 2'),
QgsPalettedRasterRenderer.Class(138, QColor(255, 0, 0), 'class 1'),
QgsPalettedRasterRenderer.Class(139, QColor(0, 0, 255), 'class 1')])
layer.setRenderer(renderer)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_paletted_renderer_band2")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_paletted_renderer_band2"), "Paletted rendering test failed")
renderer = QgsPalettedRasterRenderer(layer.dataProvider(), 3,
[QgsPalettedRasterRenderer.Class(120, QColor(0, 255, 0), 'class 2'),
QgsPalettedRasterRenderer.Class(123, QColor(255, 0, 0), 'class 1'),
QgsPalettedRasterRenderer.Class(124, QColor(0, 0, 255), 'class 1')])
layer.setRenderer(renderer)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_paletted_renderer_band3")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_paletted_renderer_band3"), "Paletted rendering test failed")
def testBrightnessContrastGamma(self):
""" test raster brightness/contrast/gamma filter"""
path = os.path.join(unitTestDataPath(),
'landsat_4326.tif')
info = QFileInfo(path)
base_name = info.baseName()
layer = QgsRasterLayer(path, base_name)
self.assertTrue(layer.isValid(), 'Raster not loaded: {}'.format(path))
layer.brightnessFilter().setContrast(100)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_raster_contrast100")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_raster_contrast100"), "Contrast (c = 100) rendering test failed")
layer.brightnessFilter().setContrast(-30)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_raster_contrast30")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_raster_contrast30"), "Contrast (c = -30) rendering test failed")
layer.brightnessFilter().setContrast(0)
layer.brightnessFilter().setBrightness(50)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_raster_brightness50")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_raster_brightness50"), "Brightness (b = 50) rendering test failed")
layer.brightnessFilter().setBrightness(-20)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_raster_brightness20")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_raster_brightness20"), "Brightness (b = -20) rendering test failed")
path = os.path.join(unitTestDataPath(),
'landsat-int16-b1.tif')
info = QFileInfo(path)
base_name = info.baseName()
layer = QgsRasterLayer(path, base_name)
self.assertTrue(layer.isValid(), 'Raster not loaded: {}'.format(path))
layer.brightnessFilter().setGamma(0.22)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_raster_gamma022")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_raster_gamma022"), "Gamma correction (gamma = 0.22) rendering test failed")
layer.brightnessFilter().setGamma(2.22)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_raster_gamma222")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_raster_gamma222"), "Gamma correction (gamma = 2.22) rendering test failed")
def testPalettedColorTableToClassData(self):
entries = [QgsColorRampShader.ColorRampItem(5, QColor(255, 0, 0), 'item1'),
QgsColorRampShader.ColorRampItem(3, QColor(0, 255, 0), 'item2'),
QgsColorRampShader.ColorRampItem(6, QColor(0, 0, 255), 'item3'),
]
classes = QgsPalettedRasterRenderer.colorTableToClassData(entries)
self.assertEqual(classes[0].value, 5)
self.assertEqual(classes[1].value, 3)
self.assertEqual(classes[2].value, 6)
self.assertEqual(classes[0].label, 'item1')
self.assertEqual(classes[1].label, 'item2')
self.assertEqual(classes[2].label, 'item3')
self.assertEqual(classes[0].color.name(), '#ff0000')
self.assertEqual(classes[1].color.name(), '#00ff00')
self.assertEqual(classes[2].color.name(), '#0000ff')
# test #13263
path = os.path.join(unitTestDataPath('raster'),
'hub13263.vrt')
info = QFileInfo(path)
base_name = info.baseName()
layer = QgsRasterLayer(path, base_name)
self.assertTrue(layer.isValid(), 'Raster not loaded: {}'.format(path))
classes = QgsPalettedRasterRenderer.colorTableToClassData(layer.dataProvider().colorTable(1))
self.assertEqual(len(classes), 4)
classes = QgsPalettedRasterRenderer.colorTableToClassData(layer.dataProvider().colorTable(15))
self.assertEqual(len(classes), 256)
def testLoadPalettedColorDataFromString(self):
"""
Test interpreting a bunch of color data format strings
"""
esri_clr_format = '1 255 255 0\n2 64 0 128\n3 255 32 32\n4 0 255 0\n5 0 0 255'
esri_clr_format_win = '1 255 255 0\r\n2 64 0 128\r\n3 255 32 32\r\n4 0 255 0\r\n5 0 0 255'
esri_clr_format_tab = '1\t255\t255\t0\n2\t64\t0\t128\n3\t255\t32\t32\n4\t0\t255\t0\n5\t0\t0\t255'
esri_clr_spaces = '1 255 255 0\n2 64 0 128\n3 255 32 32\n4 0 255 0\n5 0 0 255'
gdal_clr_comma = '1,255,255,0\n2,64,0,128\n3,255,32,32\n4,0,255,0\n5,0,0,255'
gdal_clr_colon = '1:255:255:0\n2:64:0:128\n3:255:32:32\n4:0:255:0\n5:0:0:255'
for f in [esri_clr_format,
esri_clr_format_win,
esri_clr_format_tab,
esri_clr_spaces,
gdal_clr_comma,
gdal_clr_colon]:
classes = QgsPalettedRasterRenderer.classDataFromString(f)
self.assertEqual(len(classes), 5)
self.assertEqual(classes[0].value, 1)
self.assertEqual(classes[0].color.name(), '#ffff00')
self.assertEqual(classes[1].value, 2)
self.assertEqual(classes[1].color.name(), '#400080')
self.assertEqual(classes[2].value, 3)
self.assertEqual(classes[2].color.name(), '#ff2020')
self.assertEqual(classes[3].value, 4)
self.assertEqual(classes[3].color.name(), '#00ff00')
self.assertEqual(classes[4].value, 5)
self.assertEqual(classes[4].color.name(), '#0000ff')
grass_named_colors = '0 white\n1 yellow\n3 black\n6 blue\n9 magenta\n11 aqua\n13 grey\n14 gray\n15 orange\n19 brown\n21 purple\n22 violet\n24 indigo\n90 green\n180 cyan\n270 red\n'
classes = QgsPalettedRasterRenderer.classDataFromString(grass_named_colors)
self.assertEqual(len(classes), 16)
self.assertEqual(classes[0].value, 0)
self.assertEqual(classes[0].color.name(), '#ffffff')
self.assertEqual(classes[1].value, 1)
self.assertEqual(classes[1].color.name(), '#ffff00')
self.assertEqual(classes[2].value, 3)
self.assertEqual(classes[2].color.name(), '#000000')
self.assertEqual(classes[3].value, 6)
self.assertEqual(classes[3].color.name(), '#0000ff')
self.assertEqual(classes[4].value, 9)
self.assertEqual(classes[4].color.name(), '#ff00ff')
self.assertEqual(classes[5].value, 11)
self.assertEqual(classes[5].color.name(), '#00ffff')
self.assertEqual(classes[6].value, 13)
self.assertEqual(classes[6].color.name(), '#808080')
self.assertEqual(classes[7].value, 14)
self.assertEqual(classes[7].color.name(), '#808080')
self.assertEqual(classes[8].value, 15)
self.assertEqual(classes[8].color.name(), '#ffa500')
self.assertEqual(classes[9].value, 19)
self.assertEqual(classes[9].color.name(), '#a52a2a')
self.assertEqual(classes[10].value, 21)
self.assertEqual(classes[10].color.name(), '#800080')
self.assertEqual(classes[11].value, 22)
self.assertEqual(classes[11].color.name(), '#ee82ee')
self.assertEqual(classes[12].value, 24)
self.assertEqual(classes[12].color.name(), '#4b0082')
self.assertEqual(classes[13].value, 90)
self.assertEqual(classes[13].color.name(), '#008000')
self.assertEqual(classes[14].value, 180)
self.assertEqual(classes[14].color.name(), '#00ffff')
self.assertEqual(classes[15].value, 270)
self.assertEqual(classes[15].color.name(), '#ff0000')
gdal_alpha = '1:255:255:0:0\n2:64:0:128:50\n3:255:32:32:122\n4:0:255:0:200\n5:0:0:255:255'
classes = QgsPalettedRasterRenderer.classDataFromString(gdal_alpha)
self.assertEqual(len(classes), 5)
self.assertEqual(classes[0].value, 1)
self.assertEqual(classes[0].color.name(), '#ffff00')
self.assertEqual(classes[0].color.alpha(), 0)
self.assertEqual(classes[1].value, 2)
self.assertEqual(classes[1].color.name(), '#400080')
self.assertEqual(classes[1].color.alpha(), 50)
self.assertEqual(classes[2].value, 3)
self.assertEqual(classes[2].color.name(), '#ff2020')
self.assertEqual(classes[2].color.alpha(), 122)
self.assertEqual(classes[3].value, 4)
self.assertEqual(classes[3].color.name(), '#00ff00')
self.assertEqual(classes[3].color.alpha(), 200)
self.assertEqual(classes[4].value, 5)
self.assertEqual(classes[4].color.name(), '#0000ff')
self.assertEqual(classes[4].color.alpha(), 255)
# qgis style, with labels
qgis = '3 255 0 0 255 class 1\n4 0 255 0 200 class 2'
classes = QgsPalettedRasterRenderer.classDataFromString(qgis)
self.assertEqual(len(classes), 2)
self.assertEqual(classes[0].value, 3)
self.assertEqual(classes[0].color.name(), '#ff0000')
self.assertEqual(classes[0].color.alpha(), 255)
self.assertEqual(classes[0].label, 'class 1')
self.assertEqual(classes[1].value, 4)
self.assertEqual(classes[1].color.name(), '#00ff00')
self.assertEqual(classes[1].color.alpha(), 200)
self.assertEqual(classes[1].label, 'class 2')
# some bad inputs
bad = ''
classes = QgsPalettedRasterRenderer.classDataFromString(bad)
self.assertEqual(len(classes), 0)
bad = '\n\n\n'
classes = QgsPalettedRasterRenderer.classDataFromString(bad)
self.assertEqual(len(classes), 0)
bad = 'x x x x'
classes = QgsPalettedRasterRenderer.classDataFromString(bad)
self.assertEqual(len(classes), 0)
bad = '1 255 0 0\n2 255 255\n3 255 0 255'
classes = QgsPalettedRasterRenderer.classDataFromString(bad)
self.assertEqual(len(classes), 2)
bad = '1 255 a 0'
classes = QgsPalettedRasterRenderer.classDataFromString(bad)
self.assertEqual(len(classes), 1)
def testLoadPalettedClassDataFromFile(self):
# bad file
classes = QgsPalettedRasterRenderer.classDataFromFile('ajdhjashjkdh kjahjkdhk')
self.assertEqual(len(classes), 0)
# good file!
path = os.path.join(unitTestDataPath('raster'),
'test.clr')
classes = QgsPalettedRasterRenderer.classDataFromFile(path)
self.assertEqual(len(classes), 10)
self.assertEqual(classes[0].value, 1)
self.assertEqual(classes[0].color.name(), '#000000')
self.assertEqual(classes[0].color.alpha(), 255)
self.assertEqual(classes[1].value, 2)
self.assertEqual(classes[1].color.name(), '#c8c8c8')
self.assertEqual(classes[2].value, 3)
self.assertEqual(classes[2].color.name(), '#006e00')
self.assertEqual(classes[3].value, 4)
self.assertEqual(classes[3].color.name(), '#6e4100')
self.assertEqual(classes[4].value, 5)
self.assertEqual(classes[4].color.name(), '#0000ff')
self.assertEqual(classes[4].color.alpha(), 255)
self.assertEqual(classes[5].value, 6)
self.assertEqual(classes[5].color.name(), '#0059ff')
self.assertEqual(classes[6].value, 7)
self.assertEqual(classes[6].color.name(), '#00aeff')
self.assertEqual(classes[7].value, 8)
self.assertEqual(classes[7].color.name(), '#00fff6')
self.assertEqual(classes[8].value, 9)
self.assertEqual(classes[8].color.name(), '#eeff00')
self.assertEqual(classes[9].value, 10)
self.assertEqual(classes[9].color.name(), '#ffb600')
def testPalettedClassDataToString(self):
classes = [QgsPalettedRasterRenderer.Class(1, QColor(0, 255, 0), 'class 2'),
QgsPalettedRasterRenderer.Class(3, QColor(255, 0, 0), 'class 1')]
self.assertEqual(QgsPalettedRasterRenderer.classDataToString(classes),
'1 0 255 0 255 class 2\n3 255 0 0 255 class 1')
# must be sorted by value to work OK in ArcMap
classes = [QgsPalettedRasterRenderer.Class(4, QColor(0, 255, 0), 'class 2'),
QgsPalettedRasterRenderer.Class(3, QColor(255, 0, 0), 'class 1')]
self.assertEqual(QgsPalettedRasterRenderer.classDataToString(classes),
'3 255 0 0 255 class 1\n4 0 255 0 255 class 2')
def testPalettedClassDataFromLayer(self):
# no layer
classes = QgsPalettedRasterRenderer.classDataFromRaster(None, 1)
self.assertFalse(classes)
# 10 class layer
path = os.path.join(unitTestDataPath('raster'),
'with_color_table.tif')
info = QFileInfo(path)
base_name = info.baseName()
layer10 = QgsRasterLayer(path, base_name)
classes = QgsPalettedRasterRenderer.classDataFromRaster(layer10.dataProvider(), 1)
self.assertEqual(len(classes), 10)
self.assertEqual(classes[0].value, 1)
self.assertEqual(classes[0].label, '1')
self.assertEqual(classes[1].value, 2)
self.assertEqual(classes[1].label, '2')
self.assertEqual(classes[2].value, 3)
self.assertEqual(classes[2].label, '3')
self.assertEqual(classes[3].value, 4)
self.assertEqual(classes[3].label, '4')
self.assertEqual(classes[4].value, 5)
self.assertEqual(classes[4].label, '5')
self.assertEqual(classes[5].value, 6)
self.assertEqual(classes[5].label, '6')
self.assertEqual(classes[6].value, 7)
self.assertEqual(classes[6].label, '7')
self.assertEqual(classes[7].value, 8)
self.assertEqual(classes[7].label, '8')
self.assertEqual(classes[8].value, 9)
self.assertEqual(classes[8].label, '9')
self.assertEqual(classes[9].value, 10)
self.assertEqual(classes[9].label, '10')
# bad band
self.assertFalse(QgsPalettedRasterRenderer.classDataFromRaster(layer10.dataProvider(), 10101010))
# with ramp
r = QgsGradientColorRamp(QColor(200, 0, 0, 100), QColor(0, 200, 0, 200))
classes = QgsPalettedRasterRenderer.classDataFromRaster(layer10.dataProvider(), 1, r)
self.assertEqual(len(classes), 10)
self.assertEqual(classes[0].color.name(), '#c80000')
self.assertEqual(classes[1].color.name(), '#b21600')
self.assertEqual(classes[2].color.name(), '#9c2c00')
self.assertEqual(classes[3].color.name(), '#854200')
self.assertEqual(classes[4].color.name(), '#6f5900')
self.assertEqual(classes[5].color.name(), '#596f00')
self.assertEqual(classes[6].color.name(), '#428500')
self.assertEqual(classes[7].color.name(), '#2c9c00')
self.assertEqual(classes[8].color.name(), '#16b200')
self.assertEqual(classes[9].color.name(), '#00c800')
# 30 class layer
path = os.path.join(unitTestDataPath('raster'),
'unique_1.tif')
info = QFileInfo(path)
base_name = info.baseName()
layer10 = QgsRasterLayer(path, base_name)
classes = QgsPalettedRasterRenderer.classDataFromRaster(layer10.dataProvider(), 1)
self.assertEqual(len(classes), 30)
expected = [11, 21, 22, 24, 31, 82, 2002, 2004, 2014, 2019, 2027, 2029, 2030, 2080, 2081, 2082, 2088, 2092,
2097, 2098, 2099, 2105, 2108, 2110, 2114, 2118, 2126, 2152, 2184, 2220]
self.assertEqual([c.value for c in classes], expected)
# bad layer
path = os.path.join(unitTestDataPath('raster'),
'hub13263.vrt')
info = QFileInfo(path)
base_name = info.baseName()
layer = QgsRasterLayer(path, base_name)
classes = QgsPalettedRasterRenderer.classDataFromRaster(layer.dataProvider(), 1)
self.assertFalse(classes)
def testPalettedRendererWithNegativeColorValue(self):
""" test paletted raster renderer with negative values in color table"""
path = os.path.join(unitTestDataPath('raster'),
'hub13263.vrt')
info = QFileInfo(path)
base_name = info.baseName()
layer = QgsRasterLayer(path, base_name)
self.assertTrue(layer.isValid(), 'Raster not loaded: {}'.format(path))
renderer = QgsPalettedRasterRenderer(layer.dataProvider(), 1,
[QgsPalettedRasterRenderer.Class(-1, QColor(0, 255, 0), 'class 2'),
QgsPalettedRasterRenderer.Class(3, QColor(255, 0, 0), 'class 1')])
self.assertEqual(renderer.nColors(), 2)
self.assertEqual(renderer.usesBands(), [1])
def testClone(self):
myPath = os.path.join(unitTestDataPath('raster'),
'band1_float32_noct_epsg4326.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
layer = QgsRasterLayer(myPath, myBaseName)
renderer = layer.renderer().clone()
renderer.setOpacity(33.3)
layer.setRenderer(renderer)
# clone layer
clone = layer.clone()
# generate xml from layer
layer_doc = QDomDocument("doc")
layer_elem = layer_doc.createElement("maplayer")
layer.writeLayerXml(layer_elem, layer_doc, QgsReadWriteContext())
# generate xml from clone
clone_doc = QDomDocument("doc")
clone_elem = clone_doc.createElement("maplayer")
clone.writeLayerXml(clone_elem, clone_doc, QgsReadWriteContext())
# replace id within xml of clone
clone_id_elem = clone_elem.firstChildElement("id")
clone_id_elem_patch = clone_doc.createElement("id")
clone_id_elem_patch_value = clone_doc.createTextNode(layer.id())
clone_id_elem_patch.appendChild(clone_id_elem_patch_value)
clone_elem.replaceChild(clone_id_elem_patch, clone_id_elem)
# update doc
clone_doc.appendChild(clone_elem)
layer_doc.appendChild(layer_elem)
# compare xml documents
self.assertEqual(layer_doc.toString(), clone_doc.toString())
def testSetDataSource(self):
"""Test change data source"""
temp_dir = QTemporaryDir()
options = QgsDataProvider.ProviderOptions()
myPath = os.path.join(unitTestDataPath('raster'),
'band1_float32_noct_epsg4326.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
layer = QgsRasterLayer(myPath, myBaseName)
renderer = QgsSingleBandGrayRenderer(layer.dataProvider(), 2)
image = layer.previewAsImage(QSize(400, 400))
self.assertFalse(image.isNull())
self.assertTrue(image.save(os.path.join(temp_dir.path(), 'expected.png'), "PNG"))
layer.setDataSource(myPath.replace('4326.tif', '4326-BAD_SOURCE.tif'), 'bad_layer', 'gdal', options)
self.assertFalse(layer.isValid())
image = layer.previewAsImage(QSize(400, 400))
self.assertTrue(image.isNull())
layer.setDataSource(myPath.replace('4326-BAD_SOURCE.tif', '4326.tif'), 'bad_layer', 'gdal', options)
self.assertTrue(layer.isValid())
image = layer.previewAsImage(QSize(400, 400))
self.assertFalse(image.isNull())
self.assertTrue(image.save(os.path.join(temp_dir.path(), 'actual.png'), "PNG"))
self.assertTrue(
filecmp.cmp(os.path.join(temp_dir.path(), 'actual.png'), os.path.join(temp_dir.path(), 'expected.png')),
False)
def testWriteSld(self):
"""Test SLD generation for the XMLS fields geneerated at RasterLayer level and not to the deeper renderer level."""
myPath = os.path.join(unitTestDataPath(), 'landsat.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
myRasterLayer = QgsRasterLayer(myPath, myBaseName)
myMessage = 'Raster not loaded: %s' % myPath
assert myRasterLayer.isValid(), myMessage
# do generic export with default layer values
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = root.elementsByTagName('sld:LayerFeatureConstraints')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
elements = element.elementsByTagName('sld:FeatureTypeConstraint')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
elements = root.elementsByTagName('sld:UserStyle')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
name = element.firstChildElement('sld:Name')
self.assertFalse(name.isNull())
self.assertEqual(name.text(), 'landsat')
abstract = element.firstChildElement('sld:Abstract')
self.assertTrue(abstract.isNull())
title = element.firstChildElement('sld:Title')
self.assertTrue(title.isNull())
featureTypeStyle = element.firstChildElement('sld:FeatureTypeStyle')
self.assertFalse(featureTypeStyle.isNull())
rule = featureTypeStyle.firstChildElement('sld:Rule')
self.assertFalse(rule.isNull())
temp = rule.firstChildElement('sld:MinScaleDenominator')
self.assertTrue(temp.isNull())
temp = rule.firstChildElement('sld:MaxScaleDenominator')
self.assertTrue(temp.isNull())
rasterSymbolizer = rule.firstChildElement('sld:RasterSymbolizer')
self.assertFalse(rule.isNull())
vendorOptions = rasterSymbolizer.elementsByTagName('sld:VendorOption')
self.assertTrue(vendorOptions.size() == 0)
# set no default values and check exported sld
myRasterLayer.setName('')
myRasterLayer.setAbstract('fake')
myRasterLayer.setTitle('fake')
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = root.elementsByTagName('sld:LayerFeatureConstraints')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
elements = element.elementsByTagName('sld:FeatureTypeConstraint')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
elements = root.elementsByTagName('sld:UserStyle')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
# no generated if empty
name = element.firstChildElement('sld:Name')
self.assertTrue(name.isNull())
# generated if not empty
abstract = element.firstChildElement('sld:Abstract')
self.assertFalse(abstract.isNull())
self.assertEqual(abstract.text(), 'fake')
title = element.firstChildElement('sld:Title')
self.assertFalse(title.isNull())
self.assertEqual(title.text(), 'fake')
# if setScaleBasedVisibility is true print scales
myRasterLayer.setScaleBasedVisibility(True)
myRasterLayer.setMaximumScale(0.0001)
myRasterLayer.setMinimumScale(0.01)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:Rule')
self.assertEqual(len(elements), 1)
rule = elements.at(0).toElement()
self.assertFalse(rule.isNull())
temp = rule.firstChildElement('sld:MinScaleDenominator')
self.assertFalse(temp.isNull())
self.assertEqual(temp.text(), '0.0001')
temp = rule.firstChildElement('sld:MaxScaleDenominator')
self.assertFalse(temp.isNull())
self.assertEqual(temp.text(), '0.01')
# check non default hueSaturationFilter values
hue = myRasterLayer.hueSaturationFilter()
hue.setGrayscaleMode(QgsHueSaturationFilter.GrayscaleLightness)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'grayScale', 'lightness')
hue = myRasterLayer.hueSaturationFilter()
hue.setGrayscaleMode(QgsHueSaturationFilter.GrayscaleLuminosity)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'grayScale', 'luminosity')
hue = myRasterLayer.hueSaturationFilter()
hue.setGrayscaleMode(QgsHueSaturationFilter.GrayscaleAverage)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'grayScale', 'average')
hue = myRasterLayer.hueSaturationFilter()
hue.setGrayscaleMode(QgsHueSaturationFilter.GrayscaleOff)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'grayScale', None)
# manage colorize vendorOption tags
hue = myRasterLayer.hueSaturationFilter()
hue.setColorizeOn(True)
hue.setColorizeStrength(50)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'colorizeOn', '1')
self.assertVendorOption(element, 'colorizeRed', '255')
self.assertVendorOption(element, 'colorizeGreen', '128')
self.assertVendorOption(element, 'colorizeBlue', '128')
self.assertVendorOption(element, 'colorizeStrength', '0.5')
self.assertVendorOption(element, 'saturation', '0.498039')
# other hue non default values, no colorize and saturation = 0
hue = myRasterLayer.hueSaturationFilter()
hue.setColorizeOn(False)
hue.setSaturation(0)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'colorizeOn', None)
self.assertVendorOption(element, 'colorizeRed', None)
self.assertVendorOption(element, 'colorizeGreen', None)
self.assertVendorOption(element, 'colorizeBlue', None)
self.assertVendorOption(element, 'colorizeStrength', None)
self.assertVendorOption(element, 'saturation', None)
self.assertVendorOption(element, 'brightness', None)
self.assertVendorOption(element, 'contrast', None)
# other hue non default values, no colorize and saturation = 100
hue = myRasterLayer.hueSaturationFilter()
hue.setColorizeOn(False)
hue.setSaturation(100)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'colorizeOn', None)
self.assertVendorOption(element, 'colorizeRed', None)
self.assertVendorOption(element, 'colorizeGreen', None)
self.assertVendorOption(element, 'colorizeBlue', None)
self.assertVendorOption(element, 'colorizeStrength', None)
self.assertVendorOption(element, 'saturation', '1')
hue.setSaturation(-100)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
self.assertVendorOption(root, 'saturation', '0')
# brightness filter default values
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertTrue(myRasterLayer.brightnessFilter().brightness() == 0)
self.assertTrue(myRasterLayer.brightnessFilter().contrast() == 0)
self.assertVendorOption(element, 'brightness', None)
self.assertVendorOption(element, 'contrast', None)
# brightness filter no default values
bf = myRasterLayer.brightnessFilter()
bf.setBrightness(-255)
bf.setContrast(-100)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'brightness', '0')
self.assertVendorOption(element, 'contrast', '0')
bf.setBrightness(255)
bf.setContrast(100)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'brightness', '1')
self.assertVendorOption(element, 'contrast', '1')
def assertVendorOption(self, root, name, expectedValue):
"""Set expectedValue=None to check that the vendor option is not present."""
vendorOptions = root.elementsByTagName('sld:VendorOption')
found = False
for vendorOptionIndex in range(vendorOptions.count()):
vendorOption = vendorOptions.at(vendorOptionIndex)
self.assertEqual('sld:VendorOption', vendorOption.nodeName())
if (vendorOption.attributes().namedItem('name').nodeValue() == name):
found = True
self.assertEqual(vendorOption.firstChild().nodeValue(), expectedValue)
if (expectedValue is None) and found:
self.fail("found VendorOption: {} where supposed not present".format(name))
if expectedValue and not found:
self.fail("Not found VendorOption: {}".format(name))
def layerToSld(self, layer, properties={}):
dom = QDomDocument()
root = dom.createElement("FakeRoot")
dom.appendChild(root)
errorMessage = ''
layer.writeSld(root, dom, errorMessage, properties)
return dom, root, errorMessage
def testHistogram(self):
"""Test histogram bindings regression GH #29700"""
l = QgsRasterLayer(unitTestDataPath('raster/landcover.img'), 'landcover')
self.assertTrue(l.isValid())
p = l.dataProvider()
# Note that this is not a correct use of the API: there is no
# need to call initHistogram(): it is called internally
# from p.histogram()
p.initHistogram(QgsRasterHistogram(), 1, 100)
h = p.histogram(1)
self.assertTrue(len(h.histogramVector), 100)
# Check it twice because it crashed in some circumstances with the old implementation
self.assertTrue(len(h.histogramVector), 100)
def testInvalidLayerStyleRestoration(self):
"""
Test that styles are correctly restored from invalid layers
"""
source_path = os.path.join(unitTestDataPath('raster'),
'band1_float32_noct_epsg4326.tif')
# copy to temp path
tmp_dir = QTemporaryDir()
tmp_path = os.path.join(tmp_dir.path(), 'test_raster.tif')
copyfile(source_path, tmp_path)
rl = QgsRasterLayer(tmp_path, 'test_raster', 'gdal')
self.assertTrue(rl.isValid())
renderer = QgsSingleBandPseudoColorRenderer(rl.dataProvider(), 1)
color_ramp = QgsGradientColorRamp(QColor(255, 255, 0), QColor(0, 0, 255))
renderer.setClassificationMin(101)
renderer.setClassificationMax(131)
renderer.createShader(color_ramp)
renderer.setOpacity(0.6)
rl.setRenderer(renderer)
rl.resampleFilter().setZoomedInResampler(QgsCubicRasterResampler())
rl.resampleFilter().setZoomedOutResampler(QgsBilinearRasterResampler())
p = QgsProject()
p.addMapLayer(rl)
project_path = os.path.join(tmp_dir.path(), 'test_project.qgs')
self.assertTrue(p.write(project_path))
# simple case, layer still exists in same path
p2 = QgsProject()
self.assertTrue(p2.read(project_path))
self.assertEqual(len(p2.mapLayers()), 1)
rl2 = list(p2.mapLayers().values())[0]
self.assertTrue(rl2.isValid())
self.assertEqual(rl2.name(), 'test_raster')
self.assertIsInstance(rl2.renderer(), QgsSingleBandPseudoColorRenderer)
self.assertEqual(rl2.renderer().classificationMin(), 101)
self.assertEqual(rl2.renderer().classificationMax(), 131)
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color1().name(), '#ffff00')
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color2().name(), '#0000ff')
self.assertIsInstance(rl2.resampleFilter().zoomedInResampler(), QgsCubicRasterResampler)
self.assertIsInstance(rl2.resampleFilter().zoomedOutResampler(), QgsBilinearRasterResampler)
self.assertEqual(rl2.renderer().opacity(), 0.6)
# now, remove raster
os.remove(tmp_path)
# reload project
p2 = QgsProject()
self.assertTrue(p2.read(project_path))
self.assertEqual(len(p2.mapLayers()), 1)
rl2 = list(p2.mapLayers().values())[0]
self.assertFalse(rl2.isValid())
self.assertEqual(rl2.name(), 'test_raster')
# invalid layers should still have renderer available
self.assertIsInstance(rl2.renderer(), QgsSingleBandPseudoColorRenderer)
self.assertEqual(rl2.renderer().classificationMin(), 101)
self.assertEqual(rl2.renderer().classificationMax(), 131)
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color1().name(), '#ffff00')
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color2().name(), '#0000ff')
self.assertIsInstance(rl2.resampleFilter().zoomedInResampler(), QgsCubicRasterResampler)
self.assertIsInstance(rl2.resampleFilter().zoomedOutResampler(), QgsBilinearRasterResampler)
self.assertEqual(rl2.renderer().opacity(), 0.6)
# make a little change
rl2.renderer().setOpacity(0.8)
# now, fix path
rl2.setDataSource(source_path, 'test_raster', 'gdal', QgsDataProvider.ProviderOptions())
self.assertTrue(rl2.isValid())
self.assertEqual(rl2.name(), 'test_raster')
# at this stage, the original style should be retained...
self.assertIsInstance(rl2.renderer(), QgsSingleBandPseudoColorRenderer)
self.assertEqual(rl2.renderer().classificationMin(), 101)
self.assertEqual(rl2.renderer().classificationMax(), 131)
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color1().name(), '#ffff00')
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color2().name(), '#0000ff')
self.assertIsInstance(rl2.resampleFilter().zoomedInResampler(), QgsCubicRasterResampler)
self.assertIsInstance(rl2.resampleFilter().zoomedOutResampler(), QgsBilinearRasterResampler)
# the opacity change (and other renderer changes made while the layer was invalid) should be retained
self.assertEqual(rl2.renderer().opacity(), 0.8)
# break path
rl2.setDataSource(tmp_path, 'test_raster', 'gdal', QgsDataProvider.ProviderOptions())
# and restore
rl2.setDataSource(source_path, 'test_raster', 'gdal', QgsDataProvider.ProviderOptions())
self.assertTrue(rl2.isValid())
self.assertEqual(rl2.name(), 'test_raster')
# at this stage, the original style should be recreated...
self.assertIsInstance(rl2.renderer(), QgsSingleBandPseudoColorRenderer)
self.assertEqual(rl2.renderer().classificationMin(), 101)
self.assertEqual(rl2.renderer().classificationMax(), 131)
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color1().name(), '#ffff00')
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color2().name(), '#0000ff')
self.assertIsInstance(rl2.resampleFilter().zoomedInResampler(), QgsCubicRasterResampler)
self.assertIsInstance(rl2.resampleFilter().zoomedOutResampler(), QgsBilinearRasterResampler)
self.assertEqual(rl2.renderer().opacity(), 0.8)
# break again
rl2.setDataSource(tmp_path, 'test_raster', 'gdal', QgsDataProvider.ProviderOptions())
# export via qlr, with broken path (but hopefully correct style)
doc = QgsLayerDefinition.exportLayerDefinitionLayers([rl2], QgsReadWriteContext())
layers = QgsLayerDefinition.loadLayerDefinitionLayers(doc, QgsReadWriteContext())
self.assertEqual(len(layers), 1)
rl2 = layers[0]
self.assertFalse(rl2.isValid())
# fix path
rl2.setDataSource(source_path, 'test_raster', 'gdal', QgsDataProvider.ProviderOptions())
self.assertTrue(rl2.isValid())
self.assertEqual(rl2.name(), 'test_raster')
# at this stage, the original style should be recreated...
self.assertIsInstance(rl2.renderer(), QgsSingleBandPseudoColorRenderer)
self.assertEqual(rl2.renderer().classificationMin(), 101)
self.assertEqual(rl2.renderer().classificationMax(), 131)
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color1().name(), '#ffff00')
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color2().name(), '#0000ff')
self.assertIsInstance(rl2.resampleFilter().zoomedInResampler(), QgsCubicRasterResampler)
self.assertIsInstance(rl2.resampleFilter().zoomedOutResampler(), QgsBilinearRasterResampler)
self.assertEqual(rl2.renderer().opacity(), 0.8)
# another test
rl = QgsRasterLayer(source_path, 'test_raster', 'gdal')
self.assertTrue(rl.isValid())
renderer = QgsSingleBandPseudoColorRenderer(rl.dataProvider(), 1)
color_ramp = QgsGradientColorRamp(QColor(255, 255, 0), QColor(0, 0, 255))
renderer.setClassificationMin(101)
renderer.setClassificationMax(131)
renderer.createShader(color_ramp)
renderer.setOpacity(0.6)
rl.setRenderer(renderer)
rl.resampleFilter().setZoomedInResampler(QgsCubicRasterResampler())
rl.resampleFilter().setZoomedOutResampler(QgsBilinearRasterResampler())
# break path
rl.setDataSource(tmp_path, 'test_raster', 'gdal', QgsDataProvider.ProviderOptions())
# fix path
rl.setDataSource(source_path, 'test_raster', 'gdal', QgsDataProvider.ProviderOptions())
self.assertIsInstance(rl.renderer(), QgsSingleBandPseudoColorRenderer)
self.assertEqual(rl.renderer().classificationMin(), 101)
self.assertEqual(rl.renderer().classificationMax(), 131)
self.assertEqual(rl.renderer().shader().rasterShaderFunction().sourceColorRamp().color1().name(), '#ffff00')
self.assertEqual(rl.renderer().shader().rasterShaderFunction().sourceColorRamp().color2().name(), '#0000ff')
self.assertIsInstance(rl.resampleFilter().zoomedInResampler(), QgsCubicRasterResampler)
self.assertIsInstance(rl.resampleFilter().zoomedOutResampler(), QgsBilinearRasterResampler)
self.assertEqual(rl.renderer().opacity(), 0.6)
class TestQgsRasterLayerTransformContext(unittest.TestCase):
def setUp(self):
"""Prepare tc"""
super(TestQgsRasterLayerTransformContext, self).setUp()
self.ctx = QgsCoordinateTransformContext()
self.ctx.addSourceDestinationDatumTransform(QgsCoordinateReferenceSystem('EPSG:4326'),
QgsCoordinateReferenceSystem('EPSG:3857'), 1234, 1235)
self.ctx.addCoordinateOperation(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857'), 'test')
self.rpath = os.path.join(unitTestDataPath(), 'landsat.tif')
def testTransformContextIsSetInCtor(self):
"""Test transform context can be set from ctor"""
rl = QgsRasterLayer(self.rpath, 'raster')
self.assertFalse(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
options = QgsRasterLayer.LayerOptions(transformContext=self.ctx)
rl = QgsRasterLayer(self.rpath, 'raster', 'gdal', options)
self.assertTrue(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
def testTransformContextInheritsFromProject(self):
"""Test that when a layer is added to a project it inherits its context"""
rl = QgsRasterLayer(self.rpath, 'raster')
self.assertFalse(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
p = QgsProject()
self.assertFalse(
p.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
p.setTransformContext(self.ctx)
self.assertTrue(
p.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
p.addMapLayers([rl])
self.assertTrue(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
def testTransformContextIsSyncedFromProject(self):
"""Test that when a layer is synced when project context changes"""
rl = QgsRasterLayer(self.rpath, 'raster')
self.assertFalse(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
p = QgsProject()
self.assertFalse(
p.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
p.setTransformContext(self.ctx)
self.assertTrue(
p.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
p.addMapLayers([rl])
self.assertTrue(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
# Now change the project context
tc2 = QgsCoordinateTransformContext()
p.setTransformContext(tc2)
self.assertFalse(
p.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
self.assertFalse(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
p.setTransformContext(self.ctx)
self.assertTrue(
p.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
self.assertTrue(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 111,609,611,924,338,190 | 2,995,757,152,928,424,000 | 45.332364 | 188 | 0.651687 | false |
aristotle-tek/cuny-bdif | AWS/ec2/lib/boto-2.34.0/boto/ec2/cloudwatch/metric.py | 94 | 7491 | # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.ec2.cloudwatch.alarm import MetricAlarm
from boto.ec2.cloudwatch.dimension import Dimension
class Metric(object):
Statistics = ['Minimum', 'Maximum', 'Sum', 'Average', 'SampleCount']
Units = ['Seconds', 'Microseconds', 'Milliseconds', 'Bytes', 'Kilobytes',
'Megabytes', 'Gigabytes', 'Terabytes', 'Bits', 'Kilobits',
'Megabits', 'Gigabits', 'Terabits', 'Percent', 'Count',
'Bytes/Second', 'Kilobytes/Second', 'Megabytes/Second',
'Gigabytes/Second', 'Terabytes/Second', 'Bits/Second',
'Kilobits/Second', 'Megabits/Second', 'Gigabits/Second',
'Terabits/Second', 'Count/Second', None]
def __init__(self, connection=None):
self.connection = connection
self.name = None
self.namespace = None
self.dimensions = None
def __repr__(self):
return 'Metric:%s' % self.name
def startElement(self, name, attrs, connection):
if name == 'Dimensions':
self.dimensions = Dimension()
return self.dimensions
def endElement(self, name, value, connection):
if name == 'MetricName':
self.name = value
elif name == 'Namespace':
self.namespace = value
else:
setattr(self, name, value)
def query(self, start_time, end_time, statistics, unit=None, period=60):
"""
:type start_time: datetime
:param start_time: The time stamp to use for determining the
first datapoint to return. The value specified is
inclusive; results include datapoints with the time stamp
specified.
:type end_time: datetime
:param end_time: The time stamp to use for determining the
last datapoint to return. The value specified is
exclusive; results will include datapoints up to the time
stamp specified.
:type statistics: list
:param statistics: A list of statistics names Valid values:
Average | Sum | SampleCount | Maximum | Minimum
:type unit: string
:param unit: The unit for the metric. Value values are:
Seconds | Microseconds | Milliseconds | Bytes | Kilobytes |
Megabytes | Gigabytes | Terabytes | Bits | Kilobits |
Megabits | Gigabits | Terabits | Percent | Count |
Bytes/Second | Kilobytes/Second | Megabytes/Second |
Gigabytes/Second | Terabytes/Second | Bits/Second |
Kilobits/Second | Megabits/Second | Gigabits/Second |
Terabits/Second | Count/Second | None
:type period: integer
:param period: The granularity, in seconds, of the returned datapoints.
Period must be at least 60 seconds and must be a multiple
of 60. The default value is 60.
"""
if not isinstance(statistics, list):
statistics = [statistics]
return self.connection.get_metric_statistics(period,
start_time,
end_time,
self.name,
self.namespace,
statistics,
self.dimensions,
unit)
def create_alarm(self, name, comparison, threshold,
period, evaluation_periods,
statistic, enabled=True, description=None,
dimensions=None, alarm_actions=None, ok_actions=None,
insufficient_data_actions=None, unit=None):
"""
Creates or updates an alarm and associates it with this metric.
Optionally, this operation can associate one or more
Amazon Simple Notification Service resources with the alarm.
When this operation creates an alarm, the alarm state is immediately
set to INSUFFICIENT_DATA. The alarm is evaluated and its StateValue is
set appropriately. Any actions associated with the StateValue is then
executed.
When updating an existing alarm, its StateValue is left unchanged.
:type alarm: boto.ec2.cloudwatch.alarm.MetricAlarm
:param alarm: MetricAlarm object.
"""
if not dimensions:
dimensions = self.dimensions
alarm = MetricAlarm(self.connection, name, self.name,
self.namespace, statistic, comparison,
threshold, period, evaluation_periods,
unit, description, dimensions,
alarm_actions, insufficient_data_actions,
ok_actions)
if self.connection.put_metric_alarm(alarm):
return alarm
def describe_alarms(self, period=None, statistic=None,
dimensions=None, unit=None):
"""
Retrieves all alarms for this metric. Specify a statistic, period,
or unit to filter the set of alarms further.
:type period: int
:param period: The period in seconds over which the statistic
is applied.
:type statistic: string
:param statistic: The statistic for the metric.
:param dimension_filters: A dictionary containing name/value
pairs that will be used to filter the results. The key in
the dictionary is the name of a Dimension. The value in
the dictionary is either a scalar value of that Dimension
name that you want to filter on, a list of values to
filter on or None if you want all metrics with that
Dimension name.
:type unit: string
:rtype list
"""
return self.connection.describe_alarms_for_metric(self.name,
self.namespace,
period,
statistic,
dimensions,
unit)
| mit | -7,716,092,623,113,754,000 | 2,506,500,175,587,727,000 | 43.589286 | 79 | 0.584435 | false |
jyundt/oval | app/main/views.py | 1 | 16557 | from collections import OrderedDict
from itertools import groupby
from operator import itemgetter, and_
import datetime
from flask import render_template, redirect, request, url_for, current_app, flash
from slackclient import SlackClient
from sqlalchemy import extract, or_
from sqlalchemy import func
from app import db
from app.util import sort_and_rank
from . import main
from .forms import FeedbackForm
from ..email import send_feedback_email
from ..models import Course, RaceClass, Racer, Team, Race, Participant
def _gen_default(year, race_class_id, race_calendar):
"""Default error case for standings type parameter
It seems useful to create a full function here in case any logging,
or more important work should be done on error.
"""
return None
def _gen_race_calendar(year, race_class_id):
"""Returns the full calendar of dates for a class and year of racing
This is necessary because dates where individuals do not participate will
not exist in their individual results otherwise.
"""
dates = Race.query.with_entities(Race.date, Race.id)\
.filter(extract("year", Race.date) == year)\
.filter(Race.points_race == True)\
.filter(Race.class_id == race_class_id).all()
dates = sorted(dates, key=lambda x: x[0])
return dates
def _make_result(name, id_, rank, total_pts, pts, race_calendar, team_name, team_id):
"""Create result dictionary to make html templates more readable
"""
result = {"name": name,
"id": id_,
"rank": rank,
"total_pts": total_pts,
"race_pts": OrderedDict([(date, "-") for date,_ in race_calendar]),
"team_name": team_name,
"team_id": team_id}
for point, date in pts:
if point:
result["race_pts"][date] = point
return result
def _gen_team_standings(race_info, race_calendar):
"""Return team standings with individual race and total points
"""
# Sort race info first by team (for grouping below) then by date
# for table construction. Filter results not associated with a team.
team_race_info = sorted(
[ri for ri in race_info if ri.team_id],
key=lambda ri: (ri.team_id, ri.race_date))
def sum_team_points_by_date(team_results):
return [
(sum(ri.team_points or 0 for ri in dg), date)
for (team_id, date), dg in
groupby(team_results, key=lambda ri: (ri.team_id, ri.race_date))]
team_points_by_date = {
team_id: sum_team_points_by_date(g) for team_id, g
in groupby(team_race_info, key=lambda ri: ri.team_id)}
# Aggregate results by team
team_agg_info = [
(team_id, team_name, sum(ri.team_points or 0 for ri in g))
for ((team_id, team_name), g) in
groupby(team_race_info, key=lambda ri: (ri.team_id, ri.team_name))
]
# Filter to only teams that have points, and
# rank by total team points.
ranked_teams = sort_and_rank(
filter(itemgetter(2), team_agg_info),
key=itemgetter(2))
results = []
for rank, (team_id, team_name, total_pts) in ranked_teams:
result = _make_result(name=team_name, id_=team_id, rank=rank, total_pts=total_pts,
pts=team_points_by_date[team_id], race_calendar=race_calendar,
team_name=None, team_id=None)
results.append(result)
return results
def _gen_ind_standings(race_info, race_calendar):
"""Return top individual racer standings with individual race and total points
Note, individual placing tiebreak is by number of wins, followed by number of
seconds places, etc.
"""
# Sort race info first by racer (for grouping below) then by date
# for table construction.
racer_race_info = sorted(race_info, key=lambda ri: (ri.racer_id, ri.race_date))
# A list of per-race points for each racer
racer_race_points = {
racer_id: list((ri.points if not ri.points_dropped else '(%d)' % ri.points or 0, ri.race_date) for ri in g)
for racer_id, g in groupby(racer_race_info, key=lambda ri: ri.racer_id)}
# Team info for each racer
racer_teams = {
racer_id: [(ri.team_name, ri.team_id) for ri in g]
for racer_id, g in groupby(racer_race_info, key=lambda ri: ri.racer_id)
}
def placing_counts(placings):
# Helper to count placings
# Returns a tuple with the count of number of first places, then number
# of seconds, etc., up to the 8th place.
placings = filter(None, placings)
if not placings:
return ()
counts_by_place = {place: sum(1 for _ in g) for place, g in groupby(sorted(placings))}
assert min(counts_by_place.keys()) >= 1
return tuple(counts_by_place.get(place) or 0 for place in xrange(1, 9))
# Group race results by racer
race_info_gby_racer = [
((racer_id, racer_name), list(g))
for ((racer_id, racer_name), g) in
groupby(racer_race_info, key=lambda ri: (ri.racer_id, ri.racer_name))]
# Aggregate points and placings by racer
racer_agg_info = [(
racer_id,
racer_name,
sum(r.points if r.points and not r.points_dropped else 0 for r in g),
placing_counts(r.place for r in g))
for (racer_id, racer_name), g in race_info_gby_racer]
# Filter to only racers that have any points,
# rank by total points then by placings.
ranked_racers = sort_and_rank(
filter(itemgetter(2), racer_agg_info),
key=itemgetter(2, 3))
results = []
for rank, (racer_id, racer_name, racer_points, _) in ranked_racers:
team = racer_teams[racer_id][-1] if racer_id in racer_teams else (None, None)
result = _make_result(name=racer_name, id_=racer_id, rank=rank, total_pts=racer_points,
pts=racer_race_points[racer_id], race_calendar=race_calendar,
team_name=team[0], team_id=team[1])
results.append(result)
return results
def _gen_mar_standings(race_info, race_calendar):
"""Return top MAR standings with individual race and total points
"""
# Sort race info first by racer (for grouping below) then by date
# for table construction.
racer_race_info = sorted(race_info, key=lambda ri: (ri.racer_id, ri.race_date))
# A list of per-race mar points for each racer
racer_race_mar_points = {
racer_id: list((ri.mar_points, ri.race_date) for ri in g)
for racer_id, g in groupby(racer_race_info, key=lambda ri: ri.racer_id)}
# Team info for each racer
racer_teams = {
racer_id: list((ri.team_name, ri.team_id) for ri in g)
for racer_id, g in groupby(racer_race_info, key=itemgetter(0))
}
# Aggregate mar points by racer
racer_agg_info = [
(racer_id, racer_name, sum(ri.mar_points or 0 for ri in g))
for (racer_id, racer_name), g in
groupby(racer_race_info, key=lambda ri: (ri.racer_id, ri.racer_name))]
# Filter to only racers that have any mar points,
# rank by total points.
ranked_racers = sort_and_rank(
filter(itemgetter(2), racer_agg_info),
key=itemgetter(2))
results = []
for rank, (racer_id, racer_name, racer_points) in ranked_racers:
team = racer_teams[racer_id][-1] if racer_id in racer_teams else (None, None)
result = _make_result(name=racer_name, id_=racer_id, rank=rank, total_pts=racer_points,
pts=racer_race_mar_points[racer_id], race_calendar=race_calendar,
team_name=team[0], team_id=team[1])
results.append(result)
return results
@main.route('/')
def index():
"""Fills and renders the front page index.html template
Only display recent results when they're within the past ~three months.
"""
recent_time = datetime.datetime.now() - datetime.timedelta(days=90)
recent_results = (
Race.query
.join(Participant, Race.id == Participant.race_id)
.filter(Race.date > recent_time)
.group_by(Race.id)
.having(func.count(Participant.id) > 0))
r1 = recent_results.subquery('r1')
r2 = recent_results.subquery('r2')
latest_races = (
db.session.query(r1)
.with_entities(
r1.c.id.label('id'),
r1.c.date.label('date'),
RaceClass.name.label('class_name'))
.join(r2, and_(r1.c.class_id == r2.c.class_id, r1.c.date < r2.c.date), isouter=True)
.join(RaceClass, RaceClass.id == r1.c.class_id)
.filter(r2.c.id.is_(None))
.order_by(r1.c.date.desc(), RaceClass.id))
races = latest_races.all()
return render_template('index.html', races=races)
@main.route('/standings/')
def standings():
years = sorted(set(
int(date.year) for (date,) in Race.query.with_entities(Race.date)
.filter_by(points_race=True)),
reverse=True)
try:
req_year = int(request.args.get('year'))
except (ValueError, TypeError):
req_year = None
year = req_year if req_year is not None else (years[0] if years else None)
race_classes = [
(race_class_id.id, race_class_id.name)
for race_class_id in
RaceClass.query.with_entities(
RaceClass.id, RaceClass.name)
.join(Race)
.join(Participant)
.filter(extract("year", Race.date) == year)
.filter(Race.points_race == True)
.group_by(RaceClass.id)
.order_by(RaceClass.name)]
year_race_class_ids = [race_class_id for race_class_id, _ in race_classes]
try:
req_race_class_id = int(request.args.get('race_class_id'))
except (ValueError, TypeError):
req_race_class_id = None
race_class_id = (
req_race_class_id if req_race_class_id in year_race_class_ids
else (year_race_class_ids[0] if year_race_class_ids else None))
if year is not None and race_class_id is not None:
race_info = (
Racer.query.with_entities(
Racer.id.label('racer_id'), Racer.name.label('racer_name'),
Race.date.label('race_date'), Participant.points,
Participant.team_points, Participant.mar_points,
Team.id.label('team_id'), Team.name.label('team_name'), Participant.place,
Participant.points_dropped)
.join(Participant)
.join(Team, isouter=True)
.join(Race)
.filter(Race.points_race == True)
.filter(extract("year", Race.date) == year)
.filter(Race.class_id == race_class_id)
.order_by(Racer.id, Race.date.desc())
.all())
race_calendar = _gen_race_calendar(year, race_class_id)
ind_standings = _gen_ind_standings(race_info, race_calendar)
team_standings = _gen_team_standings(race_info, race_calendar)
mar_standings = _gen_mar_standings(race_info, race_calendar)
results = (
('Individual', ind_standings),
('Team', team_standings),
('MAR', mar_standings))
return render_template(
'standings.html',
selected_year=year, selected_race_class_id=race_class_id,
years=years, race_classes=race_classes,
results=results, race_calendar=race_calendar)
return render_template('standings.html', selected_year=year, years=years)
@main.route('/results/')
def results():
years = sorted(set(
int(date.year) for (date,) in Race.query.with_entities(Race.date).all()),
reverse=True)
try:
req_year = int(request.args.get('year'))
except (ValueError, TypeError):
req_year = None
year = req_year if req_year is not None else (years[0] if years else None)
race_classes = [
(race_class_id.id, race_class_id.name)
for race_class_id in
RaceClass.query.with_entities(
RaceClass.id, RaceClass.name)
.join(Race)
.join(Participant)
.filter(extract("year", Race.date) == year)
.group_by(RaceClass.id)
.order_by(RaceClass.name)]
year_race_class_ids = [race_class_id for race_class_id, _ in race_classes]
try:
req_race_class_id = int(request.args.get('race_class_id'))
except (ValueError, TypeError):
req_race_class_id = None
race_class_id = (
req_race_class_id if req_race_class_id in year_race_class_ids
else (year_race_class_ids[0] if year_race_class_ids else None))
if year is not None and race_class_id is not None:
race_info = (Racer.query.with_entities(
Racer.id, Racer.name,
Team.id, Team.name,
Participant.place, Participant.mar_place,
Race.id, Race.date,
Race.course_id, Race.average_lap, Race.fast_lap,
Race.winning_time, Race.laps, Race.starters, Race.points_race,
RaceClass.id, RaceClass.name,
Course.name, Course.length_miles)
.join(Participant, Participant.racer_id == Racer.id)
.join(Team, Team.id == Participant.team_id, isouter=True)
.join(Race, Race.id == Participant.race_id)
.join(RaceClass, RaceClass.id == Race.class_id)
.join(Course, Course.id == Race.course_id)
.filter(or_(Participant.place == 1, Participant.mar_place == 1))
.filter(extract("year", Race.date) == year)
.filter(Race.class_id == race_class_id)
.order_by(Race.date)
.all())
race_info_by_date = [
(date, list(date_group))
for date, date_group in groupby(race_info, key=itemgetter(7))]
results = []
for date, date_group in race_info_by_date:
(race_id, race_date, course_id, average_lap, fast_lap, winning_time,
laps, starters, points_race, race_class_id, race_class_name,
course_name, course_length_miles) = date_group[0][6:]
winner = None
mar_winner = None
for maybe_winner in date_group:
racer_id, racer_name, team_id, team_name, place, mar_place = maybe_winner[0:6]
if place == 1:
winner = (racer_id, racer_name, team_id, team_name)
if mar_place == 1:
mar_winner = (racer_id, racer_name, team_id, team_name)
avg_lap = (average_lap.total_seconds()) if average_lap else (
(winning_time.total_seconds() / laps)
if (winning_time and laps) else None)
avg_speed = (
course_length_miles / (avg_lap / 3600)
if course_length_miles and avg_lap
else None)
results.insert(0, {
'race_id': race_id,
'date': date,
'course_name': course_name,
'winner': winner,
'mar_winner': mar_winner,
'fast_lap': fast_lap,
'avg_speed': avg_speed,
'starters': starters,
'points_race': points_race})
return render_template(
'results.html',
selected_year=year, selected_race_class_id=race_class_id,
years=years, race_classes=race_classes, results=results)
return render_template('results.html', selected_year=year, years=years)
@main.route('/feedback/', methods=['GET', 'POST'])
def send_feedback():
form = FeedbackForm()
if form.validate_on_submit():
name = form.name.data
replyaddress = form.replyaddress.data
subject = form.subject.data
feedback = form.feedback.data
send_feedback_email(name, replyaddress, subject, feedback)
message = "%s <%s> - %s: %s" % (name, replyaddress, subject, feedback)
token = current_app.config['SLACK_OAUTH_API_TOKEN']
sc = SlackClient(token)
sc.api_call("chat.postMessage", channel="#feedback", text=message,
username="Flask")
flash('Feedback sent!')
return redirect(url_for('main.index'))
return render_template('feedback.html', form=form)
@main.route('/robots.txt')
def serve_static():
return current_app.send_static_file('robots.txt')
@main.route('/favicon.ico')
def serve_favicon():
return current_app.send_static_file('favicon.ico')
| gpl-2.0 | 7,834,835,089,934,758,000 | -5,690,846,384,426,590,000 | 38.705036 | 115 | 0.59872 | false |
rec/echomesh | code/python/external/platform/darwin/numpy/distutils/environment.py | 13 | 2280 | import os
from distutils.dist import Distribution
__metaclass__ = type
class EnvironmentConfig(object):
def __init__(self, distutils_section='ALL', **kw):
self._distutils_section = distutils_section
self._conf_keys = kw
self._conf = None
self._hook_handler = None
def dump_variable(self, name):
conf_desc = self._conf_keys[name]
hook, envvar, confvar, convert = conf_desc
if not convert:
convert = lambda x : x
print('%s.%s:' % (self._distutils_section, name))
v = self._hook_handler(name, hook)
print(' hook : %s' % (convert(v),))
if envvar:
v = os.environ.get(envvar, None)
print(' environ: %s' % (convert(v),))
if confvar and self._conf:
v = self._conf.get(confvar, (None, None))[1]
print(' config : %s' % (convert(v),))
def dump_variables(self):
for name in self._conf_keys:
self.dump_variable(name)
def __getattr__(self, name):
try:
conf_desc = self._conf_keys[name]
except KeyError:
raise AttributeError(name)
return self._get_var(name, conf_desc)
def get(self, name, default=None):
try:
conf_desc = self._conf_keys[name]
except KeyError:
return default
var = self._get_var(name, conf_desc)
if var is None:
var = default
return var
def _get_var(self, name, conf_desc):
hook, envvar, confvar, convert = conf_desc
var = self._hook_handler(name, hook)
if envvar is not None:
var = os.environ.get(envvar, var)
if confvar is not None and self._conf:
var = self._conf.get(confvar, (None, var))[1]
if convert is not None:
var = convert(var)
return var
def clone(self, hook_handler):
ec = self.__class__(distutils_section=self._distutils_section,
**self._conf_keys)
ec._hook_handler = hook_handler
return ec
def use_distribution(self, dist):
if isinstance(dist, Distribution):
self._conf = dist.get_option_dict(self._distutils_section)
else:
self._conf = dist
| mit | 4,706,455,002,696,025,000 | 2,781,140,429,654,150,700 | 31.571429 | 70 | 0.549123 | false |
zstyblik/infernal-twin | build/pillow/build/lib.linux-i686-2.7/PIL/MpegImagePlugin.py | 26 | 1823 | #
# The Python Imaging Library.
# $Id$
#
# MPEG file handling
#
# History:
# 95-09-09 fl Created
#
# Copyright (c) Secret Labs AB 1997.
# Copyright (c) Fredrik Lundh 1995.
#
# See the README file for information on usage and redistribution.
#
__version__ = "0.1"
from PIL import Image, ImageFile
from PIL._binary import i8
#
# Bitstream parser
class BitStream(object):
def __init__(self, fp):
self.fp = fp
self.bits = 0
self.bitbuffer = 0
def next(self):
return i8(self.fp.read(1))
def peek(self, bits):
while self.bits < bits:
c = self.next()
if c < 0:
self.bits = 0
continue
self.bitbuffer = (self.bitbuffer << 8) + c
self.bits += 8
return self.bitbuffer >> (self.bits - bits) & (1 << bits) - 1
def skip(self, bits):
while self.bits < bits:
self.bitbuffer = (self.bitbuffer << 8) + i8(self.fp.read(1))
self.bits += 8
self.bits = self.bits - bits
def read(self, bits):
v = self.peek(bits)
self.bits = self.bits - bits
return v
##
# Image plugin for MPEG streams. This plugin can identify a stream,
# but it cannot read it.
class MpegImageFile(ImageFile.ImageFile):
format = "MPEG"
format_description = "MPEG"
def _open(self):
s = BitStream(self.fp)
if s.read(32) != 0x1B3:
raise SyntaxError("not an MPEG file")
self.mode = "RGB"
self.size = s.read(12), s.read(12)
# --------------------------------------------------------------------
# Registry stuff
Image.register_open("MPEG", MpegImageFile)
Image.register_extension("MPEG", ".mpg")
Image.register_extension("MPEG", ".mpeg")
Image.register_mime("MPEG", "video/mpeg")
| gpl-3.0 | -4,593,755,488,526,659,000 | 3,274,775,229,987,488,300 | 20.447059 | 72 | 0.550192 | false |
fitzgen/servo | tests/wpt/css-tests/tools/pytest/testing/test_unittest.py | 171 | 22302 | from _pytest.main import EXIT_NOTESTSCOLLECTED
import pytest
def test_simple_unittest(testdir):
testpath = testdir.makepyfile("""
import unittest
class MyTestCase(unittest.TestCase):
def testpassing(self):
self.assertEquals('foo', 'foo')
def test_failing(self):
self.assertEquals('foo', 'bar')
""")
reprec = testdir.inline_run(testpath)
assert reprec.matchreport("testpassing").passed
assert reprec.matchreport("test_failing").failed
def test_runTest_method(testdir):
testdir.makepyfile("""
import unittest
class MyTestCaseWithRunTest(unittest.TestCase):
def runTest(self):
self.assertEquals('foo', 'foo')
class MyTestCaseWithoutRunTest(unittest.TestCase):
def runTest(self):
self.assertEquals('foo', 'foo')
def test_something(self):
pass
""")
result = testdir.runpytest("-v")
result.stdout.fnmatch_lines("""
*MyTestCaseWithRunTest::runTest*
*MyTestCaseWithoutRunTest::test_something*
*2 passed*
""")
def test_isclasscheck_issue53(testdir):
testpath = testdir.makepyfile("""
import unittest
class _E(object):
def __getattr__(self, tag):
pass
E = _E()
""")
result = testdir.runpytest(testpath)
assert result.ret == EXIT_NOTESTSCOLLECTED
def test_setup(testdir):
testpath = testdir.makepyfile("""
import unittest
class MyTestCase(unittest.TestCase):
def setUp(self):
self.foo = 1
def setup_method(self, method):
self.foo2 = 1
def test_both(self):
self.assertEquals(1, self.foo)
assert self.foo2 == 1
def teardown_method(self, method):
assert 0, "42"
""")
reprec = testdir.inline_run("-s", testpath)
assert reprec.matchreport("test_both", when="call").passed
rep = reprec.matchreport("test_both", when="teardown")
assert rep.failed and '42' in str(rep.longrepr)
def test_setUpModule(testdir):
testpath = testdir.makepyfile("""
l = []
def setUpModule():
l.append(1)
def tearDownModule():
del l[0]
def test_hello():
assert l == [1]
def test_world():
assert l == [1]
""")
result = testdir.runpytest(testpath)
result.stdout.fnmatch_lines([
"*2 passed*",
])
def test_setUpModule_failing_no_teardown(testdir):
testpath = testdir.makepyfile("""
l = []
def setUpModule():
0/0
def tearDownModule():
l.append(1)
def test_hello():
pass
""")
reprec = testdir.inline_run(testpath)
reprec.assertoutcome(passed=0, failed=1)
call = reprec.getcalls("pytest_runtest_setup")[0]
assert not call.item.module.l
def test_new_instances(testdir):
testpath = testdir.makepyfile("""
import unittest
class MyTestCase(unittest.TestCase):
def test_func1(self):
self.x = 2
def test_func2(self):
assert not hasattr(self, 'x')
""")
reprec = testdir.inline_run(testpath)
reprec.assertoutcome(passed=2)
def test_teardown(testdir):
testpath = testdir.makepyfile("""
import unittest
class MyTestCase(unittest.TestCase):
l = []
def test_one(self):
pass
def tearDown(self):
self.l.append(None)
class Second(unittest.TestCase):
def test_check(self):
self.assertEquals(MyTestCase.l, [None])
""")
reprec = testdir.inline_run(testpath)
passed, skipped, failed = reprec.countoutcomes()
assert failed == 0, failed
assert passed == 2
assert passed + skipped + failed == 2
@pytest.mark.skipif("sys.version_info < (2,7)")
def test_unittest_skip_issue148(testdir):
testpath = testdir.makepyfile("""
import unittest
@unittest.skip("hello")
class MyTestCase(unittest.TestCase):
@classmethod
def setUpClass(self):
xxx
def test_one(self):
pass
@classmethod
def tearDownClass(self):
xxx
""")
reprec = testdir.inline_run(testpath)
reprec.assertoutcome(skipped=1)
def test_method_and_teardown_failing_reporting(testdir):
testdir.makepyfile("""
import unittest, pytest
class TC(unittest.TestCase):
def tearDown(self):
assert 0, "down1"
def test_method(self):
assert False, "down2"
""")
result = testdir.runpytest("-s")
assert result.ret == 1
result.stdout.fnmatch_lines([
"*tearDown*",
"*assert 0*",
"*test_method*",
"*assert False*",
"*1 failed*1 error*",
])
def test_setup_failure_is_shown(testdir):
testdir.makepyfile("""
import unittest
import pytest
class TC(unittest.TestCase):
def setUp(self):
assert 0, "down1"
def test_method(self):
print ("never42")
xyz
""")
result = testdir.runpytest("-s")
assert result.ret == 1
result.stdout.fnmatch_lines([
"*setUp*",
"*assert 0*down1*",
"*1 failed*",
])
assert 'never42' not in result.stdout.str()
def test_setup_setUpClass(testdir):
testpath = testdir.makepyfile("""
import unittest
import pytest
class MyTestCase(unittest.TestCase):
x = 0
@classmethod
def setUpClass(cls):
cls.x += 1
def test_func1(self):
assert self.x == 1
def test_func2(self):
assert self.x == 1
@classmethod
def tearDownClass(cls):
cls.x -= 1
def test_teareddown():
assert MyTestCase.x == 0
""")
reprec = testdir.inline_run(testpath)
reprec.assertoutcome(passed=3)
def test_setup_class(testdir):
testpath = testdir.makepyfile("""
import unittest
import pytest
class MyTestCase(unittest.TestCase):
x = 0
def setup_class(cls):
cls.x += 1
def test_func1(self):
assert self.x == 1
def test_func2(self):
assert self.x == 1
def teardown_class(cls):
cls.x -= 1
def test_teareddown():
assert MyTestCase.x == 0
""")
reprec = testdir.inline_run(testpath)
reprec.assertoutcome(passed=3)
@pytest.mark.parametrize("type", ['Error', 'Failure'])
def test_testcase_adderrorandfailure_defers(testdir, type):
testdir.makepyfile("""
from unittest import TestCase
import pytest
class MyTestCase(TestCase):
def run(self, result):
excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0)
try:
result.add%s(self, excinfo._excinfo)
except KeyboardInterrupt:
raise
except:
pytest.fail("add%s should not raise")
def test_hello(self):
pass
""" % (type, type))
result = testdir.runpytest()
assert 'should not raise' not in result.stdout.str()
@pytest.mark.parametrize("type", ['Error', 'Failure'])
def test_testcase_custom_exception_info(testdir, type):
testdir.makepyfile("""
from unittest import TestCase
import py, pytest
import _pytest._code
class MyTestCase(TestCase):
def run(self, result):
excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0)
# we fake an incompatible exception info
from _pytest.monkeypatch import monkeypatch
mp = monkeypatch()
def t(*args):
mp.undo()
raise TypeError()
mp.setattr(_pytest._code, 'ExceptionInfo', t)
try:
excinfo = excinfo._excinfo
result.add%(type)s(self, excinfo)
finally:
mp.undo()
def test_hello(self):
pass
""" % locals())
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"NOTE: Incompatible Exception Representation*",
"*ZeroDivisionError*",
"*1 failed*",
])
def test_testcase_totally_incompatible_exception_info(testdir):
item, = testdir.getitems("""
from unittest import TestCase
class MyTestCase(TestCase):
def test_hello(self):
pass
""")
item.addError(None, 42)
excinfo = item._excinfo.pop(0)
assert 'ERROR: Unknown Incompatible' in str(excinfo.getrepr())
def test_module_level_pytestmark(testdir):
testpath = testdir.makepyfile("""
import unittest
import pytest
pytestmark = pytest.mark.xfail
class MyTestCase(unittest.TestCase):
def test_func1(self):
assert 0
""")
reprec = testdir.inline_run(testpath, "-s")
reprec.assertoutcome(skipped=1)
def test_trial_testcase_skip_property(testdir):
pytest.importorskip('twisted.trial.unittest')
testpath = testdir.makepyfile("""
from twisted.trial import unittest
class MyTestCase(unittest.TestCase):
skip = 'dont run'
def test_func(self):
pass
""")
reprec = testdir.inline_run(testpath, "-s")
reprec.assertoutcome(skipped=1)
def test_trial_testfunction_skip_property(testdir):
pytest.importorskip('twisted.trial.unittest')
testpath = testdir.makepyfile("""
from twisted.trial import unittest
class MyTestCase(unittest.TestCase):
def test_func(self):
pass
test_func.skip = 'dont run'
""")
reprec = testdir.inline_run(testpath, "-s")
reprec.assertoutcome(skipped=1)
def test_trial_testcase_todo_property(testdir):
pytest.importorskip('twisted.trial.unittest')
testpath = testdir.makepyfile("""
from twisted.trial import unittest
class MyTestCase(unittest.TestCase):
todo = 'dont run'
def test_func(self):
assert 0
""")
reprec = testdir.inline_run(testpath, "-s")
reprec.assertoutcome(skipped=1)
def test_trial_testfunction_todo_property(testdir):
pytest.importorskip('twisted.trial.unittest')
testpath = testdir.makepyfile("""
from twisted.trial import unittest
class MyTestCase(unittest.TestCase):
def test_func(self):
assert 0
test_func.todo = 'dont run'
""")
reprec = testdir.inline_run(testpath, "-s")
reprec.assertoutcome(skipped=1)
class TestTrialUnittest:
def setup_class(cls):
cls.ut = pytest.importorskip("twisted.trial.unittest")
def test_trial_testcase_runtest_not_collected(self, testdir):
testdir.makepyfile("""
from twisted.trial.unittest import TestCase
class TC(TestCase):
def test_hello(self):
pass
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
testdir.makepyfile("""
from twisted.trial.unittest import TestCase
class TC(TestCase):
def runTest(self):
pass
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_trial_exceptions_with_skips(self, testdir):
testdir.makepyfile("""
from twisted.trial import unittest
import pytest
class TC(unittest.TestCase):
def test_hello(self):
pytest.skip("skip_in_method")
@pytest.mark.skipif("sys.version_info != 1")
def test_hello2(self):
pass
@pytest.mark.xfail(reason="iwanto")
def test_hello3(self):
assert 0
def test_hello4(self):
pytest.xfail("i2wanto")
def test_trial_skip(self):
pass
test_trial_skip.skip = "trialselfskip"
def test_trial_todo(self):
assert 0
test_trial_todo.todo = "mytodo"
def test_trial_todo_success(self):
pass
test_trial_todo_success.todo = "mytodo"
class TC2(unittest.TestCase):
def setup_class(cls):
pytest.skip("skip_in_setup_class")
def test_method(self):
pass
""")
result = testdir.runpytest("-rxs")
assert result.ret == 0
result.stdout.fnmatch_lines_random([
"*XFAIL*test_trial_todo*",
"*trialselfskip*",
"*skip_in_setup_class*",
"*iwanto*",
"*i2wanto*",
"*sys.version_info*",
"*skip_in_method*",
"*4 skipped*3 xfail*1 xpass*",
])
def test_trial_error(self, testdir):
testdir.makepyfile("""
from twisted.trial.unittest import TestCase
from twisted.internet.defer import Deferred
from twisted.internet import reactor
class TC(TestCase):
def test_one(self):
crash
def test_two(self):
def f(_):
crash
d = Deferred()
d.addCallback(f)
reactor.callLater(0.3, d.callback, None)
return d
def test_three(self):
def f():
pass # will never get called
reactor.callLater(0.3, f)
# will crash at teardown
def test_four(self):
def f(_):
reactor.callLater(0.3, f)
crash
d = Deferred()
d.addCallback(f)
reactor.callLater(0.3, d.callback, None)
return d
# will crash both at test time and at teardown
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*ERRORS*",
"*DelayedCalls*",
"*test_four*",
"*NameError*crash*",
"*test_one*",
"*NameError*crash*",
"*test_three*",
"*DelayedCalls*",
"*test_two*",
"*crash*",
])
def test_trial_pdb(self, testdir):
p = testdir.makepyfile("""
from twisted.trial import unittest
import pytest
class TC(unittest.TestCase):
def test_hello(self):
assert 0, "hellopdb"
""")
child = testdir.spawn_pytest(p)
child.expect("hellopdb")
child.sendeof()
def test_djangolike_testcase(testdir):
# contributed from Morten Breekevold
testdir.makepyfile("""
from unittest import TestCase, main
class DjangoLikeTestCase(TestCase):
def setUp(self):
print ("setUp()")
def test_presetup_has_been_run(self):
print ("test_thing()")
self.assertTrue(hasattr(self, 'was_presetup'))
def tearDown(self):
print ("tearDown()")
def __call__(self, result=None):
try:
self._pre_setup()
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
import sys
result.addError(self, sys.exc_info())
return
super(DjangoLikeTestCase, self).__call__(result)
try:
self._post_teardown()
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
import sys
result.addError(self, sys.exc_info())
return
def _pre_setup(self):
print ("_pre_setup()")
self.was_presetup = True
def _post_teardown(self):
print ("_post_teardown()")
""")
result = testdir.runpytest("-s")
assert result.ret == 0
result.stdout.fnmatch_lines([
"*_pre_setup()*",
"*setUp()*",
"*test_thing()*",
"*tearDown()*",
"*_post_teardown()*",
])
def test_unittest_not_shown_in_traceback(testdir):
testdir.makepyfile("""
import unittest
class t(unittest.TestCase):
def test_hello(self):
x = 3
self.assertEquals(x, 4)
""")
res = testdir.runpytest()
assert "failUnlessEqual" not in res.stdout.str()
def test_unorderable_types(testdir):
testdir.makepyfile("""
import unittest
class TestJoinEmpty(unittest.TestCase):
pass
def make_test():
class Test(unittest.TestCase):
pass
Test.__name__ = "TestFoo"
return Test
TestFoo = make_test()
""")
result = testdir.runpytest()
assert "TypeError" not in result.stdout.str()
assert result.ret == EXIT_NOTESTSCOLLECTED
def test_unittest_typerror_traceback(testdir):
testdir.makepyfile("""
import unittest
class TestJoinEmpty(unittest.TestCase):
def test_hello(self, arg1):
pass
""")
result = testdir.runpytest()
assert "TypeError" in result.stdout.str()
assert result.ret == 1
@pytest.mark.skipif("sys.version_info < (2,7)")
def test_unittest_unexpected_failure(testdir):
testdir.makepyfile("""
import unittest
class MyTestCase(unittest.TestCase):
@unittest.expectedFailure
def test_func1(self):
assert 0
@unittest.expectedFailure
def test_func2(self):
assert 1
""")
result = testdir.runpytest("-rxX")
result.stdout.fnmatch_lines([
"*XFAIL*MyTestCase*test_func1*",
"*XPASS*MyTestCase*test_func2*",
"*1 xfailed*1 xpass*",
])
@pytest.mark.parametrize('fix_type, stmt', [
('fixture', 'return'),
('yield_fixture', 'yield'),
])
def test_unittest_setup_interaction(testdir, fix_type, stmt):
testdir.makepyfile("""
import unittest
import pytest
class MyTestCase(unittest.TestCase):
@pytest.{fix_type}(scope="class", autouse=True)
def perclass(self, request):
request.cls.hello = "world"
{stmt}
@pytest.{fix_type}(scope="function", autouse=True)
def perfunction(self, request):
request.instance.funcname = request.function.__name__
{stmt}
def test_method1(self):
assert self.funcname == "test_method1"
assert self.hello == "world"
def test_method2(self):
assert self.funcname == "test_method2"
def test_classattr(self):
assert self.__class__.hello == "world"
""".format(fix_type=fix_type, stmt=stmt))
result = testdir.runpytest()
result.stdout.fnmatch_lines("*3 passed*")
def test_non_unittest_no_setupclass_support(testdir):
testpath = testdir.makepyfile("""
class TestFoo:
x = 0
@classmethod
def setUpClass(cls):
cls.x = 1
def test_method1(self):
assert self.x == 0
@classmethod
def tearDownClass(cls):
cls.x = 1
def test_not_teareddown():
assert TestFoo.x == 0
""")
reprec = testdir.inline_run(testpath)
reprec.assertoutcome(passed=2)
def test_no_teardown_if_setupclass_failed(testdir):
testpath = testdir.makepyfile("""
import unittest
class MyTestCase(unittest.TestCase):
x = 0
@classmethod
def setUpClass(cls):
cls.x = 1
assert False
def test_func1(self):
cls.x = 10
@classmethod
def tearDownClass(cls):
cls.x = 100
def test_notTornDown():
assert MyTestCase.x == 1
""")
reprec = testdir.inline_run(testpath)
reprec.assertoutcome(passed=1, failed=1)
def test_issue333_result_clearing(testdir):
testdir.makeconftest("""
def pytest_runtest_call(__multicall__, item):
__multicall__.execute()
assert 0
""")
testdir.makepyfile("""
import unittest
class TestIt(unittest.TestCase):
def test_func(self):
0/0
""")
reprec = testdir.inline_run()
reprec.assertoutcome(failed=1)
@pytest.mark.skipif("sys.version_info < (2,7)")
def test_unittest_raise_skip_issue748(testdir):
testdir.makepyfile(test_foo="""
import unittest
class MyTestCase(unittest.TestCase):
def test_one(self):
raise unittest.SkipTest('skipping due to reasons')
""")
result = testdir.runpytest("-v", '-rs')
result.stdout.fnmatch_lines("""
*SKIP*[1]*test_foo.py*skipping due to reasons*
*1 skipped*
""")
@pytest.mark.skipif("sys.version_info < (2,7)")
def test_unittest_skip_issue1169(testdir):
testdir.makepyfile(test_foo="""
import unittest
class MyTestCase(unittest.TestCase):
@unittest.skip("skipping due to reasons")
def test_skip(self):
self.fail()
""")
result = testdir.runpytest("-v", '-rs')
result.stdout.fnmatch_lines("""
*SKIP*[1]*skipping due to reasons*
*1 skipped*
""")
| mpl-2.0 | 2,233,509,671,031,628,500 | -4,555,387,990,792,914,400 | 29.260516 | 71 | 0.533002 | false |
napkindrawing/ansible | contrib/inventory/zone.py | 196 | 1490 | #!/usr/bin/env python
# (c) 2015, Dagobert Michelsen <[email protected]>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from subprocess import Popen, PIPE
import sys
import json
result = {}
result['all'] = {}
pipe = Popen(['zoneadm', 'list', '-ip'], stdout=PIPE, universal_newlines=True)
result['all']['hosts'] = []
for l in pipe.stdout.readlines():
# 1:work:running:/zones/work:3126dc59-9a07-4829-cde9-a816e4c5040e:native:shared
s = l.split(':')
if s[1] != 'global':
result['all']['hosts'].append(s[1])
result['all']['vars'] = {}
result['all']['vars']['ansible_connection'] = 'zone'
if len(sys.argv) == 2 and sys.argv[1] == '--list':
print(json.dumps(result))
elif len(sys.argv) == 3 and sys.argv[1] == '--host':
print(json.dumps({'ansible_connection': 'zone'}))
else:
sys.stderr.write("Need an argument, either --list or --host <host>\n")
| gpl-3.0 | -2,888,301,002,830,469,600 | 6,914,277,815,920,776,000 | 33.651163 | 83 | 0.686577 | false |
dpiers/coderang-meteor | public/jsrepl/extern/python/unclosured/lib/python2.7/UserList.py | 327 | 3644 | """A more or less complete user-defined wrapper around list objects."""
import collections
class UserList(collections.MutableSequence):
def __init__(self, initlist=None):
self.data = []
if initlist is not None:
# XXX should this accept an arbitrary sequence?
if type(initlist) == type(self.data):
self.data[:] = initlist
elif isinstance(initlist, UserList):
self.data[:] = initlist.data[:]
else:
self.data = list(initlist)
def __repr__(self): return repr(self.data)
def __lt__(self, other): return self.data < self.__cast(other)
def __le__(self, other): return self.data <= self.__cast(other)
def __eq__(self, other): return self.data == self.__cast(other)
def __ne__(self, other): return self.data != self.__cast(other)
def __gt__(self, other): return self.data > self.__cast(other)
def __ge__(self, other): return self.data >= self.__cast(other)
def __cast(self, other):
if isinstance(other, UserList): return other.data
else: return other
def __cmp__(self, other):
return cmp(self.data, self.__cast(other))
__hash__ = None # Mutable sequence, so not hashable
def __contains__(self, item): return item in self.data
def __len__(self): return len(self.data)
def __getitem__(self, i): return self.data[i]
def __setitem__(self, i, item): self.data[i] = item
def __delitem__(self, i): del self.data[i]
def __getslice__(self, i, j):
i = max(i, 0); j = max(j, 0)
return self.__class__(self.data[i:j])
def __setslice__(self, i, j, other):
i = max(i, 0); j = max(j, 0)
if isinstance(other, UserList):
self.data[i:j] = other.data
elif isinstance(other, type(self.data)):
self.data[i:j] = other
else:
self.data[i:j] = list(other)
def __delslice__(self, i, j):
i = max(i, 0); j = max(j, 0)
del self.data[i:j]
def __add__(self, other):
if isinstance(other, UserList):
return self.__class__(self.data + other.data)
elif isinstance(other, type(self.data)):
return self.__class__(self.data + other)
else:
return self.__class__(self.data + list(other))
def __radd__(self, other):
if isinstance(other, UserList):
return self.__class__(other.data + self.data)
elif isinstance(other, type(self.data)):
return self.__class__(other + self.data)
else:
return self.__class__(list(other) + self.data)
def __iadd__(self, other):
if isinstance(other, UserList):
self.data += other.data
elif isinstance(other, type(self.data)):
self.data += other
else:
self.data += list(other)
return self
def __mul__(self, n):
return self.__class__(self.data*n)
__rmul__ = __mul__
def __imul__(self, n):
self.data *= n
return self
def append(self, item): self.data.append(item)
def insert(self, i, item): self.data.insert(i, item)
def pop(self, i=-1): return self.data.pop(i)
def remove(self, item): self.data.remove(item)
def count(self, item): return self.data.count(item)
def index(self, item, *args): return self.data.index(item, *args)
def reverse(self): self.data.reverse()
def sort(self, *args, **kwds): self.data.sort(*args, **kwds)
def extend(self, other):
if isinstance(other, UserList):
self.data.extend(other.data)
else:
self.data.extend(other)
| mit | -3,298,578,448,412,530,000 | -5,656,248,794,895,835,000 | 40.409091 | 71 | 0.565313 | false |
efiring/numpy-work | numpy/f2py/common_rules.py | 48 | 4630 | #!/usr/bin/env python
"""
Build common block mechanism for f2py2e.
Copyright 2000 Pearu Peterson all rights reserved,
Pearu Peterson <[email protected]>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy License
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Date: 2005/05/06 10:57:33 $
Pearu Peterson
"""
__version__ = "$Revision: 1.19 $"[10:-1]
import __version__
f2py_version = __version__.version
import pprint
import sys
errmess=sys.stderr.write
outmess=sys.stdout.write
show=pprint.pprint
from auxfuncs import *
import capi_maps
import func2subr
from crackfortran import rmbadname
##############
def findcommonblocks(block,top=1):
ret = []
if hascommon(block):
for n in block['common'].keys():
vars={}
for v in block['common'][n]:
vars[v]=block['vars'][v]
ret.append((n,block['common'][n],vars))
elif hasbody(block):
for b in block['body']:
ret=ret+findcommonblocks(b,0)
if top:
tret=[]
names=[]
for t in ret:
if t[0] not in names:
names.append(t[0])
tret.append(t)
return tret
return ret
def buildhooks(m):
ret = {'commonhooks':[],'initcommonhooks':[],'docs':['"COMMON blocks:\\n"']}
fwrap = ['']
def fadd(line,s=fwrap): s[0] = '%s\n %s'%(s[0],line)
chooks = ['']
def cadd(line,s=chooks): s[0] = '%s\n%s'%(s[0],line)
ihooks = ['']
def iadd(line,s=ihooks): s[0] = '%s\n%s'%(s[0],line)
doc = ['']
def dadd(line,s=doc): s[0] = '%s\n%s'%(s[0],line)
for (name,vnames,vars) in findcommonblocks(m):
lower_name = name.lower()
hnames,inames = [],[]
for n in vnames:
if isintent_hide(vars[n]): hnames.append(n)
else: inames.append(n)
if hnames:
outmess('\t\tConstructing COMMON block support for "%s"...\n\t\t %s\n\t\t Hidden: %s\n'%(name,','.join(inames),','.join(hnames)))
else:
outmess('\t\tConstructing COMMON block support for "%s"...\n\t\t %s\n'%(name,','.join(inames)))
fadd('subroutine f2pyinit%s(setupfunc)'%name)
fadd('external setupfunc')
for n in vnames:
fadd(func2subr.var2fixfortran(vars,n))
if name=='_BLNK_':
fadd('common %s'%(','.join(vnames)))
else:
fadd('common /%s/ %s'%(name,','.join(vnames)))
fadd('call setupfunc(%s)'%(','.join(inames)))
fadd('end\n')
cadd('static FortranDataDef f2py_%s_def[] = {'%(name))
idims=[]
for n in inames:
ct = capi_maps.getctype(vars[n])
at = capi_maps.c2capi_map[ct]
dm = capi_maps.getarrdims(n,vars[n])
if dm['dims']: idims.append('(%s)'%(dm['dims']))
else: idims.append('')
dms=dm['dims'].strip()
if not dms: dms='-1'
cadd('\t{\"%s\",%s,{{%s}},%s},'%(n,dm['rank'],dms,at))
cadd('\t{NULL}\n};')
inames1 = rmbadname(inames)
inames1_tps = ','.join(map(lambda s:'char *'+s,inames1))
cadd('static void f2py_setup_%s(%s) {'%(name,inames1_tps))
cadd('\tint i_f2py=0;')
for n in inames1:
cadd('\tf2py_%s_def[i_f2py++].data = %s;'%(name,n))
cadd('}')
if '_' in lower_name:
F_FUNC='F_FUNC_US'
else:
F_FUNC='F_FUNC'
cadd('extern void %s(f2pyinit%s,F2PYINIT%s)(void(*)(%s));'\
%(F_FUNC,lower_name,name.upper(),
','.join(['char*']*len(inames1))))
cadd('static void f2py_init_%s(void) {'%name)
cadd('\t%s(f2pyinit%s,F2PYINIT%s)(f2py_setup_%s);'\
%(F_FUNC,lower_name,name.upper(),name))
cadd('}\n')
iadd('\tF2PyDict_SetItemString(d, \"%s\", PyFortranObject_New(f2py_%s_def,f2py_init_%s));'%(name,name,name))
tname = name.replace('_','\\_')
dadd('\\subsection{Common block \\texttt{%s}}\n'%(tname))
dadd('\\begin{description}')
for n in inames:
dadd('\\item[]{{}\\verb@%s@{}}'%(capi_maps.getarrdocsign(n,vars[n])))
if hasnote(vars[n]):
note = vars[n]['note']
if type(note) is type([]): note='\n'.join(note)
dadd('--- %s'%(note))
dadd('\\end{description}')
ret['docs'].append('"\t/%s/ %s\\n"'%(name,','.join(map(lambda v,d:v+d,inames,idims))))
ret['commonhooks']=chooks
ret['initcommonhooks']=ihooks
ret['latexdoc']=doc[0]
if len(ret['docs'])<=1: ret['docs']=''
return ret,fwrap[0]
| bsd-3-clause | -813,629,891,259,114,400 | 8,602,438,914,153,555,000 | 34.615385 | 143 | 0.528942 | false |
Smart-Torvy/torvy-home-assistant | tests/components/lock/test_demo.py | 23 | 1484 | """The tests for the Demo lock platform."""
import unittest
from homeassistant.bootstrap import setup_component
from homeassistant.components import lock
from tests.common import get_test_home_assistant
FRONT = 'lock.front_door'
KITCHEN = 'lock.kitchen_door'
class TestLockDemo(unittest.TestCase):
"""Test the demo lock."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.assertTrue(setup_component(self.hass, lock.DOMAIN, {
'lock': {
'platform': 'demo'
}
}))
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
def test_is_locked(self):
"""Test if lock is locked."""
self.assertTrue(lock.is_locked(self.hass, FRONT))
self.hass.states.is_state(FRONT, 'locked')
self.assertFalse(lock.is_locked(self.hass, KITCHEN))
self.hass.states.is_state(KITCHEN, 'unlocked')
def test_locking(self):
"""Test the locking of a lock."""
lock.lock(self.hass, KITCHEN)
self.hass.block_till_done()
self.assertTrue(lock.is_locked(self.hass, KITCHEN))
def test_unlocking(self):
"""Test the unlocking of a lock."""
lock.unlock(self.hass, FRONT)
self.hass.block_till_done()
self.assertFalse(lock.is_locked(self.hass, FRONT))
| mit | -2,509,729,510,781,074,400 | -5,229,465,073,482,189,000 | 28.68 | 65 | 0.630054 | false |
ench0/external_chromium_org_third_party_skia | platform_tools/android/tests/makefile_writer_tests.py | 65 | 7127 | #!/usr/bin/python
# Copyright 2014 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Test makefile_writer.py
"""
import argparse
import os
import shutil
import sys
import tempfile
import test_variables
import unittest
import utils
sys.path.append(test_variables.GYP_GEN_DIR)
import makefile_writer
import tool_makefile_writer
import vars_dict_lib
MAKEFILE_NAME = test_variables.ANDROID_MK
REBASELINE_MSG = ('If you\'ve modified makefile_writer.py, run '
'"makefile_writer_tests.py --rebaseline" to rebaseline')
TOOL_DIR = 'tool'
def generate_dummy_vars_dict(name):
"""Create a VarsDict and fill it with dummy entries.
Args:
name: string to be appended to each entry, if not None.
Returns:
A VarsDict with dummy entries.
"""
vars_dict = vars_dict_lib.VarsDict()
for key in vars_dict.keys():
entry = key.lower()
if name:
entry += '_' + name
vars_dict[key].add(entry)
return vars_dict
def generate_write_local_vars_params():
"""Generator to compute params for write_local_vars tests.
Each iteration yields a new tuple: (filename, append, name), specific to a
way to call write_local_vars for the tests.
Yields:
filename: filename corresponding to the expectation file for this
combination of params to write_local_vars.
append: boolean to pass as append parameter to write_local_vars.
name: string to pass as name parameter to write_local_vars.
"""
for append in [ True, False ]:
for name in [ None, 'arm', 'foo' ]:
filename = 'write_local_vars'
if append:
filename += '_append'
else:
filename += '_no_append'
if name:
filename += '_' + name
else:
filename += '_no_name'
yield (filename, append, name)
def generate_dummy_vars_dict_data(name, condition):
"""Create a dummy VarsDictData.
Create a dummy VarsDictData, using the name for both the contained
VarsDict and the VarsDictData
Args:
name: name used by both the returned VarsDictData and its contained
VarsDict.
condition: condition used by the returned VarsDictData.
Returns:
A VarsDictData with dummy values, using the passed in info.
"""
vars_dict = generate_dummy_vars_dict(name)
return makefile_writer.VarsDictData(vars_dict=vars_dict, name=name,
condition=condition)
def generate_dummy_makefile(target_dir):
"""Create a dummy makefile to demonstrate how it works.
Use dummy values unrelated to any gyp files. Its output should remain the
same unless/until makefile_writer.write_android_mk changes.
Args:
target_dir: directory in which to write the resulting Android.mk
"""
common_vars_dict = generate_dummy_vars_dict(None)
deviation_params = [('foo', 'COND'), ('bar', None)]
deviations = [generate_dummy_vars_dict_data(name, condition)
for (name, condition) in deviation_params]
makefile_writer.write_android_mk(target_dir=target_dir,
common=common_vars_dict,
deviations_from_common=deviations)
def generate_dummy_tool_makefile(target_dir):
"""Create a dummy makefile for a tool.
Args:
target_dir: directory in which to write the resulting Android.mk
"""
vars_dict = generate_dummy_vars_dict(None)
tool_makefile_writer.write_tool_android_mk(target_dir=target_dir,
var_dict=vars_dict)
class MakefileWriterTest(unittest.TestCase):
def test_write_group_empty(self):
f = tempfile.TemporaryFile()
assert f.tell() == 0
for empty in (None, []):
for truth in (True, False):
makefile_writer.write_group(f, 'name', empty, truth)
self.assertEqual(f.tell(), 0)
f.close()
def test_write_group(self):
animals = ('dog', 'cat', 'mouse', 'elephant')
fd, filename = tempfile.mkstemp()
with open(filename, 'w') as f:
makefile_writer.write_group(f, 'animals', animals, False)
os.close(fd)
# Now confirm that it matches expectations
utils.compare_to_expectation(filename, 'animals.txt', self.assertTrue)
with open(filename, 'w') as f:
makefile_writer.write_group(f, 'animals_append', animals, True)
# Now confirm that it matches expectations
utils.compare_to_expectation(filename, 'animals_append.txt',
self.assertTrue)
os.remove(filename)
def test_write_local_vars(self):
vars_dict = generate_dummy_vars_dict(None)
# Compare various ways of calling write_local_vars to expectations.
for (filename, append, name) in generate_write_local_vars_params():
fd, outfile = tempfile.mkstemp()
with open(outfile, 'w') as f:
makefile_writer.write_local_vars(f, vars_dict, append, name)
os.close(fd)
# Compare to the expected file.
utils.compare_to_expectation(outfile, filename, self.assertTrue,
REBASELINE_MSG)
# KNOWN_TARGETS is always a key in the input VarsDict, but it should not
# be written to the resulting file.
# Note that this assumes none of our dummy entries is 'KNOWN_TARGETS'.
known_targets_name = 'KNOWN_TARGETS'
self.assertEqual(len(vars_dict[known_targets_name]), 1)
with open(outfile, 'r') as f:
self.assertNotIn(known_targets_name, f.read())
os.remove(outfile)
def test_write_android_mk(self):
outdir = tempfile.mkdtemp()
generate_dummy_makefile(outdir)
utils.compare_to_expectation(os.path.join(outdir, MAKEFILE_NAME),
MAKEFILE_NAME, self.assertTrue, REBASELINE_MSG)
shutil.rmtree(outdir)
def test_tool_writer(self):
outdir = tempfile.mkdtemp()
tool_dir = os.path.join(outdir, TOOL_DIR)
os.mkdir(tool_dir)
generate_dummy_tool_makefile(tool_dir)
utils.compare_to_expectation(os.path.join(tool_dir, MAKEFILE_NAME),
os.path.join(TOOL_DIR, MAKEFILE_NAME),
self.assertTrue, REBASELINE_MSG)
def main():
loader = unittest.TestLoader()
suite = loader.loadTestsFromTestCase(MakefileWriterTest)
results = unittest.TextTestRunner(verbosity=2).run(suite)
print repr(results)
if not results.wasSuccessful():
raise Exception('failed one or more unittests')
def rebaseline():
generate_dummy_makefile(utils.EXPECTATIONS_DIR)
vars_dict = generate_dummy_vars_dict(None)
for (filename, append, name) in generate_write_local_vars_params():
with open(os.path.join(utils.EXPECTATIONS_DIR, filename), 'w') as f:
makefile_writer.write_local_vars(f, vars_dict, append, name)
generate_dummy_tool_makefile(os.path.join(utils.EXPECTATIONS_DIR, TOOL_DIR))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--rebaseline', help='Rebaseline expectations.',
action='store_true')
args = parser.parse_args()
if args.rebaseline:
rebaseline()
else:
main()
| bsd-3-clause | 8,231,598,008,942,421,000 | -8,086,827,479,414,153,000 | 30.816964 | 80 | 0.659885 | false |
ykaneko/quantum | quantum/scheduler/dhcp_agent_scheduler.py | 2 | 4728 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
from sqlalchemy.orm import exc
from sqlalchemy.sql import exists
from quantum.common import constants
from quantum.db import agents_db
from quantum.db import agentschedulers_db
from quantum.db import models_v2
from quantum.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class ChanceScheduler(object):
"""Allocate a DHCP agent for a network in a random way.
More sophisticated scheduler (similar to filter scheduler in nova?)
can be introduced later.
"""
def schedule(self, plugin, context, network):
"""Schedule the network to an active DHCP agent if there
is no active DHCP agent hosting it.
"""
#TODO(gongysh) don't schedule the networks with only
# subnets whose enable_dhcp is false
with context.session.begin(subtransactions=True):
dhcp_agents = plugin.get_dhcp_agents_hosting_networks(
context, [network['id']], active=True)
if dhcp_agents:
LOG.debug(_('Network %s is hosted already'),
network['id'])
return
enabled_dhcp_agents = plugin.get_agents_db(
context, filters={
'agent_type': [constants.AGENT_TYPE_DHCP],
'admin_state_up': [True]})
if not enabled_dhcp_agents:
LOG.warn(_('No enabled DHCP agents'))
return
active_dhcp_agents = [enabled_dhcp_agent for enabled_dhcp_agent in
enabled_dhcp_agents if not
agents_db.AgentDbMixin.is_agent_down(
enabled_dhcp_agent['heartbeat_timestamp'])]
if not active_dhcp_agents:
LOG.warn(_('No active DHCP agents'))
return
chosen_agent = random.choice(active_dhcp_agents)
binding = agentschedulers_db.NetworkDhcpAgentBinding()
binding.dhcp_agent = chosen_agent
binding.network_id = network['id']
context.session.add(binding)
LOG.debug(_('Network %(network_id)s is scheduled to be hosted by '
'DHCP agent %(agent_id)s'),
{'network_id': network['id'],
'agent_id': chosen_agent['id']})
return chosen_agent
def auto_schedule_networks(self, plugin, context, host):
"""Schedule non-hosted networks to the DHCP agent on
the specified host.
"""
with context.session.begin(subtransactions=True):
query = context.session.query(agents_db.Agent)
query = query.filter(agents_db.Agent.agent_type ==
constants.AGENT_TYPE_DHCP,
agents_db.Agent.host == host,
agents_db.Agent.admin_state_up == True)
try:
dhcp_agent = query.one()
except (exc.MultipleResultsFound, exc.NoResultFound):
LOG.warn(_('No enabled DHCP agent on host %s'),
host)
return False
if agents_db.AgentDbMixin.is_agent_down(
dhcp_agent.heartbeat_timestamp):
LOG.warn(_('DHCP agent %s is not active'), dhcp_agent.id)
#TODO(gongysh) consider the disabled agent's network
net_stmt = ~exists().where(
models_v2.Network.id ==
agentschedulers_db.NetworkDhcpAgentBinding.network_id)
net_ids = context.session.query(
models_v2.Network.id).filter(net_stmt).all()
if not net_ids:
LOG.debug(_('No non-hosted networks'))
return False
for net_id in net_ids:
binding = agentschedulers_db.NetworkDhcpAgentBinding()
binding.dhcp_agent = dhcp_agent
binding.network_id = net_id[0]
context.session.add(binding)
return True
| apache-2.0 | -4,432,180,635,112,629,000 | 2,756,286,798,019,926,000 | 41.981818 | 78 | 0.58143 | false |
dennis-sheil/commandergenius | project/jni/python/src/Lib/plat-mac/Carbon/QuickTime.py | 81 | 129090 | # Generated from 'Movies.h'
def FOUR_CHAR_CODE(x): return x
xmlIdentifierUnrecognized = -1
kControllerMinimum = -0xf777
notImplementedMusicOSErr = -2071
cantSendToSynthesizerOSErr = -2072
cantReceiveFromSynthesizerOSErr = -2073
illegalVoiceAllocationOSErr = -2074
illegalPartOSErr = -2075
illegalChannelOSErr = -2076
illegalKnobOSErr = -2077
illegalKnobValueOSErr = -2078
illegalInstrumentOSErr = -2079
illegalControllerOSErr = -2080
midiManagerAbsentOSErr = -2081
synthesizerNotRespondingOSErr = -2082
synthesizerOSErr = -2083
illegalNoteChannelOSErr = -2084
noteChannelNotAllocatedOSErr = -2085
tunePlayerFullOSErr = -2086
tuneParseOSErr = -2087
MovieFileType = FOUR_CHAR_CODE('MooV')
MovieScrapType = FOUR_CHAR_CODE('moov')
MovieResourceType = FOUR_CHAR_CODE('moov')
MovieForwardPointerResourceType = FOUR_CHAR_CODE('fore')
MovieBackwardPointerResourceType = FOUR_CHAR_CODE('back')
MovieResourceAtomType = FOUR_CHAR_CODE('moov')
MovieDataAtomType = FOUR_CHAR_CODE('mdat')
FreeAtomType = FOUR_CHAR_CODE('free')
SkipAtomType = FOUR_CHAR_CODE('skip')
WideAtomPlaceholderType = FOUR_CHAR_CODE('wide')
MediaHandlerType = FOUR_CHAR_CODE('mhlr')
DataHandlerType = FOUR_CHAR_CODE('dhlr')
VideoMediaType = FOUR_CHAR_CODE('vide')
SoundMediaType = FOUR_CHAR_CODE('soun')
TextMediaType = FOUR_CHAR_CODE('text')
BaseMediaType = FOUR_CHAR_CODE('gnrc')
MPEGMediaType = FOUR_CHAR_CODE('MPEG')
MusicMediaType = FOUR_CHAR_CODE('musi')
TimeCodeMediaType = FOUR_CHAR_CODE('tmcd')
SpriteMediaType = FOUR_CHAR_CODE('sprt')
FlashMediaType = FOUR_CHAR_CODE('flsh')
MovieMediaType = FOUR_CHAR_CODE('moov')
TweenMediaType = FOUR_CHAR_CODE('twen')
ThreeDeeMediaType = FOUR_CHAR_CODE('qd3d')
SkinMediaType = FOUR_CHAR_CODE('skin')
HandleDataHandlerSubType = FOUR_CHAR_CODE('hndl')
PointerDataHandlerSubType = FOUR_CHAR_CODE('ptr ')
NullDataHandlerSubType = FOUR_CHAR_CODE('null')
ResourceDataHandlerSubType = FOUR_CHAR_CODE('rsrc')
URLDataHandlerSubType = FOUR_CHAR_CODE('url ')
WiredActionHandlerType = FOUR_CHAR_CODE('wire')
VisualMediaCharacteristic = FOUR_CHAR_CODE('eyes')
AudioMediaCharacteristic = FOUR_CHAR_CODE('ears')
kCharacteristicCanSendVideo = FOUR_CHAR_CODE('vsnd')
kCharacteristicProvidesActions = FOUR_CHAR_CODE('actn')
kCharacteristicNonLinear = FOUR_CHAR_CODE('nonl')
kCharacteristicCanStep = FOUR_CHAR_CODE('step')
kCharacteristicHasNoDuration = FOUR_CHAR_CODE('noti')
kCharacteristicHasSkinData = FOUR_CHAR_CODE('skin')
kCharacteristicProvidesKeyFocus = FOUR_CHAR_CODE('keyf')
kUserDataMovieControllerType = FOUR_CHAR_CODE('ctyp')
kUserDataName = FOUR_CHAR_CODE('name')
kUserDataTextAlbum = FOUR_CHAR_CODE('\xa9alb')
kUserDataTextArtist = FOUR_CHAR_CODE('\xa9ART')
kUserDataTextAuthor = FOUR_CHAR_CODE('\xa9aut')
kUserDataTextChapter = FOUR_CHAR_CODE('\xa9chp')
kUserDataTextComment = FOUR_CHAR_CODE('\xa9cmt')
kUserDataTextComposer = FOUR_CHAR_CODE('\xa9com')
kUserDataTextCopyright = FOUR_CHAR_CODE('\xa9cpy')
kUserDataTextCreationDate = FOUR_CHAR_CODE('\xa9day')
kUserDataTextDescription = FOUR_CHAR_CODE('\xa9des')
kUserDataTextDirector = FOUR_CHAR_CODE('\xa9dir')
kUserDataTextDisclaimer = FOUR_CHAR_CODE('\xa9dis')
kUserDataTextEncodedBy = FOUR_CHAR_CODE('\xa9enc')
kUserDataTextFullName = FOUR_CHAR_CODE('\xa9nam')
kUserDataTextGenre = FOUR_CHAR_CODE('\xa9gen')
kUserDataTextHostComputer = FOUR_CHAR_CODE('\xa9hst')
kUserDataTextInformation = FOUR_CHAR_CODE('\xa9inf')
kUserDataTextKeywords = FOUR_CHAR_CODE('\xa9key')
kUserDataTextMake = FOUR_CHAR_CODE('\xa9mak')
kUserDataTextModel = FOUR_CHAR_CODE('\xa9mod')
kUserDataTextOriginalArtist = FOUR_CHAR_CODE('\xa9ope')
kUserDataTextOriginalFormat = FOUR_CHAR_CODE('\xa9fmt')
kUserDataTextOriginalSource = FOUR_CHAR_CODE('\xa9src')
kUserDataTextPerformers = FOUR_CHAR_CODE('\xa9prf')
kUserDataTextProducer = FOUR_CHAR_CODE('\xa9prd')
kUserDataTextProduct = FOUR_CHAR_CODE('\xa9PRD')
kUserDataTextSoftware = FOUR_CHAR_CODE('\xa9swr')
kUserDataTextSpecialPlaybackRequirements = FOUR_CHAR_CODE('\xa9req')
kUserDataTextTrack = FOUR_CHAR_CODE('\xa9trk')
kUserDataTextWarning = FOUR_CHAR_CODE('\xa9wrn')
kUserDataTextWriter = FOUR_CHAR_CODE('\xa9wrt')
kUserDataTextURLLink = FOUR_CHAR_CODE('\xa9url')
kUserDataTextEditDate1 = FOUR_CHAR_CODE('\xa9ed1')
kUserDataUnicodeBit = 1L << 7
DoTheRightThing = 0
kQTNetworkStatusNoNetwork = -2
kQTNetworkStatusUncertain = -1
kQTNetworkStatusNotConnected = 0
kQTNetworkStatusConnected = 1
kMusicFlagDontPlay2Soft = 1L << 0
kMusicFlagDontSlaveToMovie = 1L << 1
dfDontDisplay = 1 << 0
dfDontAutoScale = 1 << 1
dfClipToTextBox = 1 << 2
dfUseMovieBGColor = 1 << 3
dfShrinkTextBoxToFit = 1 << 4
dfScrollIn = 1 << 5
dfScrollOut = 1 << 6
dfHorizScroll = 1 << 7
dfReverseScroll = 1 << 8
dfContinuousScroll = 1 << 9
dfFlowHoriz = 1 << 10
dfContinuousKaraoke = 1 << 11
dfDropShadow = 1 << 12
dfAntiAlias = 1 << 13
dfKeyedText = 1 << 14
dfInverseHilite = 1 << 15
dfTextColorHilite = 1 << 16
searchTextDontGoToFoundTime = 1L << 16
searchTextDontHiliteFoundText = 1L << 17
searchTextOneTrackOnly = 1L << 18
searchTextEnabledTracksOnly = 1L << 19
kTextTextHandle = 1
kTextTextPtr = 2
kTextTEStyle = 3
kTextSelection = 4
kTextBackColor = 5
kTextForeColor = 6
kTextFace = 7
kTextFont = 8
kTextSize = 9
kTextAlignment = 10
kTextHilite = 11
kTextDropShadow = 12
kTextDisplayFlags = 13
kTextScroll = 14
kTextRelativeScroll = 15
kTextHyperTextFace = 16
kTextHyperTextColor = 17
kTextKeyEntry = 18
kTextMouseDown = 19
kTextTextBox = 20
kTextEditState = 21
kTextLength = 22
k3DMediaRendererEntry = FOUR_CHAR_CODE('rend')
k3DMediaRendererName = FOUR_CHAR_CODE('name')
k3DMediaRendererCode = FOUR_CHAR_CODE('rcod')
movieProgressOpen = 0
movieProgressUpdatePercent = 1
movieProgressClose = 2
progressOpFlatten = 1
progressOpInsertTrackSegment = 2
progressOpInsertMovieSegment = 3
progressOpPaste = 4
progressOpAddMovieSelection = 5
progressOpCopy = 6
progressOpCut = 7
progressOpLoadMovieIntoRam = 8
progressOpLoadTrackIntoRam = 9
progressOpLoadMediaIntoRam = 10
progressOpImportMovie = 11
progressOpExportMovie = 12
mediaQualityDraft = 0x0000
mediaQualityNormal = 0x0040
mediaQualityBetter = 0x0080
mediaQualityBest = 0x00C0
kQTEventPayloadIsQTList = 1L << 0
kActionMovieSetVolume = 1024
kActionMovieSetRate = 1025
kActionMovieSetLoopingFlags = 1026
kActionMovieGoToTime = 1027
kActionMovieGoToTimeByName = 1028
kActionMovieGoToBeginning = 1029
kActionMovieGoToEnd = 1030
kActionMovieStepForward = 1031
kActionMovieStepBackward = 1032
kActionMovieSetSelection = 1033
kActionMovieSetSelectionByName = 1034
kActionMoviePlaySelection = 1035
kActionMovieSetLanguage = 1036
kActionMovieChanged = 1037
kActionMovieRestartAtTime = 1038
kActionMovieGotoNextChapter = 1039
kActionMovieGotoPreviousChapter = 1040
kActionMovieGotoFirstChapter = 1041
kActionMovieGotoLastChapter = 1042
kActionMovieGotoChapterByIndex = 1043
kActionMovieSetScale = 1044
kActionTrackSetVolume = 2048
kActionTrackSetBalance = 2049
kActionTrackSetEnabled = 2050
kActionTrackSetMatrix = 2051
kActionTrackSetLayer = 2052
kActionTrackSetClip = 2053
kActionTrackSetCursor = 2054
kActionTrackSetGraphicsMode = 2055
kActionTrackSetIdleFrequency = 2056
kActionTrackSetBassTreble = 2057
kActionSpriteSetMatrix = 3072
kActionSpriteSetImageIndex = 3073
kActionSpriteSetVisible = 3074
kActionSpriteSetLayer = 3075
kActionSpriteSetGraphicsMode = 3076
kActionSpritePassMouseToCodec = 3078
kActionSpriteClickOnCodec = 3079
kActionSpriteTranslate = 3080
kActionSpriteScale = 3081
kActionSpriteRotate = 3082
kActionSpriteStretch = 3083
kActionSpriteSetCanBeHitTested = 3094
kActionQTVRSetPanAngle = 4096
kActionQTVRSetTiltAngle = 4097
kActionQTVRSetFieldOfView = 4098
kActionQTVRShowDefaultView = 4099
kActionQTVRGoToNodeID = 4100
kActionQTVREnableHotSpot = 4101
kActionQTVRShowHotSpots = 4102
kActionQTVRTranslateObject = 4103
kActionQTVRSetViewState = 4109
kActionMusicPlayNote = 5120
kActionMusicSetController = 5121
kActionCase = 6144
kActionWhile = 6145
kActionGoToURL = 6146
kActionSendQTEventToSprite = 6147
kActionDebugStr = 6148
kActionPushCurrentTime = 6149
kActionPushCurrentTimeWithLabel = 6150
kActionPopAndGotoTopTime = 6151
kActionPopAndGotoLabeledTime = 6152
kActionStatusString = 6153
kActionSendQTEventToTrackObject = 6154
kActionAddChannelSubscription = 6155
kActionRemoveChannelSubscription = 6156
kActionOpenCustomActionHandler = 6157
kActionDoScript = 6158
kActionDoCompressedActions = 6159
kActionSendAppMessage = 6160
kActionLoadComponent = 6161
kActionSetFocus = 6162
kActionDontPassKeyEvent = 6163
kActionSetRandomSeed = 6164
kActionSpriteTrackSetVariable = 7168
kActionSpriteTrackNewSprite = 7169
kActionSpriteTrackDisposeSprite = 7170
kActionSpriteTrackSetVariableToString = 7171
kActionSpriteTrackConcatVariables = 7172
kActionSpriteTrackSetVariableToMovieURL = 7173
kActionSpriteTrackSetVariableToMovieBaseURL = 7174
kActionSpriteTrackSetAllSpritesHitTestingMode = 7181
kActionSpriteTrackNewImage = 7182
kActionSpriteTrackDisposeImage = 7183
kActionApplicationNumberAndString = 8192
kActionQD3DNamedObjectTranslateTo = 9216
kActionQD3DNamedObjectScaleTo = 9217
kActionQD3DNamedObjectRotateTo = 9218
kActionFlashTrackSetPan = 10240
kActionFlashTrackSetZoom = 10241
kActionFlashTrackSetZoomRect = 10242
kActionFlashTrackGotoFrameNumber = 10243
kActionFlashTrackGotoFrameLabel = 10244
kActionFlashTrackSetFlashVariable = 10245
kActionFlashTrackDoButtonActions = 10246
kActionMovieTrackAddChildMovie = 11264
kActionMovieTrackLoadChildMovie = 11265
kActionMovieTrackLoadChildMovieWithQTListParams = 11266
kActionTextTrackPasteText = 12288
kActionTextTrackSetTextBox = 12291
kActionTextTrackSetTextStyle = 12292
kActionTextTrackSetSelection = 12293
kActionTextTrackSetBackgroundColor = 12294
kActionTextTrackSetForegroundColor = 12295
kActionTextTrackSetFace = 12296
kActionTextTrackSetFont = 12297
kActionTextTrackSetSize = 12298
kActionTextTrackSetAlignment = 12299
kActionTextTrackSetHilite = 12300
kActionTextTrackSetDropShadow = 12301
kActionTextTrackSetDisplayFlags = 12302
kActionTextTrackSetScroll = 12303
kActionTextTrackRelativeScroll = 12304
kActionTextTrackFindText = 12305
kActionTextTrackSetHyperTextFace = 12306
kActionTextTrackSetHyperTextColor = 12307
kActionTextTrackKeyEntry = 12308
kActionTextTrackMouseDown = 12309
kActionTextTrackSetEditable = 12310
kActionListAddElement = 13312
kActionListRemoveElements = 13313
kActionListSetElementValue = 13314
kActionListPasteFromXML = 13315
kActionListSetMatchingFromXML = 13316
kActionListSetFromURL = 13317
kActionListExchangeLists = 13318
kActionListServerQuery = 13319
kOperandExpression = 1
kOperandConstant = 2
kOperandSubscribedToChannel = 3
kOperandUniqueCustomActionHandlerID = 4
kOperandCustomActionHandlerIDIsOpen = 5
kOperandConnectionSpeed = 6
kOperandGMTDay = 7
kOperandGMTMonth = 8
kOperandGMTYear = 9
kOperandGMTHours = 10
kOperandGMTMinutes = 11
kOperandGMTSeconds = 12
kOperandLocalDay = 13
kOperandLocalMonth = 14
kOperandLocalYear = 15
kOperandLocalHours = 16
kOperandLocalMinutes = 17
kOperandLocalSeconds = 18
kOperandRegisteredForQuickTimePro = 19
kOperandPlatformRunningOn = 20
kOperandQuickTimeVersion = 21
kOperandComponentVersion = 22
kOperandOriginalHandlerRefcon = 23
kOperandTicks = 24
kOperandMaxLoadedTimeInMovie = 25
kOperandEventParameter = 26
kOperandFreeMemory = 27
kOperandNetworkStatus = 28
kOperandQuickTimeVersionRegistered = 29
kOperandSystemVersion = 30
kOperandMovieVolume = 1024
kOperandMovieRate = 1025
kOperandMovieIsLooping = 1026
kOperandMovieLoopIsPalindrome = 1027
kOperandMovieTime = 1028
kOperandMovieDuration = 1029
kOperandMovieTimeScale = 1030
kOperandMovieWidth = 1031
kOperandMovieHeight = 1032
kOperandMovieLoadState = 1033
kOperandMovieTrackCount = 1034
kOperandMovieIsActive = 1035
kOperandMovieName = 1036
kOperandMovieID = 1037
kOperandMovieChapterCount = 1038
kOperandMovieChapterIndex = 1039
kOperandMovieChapterName = 1040
kOperandMovieChapterNameByIndex = 1041
kOperandMovieChapterIndexByName = 1042
kOperandMovieAnnotation = 1043
kOperandMovieConnectionFlags = 1044
kOperandMovieConnectionString = 1045
kOperandTrackVolume = 2048
kOperandTrackBalance = 2049
kOperandTrackEnabled = 2050
kOperandTrackLayer = 2051
kOperandTrackWidth = 2052
kOperandTrackHeight = 2053
kOperandTrackDuration = 2054
kOperandTrackName = 2055
kOperandTrackID = 2056
kOperandTrackIdleFrequency = 2057
kOperandTrackBass = 2058
kOperandTrackTreble = 2059
kOperandSpriteBoundsLeft = 3072
kOperandSpriteBoundsTop = 3073
kOperandSpriteBoundsRight = 3074
kOperandSpriteBoundsBottom = 3075
kOperandSpriteImageIndex = 3076
kOperandSpriteVisible = 3077
kOperandSpriteLayer = 3078
kOperandSpriteTrackVariable = 3079
kOperandSpriteTrackNumSprites = 3080
kOperandSpriteTrackNumImages = 3081
kOperandSpriteID = 3082
kOperandSpriteIndex = 3083
kOperandSpriteFirstCornerX = 3084
kOperandSpriteFirstCornerY = 3085
kOperandSpriteSecondCornerX = 3086
kOperandSpriteSecondCornerY = 3087
kOperandSpriteThirdCornerX = 3088
kOperandSpriteThirdCornerY = 3089
kOperandSpriteFourthCornerX = 3090
kOperandSpriteFourthCornerY = 3091
kOperandSpriteImageRegistrationPointX = 3092
kOperandSpriteImageRegistrationPointY = 3093
kOperandSpriteTrackSpriteIDAtPoint = 3094
kOperandSpriteName = 3095
kOperandSpriteCanBeHitTested = 3105
kOperandSpriteTrackAllSpritesHitTestingMode = 3106
kOperandSpriteTrackImageIDByIndex = 3107
kOperandSpriteTrackImageIndexByID = 3108
kOperandQTVRPanAngle = 4096
kOperandQTVRTiltAngle = 4097
kOperandQTVRFieldOfView = 4098
kOperandQTVRNodeID = 4099
kOperandQTVRHotSpotsVisible = 4100
kOperandQTVRViewCenterH = 4101
kOperandQTVRViewCenterV = 4102
kOperandQTVRViewStateCount = 4103
kOperandQTVRViewState = 4104
kOperandMouseLocalHLoc = 5120
kOperandMouseLocalVLoc = 5121
kOperandKeyIsDown = 5122
kOperandRandom = 5123
kOperandCanHaveFocus = 5124
kOperandHasFocus = 5125
kOperandTextTrackEditable = 6144
kOperandTextTrackCopyText = 6145
kOperandTextTrackStartSelection = 6146
kOperandTextTrackEndSelection = 6147
kOperandTextTrackTextBoxLeft = 6148
kOperandTextTrackTextBoxTop = 6149
kOperandTextTrackTextBoxRight = 6150
kOperandTextTrackTextBoxBottom = 6151
kOperandTextTrackTextLength = 6152
kOperandListCountElements = 7168
kOperandListGetElementPathByIndex = 7169
kOperandListGetElementValue = 7170
kOperandListCopyToXML = 7171
kOperandSin = 8192
kOperandCos = 8193
kOperandTan = 8194
kOperandATan = 8195
kOperandATan2 = 8196
kOperandDegreesToRadians = 8197
kOperandRadiansToDegrees = 8198
kOperandSquareRoot = 8199
kOperandExponent = 8200
kOperandLog = 8201
kOperandFlashTrackVariable = 9216
kOperandStringLength = 10240
kOperandStringCompare = 10241
kOperandStringSubString = 10242
kOperandStringConcat = 10243
kFirstMovieAction = kActionMovieSetVolume
kLastMovieAction = kActionMovieSetScale
kFirstTrackAction = kActionTrackSetVolume
kLastTrackAction = kActionTrackSetBassTreble
kFirstSpriteAction = kActionSpriteSetMatrix
kLastSpriteAction = kActionSpriteSetCanBeHitTested
kFirstQTVRAction = kActionQTVRSetPanAngle
kLastQTVRAction = kActionQTVRSetViewState
kFirstMusicAction = kActionMusicPlayNote
kLastMusicAction = kActionMusicSetController
kFirstSystemAction = kActionCase
kLastSystemAction = kActionSetRandomSeed
kFirstSpriteTrackAction = kActionSpriteTrackSetVariable
kLastSpriteTrackAction = kActionSpriteTrackDisposeImage
kFirstApplicationAction = kActionApplicationNumberAndString
kLastApplicationAction = kActionApplicationNumberAndString
kFirstQD3DNamedObjectAction = kActionQD3DNamedObjectTranslateTo
kLastQD3DNamedObjectAction = kActionQD3DNamedObjectRotateTo
kFirstFlashTrackAction = kActionFlashTrackSetPan
kLastFlashTrackAction = kActionFlashTrackDoButtonActions
kFirstMovieTrackAction = kActionMovieTrackAddChildMovie
kLastMovieTrackAction = kActionMovieTrackLoadChildMovieWithQTListParams
kFirstTextTrackAction = kActionTextTrackPasteText
kLastTextTrackAction = kActionTextTrackSetEditable
kFirstMultiTargetAction = kActionListAddElement
kLastMultiTargetAction = kActionListServerQuery
kFirstAction = kFirstMovieAction
kLastAction = kLastMultiTargetAction
kTargetMovie = FOUR_CHAR_CODE('moov')
kTargetMovieName = FOUR_CHAR_CODE('mona')
kTargetMovieID = FOUR_CHAR_CODE('moid')
kTargetRootMovie = FOUR_CHAR_CODE('moro')
kTargetParentMovie = FOUR_CHAR_CODE('mopa')
kTargetChildMovieTrackName = FOUR_CHAR_CODE('motn')
kTargetChildMovieTrackID = FOUR_CHAR_CODE('moti')
kTargetChildMovieTrackIndex = FOUR_CHAR_CODE('motx')
kTargetChildMovieMovieName = FOUR_CHAR_CODE('momn')
kTargetChildMovieMovieID = FOUR_CHAR_CODE('momi')
kTargetTrackName = FOUR_CHAR_CODE('trna')
kTargetTrackID = FOUR_CHAR_CODE('trid')
kTargetTrackType = FOUR_CHAR_CODE('trty')
kTargetTrackIndex = FOUR_CHAR_CODE('trin')
kTargetSpriteName = FOUR_CHAR_CODE('spna')
kTargetSpriteID = FOUR_CHAR_CODE('spid')
kTargetSpriteIndex = FOUR_CHAR_CODE('spin')
kTargetQD3DNamedObjectName = FOUR_CHAR_CODE('nana')
kTargetCurrentQTEventParams = FOUR_CHAR_CODE('evpa')
kQTEventType = FOUR_CHAR_CODE('evnt')
kAction = FOUR_CHAR_CODE('actn')
kWhichAction = FOUR_CHAR_CODE('whic')
kActionParameter = FOUR_CHAR_CODE('parm')
kActionTarget = FOUR_CHAR_CODE('targ')
kActionFlags = FOUR_CHAR_CODE('flag')
kActionParameterMinValue = FOUR_CHAR_CODE('minv')
kActionParameterMaxValue = FOUR_CHAR_CODE('maxv')
kActionListAtomType = FOUR_CHAR_CODE('list')
kExpressionContainerAtomType = FOUR_CHAR_CODE('expr')
kConditionalAtomType = FOUR_CHAR_CODE('test')
kOperatorAtomType = FOUR_CHAR_CODE('oper')
kOperandAtomType = FOUR_CHAR_CODE('oprn')
kCommentAtomType = FOUR_CHAR_CODE('why ')
kCustomActionHandler = FOUR_CHAR_CODE('cust')
kCustomHandlerID = FOUR_CHAR_CODE('id ')
kCustomHandlerDesc = FOUR_CHAR_CODE('desc')
kQTEventRecordAtomType = FOUR_CHAR_CODE('erec')
kQTEventMouseClick = FOUR_CHAR_CODE('clik')
kQTEventMouseClickEnd = FOUR_CHAR_CODE('cend')
kQTEventMouseClickEndTriggerButton = FOUR_CHAR_CODE('trig')
kQTEventMouseEnter = FOUR_CHAR_CODE('entr')
kQTEventMouseExit = FOUR_CHAR_CODE('exit')
kQTEventMouseMoved = FOUR_CHAR_CODE('move')
kQTEventFrameLoaded = FOUR_CHAR_CODE('fram')
kQTEventIdle = FOUR_CHAR_CODE('idle')
kQTEventKey = FOUR_CHAR_CODE('key ')
kQTEventMovieLoaded = FOUR_CHAR_CODE('load')
kQTEventRequestToModifyMovie = FOUR_CHAR_CODE('reqm')
kQTEventListReceived = FOUR_CHAR_CODE('list')
kQTEventKeyUp = FOUR_CHAR_CODE('keyU')
kActionFlagActionIsDelta = 1L << 1
kActionFlagParameterWrapsAround = 1L << 2
kActionFlagActionIsToggle = 1L << 3
kStatusStringIsURLLink = 1L << 1
kStatusStringIsStreamingStatus = 1L << 2
kStatusHasCodeNumber = 1L << 3
kStatusIsError = 1L << 4
kScriptIsUnknownType = 1L << 0
kScriptIsJavaScript = 1L << 1
kScriptIsLingoEvent = 1L << 2
kScriptIsVBEvent = 1L << 3
kScriptIsProjectorCommand = 1L << 4
kScriptIsAppleScript = 1L << 5
kQTRegistrationDialogTimeOutFlag = 1 << 0
kQTRegistrationDialogShowDialog = 1 << 1
kQTRegistrationDialogForceDialog = 1 << 2
kOperatorAdd = FOUR_CHAR_CODE('add ')
kOperatorSubtract = FOUR_CHAR_CODE('sub ')
kOperatorMultiply = FOUR_CHAR_CODE('mult')
kOperatorDivide = FOUR_CHAR_CODE('div ')
kOperatorOr = FOUR_CHAR_CODE('or ')
kOperatorAnd = FOUR_CHAR_CODE('and ')
kOperatorNot = FOUR_CHAR_CODE('not ')
kOperatorLessThan = FOUR_CHAR_CODE('< ')
kOperatorLessThanEqualTo = FOUR_CHAR_CODE('<= ')
kOperatorEqualTo = FOUR_CHAR_CODE('= ')
kOperatorNotEqualTo = FOUR_CHAR_CODE('!= ')
kOperatorGreaterThan = FOUR_CHAR_CODE('> ')
kOperatorGreaterThanEqualTo = FOUR_CHAR_CODE('>= ')
kOperatorModulo = FOUR_CHAR_CODE('mod ')
kOperatorIntegerDivide = FOUR_CHAR_CODE('idiv')
kOperatorAbsoluteValue = FOUR_CHAR_CODE('abs ')
kOperatorNegate = FOUR_CHAR_CODE('neg ')
kPlatformMacintosh = 1
kPlatformWindows = 2
kSystemIsWindows9x = 0x00010000
kSystemIsWindowsNT = 0x00020000
kMediaPropertyNonLinearAtomType = FOUR_CHAR_CODE('nonl')
kMediaPropertyHasActions = 105
loopTimeBase = 1
palindromeLoopTimeBase = 2
maintainTimeBaseZero = 4
triggerTimeFwd = 0x0001
triggerTimeBwd = 0x0002
triggerTimeEither = 0x0003
triggerRateLT = 0x0004
triggerRateGT = 0x0008
triggerRateEqual = 0x0010
triggerRateLTE = triggerRateLT | triggerRateEqual
triggerRateGTE = triggerRateGT | triggerRateEqual
triggerRateNotEqual = triggerRateGT | triggerRateEqual | triggerRateLT
triggerRateChange = 0
triggerAtStart = 0x0001
triggerAtStop = 0x0002
timeBaseBeforeStartTime = 1
timeBaseAfterStopTime = 2
callBackAtTime = 1
callBackAtRate = 2
callBackAtTimeJump = 3
callBackAtExtremes = 4
callBackAtTimeBaseDisposed = 5
callBackAtInterrupt = 0x8000
callBackAtDeferredTask = 0x4000
qtcbNeedsRateChanges = 1
qtcbNeedsTimeChanges = 2
qtcbNeedsStartStopChanges = 4
keepInRam = 1 << 0
unkeepInRam = 1 << 1
flushFromRam = 1 << 2
loadForwardTrackEdits = 1 << 3
loadBackwardTrackEdits = 1 << 4
newMovieActive = 1 << 0
newMovieDontResolveDataRefs = 1 << 1
newMovieDontAskUnresolvedDataRefs = 1 << 2
newMovieDontAutoAlternates = 1 << 3
newMovieDontUpdateForeBackPointers = 1 << 4
newMovieDontAutoUpdateClock = 1 << 5
newMovieAsyncOK = 1 << 8
newMovieIdleImportOK = 1 << 10
newMovieDontInteractWithUser = 1 << 11
trackUsageInMovie = 1 << 1
trackUsageInPreview = 1 << 2
trackUsageInPoster = 1 << 3
mediaSampleNotSync = 1 << 0
mediaSampleShadowSync = 1 << 1
pasteInParallel = 1 << 0
showUserSettingsDialog = 1 << 1
movieToFileOnlyExport = 1 << 2
movieFileSpecValid = 1 << 3
nextTimeMediaSample = 1 << 0
nextTimeMediaEdit = 1 << 1
nextTimeTrackEdit = 1 << 2
nextTimeSyncSample = 1 << 3
nextTimeStep = 1 << 4
nextTimeEdgeOK = 1 << 14
nextTimeIgnoreActiveSegment = 1 << 15
createMovieFileDeleteCurFile = 1L << 31
createMovieFileDontCreateMovie = 1L << 30
createMovieFileDontOpenFile = 1L << 29
createMovieFileDontCreateResFile = 1L << 28
flattenAddMovieToDataFork = 1L << 0
flattenActiveTracksOnly = 1L << 2
flattenDontInterleaveFlatten = 1L << 3
flattenFSSpecPtrIsDataRefRecordPtr = 1L << 4
flattenCompressMovieResource = 1L << 5
flattenForceMovieResourceBeforeMovieData = 1L << 6
movieInDataForkResID = -1
mcTopLeftMovie = 1 << 0
mcScaleMovieToFit = 1 << 1
mcWithBadge = 1 << 2
mcNotVisible = 1 << 3
mcWithFrame = 1 << 4
movieScrapDontZeroScrap = 1 << 0
movieScrapOnlyPutMovie = 1 << 1
dataRefSelfReference = 1 << 0
dataRefWasNotResolved = 1 << 1
kMovieAnchorDataRefIsDefault = 1 << 0
hintsScrubMode = 1 << 0
hintsLoop = 1 << 1
hintsDontPurge = 1 << 2
hintsUseScreenBuffer = 1 << 5
hintsAllowInterlace = 1 << 6
hintsUseSoundInterp = 1 << 7
hintsHighQuality = 1 << 8
hintsPalindrome = 1 << 9
hintsInactive = 1 << 11
hintsOffscreen = 1 << 12
hintsDontDraw = 1 << 13
hintsAllowBlacklining = 1 << 14
hintsDontUseVideoOverlaySurface = 1 << 16
hintsIgnoreBandwidthRestrictions = 1 << 17
hintsPlayingEveryFrame = 1 << 18
hintsAllowDynamicResize = 1 << 19
hintsSingleField = 1 << 20
hintsNoRenderingTimeOut = 1 << 21
hintsFlushVideoInsteadOfDirtying = 1 << 22
hintsEnableSubPixelPositioning = 1L << 23
mediaHandlerFlagBaseClient = 1
movieTrackMediaType = 1 << 0
movieTrackCharacteristic = 1 << 1
movieTrackEnabledOnly = 1 << 2
kMovieControlOptionHideController = (1L << 0)
kMovieControlOptionLocateTopLeft = (1L << 1)
kMovieControlOptionEnableEditing = (1L << 2)
kMovieControlOptionHandleEditingHI = (1L << 3)
kMovieControlOptionSetKeysEnabled = (1L << 4)
kMovieControlOptionManuallyIdled = (1L << 5)
kMovieControlDataMovieController = FOUR_CHAR_CODE('mc ')
kMovieControlDataMovie = FOUR_CHAR_CODE('moov')
kMovieControlDataManualIdling = FOUR_CHAR_CODE('manu')
movieDrawingCallWhenChanged = 0
movieDrawingCallAlways = 1
kQTCloneShareSamples = 1 << 0
kQTCloneDontCopyEdits = 1 << 1
kGetMovieImporterValidateToFind = 1L << 0
kGetMovieImporterAllowNewFile = 1L << 1
kGetMovieImporterDontConsiderGraphicsImporters = 1L << 2
kGetMovieImporterDontConsiderFileOnlyImporters = 1L << 6
kGetMovieImporterAutoImportOnly = 1L << 10
kQTGetMIMETypeInfoIsQuickTimeMovieType = FOUR_CHAR_CODE('moov')
kQTGetMIMETypeInfoIsUnhelpfulType = FOUR_CHAR_CODE('dumb')
kQTCopyUserDataReplace = FOUR_CHAR_CODE('rplc')
kQTCopyUserDataMerge = FOUR_CHAR_CODE('merg')
kMovieLoadStateError = -1L
kMovieLoadStateLoading = 1000
kMovieLoadStateLoaded = 2000
kMovieLoadStatePlayable = 10000
kMovieLoadStatePlaythroughOK = 20000
kMovieLoadStateComplete = 100000L
kQTDontUseDataToFindImporter = 1L << 0
kQTDontLookForMovieImporterIfGraphicsImporterFound = 1L << 1
kQTAllowOpeningStillImagesAsMovies = 1L << 2
kQTAllowImportersThatWouldCreateNewFile = 1L << 3
kQTAllowAggressiveImporters = 1L << 4
preloadAlways = 1L << 0
preloadOnlyIfEnabled = 1L << 1
fullScreenHideCursor = 1L << 0
fullScreenAllowEvents = 1L << 1
fullScreenDontChangeMenuBar = 1L << 2
fullScreenPreflightSize = 1L << 3
movieExecuteWiredActionDontExecute = 1L << 0
kRefConNavigationNext = 0
kRefConNavigationPrevious = 1
kRefConPropertyCanHaveFocus = 1
kRefConPropertyHasFocus = 2
kTrackFocusCanEditFlag = FOUR_CHAR_CODE('kedt')
kTrackDefaultFocusFlags = FOUR_CHAR_CODE('kfoc')
kTrackFocusDefaultRefcon = FOUR_CHAR_CODE('kref')
kTrackFocusOn = 1
kTrackHandlesTabs = 2
kFlashTrackPropertyAcceptAllClicks = FOUR_CHAR_CODE('clik')
kBackgroundSpriteLayerNum = 32767
kSpritePropertyMatrix = 1
kSpritePropertyImageDescription = 2
kSpritePropertyImageDataPtr = 3
kSpritePropertyVisible = 4
kSpritePropertyLayer = 5
kSpritePropertyGraphicsMode = 6
kSpritePropertyImageDataSize = 7
kSpritePropertyActionHandlingSpriteID = 8
kSpritePropertyCanBeHitTested = 9
kSpritePropertyImageIndex = 100
kSpriteTrackPropertyBackgroundColor = 101
kSpriteTrackPropertyOffscreenBitDepth = 102
kSpriteTrackPropertySampleFormat = 103
kSpriteTrackPropertyScaleSpritesToScaleWorld = 104
kSpriteTrackPropertyHasActions = 105
kSpriteTrackPropertyVisible = 106
kSpriteTrackPropertyQTIdleEventsFrequency = 107
kSpriteTrackPropertyAllSpritesHitTestingMode = 108
kSpriteTrackPropertyPreferredDepthInterpretationMode = 109
kSpriteImagePropertyRegistrationPoint = 1000
kSpriteImagePropertyGroupID = 1001
kSpriteTrackPreferredDepthCompatibilityMode = 0
kSpriteTrackPreferredDepthModernMode = 1
kSpriteHitTestUseSpritesOwnPropertiesMode = 0
kSpriteHitTestTreatAllSpritesAsHitTestableMode = 1
kSpriteHitTestTreatAllSpritesAsNotHitTestableMode = 2
kNoQTIdleEvents = -1
kGetSpriteWorldInvalidRegionAndLeaveIntact = -1L
kGetSpriteWorldInvalidRegionAndThenSetEmpty = -2L
kOnlyDrawToSpriteWorld = 1L << 0
kSpriteWorldPreflight = 1L << 1
kSpriteWorldDidDraw = 1L << 0
kSpriteWorldNeedsToDraw = 1L << 1
kKeyFrameAndSingleOverride = 1L << 1
kKeyFrameAndAllOverrides = 1L << 2
kScaleSpritesToScaleWorld = 1L << 1
kSpriteWorldHighQuality = 1L << 2
kSpriteWorldDontAutoInvalidate = 1L << 3
kSpriteWorldInvisible = 1L << 4
kSpriteWorldDirtyInsteadOfFlush = 1L << 5
kParentAtomIsContainer = 0
kTweenRecordNoFlags = 0
kTweenRecordIsAtInterruptTime = 0x00000001
kEffectNameAtom = FOUR_CHAR_CODE('name')
kEffectTypeAtom = FOUR_CHAR_CODE('type')
kEffectManufacturerAtom = FOUR_CHAR_CODE('manu')
pdActionConfirmDialog = 1
pdActionSetAppleMenu = 2
pdActionSetEditMenu = 3
pdActionGetDialogValues = 4
pdActionSetPreviewUserItem = 5
pdActionSetPreviewPicture = 6
pdActionSetColorPickerEventProc = 7
pdActionSetDialogTitle = 8
pdActionGetSubPanelMenu = 9
pdActionActivateSubPanel = 10
pdActionConductStopAlert = 11
pdActionModelessCallback = 12
pdActionFetchPreview = 13
pdActionSetDialogSettings = 14
pdActionGetDialogSettings = 15
pdActionGetNextSample = 16
pdActionGetPreviousSample = 17
pdActionCompactSample = 18
pdActionSetEditCallout = 19
pdActionSetSampleTime = 20
pdActionDoEditCommand = 21
pdActionGetSubPanelMenuValue = 22
pdActionCustomNewControl = 23
pdActionCustomDisposeControl = 24
pdActionCustomPositionControl = 25
pdActionCustomShowHideControl = 26
pdActionCustomHandleEvent = 27
pdActionCustomSetFocus = 28
pdActionCustomSetEditMenu = 29
pdActionCustomSetPreviewPicture = 30
pdActionCustomSetEditCallout = 31
pdActionCustomGetEnableValue = 32
pdActionCustomSetSampleTime = 33
pdActionCustomGetValue = 34
pdActionCustomDoEditCommand = 35
pdSampleTimeDisplayOptionsNone = 0x00000000
pdActionFocusOff = 0
pdActionFocusFirst = 1
pdActionFocusLast = 2
pdActionFocusForward = 3
pdActionFocusBackward = 4
elOptionsIncludeNoneInList = 0x00000001
pdOptionsCollectOneValue = 0x00000001
pdOptionsAllowOptionalInterpolations = 0x00000002
pdOptionsModalDialogBox = 0x00000004
pdOptionsEditCurrentEffectOnly = 0x00000008
pdOptionsHidePreview = 0x00000010
effectIsRealtime = 0
kAccessKeyAtomType = FOUR_CHAR_CODE('acky')
kAccessKeySystemFlag = 1L << 0
ConnectionSpeedPrefsType = FOUR_CHAR_CODE('cspd')
BandwidthManagementPrefsType = FOUR_CHAR_CODE('bwmg')
kQTIdlePriority = 10
kQTNonRealTimePriority = 20
kQTRealTimeSharedPriority = 25
kQTRealTimePriority = 30
kQTBandwidthNotifyNeedToStop = 1L << 0
kQTBandwidthNotifyGoodToGo = 1L << 1
kQTBandwidthChangeRequest = 1L << 2
kQTBandwidthQueueRequest = 1L << 3
kQTBandwidthScheduledRequest = 1L << 4
kQTBandwidthVoluntaryRelease = 1L << 5
kITextRemoveEverythingBut = 0 << 1
kITextRemoveLeaveSuggestedAlternate = 1 << 1
kITextAtomType = FOUR_CHAR_CODE('itxt')
kITextStringAtomType = FOUR_CHAR_CODE('text')
kQTParseTextHREFText = FOUR_CHAR_CODE('text')
kQTParseTextHREFBaseURL = FOUR_CHAR_CODE('burl')
kQTParseTextHREFClickPoint = FOUR_CHAR_CODE('clik')
kQTParseTextHREFUseAltDelim = FOUR_CHAR_CODE('altd')
kQTParseTextHREFDelimiter = FOUR_CHAR_CODE('delm')
kQTParseTextHREFRecomposeHREF = FOUR_CHAR_CODE('rhrf')
kQTParseTextHREFURL = FOUR_CHAR_CODE('url ')
kQTParseTextHREFTarget = FOUR_CHAR_CODE('targ')
kQTParseTextHREFChapter = FOUR_CHAR_CODE('chap')
kQTParseTextHREFIsAutoHREF = FOUR_CHAR_CODE('auto')
kQTParseTextHREFIsServerMap = FOUR_CHAR_CODE('smap')
kQTParseTextHREFHREF = FOUR_CHAR_CODE('href')
kQTParseTextHREFEMBEDArgs = FOUR_CHAR_CODE('mbed')
kTrackReferenceChapterList = FOUR_CHAR_CODE('chap')
kTrackReferenceTimeCode = FOUR_CHAR_CODE('tmcd')
kTrackReferenceModifier = FOUR_CHAR_CODE('ssrc')
kTrackModifierInput = 0x696E
kTrackModifierType = 0x7479
kTrackModifierReference = FOUR_CHAR_CODE('ssrc')
kTrackModifierObjectID = FOUR_CHAR_CODE('obid')
kTrackModifierInputName = FOUR_CHAR_CODE('name')
kInputMapSubInputID = FOUR_CHAR_CODE('subi')
kTrackModifierTypeMatrix = 1
kTrackModifierTypeClip = 2
kTrackModifierTypeGraphicsMode = 5
kTrackModifierTypeVolume = 3
kTrackModifierTypeBalance = 4
kTrackModifierTypeImage = FOUR_CHAR_CODE('vide')
kTrackModifierObjectMatrix = 6
kTrackModifierObjectGraphicsMode = 7
kTrackModifierType3d4x4Matrix = 8
kTrackModifierCameraData = 9
kTrackModifierSoundLocalizationData = 10
kTrackModifierObjectImageIndex = 11
kTrackModifierObjectLayer = 12
kTrackModifierObjectVisible = 13
kTrackModifierAngleAspectCamera = 14
kTrackModifierPanAngle = FOUR_CHAR_CODE('pan ')
kTrackModifierTiltAngle = FOUR_CHAR_CODE('tilt')
kTrackModifierVerticalFieldOfViewAngle = FOUR_CHAR_CODE('fov ')
kTrackModifierObjectQTEventSend = FOUR_CHAR_CODE('evnt')
kTrackModifierObjectCanBeHitTested = 15
kTweenTypeShort = 1
kTweenTypeLong = 2
kTweenTypeFixed = 3
kTweenTypePoint = 4
kTweenTypeQDRect = 5
kTweenTypeQDRegion = 6
kTweenTypeMatrix = 7
kTweenTypeRGBColor = 8
kTweenTypeGraphicsModeWithRGBColor = 9
kTweenTypeQTFloatSingle = 10
kTweenTypeQTFloatDouble = 11
kTweenTypeFixedPoint = 12
kTweenType3dScale = FOUR_CHAR_CODE('3sca')
kTweenType3dTranslate = FOUR_CHAR_CODE('3tra')
kTweenType3dRotate = FOUR_CHAR_CODE('3rot')
kTweenType3dRotateAboutPoint = FOUR_CHAR_CODE('3rap')
kTweenType3dRotateAboutAxis = FOUR_CHAR_CODE('3rax')
kTweenType3dRotateAboutVector = FOUR_CHAR_CODE('3rvc')
kTweenType3dQuaternion = FOUR_CHAR_CODE('3qua')
kTweenType3dMatrix = FOUR_CHAR_CODE('3mat')
kTweenType3dCameraData = FOUR_CHAR_CODE('3cam')
kTweenType3dAngleAspectCameraData = FOUR_CHAR_CODE('3caa')
kTweenType3dSoundLocalizationData = FOUR_CHAR_CODE('3slc')
kTweenTypePathToMatrixTranslation = FOUR_CHAR_CODE('gxmt')
kTweenTypePathToMatrixRotation = FOUR_CHAR_CODE('gxpr')
kTweenTypePathToMatrixTranslationAndRotation = FOUR_CHAR_CODE('gxmr')
kTweenTypePathToFixedPoint = FOUR_CHAR_CODE('gxfp')
kTweenTypePathXtoY = FOUR_CHAR_CODE('gxxy')
kTweenTypePathYtoX = FOUR_CHAR_CODE('gxyx')
kTweenTypeAtomList = FOUR_CHAR_CODE('atom')
kTweenTypePolygon = FOUR_CHAR_CODE('poly')
kTweenTypeMultiMatrix = FOUR_CHAR_CODE('mulm')
kTweenTypeSpin = FOUR_CHAR_CODE('spin')
kTweenType3dMatrixNonLinear = FOUR_CHAR_CODE('3nlr')
kTweenType3dVRObject = FOUR_CHAR_CODE('3vro')
kTweenEntry = FOUR_CHAR_CODE('twen')
kTweenData = FOUR_CHAR_CODE('data')
kTweenType = FOUR_CHAR_CODE('twnt')
kTweenStartOffset = FOUR_CHAR_CODE('twst')
kTweenDuration = FOUR_CHAR_CODE('twdu')
kTweenFlags = FOUR_CHAR_CODE('flag')
kTweenOutputMin = FOUR_CHAR_CODE('omin')
kTweenOutputMax = FOUR_CHAR_CODE('omax')
kTweenSequenceElement = FOUR_CHAR_CODE('seqe')
kTween3dInitialCondition = FOUR_CHAR_CODE('icnd')
kTweenInterpolationID = FOUR_CHAR_CODE('intr')
kTweenRegionData = FOUR_CHAR_CODE('qdrg')
kTweenPictureData = FOUR_CHAR_CODE('PICT')
kListElementType = FOUR_CHAR_CODE('type')
kListElementDataType = FOUR_CHAR_CODE('daty')
kNameAtom = FOUR_CHAR_CODE('name')
kInitialRotationAtom = FOUR_CHAR_CODE('inro')
kNonLinearTweenHeader = FOUR_CHAR_CODE('nlth')
kTweenReturnDelta = 1L << 0
kQTRestrictionClassSave = FOUR_CHAR_CODE('save')
kQTRestrictionSaveDontAddMovieResource = (1L << 0)
kQTRestrictionSaveDontFlatten = (1L << 1)
kQTRestrictionSaveDontExport = (1L << 2)
kQTRestrictionSaveDontExtract = (1L << 3)
kQTRestrictionClassEdit = FOUR_CHAR_CODE('edit')
kQTRestrictionEditDontCopy = (1L << 0)
kQTRestrictionEditDontCut = (1L << 1)
kQTRestrictionEditDontPaste = (1L << 2)
kQTRestrictionEditDontClear = (1L << 3)
kQTRestrictionEditDontModify = (1L << 4)
kQTRestrictionEditDontExtract = (1L << 5)
videoFlagDontLeanAhead = 1L << 0
txtProcDefaultDisplay = 0
txtProcDontDisplay = 1
txtProcDoDisplay = 2
findTextEdgeOK = 1 << 0
findTextCaseSensitive = 1 << 1
findTextReverseSearch = 1 << 2
findTextWrapAround = 1 << 3
findTextUseOffset = 1 << 4
dropShadowOffsetType = FOUR_CHAR_CODE('drpo')
dropShadowTranslucencyType = FOUR_CHAR_CODE('drpt')
spriteHitTestBounds = 1L << 0
spriteHitTestImage = 1L << 1
spriteHitTestInvisibleSprites = 1L << 2
spriteHitTestIsClick = 1L << 3
spriteHitTestLocInDisplayCoordinates = 1L << 4
spriteHitTestTreatAllSpritesAsHitTestable = 1L << 5
kSpriteAtomType = FOUR_CHAR_CODE('sprt')
kSpriteImagesContainerAtomType = FOUR_CHAR_CODE('imct')
kSpriteImageAtomType = FOUR_CHAR_CODE('imag')
kSpriteImageDataAtomType = FOUR_CHAR_CODE('imda')
kSpriteImageDataRefAtomType = FOUR_CHAR_CODE('imre')
kSpriteImageDataRefTypeAtomType = FOUR_CHAR_CODE('imrt')
kSpriteImageGroupIDAtomType = FOUR_CHAR_CODE('imgr')
kSpriteImageRegistrationAtomType = FOUR_CHAR_CODE('imrg')
kSpriteImageDefaultImageIndexAtomType = FOUR_CHAR_CODE('defi')
kSpriteSharedDataAtomType = FOUR_CHAR_CODE('dflt')
kSpriteNameAtomType = FOUR_CHAR_CODE('name')
kSpriteImageNameAtomType = FOUR_CHAR_CODE('name')
kSpriteUsesImageIDsAtomType = FOUR_CHAR_CODE('uses')
kSpriteBehaviorsAtomType = FOUR_CHAR_CODE('beha')
kSpriteImageBehaviorAtomType = FOUR_CHAR_CODE('imag')
kSpriteCursorBehaviorAtomType = FOUR_CHAR_CODE('crsr')
kSpriteStatusStringsBehaviorAtomType = FOUR_CHAR_CODE('sstr')
kSpriteVariablesContainerAtomType = FOUR_CHAR_CODE('vars')
kSpriteStringVariableAtomType = FOUR_CHAR_CODE('strv')
kSpriteFloatingPointVariableAtomType = FOUR_CHAR_CODE('flov')
kMovieMediaDataReference = FOUR_CHAR_CODE('mmdr')
kMovieMediaDefaultDataReferenceID = FOUR_CHAR_CODE('ddri')
kMovieMediaSlaveTime = FOUR_CHAR_CODE('slti')
kMovieMediaSlaveAudio = FOUR_CHAR_CODE('slau')
kMovieMediaSlaveGraphicsMode = FOUR_CHAR_CODE('slgr')
kMovieMediaAutoPlay = FOUR_CHAR_CODE('play')
kMovieMediaLoop = FOUR_CHAR_CODE('loop')
kMovieMediaUseMIMEType = FOUR_CHAR_CODE('mime')
kMovieMediaTitle = FOUR_CHAR_CODE('titl')
kMovieMediaAltText = FOUR_CHAR_CODE('altt')
kMovieMediaClipBegin = FOUR_CHAR_CODE('clpb')
kMovieMediaClipDuration = FOUR_CHAR_CODE('clpd')
kMovieMediaRegionAtom = FOUR_CHAR_CODE('regi')
kMovieMediaSlaveTrackDuration = FOUR_CHAR_CODE('sltr')
kMovieMediaEnableFrameStepping = FOUR_CHAR_CODE('enfs')
kMovieMediaBackgroundColor = FOUR_CHAR_CODE('bkcl')
kMovieMediaPrerollTime = FOUR_CHAR_CODE('prer')
kMovieMediaFitNone = 0
kMovieMediaFitScroll = FOUR_CHAR_CODE('scro')
kMovieMediaFitClipIfNecessary = FOUR_CHAR_CODE('hidd')
kMovieMediaFitFill = FOUR_CHAR_CODE('fill')
kMovieMediaFitMeet = FOUR_CHAR_CODE('meet')
kMovieMediaFitSlice = FOUR_CHAR_CODE('slic')
kMovieMediaSpatialAdjustment = FOUR_CHAR_CODE('fit ')
kMovieMediaRectangleAtom = FOUR_CHAR_CODE('rect')
kMovieMediaTop = FOUR_CHAR_CODE('top ')
kMovieMediaLeft = FOUR_CHAR_CODE('left')
kMovieMediaWidth = FOUR_CHAR_CODE('wd ')
kMovieMediaHeight = FOUR_CHAR_CODE('ht ')
kMoviePropertyDuration = FOUR_CHAR_CODE('dura')
kMoviePropertyTimeScale = FOUR_CHAR_CODE('tims')
kMoviePropertyTime = FOUR_CHAR_CODE('timv')
kMoviePropertyNaturalBounds = FOUR_CHAR_CODE('natb')
kMoviePropertyMatrix = FOUR_CHAR_CODE('mtrx')
kMoviePropertyTrackList = FOUR_CHAR_CODE('tlst')
kTrackPropertyMediaType = FOUR_CHAR_CODE('mtyp')
kTrackPropertyInstantiation = FOUR_CHAR_CODE('inst')
MovieControllerComponentType = FOUR_CHAR_CODE('play')
kMovieControllerQTVRFlag = 1 << 0
kMovieControllerDontDisplayToUser = 1 << 1
mcActionIdle = 1
mcActionDraw = 2
mcActionActivate = 3
mcActionDeactivate = 4
mcActionMouseDown = 5
mcActionKey = 6
mcActionPlay = 8
mcActionGoToTime = 12
mcActionSetVolume = 14
mcActionGetVolume = 15
mcActionStep = 18
mcActionSetLooping = 21
mcActionGetLooping = 22
mcActionSetLoopIsPalindrome = 23
mcActionGetLoopIsPalindrome = 24
mcActionSetGrowBoxBounds = 25
mcActionControllerSizeChanged = 26
mcActionSetSelectionBegin = 29
mcActionSetSelectionDuration = 30
mcActionSetKeysEnabled = 32
mcActionGetKeysEnabled = 33
mcActionSetPlaySelection = 34
mcActionGetPlaySelection = 35
mcActionSetUseBadge = 36
mcActionGetUseBadge = 37
mcActionSetFlags = 38
mcActionGetFlags = 39
mcActionSetPlayEveryFrame = 40
mcActionGetPlayEveryFrame = 41
mcActionGetPlayRate = 42
mcActionShowBalloon = 43
mcActionBadgeClick = 44
mcActionMovieClick = 45
mcActionSuspend = 46
mcActionResume = 47
mcActionSetControllerKeysEnabled = 48
mcActionGetTimeSliderRect = 49
mcActionMovieEdited = 50
mcActionGetDragEnabled = 51
mcActionSetDragEnabled = 52
mcActionGetSelectionBegin = 53
mcActionGetSelectionDuration = 54
mcActionPrerollAndPlay = 55
mcActionGetCursorSettingEnabled = 56
mcActionSetCursorSettingEnabled = 57
mcActionSetColorTable = 58
mcActionLinkToURL = 59
mcActionCustomButtonClick = 60
mcActionForceTimeTableUpdate = 61
mcActionSetControllerTimeLimits = 62
mcActionExecuteAllActionsForQTEvent = 63
mcActionExecuteOneActionForQTEvent = 64
mcActionAdjustCursor = 65
mcActionUseTrackForTimeTable = 66
mcActionClickAndHoldPoint = 67
mcActionShowMessageString = 68
mcActionShowStatusString = 69
mcActionGetExternalMovie = 70
mcActionGetChapterTime = 71
mcActionPerformActionList = 72
mcActionEvaluateExpression = 73
mcActionFetchParameterAs = 74
mcActionGetCursorByID = 75
mcActionGetNextURL = 76
mcActionMovieChanged = 77
mcActionDoScript = 78
mcActionRestartAtTime = 79
mcActionGetIndChapter = 80
mcActionLinkToURLExtended = 81
mcActionSetVolumeStep = 82
mcActionAutoPlay = 83
mcActionPauseToBuffer = 84
mcActionAppMessageReceived = 85
mcActionEvaluateExpressionWithType = 89
mcActionGetMovieName = 90
mcActionGetMovieID = 91
mcActionGetMovieActive = 92
mcFlagSuppressMovieFrame = 1 << 0
mcFlagSuppressStepButtons = 1 << 1
mcFlagSuppressSpeakerButton = 1 << 2
mcFlagsUseWindowPalette = 1 << 3
mcFlagsDontInvalidate = 1 << 4
mcFlagsUseCustomButton = 1 << 5
mcPositionDontInvalidate = 1 << 5
kMCIEEnabledButtonPicture = 1
kMCIEDisabledButtonPicture = 2
kMCIEDepressedButtonPicture = 3
kMCIEEnabledSizeBoxPicture = 4
kMCIEDisabledSizeBoxPicture = 5
kMCIEEnabledUnavailableButtonPicture = 6
kMCIEDisabledUnavailableButtonPicture = 7
kMCIESoundSlider = 128
kMCIESoundThumb = 129
kMCIEColorTable = 256
kMCIEIsFlatAppearance = 257
kMCIEDoButtonIconsDropOnDepress = 258
mcInfoUndoAvailable = 1 << 0
mcInfoCutAvailable = 1 << 1
mcInfoCopyAvailable = 1 << 2
mcInfoPasteAvailable = 1 << 3
mcInfoClearAvailable = 1 << 4
mcInfoHasSound = 1 << 5
mcInfoIsPlaying = 1 << 6
mcInfoIsLooping = 1 << 7
mcInfoIsInPalindrome = 1 << 8
mcInfoEditingEnabled = 1 << 9
mcInfoMovieIsInteractive = 1 << 10
mcMenuUndo = 1
mcMenuCut = 3
mcMenuCopy = 4
mcMenuPaste = 5
mcMenuClear = 6
kQTAppMessageSoftwareChanged = 1
kQTAppMessageWindowCloseRequested = 3
kQTAppMessageExitFullScreenRequested = 4
kQTAppMessageDisplayChannels = 5
kQTAppMessageEnterFullScreenRequested = 6
kFetchAsBooleanPtr = 1
kFetchAsShortPtr = 2
kFetchAsLongPtr = 3
kFetchAsMatrixRecordPtr = 4
kFetchAsModifierTrackGraphicsModeRecord = 5
kFetchAsHandle = 6
kFetchAsStr255 = 7
kFetchAsFloatPtr = 8
kFetchAsPointPtr = 9
kFetchAsNewAtomContainer = 10
kFetchAsQTEventRecordPtr = 11
kFetchAsFixedPtr = 12
kFetchAsSetControllerValuePtr = 13
kFetchAsRgnHandle = 14
kFetchAsComponentDescriptionPtr = 15
kFetchAsCString = 16
kQTCursorOpenHand = -19183
kQTCursorClosedHand = -19182
kQTCursorPointingHand = -19181
kQTCursorRightArrow = -19180
kQTCursorLeftArrow = -19179
kQTCursorDownArrow = -19178
kQTCursorUpArrow = -19177
kQTCursorIBeam = -19176
kControllerUnderstandsIdleManagers = 1 << 0
kVideoMediaResetStatisticsSelect = 0x0105
kVideoMediaGetStatisticsSelect = 0x0106
kVideoMediaGetStallCountSelect = 0x010E
kVideoMediaSetCodecParameterSelect = 0x010F
kVideoMediaGetCodecParameterSelect = 0x0110
kTextMediaSetTextProcSelect = 0x0101
kTextMediaAddTextSampleSelect = 0x0102
kTextMediaAddTESampleSelect = 0x0103
kTextMediaAddHiliteSampleSelect = 0x0104
kTextMediaDrawRawSelect = 0x0109
kTextMediaSetTextPropertySelect = 0x010A
kTextMediaRawSetupSelect = 0x010B
kTextMediaRawIdleSelect = 0x010C
kTextMediaGetTextPropertySelect = 0x010D
kTextMediaFindNextTextSelect = 0x0105
kTextMediaHiliteTextSampleSelect = 0x0106
kTextMediaSetTextSampleDataSelect = 0x0107
kSpriteMediaSetPropertySelect = 0x0101
kSpriteMediaGetPropertySelect = 0x0102
kSpriteMediaHitTestSpritesSelect = 0x0103
kSpriteMediaCountSpritesSelect = 0x0104
kSpriteMediaCountImagesSelect = 0x0105
kSpriteMediaGetIndImageDescriptionSelect = 0x0106
kSpriteMediaGetDisplayedSampleNumberSelect = 0x0107
kSpriteMediaGetSpriteNameSelect = 0x0108
kSpriteMediaGetImageNameSelect = 0x0109
kSpriteMediaSetSpritePropertySelect = 0x010A
kSpriteMediaGetSpritePropertySelect = 0x010B
kSpriteMediaHitTestAllSpritesSelect = 0x010C
kSpriteMediaHitTestOneSpriteSelect = 0x010D
kSpriteMediaSpriteIndexToIDSelect = 0x010E
kSpriteMediaSpriteIDToIndexSelect = 0x010F
kSpriteMediaGetSpriteActionsForQTEventSelect = 0x0110
kSpriteMediaSetActionVariableSelect = 0x0111
kSpriteMediaGetActionVariableSelect = 0x0112
kSpriteMediaGetIndImagePropertySelect = 0x0113
kSpriteMediaNewSpriteSelect = 0x0114
kSpriteMediaDisposeSpriteSelect = 0x0115
kSpriteMediaSetActionVariableToStringSelect = 0x0116
kSpriteMediaGetActionVariableAsStringSelect = 0x0117
kSpriteMediaNewImageSelect = 0x011B
kSpriteMediaDisposeImageSelect = 0x011C
kSpriteMediaImageIndexToIDSelect = 0x011D
kSpriteMediaImageIDToIndexSelect = 0x011E
kFlashMediaSetPanSelect = 0x0101
kFlashMediaSetZoomSelect = 0x0102
kFlashMediaSetZoomRectSelect = 0x0103
kFlashMediaGetRefConBoundsSelect = 0x0104
kFlashMediaGetRefConIDSelect = 0x0105
kFlashMediaIDToRefConSelect = 0x0106
kFlashMediaGetDisplayedFrameNumberSelect = 0x0107
kFlashMediaFrameNumberToMovieTimeSelect = 0x0108
kFlashMediaFrameLabelToMovieTimeSelect = 0x0109
kFlashMediaGetFlashVariableSelect = 0x010A
kFlashMediaSetFlashVariableSelect = 0x010B
kFlashMediaDoButtonActionsSelect = 0x010C
kFlashMediaGetSupportedSwfVersionSelect = 0x010D
kMovieMediaGetChildDoMCActionCallbackSelect = 0x0102
kMovieMediaGetDoMCActionCallbackSelect = 0x0103
kMovieMediaGetCurrentMoviePropertySelect = 0x0104
kMovieMediaGetCurrentTrackPropertySelect = 0x0105
kMovieMediaGetChildMovieDataReferenceSelect = 0x0106
kMovieMediaSetChildMovieDataReferenceSelect = 0x0107
kMovieMediaLoadChildMovieFromDataReferenceSelect = 0x0108
kMedia3DGetNamedObjectListSelect = 0x0101
kMedia3DGetRendererListSelect = 0x0102
kMedia3DGetCurrentGroupSelect = 0x0103
kMedia3DTranslateNamedObjectToSelect = 0x0104
kMedia3DScaleNamedObjectToSelect = 0x0105
kMedia3DRotateNamedObjectToSelect = 0x0106
kMedia3DSetCameraDataSelect = 0x0107
kMedia3DGetCameraDataSelect = 0x0108
kMedia3DSetCameraAngleAspectSelect = 0x0109
kMedia3DGetCameraAngleAspectSelect = 0x010A
kMedia3DSetCameraRangeSelect = 0x010D
kMedia3DGetCameraRangeSelect = 0x010E
kMedia3DGetViewObjectSelect = 0x010F
kMCSetMovieSelect = 0x0002
kMCGetIndMovieSelect = 0x0005
kMCRemoveAllMoviesSelect = 0x0006
kMCRemoveAMovieSelect = 0x0003
kMCRemoveMovieSelect = 0x0006
kMCIsPlayerEventSelect = 0x0007
kMCSetActionFilterSelect = 0x0008
kMCDoActionSelect = 0x0009
kMCSetControllerAttachedSelect = 0x000A
kMCIsControllerAttachedSelect = 0x000B
kMCSetControllerPortSelect = 0x000C
kMCGetControllerPortSelect = 0x000D
kMCSetVisibleSelect = 0x000E
kMCGetVisibleSelect = 0x000F
kMCGetControllerBoundsRectSelect = 0x0010
kMCSetControllerBoundsRectSelect = 0x0011
kMCGetControllerBoundsRgnSelect = 0x0012
kMCGetWindowRgnSelect = 0x0013
kMCMovieChangedSelect = 0x0014
kMCSetDurationSelect = 0x0015
kMCGetCurrentTimeSelect = 0x0016
kMCNewAttachedControllerSelect = 0x0017
kMCDrawSelect = 0x0018
kMCActivateSelect = 0x0019
kMCIdleSelect = 0x001A
kMCKeySelect = 0x001B
kMCClickSelect = 0x001C
kMCEnableEditingSelect = 0x001D
kMCIsEditingEnabledSelect = 0x001E
kMCCopySelect = 0x001F
kMCCutSelect = 0x0020
kMCPasteSelect = 0x0021
kMCClearSelect = 0x0022
kMCUndoSelect = 0x0023
kMCPositionControllerSelect = 0x0024
kMCGetControllerInfoSelect = 0x0025
kMCSetClipSelect = 0x0028
kMCGetClipSelect = 0x0029
kMCDrawBadgeSelect = 0x002A
kMCSetUpEditMenuSelect = 0x002B
kMCGetMenuStringSelect = 0x002C
kMCSetActionFilterWithRefConSelect = 0x002D
kMCPtInControllerSelect = 0x002E
kMCInvalidateSelect = 0x002F
kMCAdjustCursorSelect = 0x0030
kMCGetInterfaceElementSelect = 0x0031
kMCGetDoActionsProcSelect = 0x0032
kMCAddMovieSegmentSelect = 0x0033
kMCTrimMovieSegmentSelect = 0x0034
kMCSetIdleManagerSelect = 0x0035
kMCSetControllerCapabilitiesSelect = 0x0036
kMusicMediaGetIndexedTunePlayerSelect = 0x0101
kRawCodecType = FOUR_CHAR_CODE('raw ')
kCinepakCodecType = FOUR_CHAR_CODE('cvid')
kGraphicsCodecType = FOUR_CHAR_CODE('smc ')
kAnimationCodecType = FOUR_CHAR_CODE('rle ')
kVideoCodecType = FOUR_CHAR_CODE('rpza')
kComponentVideoCodecType = FOUR_CHAR_CODE('yuv2')
kJPEGCodecType = FOUR_CHAR_CODE('jpeg')
kMotionJPEGACodecType = FOUR_CHAR_CODE('mjpa')
kMotionJPEGBCodecType = FOUR_CHAR_CODE('mjpb')
kSGICodecType = FOUR_CHAR_CODE('.SGI')
kPlanarRGBCodecType = FOUR_CHAR_CODE('8BPS')
kMacPaintCodecType = FOUR_CHAR_CODE('PNTG')
kGIFCodecType = FOUR_CHAR_CODE('gif ')
kPhotoCDCodecType = FOUR_CHAR_CODE('kpcd')
kQuickDrawGXCodecType = FOUR_CHAR_CODE('qdgx')
kAVRJPEGCodecType = FOUR_CHAR_CODE('avr ')
kOpenDMLJPEGCodecType = FOUR_CHAR_CODE('dmb1')
kBMPCodecType = FOUR_CHAR_CODE('WRLE')
kWindowsRawCodecType = FOUR_CHAR_CODE('WRAW')
kVectorCodecType = FOUR_CHAR_CODE('path')
kQuickDrawCodecType = FOUR_CHAR_CODE('qdrw')
kWaterRippleCodecType = FOUR_CHAR_CODE('ripl')
kFireCodecType = FOUR_CHAR_CODE('fire')
kCloudCodecType = FOUR_CHAR_CODE('clou')
kH261CodecType = FOUR_CHAR_CODE('h261')
kH263CodecType = FOUR_CHAR_CODE('h263')
kDVCNTSCCodecType = FOUR_CHAR_CODE('dvc ')
kDVCPALCodecType = FOUR_CHAR_CODE('dvcp')
kDVCProPALCodecType = FOUR_CHAR_CODE('dvpp')
kBaseCodecType = FOUR_CHAR_CODE('base')
kFLCCodecType = FOUR_CHAR_CODE('flic')
kTargaCodecType = FOUR_CHAR_CODE('tga ')
kPNGCodecType = FOUR_CHAR_CODE('png ')
kTIFFCodecType = FOUR_CHAR_CODE('tiff')
kComponentVideoSigned = FOUR_CHAR_CODE('yuvu')
kComponentVideoUnsigned = FOUR_CHAR_CODE('yuvs')
kCMYKCodecType = FOUR_CHAR_CODE('cmyk')
kMicrosoftVideo1CodecType = FOUR_CHAR_CODE('msvc')
kSorensonCodecType = FOUR_CHAR_CODE('SVQ1')
kSorenson3CodecType = FOUR_CHAR_CODE('SVQ3')
kIndeo4CodecType = FOUR_CHAR_CODE('IV41')
kMPEG4VisualCodecType = FOUR_CHAR_CODE('mp4v')
k64ARGBCodecType = FOUR_CHAR_CODE('b64a')
k48RGBCodecType = FOUR_CHAR_CODE('b48r')
k32AlphaGrayCodecType = FOUR_CHAR_CODE('b32a')
k16GrayCodecType = FOUR_CHAR_CODE('b16g')
kMpegYUV420CodecType = FOUR_CHAR_CODE('myuv')
kYUV420CodecType = FOUR_CHAR_CODE('y420')
kSorensonYUV9CodecType = FOUR_CHAR_CODE('syv9')
k422YpCbCr8CodecType = FOUR_CHAR_CODE('2vuy')
k444YpCbCr8CodecType = FOUR_CHAR_CODE('v308')
k4444YpCbCrA8CodecType = FOUR_CHAR_CODE('v408')
k422YpCbCr16CodecType = FOUR_CHAR_CODE('v216')
k422YpCbCr10CodecType = FOUR_CHAR_CODE('v210')
k444YpCbCr10CodecType = FOUR_CHAR_CODE('v410')
k4444YpCbCrA8RCodecType = FOUR_CHAR_CODE('r408')
kBlurImageFilterType = FOUR_CHAR_CODE('blur')
kSharpenImageFilterType = FOUR_CHAR_CODE('shrp')
kEdgeDetectImageFilterType = FOUR_CHAR_CODE('edge')
kEmbossImageFilterType = FOUR_CHAR_CODE('embs')
kConvolveImageFilterType = FOUR_CHAR_CODE('genk')
kAlphaGainImageFilterType = FOUR_CHAR_CODE('gain')
kRGBColorBalanceImageFilterType = FOUR_CHAR_CODE('rgbb')
kHSLColorBalanceImageFilterType = FOUR_CHAR_CODE('hslb')
kColorSyncImageFilterType = FOUR_CHAR_CODE('sync')
kFilmNoiseImageFilterType = FOUR_CHAR_CODE('fmns')
kSolarizeImageFilterType = FOUR_CHAR_CODE('solr')
kColorTintImageFilterType = FOUR_CHAR_CODE('tint')
kLensFlareImageFilterType = FOUR_CHAR_CODE('lens')
kBrightnessContrastImageFilterType = FOUR_CHAR_CODE('brco')
kAlphaCompositorTransitionType = FOUR_CHAR_CODE('blnd')
kCrossFadeTransitionType = FOUR_CHAR_CODE('dslv')
kChannelCompositeEffectType = FOUR_CHAR_CODE('chan')
kChromaKeyTransitionType = FOUR_CHAR_CODE('ckey')
kImplodeTransitionType = FOUR_CHAR_CODE('mplo')
kExplodeTransitionType = FOUR_CHAR_CODE('xplo')
kGradientTransitionType = FOUR_CHAR_CODE('matt')
kPushTransitionType = FOUR_CHAR_CODE('push')
kSlideTransitionType = FOUR_CHAR_CODE('slid')
kWipeTransitionType = FOUR_CHAR_CODE('smpt')
kIrisTransitionType = FOUR_CHAR_CODE('smp2')
kRadialTransitionType = FOUR_CHAR_CODE('smp3')
kMatrixTransitionType = FOUR_CHAR_CODE('smp4')
kZoomTransitionType = FOUR_CHAR_CODE('zoom')
kTravellingMatteEffectType = FOUR_CHAR_CODE('trav')
kCMYKPixelFormat = FOUR_CHAR_CODE('cmyk')
k64ARGBPixelFormat = FOUR_CHAR_CODE('b64a')
k48RGBPixelFormat = FOUR_CHAR_CODE('b48r')
k32AlphaGrayPixelFormat = FOUR_CHAR_CODE('b32a')
k16GrayPixelFormat = FOUR_CHAR_CODE('b16g')
k422YpCbCr8PixelFormat = FOUR_CHAR_CODE('2vuy')
k4444YpCbCrA8PixelFormat = FOUR_CHAR_CODE('v408')
k4444YpCbCrA8RPixelFormat = FOUR_CHAR_CODE('r408')
kYUV420PixelFormat = FOUR_CHAR_CODE('y420')
codecInfoDoes1 = (1L << 0)
codecInfoDoes2 = (1L << 1)
codecInfoDoes4 = (1L << 2)
codecInfoDoes8 = (1L << 3)
codecInfoDoes16 = (1L << 4)
codecInfoDoes32 = (1L << 5)
codecInfoDoesDither = (1L << 6)
codecInfoDoesStretch = (1L << 7)
codecInfoDoesShrink = (1L << 8)
codecInfoDoesMask = (1L << 9)
codecInfoDoesTemporal = (1L << 10)
codecInfoDoesDouble = (1L << 11)
codecInfoDoesQuad = (1L << 12)
codecInfoDoesHalf = (1L << 13)
codecInfoDoesQuarter = (1L << 14)
codecInfoDoesRotate = (1L << 15)
codecInfoDoesHorizFlip = (1L << 16)
codecInfoDoesVertFlip = (1L << 17)
codecInfoHasEffectParameterList = (1L << 18)
codecInfoDoesBlend = (1L << 19)
codecInfoDoesWarp = (1L << 20)
codecInfoDoesRecompress = (1L << 21)
codecInfoDoesSpool = (1L << 22)
codecInfoDoesRateConstrain = (1L << 23)
codecInfoDepth1 = (1L << 0)
codecInfoDepth2 = (1L << 1)
codecInfoDepth4 = (1L << 2)
codecInfoDepth8 = (1L << 3)
codecInfoDepth16 = (1L << 4)
codecInfoDepth32 = (1L << 5)
codecInfoDepth24 = (1L << 6)
codecInfoDepth33 = (1L << 7)
codecInfoDepth34 = (1L << 8)
codecInfoDepth36 = (1L << 9)
codecInfoDepth40 = (1L << 10)
codecInfoStoresClut = (1L << 11)
codecInfoDoesLossless = (1L << 12)
codecInfoSequenceSensitive = (1L << 13)
codecFlagUseImageBuffer = (1L << 0)
codecFlagUseScreenBuffer = (1L << 1)
codecFlagUpdatePrevious = (1L << 2)
codecFlagNoScreenUpdate = (1L << 3)
codecFlagWasCompressed = (1L << 4)
codecFlagDontOffscreen = (1L << 5)
codecFlagUpdatePreviousComp = (1L << 6)
codecFlagForceKeyFrame = (1L << 7)
codecFlagOnlyScreenUpdate = (1L << 8)
codecFlagLiveGrab = (1L << 9)
codecFlagDiffFrame = (1L << 9)
codecFlagDontUseNewImageBuffer = (1L << 10)
codecFlagInterlaceUpdate = (1L << 11)
codecFlagCatchUpDiff = (1L << 12)
codecFlagSupportDisable = (1L << 13)
codecFlagReenable = (1L << 14)
codecFlagOutUpdateOnNextIdle = (1L << 9)
codecFlagOutUpdateOnDataSourceChange = (1L << 10)
codecFlagSequenceSensitive = (1L << 11)
codecFlagOutUpdateOnTimeChange = (1L << 12)
codecFlagImageBufferNotSourceImage = (1L << 13)
codecFlagUsedNewImageBuffer = (1L << 14)
codecFlagUsedImageBuffer = (1L << 15)
codecMinimumDataSize = 32768L
compressorComponentType = FOUR_CHAR_CODE('imco')
decompressorComponentType = FOUR_CHAR_CODE('imdc')
codecLosslessQuality = 0x00000400
codecMaxQuality = 0x000003FF
codecMinQuality = 0x00000000
codecLowQuality = 0x00000100
codecNormalQuality = 0x00000200
codecHighQuality = 0x00000300
codecLockBitsShieldCursor = (1 << 0)
codecCompletionSource = (1 << 0)
codecCompletionDest = (1 << 1)
codecCompletionDontUnshield = (1 << 2)
codecCompletionWentOffscreen = (1 << 3)
codecCompletionUnlockBits = (1 << 4)
codecCompletionForceChainFlush = (1 << 5)
codecCompletionDropped = (1 << 6)
codecProgressOpen = 0
codecProgressUpdatePercent = 1
codecProgressClose = 2
defaultDither = 0
forceDither = 1
suppressDither = 2
useColorMatching = 4
callStdBits = 1
callOldBits = 2
noDefaultOpcodes = 4
graphicsModeStraightAlpha = 256
graphicsModePreWhiteAlpha = 257
graphicsModePreBlackAlpha = 258
graphicsModeComposition = 259
graphicsModeStraightAlphaBlend = 260
graphicsModePreMulColorAlpha = 261
evenField1ToEvenFieldOut = 1 << 0
evenField1ToOddFieldOut = 1 << 1
oddField1ToEvenFieldOut = 1 << 2
oddField1ToOddFieldOut = 1 << 3
evenField2ToEvenFieldOut = 1 << 4
evenField2ToOddFieldOut = 1 << 5
oddField2ToEvenFieldOut = 1 << 6
oddField2ToOddFieldOut = 1 << 7
icmFrameTimeHasVirtualStartTimeAndDuration = 1 << 0
codecDSequenceDisableOverlaySurface = (1L << 5)
codecDSequenceSingleField = (1L << 6)
codecDSequenceBidirectionalPrediction = (1L << 7)
codecDSequenceFlushInsteadOfDirtying = (1L << 8)
codecDSequenceEnableSubPixelPositioning = (1L << 9)
kICMSequenceTaskWeight = FOUR_CHAR_CODE('twei')
kICMSequenceTaskName = FOUR_CHAR_CODE('tnam')
kICMSequenceUserPreferredCodecs = FOUR_CHAR_CODE('punt')
kImageDescriptionSampleFormat = FOUR_CHAR_CODE('idfm')
kImageDescriptionClassicAtomFormat = FOUR_CHAR_CODE('atom')
kImageDescriptionQTAtomFormat = FOUR_CHAR_CODE('qtat')
kImageDescriptionEffectDataFormat = FOUR_CHAR_CODE('fxat')
kImageDescriptionPrivateDataFormat = FOUR_CHAR_CODE('priv')
kImageDescriptionAlternateCodec = FOUR_CHAR_CODE('subs')
kImageDescriptionColorSpace = FOUR_CHAR_CODE('cspc')
sfpItemPreviewAreaUser = 11
sfpItemPreviewStaticText = 12
sfpItemPreviewDividerUser = 13
sfpItemCreatePreviewButton = 14
sfpItemShowPreviewButton = 15
kICMPixelFormatIsPlanarMask = 0x0F
kICMPixelFormatIsIndexed = (1L << 4)
kICMPixelFormatIsSupportedByQD = (1L << 5)
kICMPixelFormatIsMonochrome = (1L << 6)
kICMPixelFormatHasAlphaChannel = (1L << 7)
kICMGetChainUltimateParent = 0
kICMGetChainParent = 1
kICMGetChainChild = 2
kICMGetChainUltimateChild = 3
kDontUseValidateToFindGraphicsImporter = 1L << 0
kICMTempThenAppMemory = 1L << 12
kICMAppThenTempMemory = 1L << 13
kQTUsePlatformDefaultGammaLevel = 0
kQTUseSourceGammaLevel = -1L
kQTCCIR601VideoGammaLevel = 0x00023333
identityMatrixType = 0x00
translateMatrixType = 0x01
scaleMatrixType = 0x02
scaleTranslateMatrixType = 0x03
linearMatrixType = 0x04
linearTranslateMatrixType = 0x05
perspectiveMatrixType = 0x06
GraphicsImporterComponentType = FOUR_CHAR_CODE('grip')
graphicsImporterUsesImageDecompressor = 1L << 23
quickTimeImageFileImageDescriptionAtom = FOUR_CHAR_CODE('idsc')
quickTimeImageFileImageDataAtom = FOUR_CHAR_CODE('idat')
quickTimeImageFileMetaDataAtom = FOUR_CHAR_CODE('meta')
quickTimeImageFileColorSyncProfileAtom = FOUR_CHAR_CODE('iicc')
graphicsImporterDrawsAllPixels = 0
graphicsImporterDoesntDrawAllPixels = 1
graphicsImporterDontKnowIfDrawAllPixels = 2
kGraphicsImporterDontDoGammaCorrection = 1L << 0
kGraphicsImporterTrustResolutionFromFile = 1L << 1
kGraphicsImporterEnableSubPixelPositioning = 1L << 2
kGraphicsExportGroup = FOUR_CHAR_CODE('expo')
kGraphicsExportFileType = FOUR_CHAR_CODE('ftyp')
kGraphicsExportMIMEType = FOUR_CHAR_CODE('mime')
kGraphicsExportExtension = FOUR_CHAR_CODE('ext ')
kGraphicsExportDescription = FOUR_CHAR_CODE('desc')
kQTPhotoshopLayerMode = FOUR_CHAR_CODE('lmod')
kQTPhotoshopLayerOpacity = FOUR_CHAR_CODE('lopa')
kQTPhotoshopLayerClipping = FOUR_CHAR_CODE('lclp')
kQTPhotoshopLayerFlags = FOUR_CHAR_CODE('lflg')
kQTPhotoshopLayerName = FOUR_CHAR_CODE('\xa9lnm')
kQTPhotoshopLayerUnicodeName = FOUR_CHAR_CODE('luni')
kQTIndexedImageType = FOUR_CHAR_CODE('nth?')
kQTIndexedImageIsThumbnail = FOUR_CHAR_CODE('n=th')
kQTIndexedImageIsLayer = FOUR_CHAR_CODE('n=ly')
kQTIndexedImageIsPage = FOUR_CHAR_CODE('n=pg')
kQTIndexedImageIsMultiResolution = FOUR_CHAR_CODE('n=rs')
kQTTIFFUserDataPrefix = 0x74690000
kQTTIFFExifUserDataPrefix = 0x65780000
kQTTIFFExifGPSUserDataPrefix = 0x67700000
kQTAlphaMode = FOUR_CHAR_CODE('almo')
kQTAlphaModePreMulColor = FOUR_CHAR_CODE('almp')
kUserDataIPTC = FOUR_CHAR_CODE('iptc')
kQTTIFFUserDataOrientation = 0x74690112
kQTTIFFUserDataTransferFunction = 0x7469012D
kQTTIFFUserDataWhitePoint = 0x7469013E
kQTTIFFUserDataPrimaryChromaticities = 0x7469013F
kQTTIFFUserDataTransferRange = 0x74690156
kQTTIFFUserDataYCbCrPositioning = 0x74690213
kQTTIFFUserDataReferenceBlackWhite = 0x74690214
kQTTIFFUserDataModelPixelScale = 0x7469830E
kQTTIFFUserDataModelTransformation = 0x746985D8
kQTTIFFUserDataModelTiepoint = 0x74698482
kQTTIFFUserDataGeoKeyDirectory = 0x746987AF
kQTTIFFUserDataGeoDoubleParams = 0x746987B0
kQTTIFFUserDataGeoAsciiParams = 0x746987B1
kQTTIFFUserDataIntergraphMatrix = 0x74698480
kQTExifUserDataExifVersion = 0x65789000
kQTExifUserDataFlashPixVersion = 0x6578A000
kQTExifUserDataColorSpace = 0x6578A001
kQTExifUserDataComponentsConfiguration = 0x65789101
kQTExifUserDataCompressedBitsPerPixel = 0x65789102
kQTExifUserDataPixelXDimension = 0x6578A002
kQTExifUserDataPixelYDimension = 0x6578A003
kQTExifUserDataMakerNote = 0x6578927C
kQTExifUserDataUserComment = 0x6578928C
kQTExifUserDataRelatedSoundFile = 0x6578A004
kQTExifUserDataDateTimeOriginal = 0x65789003
kQTExifUserDataDateTimeDigitized = 0x65789004
kQTExifUserDataSubSecTime = 0x65789290
kQTExifUserDataSubSecTimeOriginal = 0x65789291
kQTExifUserDataSubSecTimeDigitized = 0x65789292
kQTExifUserDataExposureTime = 0x6578829A
kQTExifUserDataFNumber = 0x6578829D
kQTExifUserDataExposureProgram = 0x65788822
kQTExifUserDataSpectralSensitivity = 0x65788824
kQTExifUserDataISOSpeedRatings = 0x65788827
kQTExifUserDataShutterSpeedValue = 0x65789201
kQTExifUserDataApertureValue = 0x65789202
kQTExifUserDataBrightnessValue = 0x65789203
kQTExifUserDataExposureBiasValue = 0x65789204
kQTExifUserDataMaxApertureValue = 0x65789205
kQTExifUserDataSubjectDistance = 0x65789206
kQTExifUserDataMeteringMode = 0x65789207
kQTExifUserDataLightSource = 0x65789208
kQTExifUserDataFlash = 0x65789209
kQTExifUserDataFocalLength = 0x6578920A
kQTExifUserDataFlashEnergy = 0x6578A20B
kQTExifUserDataFocalPlaneXResolution = 0x6578A20E
kQTExifUserDataFocalPlaneYResolution = 0x6578A20F
kQTExifUserDataFocalPlaneResolutionUnit = 0x6578A210
kQTExifUserDataSubjectLocation = 0x6578A214
kQTExifUserDataExposureIndex = 0x6578A215
kQTExifUserDataSensingMethod = 0x6578A217
kQTExifUserDataFileSource = 0x6578A300
kQTExifUserDataSceneType = 0x6578A301
kQTExifUserDataGPSVersionID = 0x06770000
kQTExifUserDataGPSLatitudeRef = 0x06770001
kQTExifUserDataGPSLatitude = 0x06770002
kQTExifUserDataGPSLongitudeRef = 0x06770003
kQTExifUserDataGPSLongitude = 0x06770004
kQTExifUserDataGPSAltitudeRef = 0x06770005
kQTExifUserDataGPSAltitude = 0x06770006
kQTExifUserDataGPSTimeStamp = 0x06770007
kQTExifUserDataGPSSatellites = 0x06770008
kQTExifUserDataGPSStatus = 0x06770009
kQTExifUserDataGPSMeasureMode = 0x0677000A
kQTExifUserDataGPSDOP = 0x0677000B
kQTExifUserDataGPSSpeedRef = 0x0677000C
kQTExifUserDataGPSSpeed = 0x0677000D
kQTExifUserDataGPSTrackRef = 0x0677000E
kQTExifUserDataGPSTrack = 0x0677000F
kQTExifUserDataGPSImgDirectionRef = 0x06770010
kQTExifUserDataGPSImgDirection = 0x06770011
kQTExifUserDataGPSMapDatum = 0x06770012
kQTExifUserDataGPSDestLatitudeRef = 0x06770013
kQTExifUserDataGPSDestLatitude = 0x06770014
kQTExifUserDataGPSDestLongitudeRef = 0x06770015
kQTExifUserDataGPSDestLongitude = 0x06770016
kQTExifUserDataGPSDestBearingRef = 0x06770017
kQTExifUserDataGPSDestBearing = 0x06770018
kQTExifUserDataGPSDestDistanceRef = 0x06770019
kQTExifUserDataGPSDestDistance = 0x0677001A
GraphicsExporterComponentType = FOUR_CHAR_CODE('grex')
kBaseGraphicsExporterSubType = FOUR_CHAR_CODE('base')
graphicsExporterIsBaseExporter = 1L << 0
graphicsExporterCanTranscode = 1L << 1
graphicsExporterUsesImageCompressor = 1L << 2
kQTResolutionSettings = FOUR_CHAR_CODE('reso')
kQTTargetDataSize = FOUR_CHAR_CODE('dasz')
kQTDontRecompress = FOUR_CHAR_CODE('dntr')
kQTInterlaceStyle = FOUR_CHAR_CODE('ilac')
kQTColorSyncProfile = FOUR_CHAR_CODE('iccp')
kQTThumbnailSettings = FOUR_CHAR_CODE('thum')
kQTEnableExif = FOUR_CHAR_CODE('exif')
kQTMetaData = FOUR_CHAR_CODE('meta')
kQTTIFFCompressionMethod = FOUR_CHAR_CODE('tifc')
kQTTIFFCompression_None = 1
kQTTIFFCompression_PackBits = 32773L
kQTTIFFLittleEndian = FOUR_CHAR_CODE('tife')
kQTPNGFilterPreference = FOUR_CHAR_CODE('pngf')
kQTPNGFilterBestForColorType = FOUR_CHAR_CODE('bflt')
kQTPNGFilterNone = 0
kQTPNGFilterSub = 1
kQTPNGFilterUp = 2
kQTPNGFilterAverage = 3
kQTPNGFilterPaeth = 4
kQTPNGFilterAdaptivePerRow = FOUR_CHAR_CODE('aflt')
kQTPNGInterlaceStyle = FOUR_CHAR_CODE('ilac')
kQTPNGInterlaceNone = 0
kQTPNGInterlaceAdam7 = 1
ImageTranscodererComponentType = FOUR_CHAR_CODE('imtc')
kGraphicsImportSetDataReferenceSelect = 0x0001
kGraphicsImportGetDataReferenceSelect = 0x0002
kGraphicsImportSetDataFileSelect = 0x0003
kGraphicsImportGetDataFileSelect = 0x0004
kGraphicsImportSetDataHandleSelect = 0x0005
kGraphicsImportGetDataHandleSelect = 0x0006
kGraphicsImportGetImageDescriptionSelect = 0x0007
kGraphicsImportGetDataOffsetAndSizeSelect = 0x0008
kGraphicsImportReadDataSelect = 0x0009
kGraphicsImportSetClipSelect = 0x000A
kGraphicsImportGetClipSelect = 0x000B
kGraphicsImportSetSourceRectSelect = 0x000C
kGraphicsImportGetSourceRectSelect = 0x000D
kGraphicsImportGetNaturalBoundsSelect = 0x000E
kGraphicsImportDrawSelect = 0x000F
kGraphicsImportSetGWorldSelect = 0x0010
kGraphicsImportGetGWorldSelect = 0x0011
kGraphicsImportSetMatrixSelect = 0x0012
kGraphicsImportGetMatrixSelect = 0x0013
kGraphicsImportSetBoundsRectSelect = 0x0014
kGraphicsImportGetBoundsRectSelect = 0x0015
kGraphicsImportSaveAsPictureSelect = 0x0016
kGraphicsImportSetGraphicsModeSelect = 0x0017
kGraphicsImportGetGraphicsModeSelect = 0x0018
kGraphicsImportSetQualitySelect = 0x0019
kGraphicsImportGetQualitySelect = 0x001A
kGraphicsImportSaveAsQuickTimeImageFileSelect = 0x001B
kGraphicsImportSetDataReferenceOffsetAndLimitSelect = 0x001C
kGraphicsImportGetDataReferenceOffsetAndLimitSelect = 0x001D
kGraphicsImportGetAliasedDataReferenceSelect = 0x001E
kGraphicsImportValidateSelect = 0x001F
kGraphicsImportGetMetaDataSelect = 0x0020
kGraphicsImportGetMIMETypeListSelect = 0x0021
kGraphicsImportDoesDrawAllPixelsSelect = 0x0022
kGraphicsImportGetAsPictureSelect = 0x0023
kGraphicsImportExportImageFileSelect = 0x0024
kGraphicsImportGetExportImageTypeListSelect = 0x0025
kGraphicsImportDoExportImageFileDialogSelect = 0x0026
kGraphicsImportGetExportSettingsAsAtomContainerSelect = 0x0027
kGraphicsImportSetExportSettingsFromAtomContainerSelect = 0x0028
kGraphicsImportSetProgressProcSelect = 0x0029
kGraphicsImportGetProgressProcSelect = 0x002A
kGraphicsImportGetImageCountSelect = 0x002B
kGraphicsImportSetImageIndexSelect = 0x002C
kGraphicsImportGetImageIndexSelect = 0x002D
kGraphicsImportGetDataOffsetAndSize64Select = 0x002E
kGraphicsImportReadData64Select = 0x002F
kGraphicsImportSetDataReferenceOffsetAndLimit64Select = 0x0030
kGraphicsImportGetDataReferenceOffsetAndLimit64Select = 0x0031
kGraphicsImportGetDefaultMatrixSelect = 0x0032
kGraphicsImportGetDefaultClipSelect = 0x0033
kGraphicsImportGetDefaultGraphicsModeSelect = 0x0034
kGraphicsImportGetDefaultSourceRectSelect = 0x0035
kGraphicsImportGetColorSyncProfileSelect = 0x0036
kGraphicsImportSetDestRectSelect = 0x0037
kGraphicsImportGetDestRectSelect = 0x0038
kGraphicsImportSetFlagsSelect = 0x0039
kGraphicsImportGetFlagsSelect = 0x003A
kGraphicsImportGetBaseDataOffsetAndSize64Select = 0x003D
kGraphicsImportSetImageIndexToThumbnailSelect = 0x003E
kGraphicsExportDoExportSelect = 0x0001
kGraphicsExportCanTranscodeSelect = 0x0002
kGraphicsExportDoTranscodeSelect = 0x0003
kGraphicsExportCanUseCompressorSelect = 0x0004
kGraphicsExportDoUseCompressorSelect = 0x0005
kGraphicsExportDoStandaloneExportSelect = 0x0006
kGraphicsExportGetDefaultFileTypeAndCreatorSelect = 0x0007
kGraphicsExportGetDefaultFileNameExtensionSelect = 0x0008
kGraphicsExportGetMIMETypeListSelect = 0x0009
kGraphicsExportRequestSettingsSelect = 0x000B
kGraphicsExportSetSettingsFromAtomContainerSelect = 0x000C
kGraphicsExportGetSettingsAsAtomContainerSelect = 0x000D
kGraphicsExportGetSettingsAsTextSelect = 0x000E
kGraphicsExportSetDontRecompressSelect = 0x000F
kGraphicsExportGetDontRecompressSelect = 0x0010
kGraphicsExportSetInterlaceStyleSelect = 0x0011
kGraphicsExportGetInterlaceStyleSelect = 0x0012
kGraphicsExportSetMetaDataSelect = 0x0013
kGraphicsExportGetMetaDataSelect = 0x0014
kGraphicsExportSetTargetDataSizeSelect = 0x0015
kGraphicsExportGetTargetDataSizeSelect = 0x0016
kGraphicsExportSetCompressionMethodSelect = 0x0017
kGraphicsExportGetCompressionMethodSelect = 0x0018
kGraphicsExportSetCompressionQualitySelect = 0x0019
kGraphicsExportGetCompressionQualitySelect = 0x001A
kGraphicsExportSetResolutionSelect = 0x001B
kGraphicsExportGetResolutionSelect = 0x001C
kGraphicsExportSetDepthSelect = 0x001D
kGraphicsExportGetDepthSelect = 0x001E
kGraphicsExportSetColorSyncProfileSelect = 0x0021
kGraphicsExportGetColorSyncProfileSelect = 0x0022
kGraphicsExportSetProgressProcSelect = 0x0023
kGraphicsExportGetProgressProcSelect = 0x0024
kGraphicsExportSetInputDataReferenceSelect = 0x0025
kGraphicsExportGetInputDataReferenceSelect = 0x0026
kGraphicsExportSetInputFileSelect = 0x0027
kGraphicsExportGetInputFileSelect = 0x0028
kGraphicsExportSetInputHandleSelect = 0x0029
kGraphicsExportGetInputHandleSelect = 0x002A
kGraphicsExportSetInputPtrSelect = 0x002B
kGraphicsExportGetInputPtrSelect = 0x002C
kGraphicsExportSetInputGraphicsImporterSelect = 0x002D
kGraphicsExportGetInputGraphicsImporterSelect = 0x002E
kGraphicsExportSetInputPictureSelect = 0x002F
kGraphicsExportGetInputPictureSelect = 0x0030
kGraphicsExportSetInputGWorldSelect = 0x0031
kGraphicsExportGetInputGWorldSelect = 0x0032
kGraphicsExportSetInputPixmapSelect = 0x0033
kGraphicsExportGetInputPixmapSelect = 0x0034
kGraphicsExportSetInputOffsetAndLimitSelect = 0x0035
kGraphicsExportGetInputOffsetAndLimitSelect = 0x0036
kGraphicsExportMayExporterReadInputDataSelect = 0x0037
kGraphicsExportGetInputDataSizeSelect = 0x0038
kGraphicsExportReadInputDataSelect = 0x0039
kGraphicsExportGetInputImageDescriptionSelect = 0x003A
kGraphicsExportGetInputImageDimensionsSelect = 0x003B
kGraphicsExportGetInputImageDepthSelect = 0x003C
kGraphicsExportDrawInputImageSelect = 0x003D
kGraphicsExportSetOutputDataReferenceSelect = 0x003E
kGraphicsExportGetOutputDataReferenceSelect = 0x003F
kGraphicsExportSetOutputFileSelect = 0x0040
kGraphicsExportGetOutputFileSelect = 0x0041
kGraphicsExportSetOutputHandleSelect = 0x0042
kGraphicsExportGetOutputHandleSelect = 0x0043
kGraphicsExportSetOutputOffsetAndMaxSizeSelect = 0x0044
kGraphicsExportGetOutputOffsetAndMaxSizeSelect = 0x0045
kGraphicsExportSetOutputFileTypeAndCreatorSelect = 0x0046
kGraphicsExportGetOutputFileTypeAndCreatorSelect = 0x0047
kGraphicsExportWriteOutputDataSelect = 0x0048
kGraphicsExportSetOutputMarkSelect = 0x0049
kGraphicsExportGetOutputMarkSelect = 0x004A
kGraphicsExportReadOutputDataSelect = 0x004B
kGraphicsExportSetThumbnailEnabledSelect = 0x004C
kGraphicsExportGetThumbnailEnabledSelect = 0x004D
kGraphicsExportSetExifEnabledSelect = 0x004E
kGraphicsExportGetExifEnabledSelect = 0x004F
kImageTranscoderBeginSequenceSelect = 0x0001
kImageTranscoderConvertSelect = 0x0002
kImageTranscoderDisposeDataSelect = 0x0003
kImageTranscoderEndSequenceSelect = 0x0004
clockComponentType = FOUR_CHAR_CODE('clok')
systemTickClock = FOUR_CHAR_CODE('tick')
systemSecondClock = FOUR_CHAR_CODE('seco')
systemMillisecondClock = FOUR_CHAR_CODE('mill')
systemMicrosecondClock = FOUR_CHAR_CODE('micr')
kClockRateIsLinear = 1
kClockImplementsCallBacks = 2
kClockCanHandleIntermittentSound = 4
StandardCompressionType = FOUR_CHAR_CODE('scdi')
StandardCompressionSubType = FOUR_CHAR_CODE('imag')
StandardCompressionSubTypeSound = FOUR_CHAR_CODE('soun')
scListEveryCodec = 1L << 1
scAllowZeroFrameRate = 1L << 2
scAllowZeroKeyFrameRate = 1L << 3
scShowBestDepth = 1L << 4
scUseMovableModal = 1L << 5
scDisableFrameRateItem = 1L << 6
scShowDataRateAsKilobits = 1L << 7
scPreferCropping = 1 << 0
scPreferScaling = 1 << 1
scPreferScalingAndCropping = scPreferScaling | scPreferCropping
scDontDetermineSettingsFromTestImage = 1 << 2
scTestImageWidth = 80
scTestImageHeight = 80
scOKItem = 1
scCancelItem = 2
scCustomItem = 3
scUserCancelled = 1
scPositionRect = 2
scPositionDialog = 3
scSetTestImagePictHandle = 4
scSetTestImagePictFile = 5
scSetTestImagePixMap = 6
scGetBestDeviceRect = 7
scRequestImageSettings = 10
scCompressImage = 11
scCompressPicture = 12
scCompressPictureFile = 13
scRequestSequenceSettings = 14
scCompressSequenceBegin = 15
scCompressSequenceFrame = 16
scCompressSequenceEnd = 17
scDefaultPictHandleSettings = 18
scDefaultPictFileSettings = 19
scDefaultPixMapSettings = 20
scGetInfo = 21
scSetInfo = 22
scNewGWorld = 23
scSpatialSettingsType = FOUR_CHAR_CODE('sptl')
scTemporalSettingsType = FOUR_CHAR_CODE('tprl')
scDataRateSettingsType = FOUR_CHAR_CODE('drat')
scColorTableType = FOUR_CHAR_CODE('clut')
scProgressProcType = FOUR_CHAR_CODE('prog')
scExtendedProcsType = FOUR_CHAR_CODE('xprc')
scPreferenceFlagsType = FOUR_CHAR_CODE('pref')
scSettingsStateType = FOUR_CHAR_CODE('ssta')
scSequenceIDType = FOUR_CHAR_CODE('sequ')
scWindowPositionType = FOUR_CHAR_CODE('wndw')
scCodecFlagsType = FOUR_CHAR_CODE('cflg')
scCodecSettingsType = FOUR_CHAR_CODE('cdec')
scForceKeyValueType = FOUR_CHAR_CODE('ksim')
scSoundSampleRateType = FOUR_CHAR_CODE('ssrt')
scSoundSampleSizeType = FOUR_CHAR_CODE('ssss')
scSoundChannelCountType = FOUR_CHAR_CODE('sscc')
scSoundCompressionType = FOUR_CHAR_CODE('ssct')
scCompressionListType = FOUR_CHAR_CODE('ctyl')
scCodecManufacturerType = FOUR_CHAR_CODE('cmfr')
scSoundVBRCompressionOK = FOUR_CHAR_CODE('cvbr')
scSoundInputSampleRateType = FOUR_CHAR_CODE('ssir')
scSoundSampleRateChangeOK = FOUR_CHAR_CODE('rcok')
scAvailableCompressionListType = FOUR_CHAR_CODE('avai')
scGetCompression = 1
scShowMotionSettings = 1L << 0
scSettingsChangedItem = -1
scCompressFlagIgnoreIdenticalFrames = 1
kQTSettingsVideo = FOUR_CHAR_CODE('vide')
kQTSettingsSound = FOUR_CHAR_CODE('soun')
kQTSettingsComponentVersion = FOUR_CHAR_CODE('vers')
TweenComponentType = FOUR_CHAR_CODE('twen')
TCSourceRefNameType = FOUR_CHAR_CODE('name')
tcDropFrame = 1 << 0
tc24HourMax = 1 << 1
tcNegTimesOK = 1 << 2
tcCounter = 1 << 3
tctNegFlag = 0x80
tcdfShowTimeCode = 1 << 0
MovieImportType = FOUR_CHAR_CODE('eat ')
MovieExportType = FOUR_CHAR_CODE('spit')
canMovieImportHandles = 1 << 0
canMovieImportFiles = 1 << 1
hasMovieImportUserInterface = 1 << 2
canMovieExportHandles = 1 << 3
canMovieExportFiles = 1 << 4
hasMovieExportUserInterface = 1 << 5
movieImporterIsXMLBased = 1 << 5
dontAutoFileMovieImport = 1 << 6
canMovieExportAuxDataHandle = 1 << 7
canMovieImportValidateHandles = 1 << 8
canMovieImportValidateFile = 1 << 9
dontRegisterWithEasyOpen = 1 << 10
canMovieImportInPlace = 1 << 11
movieImportSubTypeIsFileExtension = 1 << 12
canMovieImportPartial = 1 << 13
hasMovieImportMIMEList = 1 << 14
canMovieImportAvoidBlocking = 1 << 15
canMovieExportFromProcedures = 1 << 15
canMovieExportValidateMovie = 1L << 16
movieImportMustGetDestinationMediaType = 1L << 16
movieExportNeedsResourceFork = 1L << 17
canMovieImportDataReferences = 1L << 18
movieExportMustGetSourceMediaType = 1L << 19
canMovieImportWithIdle = 1L << 20
canMovieImportValidateDataReferences = 1L << 21
reservedForUseByGraphicsImporters = 1L << 23
movieImportCreateTrack = 1
movieImportInParallel = 2
movieImportMustUseTrack = 4
movieImportWithIdle = 16
movieImportResultUsedMultipleTracks = 8
movieImportResultNeedIdles = 32
movieImportResultComplete = 64
kMovieExportTextOnly = 0
kMovieExportAbsoluteTime = 1
kMovieExportRelativeTime = 2
kMIDIImportSilenceBefore = 1 << 0
kMIDIImportSilenceAfter = 1 << 1
kMIDIImport20Playable = 1 << 2
kMIDIImportWantLyrics = 1 << 3
kQTMediaConfigResourceType = FOUR_CHAR_CODE('mcfg')
kQTMediaConfigResourceVersion = 2
kQTMediaGroupResourceType = FOUR_CHAR_CODE('mgrp')
kQTMediaGroupResourceVersion = 1
kQTBrowserInfoResourceType = FOUR_CHAR_CODE('brws')
kQTBrowserInfoResourceVersion = 1
kQTMediaMIMEInfoHasChanged = (1L << 1)
kQTMediaFileInfoHasChanged = (1L << 2)
kQTMediaConfigCanUseApp = (1L << 18)
kQTMediaConfigCanUsePlugin = (1L << 19)
kQTMediaConfigUNUSED = (1L << 20)
kQTMediaConfigBinaryFile = (1L << 23)
kQTMediaConfigTextFile = 0
kQTMediaConfigMacintoshFile = (1L << 24)
kQTMediaConfigAssociateByDefault = (1L << 27)
kQTMediaConfigUseAppByDefault = (1L << 28)
kQTMediaConfigUsePluginByDefault = (1L << 29)
kQTMediaConfigDefaultsMask = (kQTMediaConfigUseAppByDefault | kQTMediaConfigUsePluginByDefault)
kQTMediaConfigDefaultsShift = 12
kQTMediaConfigHasFileHasQTAtoms = (1L << 30)
kQTMediaConfigStreamGroupID = FOUR_CHAR_CODE('strm')
kQTMediaConfigInteractiveGroupID = FOUR_CHAR_CODE('intr')
kQTMediaConfigVideoGroupID = FOUR_CHAR_CODE('eyes')
kQTMediaConfigAudioGroupID = FOUR_CHAR_CODE('ears')
kQTMediaConfigMPEGGroupID = FOUR_CHAR_CODE('mpeg')
kQTMediaConfigMP3GroupID = FOUR_CHAR_CODE('mp3 ')
kQTMediaConfigImageGroupID = FOUR_CHAR_CODE('ogle')
kQTMediaConfigMiscGroupID = FOUR_CHAR_CODE('misc')
kQTMediaInfoNetGroup = FOUR_CHAR_CODE('net ')
kQTMediaInfoWinGroup = FOUR_CHAR_CODE('win ')
kQTMediaInfoMacGroup = FOUR_CHAR_CODE('mac ')
kQTMediaInfoMiscGroup = 0x3F3F3F3F
kMimeInfoMimeTypeTag = FOUR_CHAR_CODE('mime')
kMimeInfoFileExtensionTag = FOUR_CHAR_CODE('ext ')
kMimeInfoDescriptionTag = FOUR_CHAR_CODE('desc')
kMimeInfoGroupTag = FOUR_CHAR_CODE('grop')
kMimeInfoDoNotOverrideExistingFileTypeAssociation = FOUR_CHAR_CODE('nofa')
kQTFileTypeAIFF = FOUR_CHAR_CODE('AIFF')
kQTFileTypeAIFC = FOUR_CHAR_CODE('AIFC')
kQTFileTypeDVC = FOUR_CHAR_CODE('dvc!')
kQTFileTypeMIDI = FOUR_CHAR_CODE('Midi')
kQTFileTypePicture = FOUR_CHAR_CODE('PICT')
kQTFileTypeMovie = FOUR_CHAR_CODE('MooV')
kQTFileTypeText = FOUR_CHAR_CODE('TEXT')
kQTFileTypeWave = FOUR_CHAR_CODE('WAVE')
kQTFileTypeSystemSevenSound = FOUR_CHAR_CODE('sfil')
kQTFileTypeMuLaw = FOUR_CHAR_CODE('ULAW')
kQTFileTypeAVI = FOUR_CHAR_CODE('VfW ')
kQTFileTypeSoundDesignerII = FOUR_CHAR_CODE('Sd2f')
kQTFileTypeAudioCDTrack = FOUR_CHAR_CODE('trak')
kQTFileTypePICS = FOUR_CHAR_CODE('PICS')
kQTFileTypeGIF = FOUR_CHAR_CODE('GIFf')
kQTFileTypePNG = FOUR_CHAR_CODE('PNGf')
kQTFileTypeTIFF = FOUR_CHAR_CODE('TIFF')
kQTFileTypePhotoShop = FOUR_CHAR_CODE('8BPS')
kQTFileTypeSGIImage = FOUR_CHAR_CODE('.SGI')
kQTFileTypeBMP = FOUR_CHAR_CODE('BMPf')
kQTFileTypeJPEG = FOUR_CHAR_CODE('JPEG')
kQTFileTypeJFIF = FOUR_CHAR_CODE('JPEG')
kQTFileTypeMacPaint = FOUR_CHAR_CODE('PNTG')
kQTFileTypeTargaImage = FOUR_CHAR_CODE('TPIC')
kQTFileTypeQuickDrawGXPicture = FOUR_CHAR_CODE('qdgx')
kQTFileTypeQuickTimeImage = FOUR_CHAR_CODE('qtif')
kQTFileType3DMF = FOUR_CHAR_CODE('3DMF')
kQTFileTypeFLC = FOUR_CHAR_CODE('FLC ')
kQTFileTypeFlash = FOUR_CHAR_CODE('SWFL')
kQTFileTypeFlashPix = FOUR_CHAR_CODE('FPix')
kQTFileTypeMP4 = FOUR_CHAR_CODE('mpg4')
kQTSettingsDVExportNTSC = FOUR_CHAR_CODE('dvcv')
kQTSettingsDVExportLockedAudio = FOUR_CHAR_CODE('lock')
kQTSettingsEffect = FOUR_CHAR_CODE('effe')
kQTSettingsGraphicsFileImportSequence = FOUR_CHAR_CODE('sequ')
kQTSettingsGraphicsFileImportSequenceEnabled = FOUR_CHAR_CODE('enab')
kQTSettingsMovieExportEnableVideo = FOUR_CHAR_CODE('envi')
kQTSettingsMovieExportEnableSound = FOUR_CHAR_CODE('enso')
kQTSettingsMovieExportSaveOptions = FOUR_CHAR_CODE('save')
kQTSettingsMovieExportSaveForInternet = FOUR_CHAR_CODE('fast')
kQTSettingsMovieExportSaveCompressedMovie = FOUR_CHAR_CODE('cmpm')
kQTSettingsMIDI = FOUR_CHAR_CODE('MIDI')
kQTSettingsMIDISettingFlags = FOUR_CHAR_CODE('sttg')
kQTSettingsText = FOUR_CHAR_CODE('text')
kQTSettingsTextDescription = FOUR_CHAR_CODE('desc')
kQTSettingsTextSize = FOUR_CHAR_CODE('size')
kQTSettingsTextSettingFlags = FOUR_CHAR_CODE('sttg')
kQTSettingsTextTimeFraction = FOUR_CHAR_CODE('timf')
kQTSettingsTime = FOUR_CHAR_CODE('time')
kQTSettingsTimeDuration = FOUR_CHAR_CODE('dura')
kQTSettingsAudioCDTrack = FOUR_CHAR_CODE('trak')
kQTSettingsAudioCDTrackRateShift = FOUR_CHAR_CODE('rshf')
kQTSettingsDVExportDVFormat = FOUR_CHAR_CODE('dvcf')
kQTPresetsListResourceType = FOUR_CHAR_CODE('stg#')
kQTPresetsPlatformListResourceType = FOUR_CHAR_CODE('stgp')
kQTPresetInfoIsDivider = 1
kQTMovieExportSourceInfoResourceType = FOUR_CHAR_CODE('src#')
kQTMovieExportSourceInfoIsMediaType = 1L << 0
kQTMovieExportSourceInfoIsMediaCharacteristic = 1L << 1
kQTMovieExportSourceInfoIsSourceType = 1L << 2
movieExportUseConfiguredSettings = FOUR_CHAR_CODE('ucfg')
movieExportWidth = FOUR_CHAR_CODE('wdth')
movieExportHeight = FOUR_CHAR_CODE('hegt')
movieExportDuration = FOUR_CHAR_CODE('dura')
movieExportVideoFilter = FOUR_CHAR_CODE('iflt')
movieExportTimeScale = FOUR_CHAR_CODE('tmsc')
kQTBrowserInfoCanUseSystemFolderPlugin = (1L << 0)
kQTPreFlightOpenComponent = (1L << 1)
pnotComponentWantsEvents = 1
pnotComponentNeedsNoCache = 2
ShowFilePreviewComponentType = FOUR_CHAR_CODE('pnot')
CreateFilePreviewComponentType = FOUR_CHAR_CODE('pmak')
DataCompressorComponentType = FOUR_CHAR_CODE('dcom')
DataDecompressorComponentType = FOUR_CHAR_CODE('ddec')
AppleDataCompressorSubType = FOUR_CHAR_CODE('adec')
zlibDataCompressorSubType = FOUR_CHAR_CODE('zlib')
kDataHCanRead = 1L << 0
kDataHSpecialRead = 1L << 1
kDataHSpecialReadFile = 1L << 2
kDataHCanWrite = 1L << 3
kDataHSpecialWrite = 1 << 4
kDataHSpecialWriteFile = 1 << 5
kDataHCanStreamingWrite = 1 << 6
kDataHMustCheckDataRef = 1 << 7
kDataRefExtensionChokeSpeed = FOUR_CHAR_CODE('chok')
kDataRefExtensionFileName = FOUR_CHAR_CODE('fnam')
kDataRefExtensionMIMEType = FOUR_CHAR_CODE('mime')
kDataRefExtensionMacOSFileType = FOUR_CHAR_CODE('ftyp')
kDataRefExtensionInitializationData = FOUR_CHAR_CODE('data')
kDataRefExtensionQuickTimeMediaType = FOUR_CHAR_CODE('mtyp')
kDataHChokeToMovieDataRate = 1 << 0
kDataHChokeToParam = 1 << 1
kDataHExtendedSchedule = FOUR_CHAR_CODE('xtnd')
kDataHInfoFlagNeverStreams = 1 << 0
kDataHInfoFlagCanUpdateDataRefs = 1 << 1
kDataHInfoFlagNeedsNetworkBandwidth = 1 << 2
kDataHFileTypeMacOSFileType = FOUR_CHAR_CODE('ftyp')
kDataHFileTypeExtension = FOUR_CHAR_CODE('fext')
kDataHFileTypeMIME = FOUR_CHAR_CODE('mime')
kDataHCreateFileButDontCreateResFile = (1L << 0)
kDataHMovieUsageDoAppendMDAT = 1L << 0
kDataHTempUseSameDirectory = 1L << 0
kDataHTempUseSameVolume = 1L << 1
kDataHTempCreateFile = 1L << 2
kDataHTempOpenFile = 1L << 3
kDataHGetDataRateInfiniteRate = 0x7FFFFFFF
kDataHSetTimeHintsSkipBandwidthRequest = 1 << 0
videoDigitizerComponentType = FOUR_CHAR_CODE('vdig')
vdigInterfaceRev = 2
ntscIn = 0
currentIn = 0
palIn = 1
secamIn = 2
ntscReallyIn = 3
compositeIn = 0
sVideoIn = 1
rgbComponentIn = 2
rgbComponentSyncIn = 3
yuvComponentIn = 4
yuvComponentSyncIn = 5
tvTunerIn = 6
sdiIn = 7
vdPlayThruOff = 0
vdPlayThruOn = 1
vdDigitizerBW = 0
vdDigitizerRGB = 1
vdBroadcastMode = 0
vdVTRMode = 1
vdUseAnyField = 0
vdUseOddField = 1
vdUseEvenField = 2
vdTypeBasic = 0
vdTypeAlpha = 1
vdTypeMask = 2
vdTypeKey = 3
digiInDoesNTSC = 1L << 0
digiInDoesPAL = 1L << 1
digiInDoesSECAM = 1L << 2
digiInDoesGenLock = 1L << 7
digiInDoesComposite = 1L << 8
digiInDoesSVideo = 1L << 9
digiInDoesComponent = 1L << 10
digiInVTR_Broadcast = 1L << 11
digiInDoesColor = 1L << 12
digiInDoesBW = 1L << 13
digiInSignalLock = 1L << 31
digiOutDoes1 = 1L << 0
digiOutDoes2 = 1L << 1
digiOutDoes4 = 1L << 2
digiOutDoes8 = 1L << 3
digiOutDoes16 = 1L << 4
digiOutDoes32 = 1L << 5
digiOutDoesDither = 1L << 6
digiOutDoesStretch = 1L << 7
digiOutDoesShrink = 1L << 8
digiOutDoesMask = 1L << 9
digiOutDoesDouble = 1L << 11
digiOutDoesQuad = 1L << 12
digiOutDoesQuarter = 1L << 13
digiOutDoesSixteenth = 1L << 14
digiOutDoesRotate = 1L << 15
digiOutDoesHorizFlip = 1L << 16
digiOutDoesVertFlip = 1L << 17
digiOutDoesSkew = 1L << 18
digiOutDoesBlend = 1L << 19
digiOutDoesWarp = 1L << 20
digiOutDoesHW_DMA = 1L << 21
digiOutDoesHWPlayThru = 1L << 22
digiOutDoesILUT = 1L << 23
digiOutDoesKeyColor = 1L << 24
digiOutDoesAsyncGrabs = 1L << 25
digiOutDoesUnreadableScreenBits = 1L << 26
digiOutDoesCompress = 1L << 27
digiOutDoesCompressOnly = 1L << 28
digiOutDoesPlayThruDuringCompress = 1L << 29
digiOutDoesCompressPartiallyVisible = 1L << 30
digiOutDoesNotNeedCopyOfCompressData = 1L << 31
dmaDepth1 = 1
dmaDepth2 = 2
dmaDepth4 = 4
dmaDepth8 = 8
dmaDepth16 = 16
dmaDepth32 = 32
dmaDepth2Gray = 64
dmaDepth4Gray = 128
dmaDepth8Gray = 256
kVDIGControlledFrameRate = -1
vdDeviceFlagShowInputsAsDevices = (1 << 0)
vdDeviceFlagHideDevice = (1 << 1)
vdFlagCaptureStarting = (1 << 0)
vdFlagCaptureStopping = (1 << 1)
vdFlagCaptureIsForPreview = (1 << 2)
vdFlagCaptureIsForRecord = (1 << 3)
vdFlagCaptureLowLatency = (1 << 4)
vdFlagCaptureAlwaysUseTimeBase = (1 << 5)
vdFlagCaptureSetSettingsBegin = (1 << 6)
vdFlagCaptureSetSettingsEnd = (1 << 7)
xmlParseComponentType = FOUR_CHAR_CODE('pars')
xmlParseComponentSubType = FOUR_CHAR_CODE('xml ')
xmlIdentifierInvalid = 0
# xmlIdentifierUnrecognized = (long)0xFFFFFFFF
xmlContentTypeInvalid = 0
xmlContentTypeElement = 1
xmlContentTypeCharData = 2
elementFlagAlwaysSelfContained = 1L << 0
elementFlagPreserveWhiteSpace = 1L << 1
xmlParseFlagAllowUppercase = 1L << 0
xmlParseFlagAllowUnquotedAttributeValues = 1L << 1
xmlParseFlagEventParseOnly = 1L << 2
attributeValueKindCharString = 0
attributeValueKindInteger = 1L << 0
attributeValueKindPercent = 1L << 1
attributeValueKindBoolean = 1L << 2
attributeValueKindOnOff = 1L << 3
attributeValueKindColor = 1L << 4
attributeValueKindEnum = 1L << 5
attributeValueKindCaseSensEnum = 1L << 6
MAX_ATTRIBUTE_VALUE_KIND = attributeValueKindCaseSensEnum
nameSpaceIDNone = 0
element_xml = 1
attr_src = 1
SeqGrabComponentType = FOUR_CHAR_CODE('barg')
SeqGrabChannelType = FOUR_CHAR_CODE('sgch')
SeqGrabPanelType = FOUR_CHAR_CODE('sgpn')
SeqGrabCompressionPanelType = FOUR_CHAR_CODE('cmpr')
SeqGrabSourcePanelType = FOUR_CHAR_CODE('sour')
seqGrabToDisk = 1
seqGrabToMemory = 2
seqGrabDontUseTempMemory = 4
seqGrabAppendToFile = 8
seqGrabDontAddMovieResource = 16
seqGrabDontMakeMovie = 32
seqGrabPreExtendFile = 64
seqGrabDataProcIsInterruptSafe = 128
seqGrabDataProcDoesOverlappingReads = 256
seqGrabRecord = 1
seqGrabPreview = 2
seqGrabPlayDuringRecord = 4
seqGrabLowLatencyCapture = 8
seqGrabAlwaysUseTimeBase = 16
seqGrabHasBounds = 1
seqGrabHasVolume = 2
seqGrabHasDiscreteSamples = 4
seqGrabDoNotBufferizeData = 8
seqGrabCanMoveWindowWhileRecording = 16
grabPictOffScreen = 1
grabPictIgnoreClip = 2
grabPictCurrentImage = 4
sgFlagControlledGrab = (1 << 0)
sgFlagAllowNonRGBPixMaps = (1 << 1)
sgDeviceInputNameFlagInputUnavailable = (1 << 0)
sgDeviceNameFlagDeviceUnavailable = (1 << 0)
sgDeviceNameFlagShowInputsAsDevices = (1 << 1)
sgDeviceListWithIcons = (1 << 0)
sgDeviceListDontCheckAvailability = (1 << 1)
sgDeviceListIncludeInputs = (1 << 2)
seqGrabWriteAppend = 0
seqGrabWriteReserve = 1
seqGrabWriteFill = 2
seqGrabUnpause = 0
seqGrabPause = 1
seqGrabPauseForMenu = 3
channelFlagDontOpenResFile = 2
channelFlagHasDependency = 4
sgPanelFlagForPanel = 1
seqGrabSettingsPreviewOnly = 1
channelPlayNormal = 0
channelPlayFast = 1
channelPlayHighQuality = 2
channelPlayAllData = 4
sgSetSettingsBegin = (1 << 0)
sgSetSettingsEnd = (1 << 1)
kSGSmallestDITLSize = -1
kSGLargestDITLSize = -2
sgChannelAtom = FOUR_CHAR_CODE('chan')
sgChannelSettingsAtom = FOUR_CHAR_CODE('ctom')
sgChannelDescription = FOUR_CHAR_CODE('cdsc')
sgChannelSettings = FOUR_CHAR_CODE('cset')
sgDeviceNameType = FOUR_CHAR_CODE('name')
sgDeviceDisplayNameType = FOUR_CHAR_CODE('dnam')
sgDeviceUIDType = FOUR_CHAR_CODE('duid')
sgInputUIDType = FOUR_CHAR_CODE('iuid')
sgUsageType = FOUR_CHAR_CODE('use ')
sgPlayFlagsType = FOUR_CHAR_CODE('plyf')
sgClipType = FOUR_CHAR_CODE('clip')
sgMatrixType = FOUR_CHAR_CODE('mtrx')
sgVolumeType = FOUR_CHAR_CODE('volu')
sgPanelSettingsAtom = FOUR_CHAR_CODE('ptom')
sgPanelDescription = FOUR_CHAR_CODE('pdsc')
sgPanelSettings = FOUR_CHAR_CODE('pset')
sgcSoundCompressionType = FOUR_CHAR_CODE('scmp')
sgcSoundCodecSettingsType = FOUR_CHAR_CODE('cdec')
sgcSoundSampleRateType = FOUR_CHAR_CODE('srat')
sgcSoundChannelCountType = FOUR_CHAR_CODE('schn')
sgcSoundSampleSizeType = FOUR_CHAR_CODE('ssiz')
sgcSoundInputType = FOUR_CHAR_CODE('sinp')
sgcSoundGainType = FOUR_CHAR_CODE('gain')
sgcVideoHueType = FOUR_CHAR_CODE('hue ')
sgcVideoSaturationType = FOUR_CHAR_CODE('satr')
sgcVideoContrastType = FOUR_CHAR_CODE('trst')
sgcVideoSharpnessType = FOUR_CHAR_CODE('shrp')
sgcVideoBrigtnessType = FOUR_CHAR_CODE('brit')
sgcVideoBlackLevelType = FOUR_CHAR_CODE('blkl')
sgcVideoWhiteLevelType = FOUR_CHAR_CODE('whtl')
sgcVideoInputType = FOUR_CHAR_CODE('vinp')
sgcVideoFormatType = FOUR_CHAR_CODE('vstd')
sgcVideoFilterType = FOUR_CHAR_CODE('vflt')
sgcVideoRectType = FOUR_CHAR_CODE('vrct')
sgcVideoDigitizerType = FOUR_CHAR_CODE('vdig')
QTVideoOutputComponentType = FOUR_CHAR_CODE('vout')
QTVideoOutputComponentBaseSubType = FOUR_CHAR_CODE('base')
kQTVideoOutputDontDisplayToUser = 1L << 0
kQTVODisplayModeItem = FOUR_CHAR_CODE('qdmi')
kQTVODimensions = FOUR_CHAR_CODE('dimn')
kQTVOResolution = FOUR_CHAR_CODE('resl')
kQTVORefreshRate = FOUR_CHAR_CODE('refr')
kQTVOPixelType = FOUR_CHAR_CODE('pixl')
kQTVOName = FOUR_CHAR_CODE('name')
kQTVODecompressors = FOUR_CHAR_CODE('deco')
kQTVODecompressorType = FOUR_CHAR_CODE('dety')
kQTVODecompressorContinuous = FOUR_CHAR_CODE('cont')
kQTVODecompressorComponent = FOUR_CHAR_CODE('cmpt')
kClockGetTimeSelect = 0x0001
kClockNewCallBackSelect = 0x0002
kClockDisposeCallBackSelect = 0x0003
kClockCallMeWhenSelect = 0x0004
kClockCancelCallBackSelect = 0x0005
kClockRateChangedSelect = 0x0006
kClockTimeChangedSelect = 0x0007
kClockSetTimeBaseSelect = 0x0008
kClockStartStopChangedSelect = 0x0009
kClockGetRateSelect = 0x000A
kSCGetCompressionExtendedSelect = 0x0001
kSCPositionRectSelect = 0x0002
kSCPositionDialogSelect = 0x0003
kSCSetTestImagePictHandleSelect = 0x0004
kSCSetTestImagePictFileSelect = 0x0005
kSCSetTestImagePixMapSelect = 0x0006
kSCGetBestDeviceRectSelect = 0x0007
kSCRequestImageSettingsSelect = 0x000A
kSCCompressImageSelect = 0x000B
kSCCompressPictureSelect = 0x000C
kSCCompressPictureFileSelect = 0x000D
kSCRequestSequenceSettingsSelect = 0x000E
kSCCompressSequenceBeginSelect = 0x000F
kSCCompressSequenceFrameSelect = 0x0010
kSCCompressSequenceEndSelect = 0x0011
kSCDefaultPictHandleSettingsSelect = 0x0012
kSCDefaultPictFileSettingsSelect = 0x0013
kSCDefaultPixMapSettingsSelect = 0x0014
kSCGetInfoSelect = 0x0015
kSCSetInfoSelect = 0x0016
kSCNewGWorldSelect = 0x0017
kSCSetCompressFlagsSelect = 0x0018
kSCGetCompressFlagsSelect = 0x0019
kSCGetSettingsAsTextSelect = 0x001A
kSCGetSettingsAsAtomContainerSelect = 0x001B
kSCSetSettingsFromAtomContainerSelect = 0x001C
kSCCompressSequenceFrameAsyncSelect = 0x001D
kSCAsyncIdleSelect = 0x001E
kTweenerInitializeSelect = 0x0001
kTweenerDoTweenSelect = 0x0002
kTweenerResetSelect = 0x0003
kTCGetCurrentTimeCodeSelect = 0x0101
kTCGetTimeCodeAtTimeSelect = 0x0102
kTCTimeCodeToStringSelect = 0x0103
kTCTimeCodeToFrameNumberSelect = 0x0104
kTCFrameNumberToTimeCodeSelect = 0x0105
kTCGetSourceRefSelect = 0x0106
kTCSetSourceRefSelect = 0x0107
kTCSetTimeCodeFlagsSelect = 0x0108
kTCGetTimeCodeFlagsSelect = 0x0109
kTCSetDisplayOptionsSelect = 0x010A
kTCGetDisplayOptionsSelect = 0x010B
kMovieImportHandleSelect = 0x0001
kMovieImportFileSelect = 0x0002
kMovieImportSetSampleDurationSelect = 0x0003
kMovieImportSetSampleDescriptionSelect = 0x0004
kMovieImportSetMediaFileSelect = 0x0005
kMovieImportSetDimensionsSelect = 0x0006
kMovieImportSetChunkSizeSelect = 0x0007
kMovieImportSetProgressProcSelect = 0x0008
kMovieImportSetAuxiliaryDataSelect = 0x0009
kMovieImportSetFromScrapSelect = 0x000A
kMovieImportDoUserDialogSelect = 0x000B
kMovieImportSetDurationSelect = 0x000C
kMovieImportGetAuxiliaryDataTypeSelect = 0x000D
kMovieImportValidateSelect = 0x000E
kMovieImportGetFileTypeSelect = 0x000F
kMovieImportDataRefSelect = 0x0010
kMovieImportGetSampleDescriptionSelect = 0x0011
kMovieImportGetMIMETypeListSelect = 0x0012
kMovieImportSetOffsetAndLimitSelect = 0x0013
kMovieImportGetSettingsAsAtomContainerSelect = 0x0014
kMovieImportSetSettingsFromAtomContainerSelect = 0x0015
kMovieImportSetOffsetAndLimit64Select = 0x0016
kMovieImportIdleSelect = 0x0017
kMovieImportValidateDataRefSelect = 0x0018
kMovieImportGetLoadStateSelect = 0x0019
kMovieImportGetMaxLoadedTimeSelect = 0x001A
kMovieImportEstimateCompletionTimeSelect = 0x001B
kMovieImportSetDontBlockSelect = 0x001C
kMovieImportGetDontBlockSelect = 0x001D
kMovieImportSetIdleManagerSelect = 0x001E
kMovieImportSetNewMovieFlagsSelect = 0x001F
kMovieImportGetDestinationMediaTypeSelect = 0x0020
kMovieExportToHandleSelect = 0x0080
kMovieExportToFileSelect = 0x0081
kMovieExportGetAuxiliaryDataSelect = 0x0083
kMovieExportSetProgressProcSelect = 0x0084
kMovieExportSetSampleDescriptionSelect = 0x0085
kMovieExportDoUserDialogSelect = 0x0086
kMovieExportGetCreatorTypeSelect = 0x0087
kMovieExportToDataRefSelect = 0x0088
kMovieExportFromProceduresToDataRefSelect = 0x0089
kMovieExportAddDataSourceSelect = 0x008A
kMovieExportValidateSelect = 0x008B
kMovieExportGetSettingsAsAtomContainerSelect = 0x008C
kMovieExportSetSettingsFromAtomContainerSelect = 0x008D
kMovieExportGetFileNameExtensionSelect = 0x008E
kMovieExportGetShortFileTypeStringSelect = 0x008F
kMovieExportGetSourceMediaTypeSelect = 0x0090
kMovieExportSetGetMoviePropertyProcSelect = 0x0091
kTextExportGetDisplayDataSelect = 0x0100
kTextExportGetTimeFractionSelect = 0x0101
kTextExportSetTimeFractionSelect = 0x0102
kTextExportGetSettingsSelect = 0x0103
kTextExportSetSettingsSelect = 0x0104
kMIDIImportGetSettingsSelect = 0x0100
kMIDIImportSetSettingsSelect = 0x0101
kMovieExportNewGetDataAndPropertiesProcsSelect = 0x0100
kMovieExportDisposeGetDataAndPropertiesProcsSelect = 0x0101
kGraphicsImageImportSetSequenceEnabledSelect = 0x0100
kGraphicsImageImportGetSequenceEnabledSelect = 0x0101
kPreviewShowDataSelect = 0x0001
kPreviewMakePreviewSelect = 0x0002
kPreviewMakePreviewReferenceSelect = 0x0003
kPreviewEventSelect = 0x0004
kDataCodecDecompressSelect = 0x0001
kDataCodecGetCompressBufferSizeSelect = 0x0002
kDataCodecCompressSelect = 0x0003
kDataCodecBeginInterruptSafeSelect = 0x0004
kDataCodecEndInterruptSafeSelect = 0x0005
kDataCodecDecompressPartialSelect = 0x0006
kDataCodecCompressPartialSelect = 0x0007
kDataHGetDataSelect = 0x0002
kDataHPutDataSelect = 0x0003
kDataHFlushDataSelect = 0x0004
kDataHOpenForWriteSelect = 0x0005
kDataHCloseForWriteSelect = 0x0006
kDataHOpenForReadSelect = 0x0008
kDataHCloseForReadSelect = 0x0009
kDataHSetDataRefSelect = 0x000A
kDataHGetDataRefSelect = 0x000B
kDataHCompareDataRefSelect = 0x000C
kDataHTaskSelect = 0x000D
kDataHScheduleDataSelect = 0x000E
kDataHFinishDataSelect = 0x000F
kDataHFlushCacheSelect = 0x0010
kDataHResolveDataRefSelect = 0x0011
kDataHGetFileSizeSelect = 0x0012
kDataHCanUseDataRefSelect = 0x0013
kDataHGetVolumeListSelect = 0x0014
kDataHWriteSelect = 0x0015
kDataHPreextendSelect = 0x0016
kDataHSetFileSizeSelect = 0x0017
kDataHGetFreeSpaceSelect = 0x0018
kDataHCreateFileSelect = 0x0019
kDataHGetPreferredBlockSizeSelect = 0x001A
kDataHGetDeviceIndexSelect = 0x001B
kDataHIsStreamingDataHandlerSelect = 0x001C
kDataHGetDataInBufferSelect = 0x001D
kDataHGetScheduleAheadTimeSelect = 0x001E
kDataHSetCacheSizeLimitSelect = 0x001F
kDataHGetCacheSizeLimitSelect = 0x0020
kDataHGetMovieSelect = 0x0021
kDataHAddMovieSelect = 0x0022
kDataHUpdateMovieSelect = 0x0023
kDataHDoesBufferSelect = 0x0024
kDataHGetFileNameSelect = 0x0025
kDataHGetAvailableFileSizeSelect = 0x0026
kDataHGetMacOSFileTypeSelect = 0x0027
kDataHGetMIMETypeSelect = 0x0028
kDataHSetDataRefWithAnchorSelect = 0x0029
kDataHGetDataRefWithAnchorSelect = 0x002A
kDataHSetMacOSFileTypeSelect = 0x002B
kDataHSetTimeBaseSelect = 0x002C
kDataHGetInfoFlagsSelect = 0x002D
kDataHScheduleData64Select = 0x002E
kDataHWrite64Select = 0x002F
kDataHGetFileSize64Select = 0x0030
kDataHPreextend64Select = 0x0031
kDataHSetFileSize64Select = 0x0032
kDataHGetFreeSpace64Select = 0x0033
kDataHAppend64Select = 0x0034
kDataHReadAsyncSelect = 0x0035
kDataHPollReadSelect = 0x0036
kDataHGetDataAvailabilitySelect = 0x0037
kDataHGetFileSizeAsyncSelect = 0x003A
kDataHGetDataRefAsTypeSelect = 0x003B
kDataHSetDataRefExtensionSelect = 0x003C
kDataHGetDataRefExtensionSelect = 0x003D
kDataHGetMovieWithFlagsSelect = 0x003E
kDataHGetFileTypeOrderingSelect = 0x0040
kDataHCreateFileWithFlagsSelect = 0x0041
kDataHGetMIMETypeAsyncSelect = 0x0042
kDataHGetInfoSelect = 0x0043
kDataHSetIdleManagerSelect = 0x0044
kDataHDeleteFileSelect = 0x0045
kDataHSetMovieUsageFlagsSelect = 0x0046
kDataHUseTemporaryDataRefSelect = 0x0047
kDataHGetTemporaryDataRefCapabilitiesSelect = 0x0048
kDataHRenameFileSelect = 0x0049
kDataHPlaybackHintsSelect = 0x0103
kDataHPlaybackHints64Select = 0x010E
kDataHGetDataRateSelect = 0x0110
kDataHSetTimeHintsSelect = 0x0111
kVDGetMaxSrcRectSelect = 0x0001
kVDGetActiveSrcRectSelect = 0x0002
kVDSetDigitizerRectSelect = 0x0003
kVDGetDigitizerRectSelect = 0x0004
kVDGetVBlankRectSelect = 0x0005
kVDGetMaskPixMapSelect = 0x0006
kVDGetPlayThruDestinationSelect = 0x0008
kVDUseThisCLUTSelect = 0x0009
kVDSetInputGammaValueSelect = 0x000A
kVDGetInputGammaValueSelect = 0x000B
kVDSetBrightnessSelect = 0x000C
kVDGetBrightnessSelect = 0x000D
kVDSetContrastSelect = 0x000E
kVDSetHueSelect = 0x000F
kVDSetSharpnessSelect = 0x0010
kVDSetSaturationSelect = 0x0011
kVDGetContrastSelect = 0x0012
kVDGetHueSelect = 0x0013
kVDGetSharpnessSelect = 0x0014
kVDGetSaturationSelect = 0x0015
kVDGrabOneFrameSelect = 0x0016
kVDGetMaxAuxBufferSelect = 0x0017
kVDGetDigitizerInfoSelect = 0x0019
kVDGetCurrentFlagsSelect = 0x001A
kVDSetKeyColorSelect = 0x001B
kVDGetKeyColorSelect = 0x001C
kVDAddKeyColorSelect = 0x001D
kVDGetNextKeyColorSelect = 0x001E
kVDSetKeyColorRangeSelect = 0x001F
kVDGetKeyColorRangeSelect = 0x0020
kVDSetDigitizerUserInterruptSelect = 0x0021
kVDSetInputColorSpaceModeSelect = 0x0022
kVDGetInputColorSpaceModeSelect = 0x0023
kVDSetClipStateSelect = 0x0024
kVDGetClipStateSelect = 0x0025
kVDSetClipRgnSelect = 0x0026
kVDClearClipRgnSelect = 0x0027
kVDGetCLUTInUseSelect = 0x0028
kVDSetPLLFilterTypeSelect = 0x0029
kVDGetPLLFilterTypeSelect = 0x002A
kVDGetMaskandValueSelect = 0x002B
kVDSetMasterBlendLevelSelect = 0x002C
kVDSetPlayThruDestinationSelect = 0x002D
kVDSetPlayThruOnOffSelect = 0x002E
kVDSetFieldPreferenceSelect = 0x002F
kVDGetFieldPreferenceSelect = 0x0030
kVDPreflightDestinationSelect = 0x0032
kVDPreflightGlobalRectSelect = 0x0033
kVDSetPlayThruGlobalRectSelect = 0x0034
kVDSetInputGammaRecordSelect = 0x0035
kVDGetInputGammaRecordSelect = 0x0036
kVDSetBlackLevelValueSelect = 0x0037
kVDGetBlackLevelValueSelect = 0x0038
kVDSetWhiteLevelValueSelect = 0x0039
kVDGetWhiteLevelValueSelect = 0x003A
kVDGetVideoDefaultsSelect = 0x003B
kVDGetNumberOfInputsSelect = 0x003C
kVDGetInputFormatSelect = 0x003D
kVDSetInputSelect = 0x003E
kVDGetInputSelect = 0x003F
kVDSetInputStandardSelect = 0x0040
kVDSetupBuffersSelect = 0x0041
kVDGrabOneFrameAsyncSelect = 0x0042
kVDDoneSelect = 0x0043
kVDSetCompressionSelect = 0x0044
kVDCompressOneFrameAsyncSelect = 0x0045
kVDCompressDoneSelect = 0x0046
kVDReleaseCompressBufferSelect = 0x0047
kVDGetImageDescriptionSelect = 0x0048
kVDResetCompressSequenceSelect = 0x0049
kVDSetCompressionOnOffSelect = 0x004A
kVDGetCompressionTypesSelect = 0x004B
kVDSetTimeBaseSelect = 0x004C
kVDSetFrameRateSelect = 0x004D
kVDGetDataRateSelect = 0x004E
kVDGetSoundInputDriverSelect = 0x004F
kVDGetDMADepthsSelect = 0x0050
kVDGetPreferredTimeScaleSelect = 0x0051
kVDReleaseAsyncBuffersSelect = 0x0052
kVDSetDataRateSelect = 0x0054
kVDGetTimeCodeSelect = 0x0055
kVDUseSafeBuffersSelect = 0x0056
kVDGetSoundInputSourceSelect = 0x0057
kVDGetCompressionTimeSelect = 0x0058
kVDSetPreferredPacketSizeSelect = 0x0059
kVDSetPreferredImageDimensionsSelect = 0x005A
kVDGetPreferredImageDimensionsSelect = 0x005B
kVDGetInputNameSelect = 0x005C
kVDSetDestinationPortSelect = 0x005D
kVDGetDeviceNameAndFlagsSelect = 0x005E
kVDCaptureStateChangingSelect = 0x005F
kVDGetUniqueIDsSelect = 0x0060
kVDSelectUniqueIDsSelect = 0x0061
kXMLParseDataRefSelect = 0x0001
kXMLParseFileSelect = 0x0002
kXMLParseDisposeXMLDocSelect = 0x0003
kXMLParseGetDetailedParseErrorSelect = 0x0004
kXMLParseAddElementSelect = 0x0005
kXMLParseAddAttributeSelect = 0x0006
kXMLParseAddMultipleAttributesSelect = 0x0007
kXMLParseAddAttributeAndValueSelect = 0x0008
kXMLParseAddMultipleAttributesAndValuesSelect = 0x0009
kXMLParseAddAttributeValueKindSelect = 0x000A
kXMLParseAddNameSpaceSelect = 0x000B
kXMLParseSetOffsetAndLimitSelect = 0x000C
kXMLParseSetEventParseRefConSelect = 0x000D
kXMLParseSetStartDocumentHandlerSelect = 0x000E
kXMLParseSetEndDocumentHandlerSelect = 0x000F
kXMLParseSetStartElementHandlerSelect = 0x0010
kXMLParseSetEndElementHandlerSelect = 0x0011
kXMLParseSetCharDataHandlerSelect = 0x0012
kXMLParseSetPreprocessInstructionHandlerSelect = 0x0013
kXMLParseSetCommentHandlerSelect = 0x0014
kXMLParseSetCDataHandlerSelect = 0x0015
kSGInitializeSelect = 0x0001
kSGSetDataOutputSelect = 0x0002
kSGGetDataOutputSelect = 0x0003
kSGSetGWorldSelect = 0x0004
kSGGetGWorldSelect = 0x0005
kSGNewChannelSelect = 0x0006
kSGDisposeChannelSelect = 0x0007
kSGStartPreviewSelect = 0x0010
kSGStartRecordSelect = 0x0011
kSGIdleSelect = 0x0012
kSGStopSelect = 0x0013
kSGPauseSelect = 0x0014
kSGPrepareSelect = 0x0015
kSGReleaseSelect = 0x0016
kSGGetMovieSelect = 0x0017
kSGSetMaximumRecordTimeSelect = 0x0018
kSGGetMaximumRecordTimeSelect = 0x0019
kSGGetStorageSpaceRemainingSelect = 0x001A
kSGGetTimeRemainingSelect = 0x001B
kSGGrabPictSelect = 0x001C
kSGGetLastMovieResIDSelect = 0x001D
kSGSetFlagsSelect = 0x001E
kSGGetFlagsSelect = 0x001F
kSGSetDataProcSelect = 0x0020
kSGNewChannelFromComponentSelect = 0x0021
kSGDisposeDeviceListSelect = 0x0022
kSGAppendDeviceListToMenuSelect = 0x0023
kSGSetSettingsSelect = 0x0024
kSGGetSettingsSelect = 0x0025
kSGGetIndChannelSelect = 0x0026
kSGUpdateSelect = 0x0027
kSGGetPauseSelect = 0x0028
kSGSettingsDialogSelect = 0x0029
kSGGetAlignmentProcSelect = 0x002A
kSGSetChannelSettingsSelect = 0x002B
kSGGetChannelSettingsSelect = 0x002C
kSGGetModeSelect = 0x002D
kSGSetDataRefSelect = 0x002E
kSGGetDataRefSelect = 0x002F
kSGNewOutputSelect = 0x0030
kSGDisposeOutputSelect = 0x0031
kSGSetOutputFlagsSelect = 0x0032
kSGSetChannelOutputSelect = 0x0033
kSGGetDataOutputStorageSpaceRemainingSelect = 0x0034
kSGHandleUpdateEventSelect = 0x0035
kSGSetOutputNextOutputSelect = 0x0036
kSGGetOutputNextOutputSelect = 0x0037
kSGSetOutputMaximumOffsetSelect = 0x0038
kSGGetOutputMaximumOffsetSelect = 0x0039
kSGGetOutputDataReferenceSelect = 0x003A
kSGWriteExtendedMovieDataSelect = 0x003B
kSGGetStorageSpaceRemaining64Select = 0x003C
kSGGetDataOutputStorageSpaceRemaining64Select = 0x003D
kSGWriteMovieDataSelect = 0x0100
kSGAddFrameReferenceSelect = 0x0101
kSGGetNextFrameReferenceSelect = 0x0102
kSGGetTimeBaseSelect = 0x0103
kSGSortDeviceListSelect = 0x0104
kSGAddMovieDataSelect = 0x0105
kSGChangedSourceSelect = 0x0106
kSGAddExtendedFrameReferenceSelect = 0x0107
kSGGetNextExtendedFrameReferenceSelect = 0x0108
kSGAddExtendedMovieDataSelect = 0x0109
kSGAddOutputDataRefToMediaSelect = 0x010A
kSGSetSettingsSummarySelect = 0x010B
kSGSetChannelUsageSelect = 0x0080
kSGGetChannelUsageSelect = 0x0081
kSGSetChannelBoundsSelect = 0x0082
kSGGetChannelBoundsSelect = 0x0083
kSGSetChannelVolumeSelect = 0x0084
kSGGetChannelVolumeSelect = 0x0085
kSGGetChannelInfoSelect = 0x0086
kSGSetChannelPlayFlagsSelect = 0x0087
kSGGetChannelPlayFlagsSelect = 0x0088
kSGSetChannelMaxFramesSelect = 0x0089
kSGGetChannelMaxFramesSelect = 0x008A
kSGSetChannelRefConSelect = 0x008B
kSGSetChannelClipSelect = 0x008C
kSGGetChannelClipSelect = 0x008D
kSGGetChannelSampleDescriptionSelect = 0x008E
kSGGetChannelDeviceListSelect = 0x008F
kSGSetChannelDeviceSelect = 0x0090
kSGSetChannelMatrixSelect = 0x0091
kSGGetChannelMatrixSelect = 0x0092
kSGGetChannelTimeScaleSelect = 0x0093
kSGChannelPutPictureSelect = 0x0094
kSGChannelSetRequestedDataRateSelect = 0x0095
kSGChannelGetRequestedDataRateSelect = 0x0096
kSGChannelSetDataSourceNameSelect = 0x0097
kSGChannelGetDataSourceNameSelect = 0x0098
kSGChannelSetCodecSettingsSelect = 0x0099
kSGChannelGetCodecSettingsSelect = 0x009A
kSGGetChannelTimeBaseSelect = 0x009B
kSGGetChannelRefConSelect = 0x009C
kSGGetChannelDeviceAndInputNamesSelect = 0x009D
kSGSetChannelDeviceInputSelect = 0x009E
kSGSetChannelSettingsStateChangingSelect = 0x009F
kSGInitChannelSelect = 0x0180
kSGWriteSamplesSelect = 0x0181
kSGGetDataRateSelect = 0x0182
kSGAlignChannelRectSelect = 0x0183
kSGPanelGetDitlSelect = 0x0200
kSGPanelGetTitleSelect = 0x0201
kSGPanelCanRunSelect = 0x0202
kSGPanelInstallSelect = 0x0203
kSGPanelEventSelect = 0x0204
kSGPanelItemSelect = 0x0205
kSGPanelRemoveSelect = 0x0206
kSGPanelSetGrabberSelect = 0x0207
kSGPanelSetResFileSelect = 0x0208
kSGPanelGetSettingsSelect = 0x0209
kSGPanelSetSettingsSelect = 0x020A
kSGPanelValidateInputSelect = 0x020B
kSGPanelSetEventFilterSelect = 0x020C
kSGPanelGetDITLForSizeSelect = 0x020D
kSGGetSrcVideoBoundsSelect = 0x0100
kSGSetVideoRectSelect = 0x0101
kSGGetVideoRectSelect = 0x0102
kSGGetVideoCompressorTypeSelect = 0x0103
kSGSetVideoCompressorTypeSelect = 0x0104
kSGSetVideoCompressorSelect = 0x0105
kSGGetVideoCompressorSelect = 0x0106
kSGGetVideoDigitizerComponentSelect = 0x0107
kSGSetVideoDigitizerComponentSelect = 0x0108
kSGVideoDigitizerChangedSelect = 0x0109
kSGSetVideoBottlenecksSelect = 0x010A
kSGGetVideoBottlenecksSelect = 0x010B
kSGGrabFrameSelect = 0x010C
kSGGrabFrameCompleteSelect = 0x010D
kSGDisplayFrameSelect = 0x010E
kSGCompressFrameSelect = 0x010F
kSGCompressFrameCompleteSelect = 0x0110
kSGAddFrameSelect = 0x0111
kSGTransferFrameForCompressSelect = 0x0112
kSGSetCompressBufferSelect = 0x0113
kSGGetCompressBufferSelect = 0x0114
kSGGetBufferInfoSelect = 0x0115
kSGSetUseScreenBufferSelect = 0x0116
kSGGetUseScreenBufferSelect = 0x0117
kSGGrabCompressCompleteSelect = 0x0118
kSGDisplayCompressSelect = 0x0119
kSGSetFrameRateSelect = 0x011A
kSGGetFrameRateSelect = 0x011B
kSGSetPreferredPacketSizeSelect = 0x0121
kSGGetPreferredPacketSizeSelect = 0x0122
kSGSetUserVideoCompressorListSelect = 0x0123
kSGGetUserVideoCompressorListSelect = 0x0124
kSGSetSoundInputDriverSelect = 0x0100
kSGGetSoundInputDriverSelect = 0x0101
kSGSoundInputDriverChangedSelect = 0x0102
kSGSetSoundRecordChunkSizeSelect = 0x0103
kSGGetSoundRecordChunkSizeSelect = 0x0104
kSGSetSoundInputRateSelect = 0x0105
kSGGetSoundInputRateSelect = 0x0106
kSGSetSoundInputParametersSelect = 0x0107
kSGGetSoundInputParametersSelect = 0x0108
kSGSetAdditionalSoundRatesSelect = 0x0109
kSGGetAdditionalSoundRatesSelect = 0x010A
kSGSetFontNameSelect = 0x0100
kSGSetFontSizeSelect = 0x0101
kSGSetTextForeColorSelect = 0x0102
kSGSetTextBackColorSelect = 0x0103
kSGSetJustificationSelect = 0x0104
kSGGetTextReturnToSpaceValueSelect = 0x0105
kSGSetTextReturnToSpaceValueSelect = 0x0106
kSGGetInstrumentSelect = 0x0100
kSGSetInstrumentSelect = 0x0101
kQTVideoOutputGetDisplayModeListSelect = 0x0001
kQTVideoOutputGetCurrentClientNameSelect = 0x0002
kQTVideoOutputSetClientNameSelect = 0x0003
kQTVideoOutputGetClientNameSelect = 0x0004
kQTVideoOutputBeginSelect = 0x0005
kQTVideoOutputEndSelect = 0x0006
kQTVideoOutputSetDisplayModeSelect = 0x0007
kQTVideoOutputGetDisplayModeSelect = 0x0008
kQTVideoOutputCustomConfigureDisplaySelect = 0x0009
kQTVideoOutputSaveStateSelect = 0x000A
kQTVideoOutputRestoreStateSelect = 0x000B
kQTVideoOutputGetGWorldSelect = 0x000C
kQTVideoOutputGetGWorldParametersSelect = 0x000D
kQTVideoOutputGetIndSoundOutputSelect = 0x000E
kQTVideoOutputGetClockSelect = 0x000F
kQTVideoOutputSetEchoPortSelect = 0x0010
kQTVideoOutputGetIndImageDecompressorSelect = 0x0011
kQTVideoOutputBaseSetEchoPortSelect = 0x0012
handlerHasSpatial = 1 << 0
handlerCanClip = 1 << 1
handlerCanMatte = 1 << 2
handlerCanTransferMode = 1 << 3
handlerNeedsBuffer = 1 << 4
handlerNoIdle = 1 << 5
handlerNoScheduler = 1 << 6
handlerWantsTime = 1 << 7
handlerCGrafPortOnly = 1 << 8
handlerCanSend = 1 << 9
handlerCanHandleComplexMatrix = 1 << 10
handlerWantsDestinationPixels = 1 << 11
handlerCanSendImageData = 1 << 12
handlerCanPicSave = 1 << 13
mMustDraw = 1 << 3
mAtEnd = 1 << 4
mPreflightDraw = 1 << 5
mSyncDrawing = 1 << 6
mPrecompositeOnly = 1 << 9
mSoundOnly = 1 << 10
mDoIdleActionsBeforeDraws = 1 << 11
mDisableIdleActions = 1 << 12
mDidDraw = 1 << 0
mNeedsToDraw = 1 << 2
mDrawAgain = 1 << 3
mPartialDraw = 1 << 4
mWantIdleActions = 1 << 5
forceUpdateRedraw = 1 << 0
forceUpdateNewBuffer = 1 << 1
mHitTestBounds = 1L << 0
mHitTestImage = 1L << 1
mHitTestInvisible = 1L << 2
mHitTestIsClick = 1L << 3
mOpaque = 1L << 0
mInvisible = 1L << 1
kMediaQTIdleFrequencySelector = FOUR_CHAR_CODE('idfq')
kMediaVideoParamBrightness = 1
kMediaVideoParamContrast = 2
kMediaVideoParamHue = 3
kMediaVideoParamSharpness = 4
kMediaVideoParamSaturation = 5
kMediaVideoParamBlackLevel = 6
kMediaVideoParamWhiteLevel = 7
kMHInfoEncodedFrameRate = FOUR_CHAR_CODE('orat')
kEmptyPurgableChunksOverAllowance = 1
kCallComponentExecuteWiredActionSelect = -9
kMediaSetChunkManagementFlagsSelect = 0x0415
kMediaGetChunkManagementFlagsSelect = 0x0416
kMediaSetPurgeableChunkMemoryAllowanceSelect = 0x0417
kMediaGetPurgeableChunkMemoryAllowanceSelect = 0x0418
kMediaEmptyAllPurgeableChunksSelect = 0x0419
kMediaInitializeSelect = 0x0501
kMediaSetHandlerCapabilitiesSelect = 0x0502
kMediaIdleSelect = 0x0503
kMediaGetMediaInfoSelect = 0x0504
kMediaPutMediaInfoSelect = 0x0505
kMediaSetActiveSelect = 0x0506
kMediaSetRateSelect = 0x0507
kMediaGGetStatusSelect = 0x0508
kMediaTrackEditedSelect = 0x0509
kMediaSetMediaTimeScaleSelect = 0x050A
kMediaSetMovieTimeScaleSelect = 0x050B
kMediaSetGWorldSelect = 0x050C
kMediaSetDimensionsSelect = 0x050D
kMediaSetClipSelect = 0x050E
kMediaSetMatrixSelect = 0x050F
kMediaGetTrackOpaqueSelect = 0x0510
kMediaSetGraphicsModeSelect = 0x0511
kMediaGetGraphicsModeSelect = 0x0512
kMediaGSetVolumeSelect = 0x0513
kMediaSetSoundBalanceSelect = 0x0514
kMediaGetSoundBalanceSelect = 0x0515
kMediaGetNextBoundsChangeSelect = 0x0516
kMediaGetSrcRgnSelect = 0x0517
kMediaPrerollSelect = 0x0518
kMediaSampleDescriptionChangedSelect = 0x0519
kMediaHasCharacteristicSelect = 0x051A
kMediaGetOffscreenBufferSizeSelect = 0x051B
kMediaSetHintsSelect = 0x051C
kMediaGetNameSelect = 0x051D
kMediaForceUpdateSelect = 0x051E
kMediaGetDrawingRgnSelect = 0x051F
kMediaGSetActiveSegmentSelect = 0x0520
kMediaInvalidateRegionSelect = 0x0521
kMediaGetNextStepTimeSelect = 0x0522
kMediaSetNonPrimarySourceDataSelect = 0x0523
kMediaChangedNonPrimarySourceSelect = 0x0524
kMediaTrackReferencesChangedSelect = 0x0525
kMediaGetSampleDataPointerSelect = 0x0526
kMediaReleaseSampleDataPointerSelect = 0x0527
kMediaTrackPropertyAtomChangedSelect = 0x0528
kMediaSetTrackInputMapReferenceSelect = 0x0529
kMediaSetVideoParamSelect = 0x052B
kMediaGetVideoParamSelect = 0x052C
kMediaCompareSelect = 0x052D
kMediaGetClockSelect = 0x052E
kMediaSetSoundOutputComponentSelect = 0x052F
kMediaGetSoundOutputComponentSelect = 0x0530
kMediaSetSoundLocalizationDataSelect = 0x0531
kMediaGetInvalidRegionSelect = 0x053C
kMediaSampleDescriptionB2NSelect = 0x053E
kMediaSampleDescriptionN2BSelect = 0x053F
kMediaQueueNonPrimarySourceDataSelect = 0x0540
kMediaFlushNonPrimarySourceDataSelect = 0x0541
kMediaGetURLLinkSelect = 0x0543
kMediaMakeMediaTimeTableSelect = 0x0545
kMediaHitTestForTargetRefConSelect = 0x0546
kMediaHitTestTargetRefConSelect = 0x0547
kMediaGetActionsForQTEventSelect = 0x0548
kMediaDisposeTargetRefConSelect = 0x0549
kMediaTargetRefConsEqualSelect = 0x054A
kMediaSetActionsCallbackSelect = 0x054B
kMediaPrePrerollBeginSelect = 0x054C
kMediaPrePrerollCancelSelect = 0x054D
kMediaEnterEmptyEditSelect = 0x054F
kMediaCurrentMediaQueuedDataSelect = 0x0550
kMediaGetEffectiveVolumeSelect = 0x0551
kMediaResolveTargetRefConSelect = 0x0552
kMediaGetSoundLevelMeteringEnabledSelect = 0x0553
kMediaSetSoundLevelMeteringEnabledSelect = 0x0554
kMediaGetSoundLevelMeterInfoSelect = 0x0555
kMediaGetEffectiveSoundBalanceSelect = 0x0556
kMediaSetScreenLockSelect = 0x0557
kMediaSetDoMCActionCallbackSelect = 0x0558
kMediaGetErrorStringSelect = 0x0559
kMediaGetSoundEqualizerBandsSelect = 0x055A
kMediaSetSoundEqualizerBandsSelect = 0x055B
kMediaGetSoundEqualizerBandLevelsSelect = 0x055C
kMediaDoIdleActionsSelect = 0x055D
kMediaSetSoundBassAndTrebleSelect = 0x055E
kMediaGetSoundBassAndTrebleSelect = 0x055F
kMediaTimeBaseChangedSelect = 0x0560
kMediaMCIsPlayerEventSelect = 0x0561
kMediaGetMediaLoadStateSelect = 0x0562
kMediaVideoOutputChangedSelect = 0x0563
kMediaEmptySampleCacheSelect = 0x0564
kMediaGetPublicInfoSelect = 0x0565
kMediaSetPublicInfoSelect = 0x0566
kMediaGetUserPreferredCodecsSelect = 0x0567
kMediaSetUserPreferredCodecsSelect = 0x0568
kMediaRefConSetPropertySelect = 0x0569
kMediaRefConGetPropertySelect = 0x056A
kMediaNavigateTargetRefConSelect = 0x056B
kMediaGGetIdleManagerSelect = 0x056C
kMediaGSetIdleManagerSelect = 0x056D
kaiToneDescType = FOUR_CHAR_CODE('tone')
kaiNoteRequestInfoType = FOUR_CHAR_CODE('ntrq')
kaiKnobListType = FOUR_CHAR_CODE('knbl')
kaiKeyRangeInfoType = FOUR_CHAR_CODE('sinf')
kaiSampleDescType = FOUR_CHAR_CODE('sdsc')
kaiSampleInfoType = FOUR_CHAR_CODE('smin')
kaiSampleDataType = FOUR_CHAR_CODE('sdat')
kaiSampleDataQUIDType = FOUR_CHAR_CODE('quid')
kaiInstInfoType = FOUR_CHAR_CODE('iinf')
kaiPictType = FOUR_CHAR_CODE('pict')
kaiWriterType = FOUR_CHAR_CODE('\xa9wrt')
kaiCopyrightType = FOUR_CHAR_CODE('\xa9cpy')
kaiOtherStrType = FOUR_CHAR_CODE('str ')
kaiInstrumentRefType = FOUR_CHAR_CODE('iref')
kaiInstGMQualityType = FOUR_CHAR_CODE('qual')
kaiLibraryInfoType = FOUR_CHAR_CODE('linf')
kaiLibraryDescType = FOUR_CHAR_CODE('ldsc')
kInstKnobMissingUnknown = 0
kInstKnobMissingDefault = (1 << 0)
kMusicLoopTypeNormal = 0
kMusicLoopTypePalindrome = 1
instSamplePreProcessFlag = 1 << 0
kQTMIDIComponentType = FOUR_CHAR_CODE('midi')
kOMSComponentSubType = FOUR_CHAR_CODE('OMS ')
kFMSComponentSubType = FOUR_CHAR_CODE('FMS ')
kMIDIManagerComponentSubType = FOUR_CHAR_CODE('mmgr')
kOSXMIDIComponentSubType = FOUR_CHAR_CODE('osxm')
kMusicPacketPortLost = 1
kMusicPacketPortFound = 2
kMusicPacketTimeGap = 3
kAppleSysexID = 0x11
kAppleSysexCmdSampleSize = 0x0001
kAppleSysexCmdSampleBreak = 0x0002
kAppleSysexCmdAtomicInstrument = 0x0010
kAppleSysexCmdDeveloper = 0x7F00
kSynthesizerConnectionFMS = 1
kSynthesizerConnectionMMgr = 2
kSynthesizerConnectionOMS = 4
kSynthesizerConnectionQT = 8
kSynthesizerConnectionOSXMIDI = 16
kSynthesizerConnectionUnavailable = 256
kMusicComponentType = FOUR_CHAR_CODE('musi')
kInstrumentComponentType = FOUR_CHAR_CODE('inst')
kSoftSynthComponentSubType = FOUR_CHAR_CODE('ss ')
kGMSynthComponentSubType = FOUR_CHAR_CODE('gm ')
kSynthesizerDynamicVoice = 1 << 0
kSynthesizerUsesMIDIPort = 1 << 1
kSynthesizerMicrotone = 1 << 2
kSynthesizerHasSamples = 1 << 3
kSynthesizerMixedDrums = 1 << 4
kSynthesizerSoftware = 1 << 5
kSynthesizerHardware = 1 << 6
kSynthesizerDynamicChannel = 1 << 7
kSynthesizerHogsSystemChannel = 1 << 8
kSynthesizerHasSystemChannel = 1 << 9
kSynthesizerSlowSetPart = 1 << 10
kSynthesizerOffline = 1 << 12
kSynthesizerGM = 1 << 14
kSynthesizerDLS = 1 << 15
kSynthesizerSoundLocalization = 1 << 16
kControllerModulationWheel = 1
kControllerBreath = 2
kControllerFoot = 4
kControllerPortamentoTime = 5
kControllerVolume = 7
kControllerBalance = 8
kControllerPan = 10
kControllerExpression = 11
kControllerLever1 = 16
kControllerLever2 = 17
kControllerLever3 = 18
kControllerLever4 = 19
kControllerLever5 = 80
kControllerLever6 = 81
kControllerLever7 = 82
kControllerLever8 = 83
kControllerPitchBend = 32
kControllerAfterTouch = 33
kControllerPartTranspose = 40
kControllerTuneTranspose = 41
kControllerPartVolume = 42
kControllerTuneVolume = 43
kControllerSustain = 64
kControllerPortamento = 65
kControllerSostenuto = 66
kControllerSoftPedal = 67
kControllerReverb = 91
kControllerTremolo = 92
kControllerChorus = 93
kControllerCeleste = 94
kControllerPhaser = 95
kControllerEditPart = 113
kControllerMasterTune = 114
kControllerMasterTranspose = 114
kControllerMasterVolume = 115
kControllerMasterCPULoad = 116
kControllerMasterPolyphony = 117
kControllerMasterFeatures = 118
kQTMSKnobStartID = 0x02000000
kQTMSKnobVolumeAttackTimeID = 0x02000001
kQTMSKnobVolumeDecayTimeID = 0x02000002
kQTMSKnobVolumeSustainLevelID = 0x02000003
kQTMSKnobVolumeRelease1RateID = 0x02000004
kQTMSKnobVolumeDecayKeyScalingID = 0x02000005
kQTMSKnobVolumeReleaseTimeID = 0x02000006
kQTMSKnobVolumeLFODelayID = 0x02000007
kQTMSKnobVolumeLFORampTimeID = 0x02000008
kQTMSKnobVolumeLFOPeriodID = 0x02000009
kQTMSKnobVolumeLFOShapeID = 0x0200000A
kQTMSKnobVolumeLFODepthID = 0x0200000B
kQTMSKnobVolumeOverallID = 0x0200000C
kQTMSKnobVolumeVelocity127ID = 0x0200000D
kQTMSKnobVolumeVelocity96ID = 0x0200000E
kQTMSKnobVolumeVelocity64ID = 0x0200000F
kQTMSKnobVolumeVelocity32ID = 0x02000010
kQTMSKnobVolumeVelocity16ID = 0x02000011
kQTMSKnobPitchTransposeID = 0x02000012
kQTMSKnobPitchLFODelayID = 0x02000013
kQTMSKnobPitchLFORampTimeID = 0x02000014
kQTMSKnobPitchLFOPeriodID = 0x02000015
kQTMSKnobPitchLFOShapeID = 0x02000016
kQTMSKnobPitchLFODepthID = 0x02000017
kQTMSKnobPitchLFOQuantizeID = 0x02000018
kQTMSKnobStereoDefaultPanID = 0x02000019
kQTMSKnobStereoPositionKeyScalingID = 0x0200001A
kQTMSKnobPitchLFOOffsetID = 0x0200001B
kQTMSKnobExclusionGroupID = 0x0200001C
kQTMSKnobSustainTimeID = 0x0200001D
kQTMSKnobSustainInfiniteID = 0x0200001E
kQTMSKnobVolumeLFOStereoID = 0x0200001F
kQTMSKnobVelocityLowID = 0x02000020
kQTMSKnobVelocityHighID = 0x02000021
kQTMSKnobVelocitySensitivityID = 0x02000022
kQTMSKnobPitchSensitivityID = 0x02000023
kQTMSKnobVolumeLFODepthFromWheelID = 0x02000024
kQTMSKnobPitchLFODepthFromWheelID = 0x02000025
kQTMSKnobVolumeExpOptionsID = 0x02000026
kQTMSKnobEnv1AttackTimeID = 0x02000027
kQTMSKnobEnv1DecayTimeID = 0x02000028
kQTMSKnobEnv1SustainLevelID = 0x02000029
kQTMSKnobEnv1SustainTimeID = 0x0200002A
kQTMSKnobEnv1SustainInfiniteID = 0x0200002B
kQTMSKnobEnv1ReleaseTimeID = 0x0200002C
kQTMSKnobEnv1ExpOptionsID = 0x0200002D
kQTMSKnobEnv2AttackTimeID = 0x0200002E
kQTMSKnobEnv2DecayTimeID = 0x0200002F
kQTMSKnobEnv2SustainLevelID = 0x02000030
kQTMSKnobEnv2SustainTimeID = 0x02000031
kQTMSKnobEnv2SustainInfiniteID = 0x02000032
kQTMSKnobEnv2ReleaseTimeID = 0x02000033
kQTMSKnobEnv2ExpOptionsID = 0x02000034
kQTMSKnobPitchEnvelopeID = 0x02000035
kQTMSKnobPitchEnvelopeDepthID = 0x02000036
kQTMSKnobFilterKeyFollowID = 0x02000037
kQTMSKnobFilterTransposeID = 0x02000038
kQTMSKnobFilterQID = 0x02000039
kQTMSKnobFilterFrequencyEnvelopeID = 0x0200003A
kQTMSKnobFilterFrequencyEnvelopeDepthID = 0x0200003B
kQTMSKnobFilterQEnvelopeID = 0x0200003C
kQTMSKnobFilterQEnvelopeDepthID = 0x0200003D
kQTMSKnobReverbThresholdID = 0x0200003E
kQTMSKnobVolumeAttackVelScalingID = 0x0200003F
kQTMSKnobLastIDPlus1 = 0x02000040
kControllerMaximum = 0x00007FFF
# kControllerMinimum = (long)0xFFFF8000
kVoiceCountDynamic = -1
kFirstGMInstrument = 0x00000001
kLastGMInstrument = 0x00000080
kFirstGSInstrument = 0x00000081
kLastGSInstrument = 0x00003FFF
kFirstDrumkit = 0x00004000
kLastDrumkit = 0x00004080
kFirstROMInstrument = 0x00008000
kLastROMInstrument = 0x0000FFFF
kFirstUserInstrument = 0x00010000
kLastUserInstrument = 0x0001FFFF
kInstrumentMatchSynthesizerType = 1
kInstrumentMatchSynthesizerName = 2
kInstrumentMatchName = 4
kInstrumentMatchNumber = 8
kInstrumentMatchGMNumber = 16
kInstrumentMatchGSNumber = 32
kKnobBasic = 8
kKnobReadOnly = 16
kKnobInterruptUnsafe = 32
kKnobKeyrangeOverride = 64
kKnobGroupStart = 128
kKnobFixedPoint8 = 1024
kKnobFixedPoint16 = 2048
kKnobTypeNumber = 0 << 12
kKnobTypeGroupName = 1 << 12
kKnobTypeBoolean = 2 << 12
kKnobTypeNote = 3 << 12
kKnobTypePan = 4 << 12
kKnobTypeInstrument = 5 << 12
kKnobTypeSetting = 6 << 12
kKnobTypeMilliseconds = 7 << 12
kKnobTypePercentage = 8 << 12
kKnobTypeHertz = 9 << 12
kKnobTypeButton = 10 << 12
kUnknownKnobValue = 0x7FFFFFFF
kDefaultKnobValue = 0x7FFFFFFE
notImplementedMusicErr = (0x80000000 | (0xFFFF & (notImplementedMusicOSErr)))
cantSendToSynthesizerErr = (0x80000000 | (0xFFFF & (cantSendToSynthesizerOSErr)))
cantReceiveFromSynthesizerErr = (0x80000000 | (0xFFFF & (cantReceiveFromSynthesizerOSErr)))
illegalVoiceAllocationErr = (0x80000000 | (0xFFFF & (illegalVoiceAllocationOSErr)))
illegalPartErr = (0x80000000 | (0xFFFF & (illegalPartOSErr)))
illegalChannelErr = (0x80000000 | (0xFFFF & (illegalChannelOSErr)))
illegalKnobErr = (0x80000000 | (0xFFFF & (illegalKnobOSErr)))
illegalKnobValueErr = (0x80000000 | (0xFFFF & (illegalKnobValueOSErr)))
illegalInstrumentErr = (0x80000000 | (0xFFFF & (illegalInstrumentOSErr)))
illegalControllerErr = (0x80000000 | (0xFFFF & (illegalControllerOSErr)))
midiManagerAbsentErr = (0x80000000 | (0xFFFF & (midiManagerAbsentOSErr)))
synthesizerNotRespondingErr = (0x80000000 | (0xFFFF & (synthesizerNotRespondingOSErr)))
synthesizerErr = (0x80000000 | (0xFFFF & (synthesizerOSErr)))
illegalNoteChannelErr = (0x80000000 | (0xFFFF & (illegalNoteChannelOSErr)))
noteChannelNotAllocatedErr = (0x80000000 | (0xFFFF & (noteChannelNotAllocatedOSErr)))
tunePlayerFullErr = (0x80000000 | (0xFFFF & (tunePlayerFullOSErr)))
tuneParseErr = (0x80000000 | (0xFFFF & (tuneParseOSErr)))
kGetAtomicInstNoExpandedSamples = 1 << 0
kGetAtomicInstNoOriginalSamples = 1 << 1
kGetAtomicInstNoSamples = kGetAtomicInstNoExpandedSamples | kGetAtomicInstNoOriginalSamples
kGetAtomicInstNoKnobList = 1 << 2
kGetAtomicInstNoInstrumentInfo = 1 << 3
kGetAtomicInstOriginalKnobList = 1 << 4
kGetAtomicInstAllKnobs = 1 << 5
kSetAtomicInstKeepOriginalInstrument = 1 << 0
kSetAtomicInstShareAcrossParts = 1 << 1
kSetAtomicInstCallerTosses = 1 << 2
kSetAtomicInstCallerGuarantees = 1 << 3
kSetAtomicInstInterruptSafe = 1 << 4
kSetAtomicInstDontPreprocess = 1 << 7
kInstrumentNamesModifiable = 1
kInstrumentNamesBoth = 2
kGenericMusicComponentSubtype = FOUR_CHAR_CODE('gene')
kGenericMusicKnob = 1
kGenericMusicInstrumentKnob = 2
kGenericMusicDrumKnob = 3
kGenericMusicGlobalController = 4
kGenericMusicResFirst = 0
kGenericMusicResMiscStringList = 1
kGenericMusicResMiscLongList = 2
kGenericMusicResInstrumentList = 3
kGenericMusicResDrumList = 4
kGenericMusicResInstrumentKnobDescriptionList = 5
kGenericMusicResDrumKnobDescriptionList = 6
kGenericMusicResKnobDescriptionList = 7
kGenericMusicResBitsLongList = 8
kGenericMusicResModifiableInstrumentHW = 9
kGenericMusicResGMTranslation = 10
kGenericMusicResROMInstrumentData = 11
kGenericMusicResAboutPICT = 12
kGenericMusicResLast = 13
kGenericMusicMiscLongFirst = 0
kGenericMusicMiscLongVoiceCount = 1
kGenericMusicMiscLongPartCount = 2
kGenericMusicMiscLongModifiableInstrumentCount = 3
kGenericMusicMiscLongChannelMask = 4
kGenericMusicMiscLongDrumPartCount = 5
kGenericMusicMiscLongModifiableDrumCount = 6
kGenericMusicMiscLongDrumChannelMask = 7
kGenericMusicMiscLongOutputCount = 8
kGenericMusicMiscLongLatency = 9
kGenericMusicMiscLongFlags = 10
kGenericMusicMiscLongFirstGMHW = 11
kGenericMusicMiscLongFirstGMDrumHW = 12
kGenericMusicMiscLongFirstUserHW = 13
kGenericMusicMiscLongLast = 14
kMusicGenericRange = 0x0100
kMusicDerivedRange = 0x0200
kGenericMusicDoMIDI = 1 << 0
kGenericMusicBank0 = 1 << 1
kGenericMusicBank32 = 1 << 2
kGenericMusicErsatzMIDI = 1 << 3
kGenericMusicCallKnobs = 1 << 4
kGenericMusicCallParts = 1 << 5
kGenericMusicCallInstrument = 1 << 6
kGenericMusicCallNumber = 1 << 7
kGenericMusicCallROMInstrument = 1 << 8
kGenericMusicAllDefaults = 1 << 9
kGetInstrumentInfoNoBuiltIn = 1 << 0
kGetInstrumentInfoMidiUserInst = 1 << 1
kGetInstrumentInfoNoIText = 1 << 2
kNoteRequestNoGM = 1
kNoteRequestNoSynthType = 2
kNoteRequestSynthMustMatch = 4
kNoteRequestSpecifyMIDIChannel = 0x80
kPickDontMix = 1
kPickSameSynth = 2
kPickUserInsts = 4
kPickEditAllowEdit = 8
kPickEditAllowPick = 16
kPickEditSynthGlobal = 32
kPickEditControllers = 64
kNoteAllocatorComponentType = FOUR_CHAR_CODE('nota')
kNADummyOneSelect = 29
kNADummyTwoSelect = 30
kTuneQueueDepth = 8
kTunePlayerComponentType = FOUR_CHAR_CODE('tune')
kTuneStartNow = 1
kTuneDontClipNotes = 2
kTuneExcludeEdgeNotes = 4
kTuneQuickStart = 8
kTuneLoopUntil = 16
kTunePlayDifference = 32
kTunePlayConcurrent = 64
kTuneStartNewMaster = 16384
kTuneStopFade = 1
kTuneStopSustain = 2
kTuneStopInstant = 4
kTuneStopReleaseChannels = 8
kTuneMixMute = 1
kTuneMixSolo = 2
kRestEventType = 0x00000000
kNoteEventType = 0x00000001
kControlEventType = 0x00000002
kMarkerEventType = 0x00000003
kUndefined1EventType = 0x00000008
kXNoteEventType = 0x00000009
kXControlEventType = 0x0000000A
kKnobEventType = 0x0000000B
kUndefined2EventType = 0x0000000C
kUndefined3EventType = 0x0000000D
kUndefined4EventType = 0x0000000E
kGeneralEventType = 0x0000000F
kXEventLengthBits = 0x00000002
kGeneralEventLengthBits = 0x00000003
kEventLen = 1L
kXEventLen = 2L
kRestEventLen = kEventLen
kNoteEventLen = kEventLen
kControlEventLen = kEventLen
kMarkerEventLen = kEventLen
kXNoteEventLen = kXEventLen
kXControlEventLen = kXEventLen
kGeneralEventLen = kXEventLen
kEventLengthFieldPos = 30
kEventLengthFieldWidth = 2
kEventTypeFieldPos = 29
kEventTypeFieldWidth = 3
kXEventTypeFieldPos = 28
kXEventTypeFieldWidth = 4
kEventPartFieldPos = 24
kEventPartFieldWidth = 5
kXEventPartFieldPos = 16
kXEventPartFieldWidth = 12
kRestEventDurationFieldPos = 0
kRestEventDurationFieldWidth = 24
kRestEventDurationMax = ((1L << kRestEventDurationFieldWidth) - 1)
kNoteEventPitchFieldPos = 18
kNoteEventPitchFieldWidth = 6
kNoteEventPitchOffset = 32
kNoteEventVolumeFieldPos = 11
kNoteEventVolumeFieldWidth = 7
kNoteEventVolumeOffset = 0
kNoteEventDurationFieldPos = 0
kNoteEventDurationFieldWidth = 11
kNoteEventDurationMax = ((1L << kNoteEventDurationFieldWidth) - 1)
kXNoteEventPitchFieldPos = 0
kXNoteEventPitchFieldWidth = 16
kXNoteEventDurationFieldPos = 0
kXNoteEventDurationFieldWidth = 22
kXNoteEventDurationMax = ((1L << kXNoteEventDurationFieldWidth) - 1)
kXNoteEventVolumeFieldPos = 22
kXNoteEventVolumeFieldWidth = 7
kControlEventControllerFieldPos = 16
kControlEventControllerFieldWidth = 8
kControlEventValueFieldPos = 0
kControlEventValueFieldWidth = 16
kXControlEventControllerFieldPos = 0
kXControlEventControllerFieldWidth = 16
kXControlEventValueFieldPos = 0
kXControlEventValueFieldWidth = 16
kKnobEventValueHighFieldPos = 0
kKnobEventValueHighFieldWidth = 16
kKnobEventKnobFieldPos = 16
kKnobEventKnobFieldWidth = 14
kKnobEventValueLowFieldPos = 0
kKnobEventValueLowFieldWidth = 16
kMarkerEventSubtypeFieldPos = 16
kMarkerEventSubtypeFieldWidth = 8
kMarkerEventValueFieldPos = 0
kMarkerEventValueFieldWidth = 16
kGeneralEventSubtypeFieldPos = 16
kGeneralEventSubtypeFieldWidth = 14
kGeneralEventLengthFieldPos = 0
kGeneralEventLengthFieldWidth = 16
kEndMarkerValue = 0x00000060
kEndMarkerValue = 0x60000000
# _ext = qtma_EXT(*lP
# ulen = (_ext < 2) ? 1 : 2
# ulen = (unsigned short)qtma_EXT(*lP
# ulen = lP[1]
# _ext = qtma_EXT(*lP
# ulen = (_ext < 2) ? 1 : 2
# ulen = (unsigned short)qtma_EXT(*lP
# ulen = lP[-1]
# x = (kRestEventType << kEventTypeFieldPos) \
# x = EndianU32_NtoB(x) )
# x = (kNoteEventType << kEventTypeFieldPos) \
# x = EndianU32_NtoB(x) )
# x = (kControlEventType << kEventTypeFieldPos) \
# x = EndianU32_NtoB(x) )
# x = (kMarkerEventType << kEventTypeFieldPos) \
# x = EndianU32_NtoB(x) )
# w1 = (kXNoteEventType << kXEventTypeFieldPos) \
# w1 = EndianU32_NtoB(w1)
# w2 = (kXEventLengthBits << kEventLengthFieldPos) \
# w2 = EndianU32_NtoB(w2) )
# w1 = (kXControlEventType << kXEventTypeFieldPos) \
# w1 = EndianU32_NtoB(w1)
# w2 = (kXEventLengthBits << kEventLengthFieldPos) \
# w2 = EndianU32_NtoB(w2) )
# w1 = (kKnobEventType << kXEventTypeFieldPos) \
# w1 = EndianU32_NtoB(w1)
# w2 = (kXEventLengthBits << kEventLengthFieldPos) \
# w2 = EndianU32_NtoB(w2) )
# w1 = (kGeneralEventType << kXEventTypeFieldPos) \
# w1 = EndianU32_NtoB(w1)
# w2 = (kGeneralEventLengthBits << kEventLengthFieldPos) \
# w2 = EndianU32_NtoB(w2) )
kGeneralEventNoteRequest = 1
kGeneralEventPartKey = 4
kGeneralEventTuneDifference = 5
kGeneralEventAtomicInstrument = 6
kGeneralEventKnob = 7
kGeneralEventMIDIChannel = 8
kGeneralEventPartChange = 9
kGeneralEventNoOp = 10
kGeneralEventUsedNotes = 11
kGeneralEventPartMix = 12
kMarkerEventEnd = 0
kMarkerEventBeat = 1
kMarkerEventTempo = 2
kCurrentlyNativeEndian = 1
kCurrentlyNotNativeEndian = 2
kQTMIDIGetMIDIPortsSelect = 0x0001
kQTMIDIUseSendPortSelect = 0x0002
kQTMIDISendMIDISelect = 0x0003
kMusicGetDescriptionSelect = 0x0001
kMusicGetPartSelect = 0x0002
kMusicSetPartSelect = 0x0003
kMusicSetPartInstrumentNumberSelect = 0x0004
kMusicGetPartInstrumentNumberSelect = 0x0005
kMusicStorePartInstrumentSelect = 0x0006
kMusicGetPartAtomicInstrumentSelect = 0x0009
kMusicSetPartAtomicInstrumentSelect = 0x000A
kMusicGetPartKnobSelect = 0x0010
kMusicSetPartKnobSelect = 0x0011
kMusicGetKnobSelect = 0x0012
kMusicSetKnobSelect = 0x0013
kMusicGetPartNameSelect = 0x0014
kMusicSetPartNameSelect = 0x0015
kMusicFindToneSelect = 0x0016
kMusicPlayNoteSelect = 0x0017
kMusicResetPartSelect = 0x0018
kMusicSetPartControllerSelect = 0x0019
kMusicGetPartControllerSelect = 0x001A
kMusicGetMIDIProcSelect = 0x001B
kMusicSetMIDIProcSelect = 0x001C
kMusicGetInstrumentNamesSelect = 0x001D
kMusicGetDrumNamesSelect = 0x001E
kMusicGetMasterTuneSelect = 0x001F
kMusicSetMasterTuneSelect = 0x0020
kMusicGetInstrumentAboutInfoSelect = 0x0022
kMusicGetDeviceConnectionSelect = 0x0023
kMusicUseDeviceConnectionSelect = 0x0024
kMusicGetKnobSettingStringsSelect = 0x0025
kMusicGetMIDIPortsSelect = 0x0026
kMusicSendMIDISelect = 0x0027
kMusicStartOfflineSelect = 0x0029
kMusicSetOfflineTimeToSelect = 0x002A
kMusicGetInstrumentKnobDescriptionSelect = 0x002B
kMusicGetDrumKnobDescriptionSelect = 0x002C
kMusicGetKnobDescriptionSelect = 0x002D
kMusicGetInfoTextSelect = 0x002E
kMusicGetInstrumentInfoSelect = 0x002F
kMusicTaskSelect = 0x0031
kMusicSetPartInstrumentNumberInterruptSafeSelect = 0x0032
kMusicSetPartSoundLocalizationSelect = 0x0033
kMusicGenericConfigureSelect = 0x0100
kMusicGenericGetPartSelect = 0x0101
kMusicGenericGetKnobListSelect = 0x0102
kMusicGenericSetResourceNumbersSelect = 0x0103
kMusicDerivedMIDISendSelect = 0x0200
kMusicDerivedSetKnobSelect = 0x0201
kMusicDerivedSetPartSelect = 0x0202
kMusicDerivedSetInstrumentSelect = 0x0203
kMusicDerivedSetPartInstrumentNumberSelect = 0x0204
kMusicDerivedSetMIDISelect = 0x0205
kMusicDerivedStorePartInstrumentSelect = 0x0206
kMusicDerivedOpenResFileSelect = 0x0207
kMusicDerivedCloseResFileSelect = 0x0208
kNARegisterMusicDeviceSelect = 0x0000
kNAUnregisterMusicDeviceSelect = 0x0001
kNAGetRegisteredMusicDeviceSelect = 0x0002
kNASaveMusicConfigurationSelect = 0x0003
kNANewNoteChannelSelect = 0x0004
kNADisposeNoteChannelSelect = 0x0005
kNAGetNoteChannelInfoSelect = 0x0006
kNAPrerollNoteChannelSelect = 0x0007
kNAUnrollNoteChannelSelect = 0x0008
kNASetNoteChannelVolumeSelect = 0x000B
kNAResetNoteChannelSelect = 0x000C
kNAPlayNoteSelect = 0x000D
kNASetControllerSelect = 0x000E
kNASetKnobSelect = 0x000F
kNAFindNoteChannelToneSelect = 0x0010
kNASetInstrumentNumberSelect = 0x0011
kNAPickInstrumentSelect = 0x0012
kNAPickArrangementSelect = 0x0013
kNAStuffToneDescriptionSelect = 0x001B
kNACopyrightDialogSelect = 0x001C
kNAGetIndNoteChannelSelect = 0x001F
kNAGetMIDIPortsSelect = 0x0021
kNAGetNoteRequestSelect = 0x0022
kNASendMIDISelect = 0x0023
kNAPickEditInstrumentSelect = 0x0024
kNANewNoteChannelFromAtomicInstrumentSelect = 0x0025
kNASetAtomicInstrumentSelect = 0x0026
kNAGetKnobSelect = 0x0028
kNATaskSelect = 0x0029
kNASetNoteChannelBalanceSelect = 0x002A
kNASetInstrumentNumberInterruptSafeSelect = 0x002B
kNASetNoteChannelSoundLocalizationSelect = 0x002C
kNAGetControllerSelect = 0x002D
kTuneSetHeaderSelect = 0x0004
kTuneGetTimeBaseSelect = 0x0005
kTuneSetTimeScaleSelect = 0x0006
kTuneGetTimeScaleSelect = 0x0007
kTuneGetIndexedNoteChannelSelect = 0x0008
kTuneQueueSelect = 0x000A
kTuneInstantSelect = 0x000B
kTuneGetStatusSelect = 0x000C
kTuneStopSelect = 0x000D
kTuneSetVolumeSelect = 0x0010
kTuneGetVolumeSelect = 0x0011
kTunePrerollSelect = 0x0012
kTuneUnrollSelect = 0x0013
kTuneSetNoteChannelsSelect = 0x0014
kTuneSetPartTransposeSelect = 0x0015
kTuneGetNoteAllocatorSelect = 0x0017
kTuneSetSofterSelect = 0x0018
kTuneTaskSelect = 0x0019
kTuneSetBalanceSelect = 0x001A
kTuneSetSoundLocalizationSelect = 0x001B
kTuneSetHeaderWithSizeSelect = 0x001C
kTuneSetPartMixSelect = 0x001D
kTuneGetPartMixSelect = 0x001E
| lgpl-2.1 | -6,614,324,444,661,268,000 | -7,303,370,163,830,573,000 | 36.223183 | 95 | 0.835773 | false |
dang03/son-cli | src/son/validate/util.py | 5 | 4770 | # Copyright (c) 2015 SONATA-NFV, UBIWHERE
# ALL RIGHTS RESERVED.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Neither the name of the SONATA-NFV, UBIWHERE
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# This work has been performed in the framework of the SONATA project,
# funded by the European Commission under Grant number 671517 through
# the Horizon 2020 and 5G-PPP programmes. The authors would like to
# acknowledge the contributions of their colleagues of the SONATA
# partner consortium (www.sonata-nfv.eu).
import os
import yaml
import logging
from son.validate import event
log = logging.getLogger(__name__)
evtlog = event.get_logger('validator.events')
def read_descriptor_files(files):
"""
Loads the VNF descriptors provided in the file list. It builds a
dictionary of the loaded descriptor files. Each entry has the
key of the VNF combo ID, in the format 'vendor.name.version'.
:param files: filename list of descriptors
:return: Dictionary of descriptors. None if unsuccessful.
"""
descriptors = {}
for file in files:
content = read_descriptor_file(file)
if not content:
continue
did = descriptor_id(content)
if did in descriptors.keys():
log.error("Duplicate descriptor in files: '{0}' <==> '{1}'"
.format(file, descriptors[did]))
continue
descriptors[did] = file
return descriptors
def read_descriptor_file(file):
"""
Reads a SONATA descriptor from a file.
:param file: descriptor filename
:return: descriptor dictionary
"""
with open(file, 'r') as _file:
try:
descriptor = yaml.load(_file)
except yaml.YAMLError as exc:
evtlog.log("Invalid descriptor",
"Error parsing descriptor file: {0}".format(exc),
file,
'evt_invalid_descriptor')
return
if not descriptor:
evtlog.log("Invalid descriptor",
"Couldn't read descriptor file: '{0}'".format(file),
file,
'evt_invalid_descriptor')
return
if 'vendor' not in descriptor or \
'name' not in descriptor or \
'version' not in descriptor:
log.warning("Invalid SONATA descriptor file: '{0}'. Missing "
"'vendor', 'name' or 'version'. Ignoring."
.format(file))
return
return descriptor
def descriptor_id(descriptor):
"""
Provides the descriptor id of the specified descriptor content
:param descriptor: descriptor content dict
:return: descriptor id
"""
return build_descriptor_id(descriptor['vendor'],
descriptor['name'],
descriptor['version'])
def build_descriptor_id(vendor, name, version):
"""
Assemble the descriptor id based on its vendor, name and version.
:param vendor: descriptor vendor
:param name: descriptor name
:param version: descriptor version
:return: descriptor id
"""
return vendor + '.' + name + '.' + version
def list_files(path, extension):
"""
Retrieves a list of files with the specified extension in a given
directory path.
:param path: directory to search for files
:param extension: extension of files
:return: list of files
"""
file_list = []
for root, dirs, files in os.walk(path):
for file in files:
if file.endswith(extension):
file_list.append(os.path.join(root, file))
return file_list
def strip_root(path):
"""
Remove leading slash of a path
"""
if type(path) is not str:
return path
return path[1:] if path[0] == '/' else path
class CountCalls(object):
"""Decorator to determine number of calls for a method"""
def __init__(self, method):
self.method = method
self.counter = 0
def __call__(self, *args, **kwargs):
self.counter += 1
return self.method(*args, **kwargs)
| apache-2.0 | -9,161,866,600,370,818,000 | -2,774,757,796,645,666,000 | 30.8 | 75 | 0.625996 | false |
tobes/py3status | py3status/modules/gpmdp.py | 2 | 2162 | # -*- coding: utf-8 -*-
"""
Display song currently playing in Google Play Music Desktop Player.
Configuration parameters:
cache_timeout: refresh interval for this module (default 5)
format: specify the items and ordering of the data in the status bar.
These area 1:1 match to gpmdp-remote's options
(default '♫ {info}')
Format placeholders:
{info} Print info about now playing song
{title} Print current song title
{artist} Print current song artist
{album} Print current song album
{album_art} Print current song album art URL
{time_current} Print current song time in milliseconds
{time_total} Print total song time in milliseconds
{status} Print whether GPMDP is paused or playing
{current} Print now playing song in "artist - song" format
Requires:
gpmdp: https://www.googleplaymusicdesktopplayer.com/
gpmdp-remote: https://github.com/iandrewt/gpmdp-remote
@author Aaron Fields https://twitter.com/spirotot
@license BSD
SAMPLE OUTPUT
{'full_text': '♫ Now Playing: The Show Goes On by Lupe Fiasco'}
"""
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 5
format = u'♫ {info}'
def gpmdp(self):
def _run_cmd(cmd):
return self.py3.command_output(['gpmdp-remote', cmd]).strip()
full_text = ''
if _run_cmd('status') == 'Playing':
cmds = ['info', 'title', 'artist', 'album', 'status', 'current',
'time_total', 'time_current', 'album_art']
data = {}
for cmd in cmds:
if self.py3.format_contains(self.format, '{0}'.format(cmd)):
data[cmd] = _run_cmd(cmd)
full_text = self.py3.safe_format(self.format, data)
return {
'cached_until': self.py3.time_in(self.cache_timeout),
'full_text': full_text
}
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
| bsd-3-clause | -6,423,551,058,012,231,000 | -3,512,983,943,067,887,600 | 31.179104 | 81 | 0.590445 | false |
UTDS16/collab-texter | ctxt/client/resources_rc.py | 1 | 19028 | # -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.7.0)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x05\x88\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xff\x61\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x69\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x30\x2d\x63\x30\x36\x30\x20\x36\x31\
\x2e\x31\x33\x34\x37\x37\x37\x2c\x20\x32\x30\x31\x30\x2f\x30\x32\
\x2f\x31\x32\x2d\x31\x37\x3a\x33\x32\x3a\x30\x30\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x52\x69\x67\x68\x74\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\
\x31\x2e\x30\x2f\x72\x69\x67\x68\x74\x73\x2f\x22\x20\x78\x6d\x6c\
\x6e\x73\x3a\x78\x6d\x70\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\
\x70\x2f\x31\x2e\x30\x2f\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\
\x3a\x73\x74\x52\x65\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\
\x31\x2e\x30\x2f\x73\x54\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\
\x63\x65\x52\x65\x66\x23\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\
\x70\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\
\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\
\x20\x78\x6d\x70\x52\x69\x67\x68\x74\x73\x3a\x4d\x61\x72\x6b\x65\
\x64\x3d\x22\x46\x61\x6c\x73\x65\x22\x20\x78\x6d\x70\x4d\x4d\x3a\
\x44\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\
\x64\x69\x64\x3a\x41\x36\x32\x31\x39\x37\x43\x46\x36\x41\x30\x38\
\x31\x31\x44\x46\x42\x35\x33\x44\x41\x46\x41\x32\x44\x31\x41\x33\
\x30\x36\x46\x41\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\
\x61\x6e\x63\x65\x49\x44\x3d\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\
\x41\x36\x32\x31\x39\x37\x43\x45\x36\x41\x30\x38\x31\x31\x44\x46\
\x42\x35\x33\x44\x41\x46\x41\x32\x44\x31\x41\x33\x30\x36\x46\x41\
\x22\x20\x78\x6d\x70\x3a\x43\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\
\x6c\x3d\x22\x41\x64\x6f\x62\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\
\x6f\x70\x20\x43\x53\x33\x20\x57\x69\x6e\x64\x6f\x77\x73\x22\x3e\
\x20\x3c\x78\x6d\x70\x4d\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\
\x72\x6f\x6d\x20\x73\x74\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\
\x63\x65\x49\x44\x3d\x22\x75\x75\x69\x64\x3a\x41\x43\x31\x46\x32\
\x45\x38\x33\x33\x32\x34\x41\x44\x46\x31\x31\x41\x41\x42\x38\x43\
\x35\x33\x39\x30\x44\x38\x35\x42\x35\x42\x33\x22\x20\x73\x74\x52\
\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\x75\
\x75\x69\x64\x3a\x43\x39\x44\x33\x34\x39\x36\x36\x34\x41\x33\x43\
\x44\x44\x31\x31\x42\x30\x38\x41\x42\x42\x42\x43\x46\x46\x31\x37\
\x32\x31\x35\x36\x22\x2f\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x3e\x20\x3c\x2f\x72\x64\x66\
\x3a\x52\x44\x46\x3e\x20\x3c\x2f\x78\x3a\x78\x6d\x70\x6d\x65\x74\
\x61\x3e\x20\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x65\x6e\x64\
\x3d\x22\x72\x22\x3f\x3e\xa6\x19\x69\x4a\x00\x00\x01\xb5\x49\x44\
\x41\x54\x78\xda\x94\xd3\xcf\x4b\x02\x41\x14\x07\xf0\x67\xa6\x46\
\x10\x11\x45\x08\x5d\x0a\x3a\x48\x14\x66\x24\x1d\x3b\xc5\x6a\x3f\
\x50\x0f\x46\x60\x52\x68\x7a\x13\x21\xeb\x10\x75\xe8\x58\xa7\xfe\
\x0b\xa1\x83\x78\xc8\xe8\xec\xc9\x8b\x10\x96\x75\x88\xec\x87\x12\
\x68\x20\x7a\x08\x35\x74\x9d\xbe\x23\x53\x08\x65\x5b\x03\x1f\x66\
\xf6\xed\xee\xdb\xb7\x6f\x67\xd5\xf4\xbf\x61\x81\x53\xc8\xc2\xb8\
\x58\xff\x79\x58\xe1\x32\x18\x0c\x32\x3e\x0b\xd6\x2e\x85\x9b\x16\
\xe1\x4a\x38\x0e\x04\x02\xd3\xc5\x62\x91\xbc\x5e\xef\x34\x8e\xd5\
\xa0\x52\x29\x24\xb8\x09\x85\x42\x13\x95\x4a\x85\xaa\xd5\x6a\x4b\
\xad\x56\x23\x59\x96\x49\xa7\xd3\x51\x24\x12\xb9\x55\x2b\x24\xc8\
\x26\x12\x89\x59\xb3\xd9\x3c\x58\x2a\x95\x88\x27\xaa\xd7\xeb\xa4\
\xd5\x6a\x29\x1a\x8d\xde\xe1\xfc\xae\x52\x02\x7e\xd1\x73\x32\x99\
\x9c\x33\x99\x4c\x03\xbc\x02\x15\x8a\x8e\xc5\x62\x0f\x88\x6f\x43\
\xac\x53\x0f\x26\x61\x5f\xac\xcf\xf8\xc5\xe1\x70\x38\xcb\x9f\xcc\
\x18\xef\x21\x75\x8b\xf8\xf7\x2e\xc3\x16\x1c\x88\x24\xed\xc3\x01\
\x79\x49\x92\x78\x86\xfc\x4f\x4f\x4d\xf9\xfd\x7e\x7e\x32\x05\xbe\
\x0e\x95\xad\x42\x55\xcc\xdf\x46\xda\x66\xb3\x33\xa7\x73\x95\x27\
\x49\xc3\x92\x88\x4f\xc1\x61\xa7\x26\x7d\x36\x71\x19\x24\xbd\x7e\
\x64\xe8\xe5\x25\x4f\x46\xa3\x71\x38\x93\xb9\x9f\x41\x4c\x86\x39\
\x88\xc0\xeb\x4f\x09\x78\x13\x57\xe0\xc4\xed\xde\x34\x34\x1a\x0c\
\x5d\xee\xa6\x72\xf9\x8d\xe6\xe7\x17\x0c\x88\x87\xa0\x00\xd7\x9d\
\x2a\xe0\x1b\xe9\xd1\xe3\xf1\x8d\xe6\x72\x05\x6c\x94\xf7\x56\x50\
\xa3\xe1\x4d\x66\xd4\x6c\x32\x8a\xc7\x2f\x9e\x70\x30\xf6\xdb\xb7\
\xb6\x43\xce\xe5\xf2\x32\x8b\xc5\xc9\x1c\x8e\x75\xde\x83\x5c\x1b\
\xbb\xd2\x4f\xe2\x12\x65\x16\xd6\xd6\x7c\x4c\xac\x37\xa0\x1f\xfa\
\xa0\x17\x7a\x40\x2b\xbe\xbf\x5a\x54\xfe\xf5\x0a\x35\xd8\x6b\xd5\
\x4c\x74\x04\x3b\x70\x0e\x0d\xa1\xde\x36\xcb\x6d\x9a\x3c\xc1\x87\
\x00\x03\x00\xd2\x66\x9b\xc8\xef\x51\x79\x01\x00\x00\x00\x00\x49\
\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x05\x5a\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xff\x61\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x69\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x30\x2d\x63\x30\x36\x30\x20\x36\x31\
\x2e\x31\x33\x34\x37\x37\x37\x2c\x20\x32\x30\x31\x30\x2f\x30\x32\
\x2f\x31\x32\x2d\x31\x37\x3a\x33\x32\x3a\x30\x30\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x52\x69\x67\x68\x74\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\
\x31\x2e\x30\x2f\x72\x69\x67\x68\x74\x73\x2f\x22\x20\x78\x6d\x6c\
\x6e\x73\x3a\x78\x6d\x70\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\
\x70\x2f\x31\x2e\x30\x2f\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\
\x3a\x73\x74\x52\x65\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\
\x31\x2e\x30\x2f\x73\x54\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\
\x63\x65\x52\x65\x66\x23\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\
\x70\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\
\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\
\x20\x78\x6d\x70\x52\x69\x67\x68\x74\x73\x3a\x4d\x61\x72\x6b\x65\
\x64\x3d\x22\x46\x61\x6c\x73\x65\x22\x20\x78\x6d\x70\x4d\x4d\x3a\
\x44\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\
\x64\x69\x64\x3a\x41\x36\x32\x31\x39\x37\x43\x42\x36\x41\x30\x38\
\x31\x31\x44\x46\x42\x35\x33\x44\x41\x46\x41\x32\x44\x31\x41\x33\
\x30\x36\x46\x41\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\
\x61\x6e\x63\x65\x49\x44\x3d\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\
\x41\x36\x32\x31\x39\x37\x43\x41\x36\x41\x30\x38\x31\x31\x44\x46\
\x42\x35\x33\x44\x41\x46\x41\x32\x44\x31\x41\x33\x30\x36\x46\x41\
\x22\x20\x78\x6d\x70\x3a\x43\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\
\x6c\x3d\x22\x41\x64\x6f\x62\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\
\x6f\x70\x20\x43\x53\x33\x20\x57\x69\x6e\x64\x6f\x77\x73\x22\x3e\
\x20\x3c\x78\x6d\x70\x4d\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\
\x72\x6f\x6d\x20\x73\x74\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\
\x63\x65\x49\x44\x3d\x22\x75\x75\x69\x64\x3a\x41\x43\x31\x46\x32\
\x45\x38\x33\x33\x32\x34\x41\x44\x46\x31\x31\x41\x41\x42\x38\x43\
\x35\x33\x39\x30\x44\x38\x35\x42\x35\x42\x33\x22\x20\x73\x74\x52\
\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\x75\
\x75\x69\x64\x3a\x43\x39\x44\x33\x34\x39\x36\x36\x34\x41\x33\x43\
\x44\x44\x31\x31\x42\x30\x38\x41\x42\x42\x42\x43\x46\x46\x31\x37\
\x32\x31\x35\x36\x22\x2f\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x3e\x20\x3c\x2f\x72\x64\x66\
\x3a\x52\x44\x46\x3e\x20\x3c\x2f\x78\x3a\x78\x6d\x70\x6d\x65\x74\
\x61\x3e\x20\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x65\x6e\x64\
\x3d\x22\x72\x22\x3f\x3e\xf9\x57\x69\x37\x00\x00\x01\x87\x49\x44\
\x41\x54\x78\xda\x9c\xd2\x41\x4b\x02\x41\x14\x00\xe0\xb7\x96\x19\
\x4b\x4b\x44\x8a\xd8\x25\xb0\xa4\x43\xae\x7f\xc1\x10\x84\x2d\x74\
\xad\x24\xa5\x6d\x0b\x22\xc8\x50\x7f\x40\x7f\xa1\x5b\xff\xa2\x6e\
\x12\x75\x0a\x84\xa4\x83\xa7\x8a\x08\x11\x6f\x8b\x20\x2c\xd1\x41\
\x51\x56\x49\x73\x7a\x13\x63\xc8\x92\xb6\x38\xf0\xb1\x6f\x67\xdf\
\x1b\x66\xde\x0e\xc0\xe4\x43\x42\x4f\x93\x16\x6f\xa2\x67\xb6\x88\
\xe5\x82\x17\xf6\x1c\x8e\x2d\x8d\x2d\xf4\x9a\xc9\x64\x08\x7d\x32\
\xbf\xc5\xb6\x31\x45\x6f\xcc\x45\x2a\x95\x0a\xe8\xba\x0e\xaa\xaa\
\x06\x58\x0d\x37\x48\xe4\x46\x14\x6b\xd9\x6c\xb6\x64\x18\x06\xb4\
\x5a\x2d\x68\xb7\xdb\xd0\xe9\x74\x80\x10\x02\x82\x20\x40\x2e\x97\
\x2b\x61\x8e\x7f\xd4\x96\xcb\xe9\x74\x9a\x6e\x77\x3d\x91\x48\x10\
\xbf\x5f\x24\x1e\x8f\x87\x38\x9d\x2e\x22\x8a\x22\x9d\x2f\xa3\xc8\
\xb8\x33\x57\x22\x91\x28\x91\x65\x99\x26\x27\x83\xc1\x0d\xe2\xf5\
\xae\x10\x9f\x6f\x8d\xbe\x57\xcc\xc5\xe6\x1e\x44\xe9\x5c\xbd\xde\
\x04\x4d\xab\x41\x38\x2c\x5d\x17\x0a\x0f\xfb\x6e\xf7\x12\xf0\xfc\
\xdc\x20\xff\x6e\xd4\x02\x32\xba\x4c\x26\x0f\x56\xfb\x7d\x0e\x6c\
\x36\x3b\x34\x1a\x06\x84\x42\xd2\x55\xb1\xf8\x28\xf2\xbc\xf0\x67\
\xd3\x87\x9b\xa8\xa9\xea\xf1\xb2\xae\x7f\x60\xc3\x3e\x7f\x26\x1c\
\x0e\x3b\x70\x1c\x87\xcd\x03\xc8\xe7\x6f\xe9\x1f\xf0\xa2\x9b\x51\
\x67\x8f\xa1\xaa\xa2\x9c\x10\x49\xda\x23\xf1\xf8\x21\x3d\x73\x75\
\x48\xec\xbf\x0b\xb3\x83\x6a\x94\xa2\x9c\x12\x16\x6f\x5b\xbd\x6d\
\x71\xf4\x8e\x76\x99\x41\x6c\x69\x1c\xa1\x26\x3a\x43\x2e\xb4\x88\
\x16\xd0\x3c\xa2\x9d\xe3\xd1\x2c\x9a\x41\xd3\x68\xca\x7c\x13\xbb\
\xe8\x1c\xdd\xa3\x2f\xd4\x63\x73\x3d\x53\xdc\x65\xdf\x07\xfa\x74\
\x81\x6f\x01\x06\x00\x5c\x52\x83\xd4\xd9\xb0\x72\xcb\x00\x00\x00\
\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x05\xf0\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xff\x61\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x69\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x30\x2d\x63\x30\x36\x30\x20\x36\x31\
\x2e\x31\x33\x34\x37\x37\x37\x2c\x20\x32\x30\x31\x30\x2f\x30\x32\
\x2f\x31\x32\x2d\x31\x37\x3a\x33\x32\x3a\x30\x30\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x52\x69\x67\x68\x74\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\
\x31\x2e\x30\x2f\x72\x69\x67\x68\x74\x73\x2f\x22\x20\x78\x6d\x6c\
\x6e\x73\x3a\x78\x6d\x70\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\
\x70\x2f\x31\x2e\x30\x2f\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\
\x3a\x73\x74\x52\x65\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\
\x31\x2e\x30\x2f\x73\x54\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\
\x63\x65\x52\x65\x66\x23\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\
\x70\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\
\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\
\x20\x78\x6d\x70\x52\x69\x67\x68\x74\x73\x3a\x4d\x61\x72\x6b\x65\
\x64\x3d\x22\x46\x61\x6c\x73\x65\x22\x20\x78\x6d\x70\x4d\x4d\x3a\
\x44\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\
\x64\x69\x64\x3a\x41\x36\x32\x31\x39\x37\x44\x33\x36\x41\x30\x38\
\x31\x31\x44\x46\x42\x35\x33\x44\x41\x46\x41\x32\x44\x31\x41\x33\
\x30\x36\x46\x41\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\
\x61\x6e\x63\x65\x49\x44\x3d\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\
\x41\x36\x32\x31\x39\x37\x44\x32\x36\x41\x30\x38\x31\x31\x44\x46\
\x42\x35\x33\x44\x41\x46\x41\x32\x44\x31\x41\x33\x30\x36\x46\x41\
\x22\x20\x78\x6d\x70\x3a\x43\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\
\x6c\x3d\x22\x41\x64\x6f\x62\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\
\x6f\x70\x20\x43\x53\x33\x20\x57\x69\x6e\x64\x6f\x77\x73\x22\x3e\
\x20\x3c\x78\x6d\x70\x4d\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\
\x72\x6f\x6d\x20\x73\x74\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\
\x63\x65\x49\x44\x3d\x22\x75\x75\x69\x64\x3a\x41\x43\x31\x46\x32\
\x45\x38\x33\x33\x32\x34\x41\x44\x46\x31\x31\x41\x41\x42\x38\x43\
\x35\x33\x39\x30\x44\x38\x35\x42\x35\x42\x33\x22\x20\x73\x74\x52\
\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\x75\
\x75\x69\x64\x3a\x43\x39\x44\x33\x34\x39\x36\x36\x34\x41\x33\x43\
\x44\x44\x31\x31\x42\x30\x38\x41\x42\x42\x42\x43\x46\x46\x31\x37\
\x32\x31\x35\x36\x22\x2f\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x3e\x20\x3c\x2f\x72\x64\x66\
\x3a\x52\x44\x46\x3e\x20\x3c\x2f\x78\x3a\x78\x6d\x70\x6d\x65\x74\
\x61\x3e\x20\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x65\x6e\x64\
\x3d\x22\x72\x22\x3f\x3e\xa9\x98\xa9\xe8\x00\x00\x02\x1d\x49\x44\
\x41\x54\x78\xda\x94\x92\x4d\x68\x13\x41\x18\x86\xdf\x24\xed\xae\
\x25\x41\x23\x1e\x54\xd0\x5b\x91\xda\x2a\x34\x60\xf0\xd8\x8b\x76\
\x8d\x3f\xd4\x1f\x52\x90\x52\xb0\x8d\x8d\xa7\x10\x6b\xec\x41\x94\
\x22\xf5\xa2\x20\x88\x3f\x97\x22\xf6\x64\x03\x8a\x21\x07\x23\xa5\
\x01\xcf\xb9\x44\xa4\x5a\x3d\x78\x10\x0c\x1e\xd2\xc0\x36\xb1\x6a\
\x12\xdd\x8d\xeb\xfb\x99\x39\x04\xb5\x44\x07\x9e\x9d\xd9\x6f\xe6\
\x7b\xe7\x9b\x77\xc6\x83\xff\x6b\x87\xc8\x23\x52\x20\xdd\x6a\xfc\
\xcf\x2d\x44\x5e\xc6\xe3\x71\x47\x7a\x45\xc8\xdd\x26\xe9\x30\x79\
\xa5\xb8\x11\x8b\xc5\xfa\x4d\xd3\x44\x24\x12\xe9\xe7\xbf\x87\xb8\
\x5c\x6d\x04\xde\x24\x12\x89\xde\x6a\xb5\x8a\x5a\xad\xf6\x8b\x7a\
\xbd\x8e\x46\xa3\x01\x5d\xd7\x91\x4a\xa5\xde\x7a\xda\x08\x14\x72\
\xb9\xdc\xbe\x60\x30\xb8\xa5\x5c\x2e\x43\x84\x2c\xcb\x82\xa6\x69\
\x48\xa7\xd3\xef\x38\x3f\xd5\x4e\x40\x16\x7d\xc8\xe7\xf3\xfb\x03\
\x81\xc0\x66\xa9\xc0\xc5\xa2\x33\x99\xcc\x7b\xc6\x2f\x90\xcc\x7a\
\x1e\xec\x21\x97\xd5\xf8\xa9\x2c\x4e\x26\x93\x05\xd9\xd9\x71\xc4\
\x43\x74\xa8\xf8\x9f\x2e\x93\xb3\xe4\x8a\x12\x69\x6d\x27\x48\xd1\
\x30\x0c\x51\x28\xfe\x6d\xd7\xa5\x68\x34\x2a\x93\x4b\x64\x62\x9d\
\xca\x86\x49\x4d\xfa\x3e\x7e\x4e\xfd\x36\xb9\x3c\x34\x74\xdc\x09\
\x87\x87\x45\x64\x99\x1c\x51\xf1\xbd\xe4\x6a\xaf\x7c\x48\x58\xf5\
\x8f\x49\x52\x9d\x45\xda\x51\xb9\xd7\x4a\xe5\x33\x2a\x95\x4f\x18\
\x1c\x0c\xf5\x65\xb3\x0b\x37\x19\xdb\x49\xb6\x93\x27\xdc\xcd\xb7\
\x1b\x38\x47\x0c\x49\xe0\x2e\x8b\xec\x66\xc5\xc4\x63\xe4\xd6\xe8\
\xe8\x99\x1e\xdb\x76\xe8\x72\x07\x45\xbe\x60\x60\xe0\x60\x0f\xe3\
\x09\xb2\x72\x12\x78\xbd\x8b\xc9\xdf\x81\xd3\xdf\x80\x7b\x16\x30\
\xc6\xf1\xd6\x06\x63\x22\x70\x67\x7c\x7c\xa2\xbb\x58\x5c\x65\x72\
\x27\x7c\xbe\x4d\xf0\x7a\x37\xc2\xed\xee\xa4\x48\x48\xde\xfb\x94\
\xd5\x3c\xb8\x51\x07\x66\xc8\xf4\x1a\xb0\x8d\x42\xb7\x25\x26\x47\
\x98\x9c\x9b\xbb\x7f\x77\x64\x24\xb2\xc3\x34\xd7\xd0\xd5\xa5\xf3\
\x91\x3c\xfc\xd8\xe2\xcd\xa4\xad\x9c\xfb\x01\xbc\x40\x73\xd7\x59\
\x56\x30\x23\xcf\x58\x2a\xf0\x12\x6d\x7e\xfe\x41\xc9\xef\xf7\x4b\
\x72\xa9\xe5\x1a\xc5\xbb\xec\x02\xb0\x61\x05\x78\x5e\x05\xce\xf3\
\xfe\x4a\xab\xc0\x35\x8e\xa7\xbf\xd2\x07\x11\x61\x55\xb8\xd4\xf4\
\x05\xd7\xc9\x45\xf2\x8c\xd8\x0a\x39\x81\x4d\x27\x35\x9a\x35\xa6\
\x03\x07\xd4\xda\x45\x3f\x2b\xf9\x29\xc0\x00\x30\x59\xbc\x44\x5c\
\x68\x4c\x23\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x0a\
\x04\xb7\xe4\xfe\
\x00\x63\
\x00\x6f\x00\x6e\x00\x6e\x00\x65\x00\x63\x00\x74\x00\x69\x00\x6f\x00\x6e\
\x00\x0a\
\x0a\x68\x2a\x84\
\x00\x64\
\x00\x69\x00\x73\x00\x63\x00\x6f\x00\x6e\x00\x6e\x00\x65\x00\x63\x00\x74\
\x00\x07\
\x0a\x65\x4b\x64\
\x00\x63\
\x00\x6f\x00\x6e\x00\x6e\x00\x65\x00\x63\x00\x74\
\x00\x05\
\x00\x6c\x99\x62\
\x00\x65\
\x00\x72\x00\x72\x00\x6f\x00\x72\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\
\x00\x00\x00\x48\x00\x00\x00\x00\x00\x01\x00\x00\x0a\xea\
\x00\x00\x00\x34\x00\x00\x00\x00\x00\x01\x00\x00\x05\x8c\
\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| mit | 4,841,948,417,961,667,000 | 7,152,118,283,942,133,000 | 57.910217 | 96 | 0.725773 | false |
jmcarbo/openerp7 | openerp/addons/mrp_byproduct/mrp_byproduct.py | 29 | 8840 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields
from openerp.osv import osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
class mrp_subproduct(osv.osv):
_name = 'mrp.subproduct'
_description = 'Byproduct'
_columns={
'product_id': fields.many2one('product.product', 'Product', required=True),
'product_qty': fields.float('Product Qty', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True),
'subproduct_type': fields.selection([('fixed','Fixed'),('variable','Variable')], 'Quantity Type', required=True, help="Define how the quantity of byproducts will be set on the production orders using this BoM.\
'Fixed' depicts a situation where the quantity of created byproduct is always equal to the quantity set on the BoM, regardless of how many are created in the production order.\
By opposition, 'Variable' means that the quantity will be computed as\
'(quantity of byproduct set on the BoM / quantity of manufactured product set on the BoM * quantity of manufactured product in the production order.)'"),
'bom_id': fields.many2one('mrp.bom', 'BoM'),
}
_defaults={
'subproduct_type': 'variable',
'product_qty': lambda *a: 1.0,
}
def onchange_product_id(self, cr, uid, ids, product_id, context=None):
""" Changes UoM if product_id changes.
@param product_id: Changed product_id
@return: Dictionary of changed values
"""
if product_id:
prod = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
v = {'product_uom': prod.uom_id.id}
return {'value': v}
return {}
def onchange_uom(self, cr, uid, ids, product_id, product_uom, context=None):
res = {'value':{}}
if not product_uom or not product_id:
return res
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
uom = self.pool.get('product.uom').browse(cr, uid, product_uom, context=context)
if uom.category_id.id != product.uom_id.category_id.id:
res['warning'] = {'title': _('Warning'), 'message': _('The Product Unit of Measure you chose has a different category than in the product form.')}
res['value'].update({'product_uom': product.uom_id.id})
return res
mrp_subproduct()
class mrp_bom(osv.osv):
_name = 'mrp.bom'
_description = 'Bill of Material'
_inherit='mrp.bom'
_columns={
'sub_products':fields.one2many('mrp.subproduct', 'bom_id', 'Byproducts'),
}
mrp_bom()
class mrp_production(osv.osv):
_description = 'Production'
_inherit= 'mrp.production'
def action_confirm(self, cr, uid, ids, context=None):
""" Confirms production order and calculates quantity based on subproduct_type.
@return: Newly generated picking Id.
"""
picking_id = super(mrp_production,self).action_confirm(cr, uid, ids, context=context)
product_uom_obj = self.pool.get('product.uom')
for production in self.browse(cr, uid, ids):
source = production.product_id.property_stock_production.id
if not production.bom_id:
continue
for sub_product in production.bom_id.sub_products:
product_uom_factor = product_uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, production.bom_id.product_uom.id)
qty1 = sub_product.product_qty
qty2 = production.product_uos and production.product_uos_qty or False
product_uos_factor = 0.0
if qty2 and production.bom_id.product_uos.id:
product_uos_factor = product_uom_obj._compute_qty(cr, uid, production.product_uos.id, production.product_uos_qty, production.bom_id.product_uos.id)
if sub_product.subproduct_type == 'variable':
if production.product_qty:
qty1 *= product_uom_factor / (production.bom_id.product_qty or 1.0)
if production.product_uos_qty:
qty2 *= product_uos_factor / (production.bom_id.product_uos_qty or 1.0)
data = {
'name': 'PROD:'+production.name,
'date': production.date_planned,
'product_id': sub_product.product_id.id,
'product_qty': qty1,
'product_uom': sub_product.product_uom.id,
'product_uos_qty': qty2,
'product_uos': production.product_uos and production.product_uos.id or False,
'location_id': source,
'location_dest_id': production.location_dest_id.id,
'move_dest_id': production.move_prod_id.id,
'state': 'waiting',
'production_id': production.id
}
self.pool.get('stock.move').create(cr, uid, data)
return picking_id
def _get_subproduct_factor(self, cr, uid, production_id, move_id=None, context=None):
"""Compute the factor to compute the qty of procucts to produce for the given production_id. By default,
it's always equal to the quantity encoded in the production order or the production wizard, but with
the module mrp_byproduct installed it can differ for byproducts having type 'variable'.
:param production_id: ID of the mrp.order
:param move_id: ID of the stock move that needs to be produced. Identify the product to produce.
:return: The factor to apply to the quantity that we should produce for the given production order and stock move.
"""
sub_obj = self.pool.get('mrp.subproduct')
move_obj = self.pool.get('stock.move')
production_obj = self.pool.get('mrp.production')
production_browse = production_obj.browse(cr, uid, production_id, context=context)
move_browse = move_obj.browse(cr, uid, move_id, context=context)
subproduct_factor = 1
sub_id = sub_obj.search(cr, uid,[('product_id', '=', move_browse.product_id.id),('bom_id', '=', production_browse.bom_id.id), ('subproduct_type', '=', 'variable')], context=context)
if sub_id:
subproduct_record = sub_obj.browse(cr ,uid, sub_id[0], context=context)
if subproduct_record.bom_id.product_qty:
subproduct_factor = subproduct_record.product_qty / subproduct_record.bom_id.product_qty
return subproduct_factor
return super(mrp_production, self)._get_subproduct_factor(cr, uid, production_id, move_id, context=context)
mrp_production()
class change_production_qty(osv.osv_memory):
_inherit = 'change.production.qty'
def _update_product_to_produce(self, cr, uid, prod, qty, context=None):
bom_obj = self.pool.get('mrp.bom')
move_lines_obj = self.pool.get('stock.move')
prod_obj = self.pool.get('mrp.production')
for m in prod.move_created_ids:
if m.product_id.id == prod.product_id.id:
move_lines_obj.write(cr, uid, [m.id], {'product_qty': qty})
else:
for sub_product_line in prod.bom_id.sub_products:
if sub_product_line.product_id.id == m.product_id.id:
factor = prod_obj._get_subproduct_factor(cr, uid, prod.id, m.id, context=context)
subproduct_qty = sub_product_line.subproduct_type == 'variable' and qty * factor or sub_product_line.product_qty
move_lines_obj.write(cr, uid, [m.id], {'product_qty': subproduct_qty})
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -7,530,728,565,381,712,000 | 4,660,932,565,193,575,000 | 52.575758 | 218 | 0.619118 | false |
google/grr | grr/server/grr_response_server/bin/config_updater_util.py | 1 | 44739 | #!/usr/bin/env python
"""Utilities for modifying the GRR server configuration."""
import argparse
import getpass
import os
import re
import shutil
import socket
import subprocess
import sys
import time
from typing import Optional, Text, Generator
from urllib import parse as urlparse
import MySQLdb
from MySQLdb.constants import CR as mysql_conn_errors
from MySQLdb.constants import ER as general_mysql_errors
import pkg_resources
# pylint: disable=unused-import,g-bad-import-order
from grr_response_server import server_plugins
# pylint: enable=g-bad-import-order,unused-import
from google.protobuf import text_format
from grr_api_client import errors as api_errors
from grr_api_client import root as api_root
from grr_response_client_builder import repacking
from grr_response_core import config as grr_config
from grr_response_core.lib.util import compatibility
from grr_response_server import maintenance_utils
from grr_response_server import server_startup
from grr_response_server.bin import config_updater_keys_util
from fleetspeak.src.config.proto.fleetspeak_config import config_pb2
from fleetspeak.src.server.grpcservice.proto.fleetspeak_grpcservice import grpcservice_pb2
from fleetspeak.src.server.proto.fleetspeak_server import server_pb2
from fleetspeak.src.server.proto.fleetspeak_server import services_pb2
try:
# Importing readline enables the raw_input calls to have history etc.
import readline # pylint: disable=unused-import,g-bad-import-order,g-import-not-at-top
except ImportError:
# readline is not bundled with Python on Windows. Simply ignoring failing
# import then.
pass
# These control retry behavior when checking that GRR can connect to
# MySQL during config initialization.
_MYSQL_MAX_RETRIES = 2
_MYSQL_RETRY_WAIT_SECS = 2
# Python hacks or executables larger than this limit will not be uploaded.
_MAX_SIGNED_BINARY_BYTES = 100 << 20 # 100 MiB
class ConfigInitError(Exception):
"""Exception raised to abort config initialization."""
def __init__(self):
super().__init__(
"Aborting config initialization. Please run 'grr_config_updater "
"initialize' to retry initialization.")
class BinaryTooLargeError(Exception):
"""Exception raised when trying to upload overly large binaries."""
class UserAlreadyExistsError(Exception):
"""Exception raised when trying to create an already-existing user."""
class UserNotFoundError(Exception):
"""Exception raised when trying to fetch a non-existent user."""
def __init__(self, username):
super().__init__("User '%s' does not exist." % username)
def ImportConfig(filename, config):
"""Reads an old config file and imports keys and user accounts."""
sections_to_import = ["PrivateKeys"]
entries_to_import = [
"Client.executable_signing_public_key", "CA.certificate",
"Frontend.certificate"
]
options_imported = 0
old_config = grr_config.CONFIG.MakeNewConfig()
old_config.Initialize(filename)
for entry in old_config.raw_data:
try:
section = entry.split(".")[0]
if section in sections_to_import or entry in entries_to_import:
config.Set(entry, old_config.Get(entry))
print("Imported %s." % entry)
options_imported += 1
except Exception as e: # pylint: disable=broad-except
print("Exception during import of %s: %s" % (entry, e))
return options_imported
def RetryQuestion(question_text, output_re="", default_val=None):
"""Continually ask a question until the output_re is matched."""
while True:
if default_val is not None:
new_text = "%s [%s]: " % (question_text, default_val)
else:
new_text = "%s: " % question_text
# pytype: disable=wrong-arg-count
output = input(new_text) or str(default_val)
# pytype: enable=wrong-arg-count
output = output.strip()
if not output_re or re.match(output_re, output):
break
else:
print("Invalid input, must match %s" % output_re)
return output
def RetryBoolQuestion(question_text, default_bool):
if not isinstance(default_bool, bool):
raise ValueError("default_bool should be a boolean, not %s" %
type(default_bool))
default_val = "Y" if default_bool else "N"
prompt_suff = "[Yn]" if default_bool else "[yN]"
return RetryQuestion("%s %s: " % (question_text, prompt_suff), "[yY]|[nN]",
default_val)[0].upper() == "Y"
def RetryIntQuestion(question_text: str, default_int: int) -> int:
return int(RetryQuestion(question_text, "^[0-9]+$", str(default_int)))
def GetPassword(question_text: str) -> str:
# TODO(hanuszczak): Incorrect type specification for `getpass`.
# pytype: disable=wrong-arg-types
return getpass.getpass(prompt=question_text)
# pytype: enable=wrong-arg-types
def ConfigureHostnames(config, external_hostname: Optional[Text] = None):
"""This configures the hostnames stored in the config."""
if not external_hostname:
try:
external_hostname = socket.gethostname()
except (OSError, IOError):
print("Sorry, we couldn't guess your hostname.\n")
external_hostname = RetryQuestion(
"Please enter your hostname e.g. "
"grr.example.com", "^[\\.A-Za-z0-9-]+$", external_hostname)
print("""\n\n-=Server URL=-
The Server URL specifies the URL that the clients will connect to
communicate with the server. For best results this should be publicly
accessible. By default this will be port 8080 with the URL ending in /control.
""")
frontend_url = RetryQuestion("Frontend URL", "^http://.*/$",
"http://%s:8080/" % external_hostname)
config.Set("Client.server_urls", [frontend_url])
frontend_port = urlparse.urlparse(frontend_url).port or grr_config.CONFIG.Get(
"Frontend.bind_port")
config.Set("Frontend.bind_port", frontend_port)
print("""\n\n-=AdminUI URL=-:
The UI URL specifies where the Administrative Web Interface can be found.
""")
ui_url = RetryQuestion("AdminUI URL", "^http[s]*://.*$",
"http://%s:8000" % external_hostname)
config.Set("AdminUI.url", ui_url)
ui_port = urlparse.urlparse(ui_url).port or grr_config.CONFIG.Get(
"AdminUI.port")
config.Set("AdminUI.port", ui_port)
def CheckMySQLConnection(db_options):
"""Checks whether a connection can be established to MySQL.
Args:
db_options: A dict mapping GRR MySQL config options to their values.
Returns:
A boolean indicating whether a connection could be made to a MySQL server
instance with the given options.
"""
for tries_left in range(_MYSQL_MAX_RETRIES, -1, -1):
try:
connection_options = dict(
host=db_options["Mysql.host"],
db=db_options["Mysql.database_name"],
user=db_options["Mysql.database_username"],
passwd=db_options["Mysql.database_password"],
charset="utf8")
if "Mysql.port" in db_options:
connection_options["port"] = db_options["Mysql.port"]
if "Mysql.unix_socket" in db_options:
connection_options["unix_socket"] = db_options["Mysql.unix_socket"]
ssl_enabled = "Mysql.client_key_path" in db_options
if ssl_enabled:
connection_options["ssl"] = {
"key": db_options["Mysql.client_key_path"],
"cert": db_options["Mysql.client_cert_path"],
"ca": db_options["Mysql.ca_cert_path"],
}
connection = MySQLdb.connect(**connection_options)
if ssl_enabled:
cursor = connection.cursor()
cursor.execute("SHOW VARIABLES LIKE 'have_ssl'")
res = cursor.fetchone()
if res[0] == "have_ssl" and res[1] == "YES":
print("SSL enabled successfully.")
else:
print("Unable to establish SSL connection to MySQL.")
return False
return True
except MySQLdb.OperationalError as mysql_op_error:
if len(mysql_op_error.args) < 2:
# We expect the exception's arguments to be an error-code and
# an error message.
print("Unexpected exception type received from MySQL. %d attempts "
"left: %s" % (tries_left, mysql_op_error))
time.sleep(_MYSQL_RETRY_WAIT_SECS)
continue
if mysql_op_error.args[0] == mysql_conn_errors.CONNECTION_ERROR:
print("Failed to connect to MySQL. Is it running? %d attempts left." %
tries_left)
elif mysql_op_error.args[0] == mysql_conn_errors.UNKNOWN_HOST:
print("Unknown-hostname error encountered while trying to connect to "
"MySQL.")
return False # No need for retry.
elif mysql_op_error.args[0] == general_mysql_errors.BAD_DB_ERROR:
# GRR db doesn't exist yet. That's expected if this is the initial
# setup.
return True
elif mysql_op_error.args[0] in (
general_mysql_errors.ACCESS_DENIED_ERROR,
general_mysql_errors.DBACCESS_DENIED_ERROR):
print("Permission error encountered while trying to connect to "
"MySQL: %s" % mysql_op_error)
return False # No need for retry.
else:
print("Unexpected operational error encountered while trying to "
"connect to MySQL. %d attempts left: %s" %
(tries_left, mysql_op_error))
except MySQLdb.Error as mysql_error:
print("Unexpected error encountered while trying to connect to MySQL. "
"%d attempts left: %s" % (tries_left, mysql_error))
time.sleep(_MYSQL_RETRY_WAIT_SECS)
return False
def ConfigureMySQLDatastore(config):
"""Prompts the user for configuration details for a MySQL datastore."""
db_options = {}
db_options["Database.implementation"] = "MysqlDB"
db_options["Blobstore.implementation"] = "DbBlobStore"
print("GRR will use MySQL as its database backend. Enter connection details:")
datastore_init_complete = False
while not datastore_init_complete:
db_options["Mysql.host"] = RetryQuestion("MySQL Host", "^[\\.A-Za-z0-9-]+$",
config["Mysql.host"])
db_options["Mysql.port"] = int(
RetryQuestion("MySQL Port (0 for local socket)", "^[0-9]+$",
config["Mysql.port"]))
db_options["Mysql.database"] = RetryQuestion("MySQL Database",
"^[A-Za-z0-9-]+$",
config["Mysql.database_name"])
db_options["Mysql.database_name"] = db_options["Mysql.database"]
db_options["Mysql.username"] = RetryQuestion(
"MySQL Username", "[A-Za-z0-9-@]+$", config["Mysql.database_username"])
db_options["Mysql.database_username"] = db_options["Mysql.username"]
db_options["Mysql.password"] = GetPassword(
"Please enter password for database user %s: " %
db_options["Mysql.username"])
db_options["Mysql.database_password"] = db_options["Mysql.password"]
use_ssl = RetryBoolQuestion("Configure SSL connections for MySQL?", False)
if use_ssl:
db_options["Mysql.client_key_path"] = RetryQuestion(
"Path to the client private key file",
default_val=config["Mysql.client_key_path"])
db_options["Mysql.client_cert_path"] = RetryQuestion(
"Path to the client certificate file",
default_val=config["Mysql.client_cert_path"])
db_options["Mysql.ca_cert_path"] = RetryQuestion(
"Path to the CA certificate file",
default_val=config["Mysql.ca_cert_path"])
if CheckMySQLConnection(db_options):
print("Successfully connected to MySQL with the provided details.")
datastore_init_complete = True
else:
print("Error: Could not connect to MySQL with the provided details.")
should_retry = RetryBoolQuestion(
"Re-enter MySQL details? Answering 'no' will abort config "
"initialization: ", True)
if should_retry:
db_options.clear()
else:
raise ConfigInitError()
for option, value in db_options.items():
config.Set(option, value)
class FleetspeakConfig:
"""Wraps the bundled fleetspeak configuration."""
def __init__(self):
self.use_fleetspeak: bool = False
self.external_hostname: str = None
self.admin_port = 4444
self.grr_port = 11111
self.https_port = 4443
self.mysql_username: str = None
self.mysql_password: str = None
self.mysql_host: str = None
self.mysql_port = 3306
self.mysql_database: str = None
self.mysql_unix_socket: str = None
self.config_dir = "/etc/fleetspeak-server"
def Prompt(self, config):
"""Sets up the in-memory configuration interactively."""
if self._IsFleetspeakPresent():
self.use_fleetspeak = RetryBoolQuestion(
"Use Fleetspeak (EXPERIMENTAL, next generation communication "
"framework)?", False)
else:
self.use_fleetspeak = False
print("Fleetspeak (EXPERIMENTAL, optional, next generation "
"communication framework) seems to be missing.")
print("Skipping Fleetspeak configuration.\n")
if self.use_fleetspeak:
try:
self.external_hostname = socket.gethostname()
except (OSError, IOError):
self.external_hostname = ""
print("Sorry, we couldn't guess your hostname.\n")
self.external_hostname = RetryQuestion(
"Please enter your hostname e.g. "
"grr.example.com", "^[\\.A-Za-z0-9-]+$", self.external_hostname)
self.https_port = RetryIntQuestion("Fleetspeak public HTTPS port",
self.https_port)
self._PromptMySQL(config)
def Write(self, config):
if self.use_fleetspeak:
self._WriteEnabled(config)
else:
self._WriteDisabled(config)
def _ConfigPath(self, *path_components: str) -> str:
return os.path.join(self.config_dir, *path_components)
def _IsFleetspeakPresent(self) -> bool:
if not os.path.exists(self._ConfigPath()):
return False
if not shutil.which("fleetspeak-config"):
return False
return True
def _PromptMySQLOnce(self, config):
"""Prompt the MySQL configuration once."""
self.mysql_host = RetryQuestion("Fleetspeak MySQL Host",
"^[\\.A-Za-z0-9-]+$", self.mysql_host or
config["Mysql.host"])
self.mysql_port = RetryIntQuestion(
"Fleetspeak MySQL Port (0 for local socket)", self.mysql_port or
0) or None
if self.mysql_port is None:
# golang's mysql connector needs the socket specified explicitly.
self.mysql_unix_socket = RetryQuestion(
"Fleetspeak MySQL local socket path", ".+",
self._FindMysqlUnixSocket() or "")
self.mysql_database = RetryQuestion("Fleetspeak MySQL Database",
"^[A-Za-z0-9-]+$",
self.mysql_database or "fleetspeak")
self.mysql_username = RetryQuestion(
"Fleetspeak MySQL Username", "[A-Za-z0-9-@]+$", self.mysql_username or
config["Mysql.database_username"])
self.mysql_password = GetPassword(
f"Please enter password for database user {self.mysql_username}: ")
def _PromptMySQL(self, config):
"""Prompts the MySQL configuration, retrying if the configuration is invalid."""
while True:
self._PromptMySQLOnce(config)
if self._CheckMySQLConnection():
print("Successfully connected to MySQL with the given configuration.")
return
else:
print("Error: Could not connect to MySQL with the given configuration.")
retry = RetryBoolQuestion("Do you want to retry MySQL configuration?",
True)
if not retry:
raise ConfigInitError()
def _WriteDisabled(self, config):
config.Set("Server.fleetspeak_enabled", False)
config.Set("Client.fleetspeak_enabled", False)
config.Set("ClientBuilder.fleetspeak_bundled", False)
config.Set("Server.fleetspeak_server", "")
if self._IsFleetspeakPresent():
with open(self._ConfigPath("disabled"), "w") as f:
f.write("The existence of this file disables the "
"fleetspeak-server.service systemd unit.\n")
def _WriteEnabled(self, config):
"""Applies the in-memory configuration for the use_fleetspeak case."""
service_config = services_pb2.ServiceConfig(name="GRR", factory="GRPC")
grpc_config = grpcservice_pb2.Config(
target="localhost:{}".format(self.grr_port), insecure=True)
service_config.config.Pack(grpc_config)
server_conf = server_pb2.ServerConfig(services=[service_config])
server_conf.broadcast_poll_time.seconds = 1
with open(self._ConfigPath("server.services.config"), "w") as f:
f.write(text_format.MessageToString(server_conf))
cp = config_pb2.Config()
cp.configuration_name = "Fleetspeak"
if self.mysql_unix_socket:
cp.components_config.mysql_data_source_name = (
"{user}:{password}@unix({socket})/{db}".format(
user=self.mysql_username,
password=self.mysql_password,
socket=self.mysql_unix_socket,
db=self.mysql_database))
else:
cp.components_config.mysql_data_source_name = (
"{user}:{password}@tcp({host}:{port})/{db}".format(
user=self.mysql_username,
password=self.mysql_password,
host=self.mysql_host,
port=self.mysql_port,
db=self.mysql_database))
cp.components_config.https_config.listen_address = "{}:{}".format(
self.external_hostname, self.https_port)
cp.components_config.https_config.disable_streaming = False
cp.components_config.admin_config.listen_address = "localhost:{}".format(
self.admin_port)
cp.public_host_port.append(cp.components_config.https_config.listen_address)
cp.server_component_configuration_file = self._ConfigPath(
"server.components.config")
cp.trusted_cert_file = self._ConfigPath("trusted_cert.pem")
cp.trusted_cert_key_file = self._ConfigPath("trusted_cert_key.pem")
cp.server_cert_file = self._ConfigPath("server_cert.pem")
cp.server_cert_key_file = self._ConfigPath("server_cert_key.pem")
cp.linux_client_configuration_file = self._ConfigPath("linux_client.config")
cp.windows_client_configuration_file = self._ConfigPath(
"windows_client.config")
cp.darwin_client_configuration_file = self._ConfigPath(
"darwin_client.config")
p = subprocess.Popen(["fleetspeak-config", "-config", "/dev/stdin"],
stdin=subprocess.PIPE)
p.communicate(input=text_format.MessageToString(cp).encode())
if p.wait() != 0:
raise RuntimeError("fleetspeak-config command failed.")
# These modules don't exist on Windows, so importing locally.
# pylint: disable=g-import-not-at-top
import grp
import pwd
# pylint: enable=g-import-not-at-top
if (os.geteuid() == 0 and pwd.getpwnam("fleetspeak") and
grp.getgrnam("fleetspeak")):
subprocess.check_call(
["chown", "-R", "fleetspeak:fleetspeak",
self._ConfigPath()])
try:
os.unlink(self._ConfigPath("disabled"))
except FileNotFoundError:
pass
config.Set("Server.fleetspeak_enabled", True)
config.Set("Client.fleetspeak_enabled", True)
config.Set("ClientBuilder.fleetspeak_bundled", True)
config.Set(
"Target:Linux", {
"ClientBuilder.fleetspeak_client_config":
cp.linux_client_configuration_file
})
config.Set(
"Target:Windows", {
"ClientBuilder.fleetspeak_client_config":
cp.windows_client_configuration_file
})
config.Set(
"Target:Darwin", {
"ClientBuilder.fleetspeak_client_config":
cp.darwin_client_configuration_file
})
config.Set("Server.fleetspeak_server",
cp.components_config.admin_config.listen_address)
config.Set("FleetspeakFrontend Context",
{"Server.fleetspeak_message_listen_address": grpc_config.target})
def _CheckMySQLConnection(self):
"""Checks the MySQL configuration by attempting a connection."""
db_options = {
"Mysql.host": self.mysql_host,
"Mysql.database_name": self.mysql_database,
"Mysql.database_username": self.mysql_username,
"Mysql.database_password": self.mysql_password,
}
if self.mysql_port is not None:
db_options["Mysql.port"] = self.mysql_port
if self.mysql_unix_socket is not None:
db_options["Mysql.unix_socket"] = self.mysql_unix_socket
# In Python, localhost is automatically mapped to connecting via the UNIX
# domain socket.
# However, for Go we require a TCP connection at the moment.
# So if the host is localhost, try to connect to 127.0.0.1 to force TCP.
if db_options["Mysql.host"] == "localhost" and "Mysql.port" in db_options:
db_options_localhost = dict(db_options)
db_options_localhost["Mysql.host"] = "127.0.0.1"
if CheckMySQLConnection(db_options_localhost):
return True
return CheckMySQLConnection(db_options)
def _ListUnixSockets(self) -> Generator[str, None, None]:
"""Returns paths of all active UNIX sockets."""
# Example /proc/net/unix:
#
# Num RefCount Protocol Flags Type St Inode Path
# [...]
# 0000000000000000: 00000002 00000000 00010000 0001 01 42013 \
# /run/mysqld/mysqld.sock
# [...]
hex_digit = "[0-9a-fA-F]"
regex = re.compile(f"^{hex_digit}+: ({hex_digit}+ +){{6}}(.*)$")
with open("/proc/net/unix") as f:
for line in f:
line = line.strip("\n")
match = regex.match(line)
if match:
yield match.group(2)
def _FindMysqlUnixSocket(self) -> Optional[str]:
for socket_path in self._ListUnixSockets():
if "mysql" in socket_path:
return socket_path
return None
def ConfigureDatastore(config):
"""Guides the user through configuration of the datastore."""
print("\n\n-=GRR Datastore=-\n"
"For GRR to work each GRR server has to be able to communicate with\n"
"the datastore. To do this we need to configure a datastore.\n")
existing_datastore = grr_config.CONFIG.Get("Datastore.implementation")
if not existing_datastore or existing_datastore == "FakeDataStore":
ConfigureMySQLDatastore(config)
return
print("Found existing settings:\n REL_DB MySQL database")
if existing_datastore == "SqliteDataStore":
set_up_mysql = RetryBoolQuestion(
"The SQLite datastore is no longer supported. Would you like to\n"
"set up a MySQL datastore? Answering 'no' will abort config "
"initialization.", True)
if set_up_mysql:
print("\nPlease note that no data will be migrated from SQLite to "
"MySQL.\n")
ConfigureMySQLDatastore(config)
else:
raise ConfigInitError()
elif existing_datastore == "MySQLAdvancedDataStore":
set_up_mysql = RetryBoolQuestion(
"The MySQLAdvancedDataStore is no longer supported. Would you like to\n"
"set up a new MySQL datastore? Answering 'no' will abort config "
"initialization.", True)
if set_up_mysql:
print("\nPlease note that no data will be migrated from the old data "
"store.\n")
ConfigureMySQLDatastore(config)
else:
raise ConfigInitError()
def ConfigureUrls(config, external_hostname: Optional[Text] = None):
"""Guides the user through configuration of various URLs used by GRR."""
print("\n\n-=GRR URLs=-\n"
"For GRR to work each client has to be able to communicate with the\n"
"server. To do this we normally need a public dns name or IP address\n"
"to communicate with. In the standard configuration this will be used\n"
"to host both the client facing server and the admin user interface.\n")
existing_ui_urn = grr_config.CONFIG.Get("AdminUI.url", default=None)
existing_frontend_urns = grr_config.CONFIG.Get("Client.server_urls")
if not existing_frontend_urns:
# Port from older deprecated setting Client.control_urls.
existing_control_urns = grr_config.CONFIG.Get(
"Client.control_urls", default=None)
if existing_control_urns is not None:
existing_frontend_urns = []
for existing_control_urn in existing_control_urns:
if not existing_control_urn.endswith("control"):
raise RuntimeError("Invalid existing control URL: %s" %
existing_control_urn)
existing_frontend_urns.append(
existing_control_urn.rsplit("/", 1)[0] + "/")
config.Set("Client.server_urls", existing_frontend_urns)
config.Set("Client.control_urls", ["deprecated use Client.server_urls"])
if not existing_frontend_urns or not existing_ui_urn:
ConfigureHostnames(config, external_hostname=external_hostname)
else:
print("Found existing settings:\n AdminUI URL: %s\n "
"Frontend URL(s): %s\n" % (existing_ui_urn, existing_frontend_urns))
if not RetryBoolQuestion("Do you want to keep this configuration?", True):
ConfigureHostnames(config, external_hostname=external_hostname)
def ConfigureEmails(config):
"""Guides the user through email setup."""
print("\n\n-=GRR Emails=-\n"
"GRR needs to be able to send emails for various logging and\n"
"alerting functions. The email domain will be appended to GRR\n"
"usernames when sending emails to users.\n")
existing_log_domain = grr_config.CONFIG.Get("Logging.domain", default=None)
existing_al_email = grr_config.CONFIG.Get(
"Monitoring.alert_email", default=None)
existing_em_email = grr_config.CONFIG.Get(
"Monitoring.emergency_access_email", default=None)
if existing_log_domain and existing_al_email and existing_em_email:
print("Found existing settings:\n"
" Email Domain: %s\n Alert Email Address: %s\n"
" Emergency Access Email Address: %s\n" %
(existing_log_domain, existing_al_email, existing_em_email))
if RetryBoolQuestion("Do you want to keep this configuration?", True):
return
print("\n\n-=Monitoring/Email Domain=-\n"
"Emails concerning alerts or updates must be sent to this domain.\n")
domain = RetryQuestion("Email Domain e.g example.com",
"^([\\.A-Za-z0-9-]+)*$",
grr_config.CONFIG.Get("Logging.domain"))
config.Set("Logging.domain", domain)
print("\n\n-=Alert Email Address=-\n"
"Address where monitoring events get sent, e.g. crashed clients, \n"
"broken server, etc.\n")
email = RetryQuestion("Alert Email Address", "", "grr-monitoring@%s" % domain)
config.Set("Monitoring.alert_email", email)
print("\n\n-=Emergency Email Address=-\n"
"Address where high priority events such as an emergency ACL bypass "
"are sent.\n")
emergency_email = RetryQuestion("Emergency Access Email Address", "",
"grr-emergency@%s" % domain)
config.Set("Monitoring.emergency_access_email", emergency_email)
def InstallTemplatePackage():
"""Call pip to install the templates."""
virtualenv_bin = os.path.dirname(sys.executable)
extension = os.path.splitext(sys.executable)[1]
pip = "%s/pip%s" % (virtualenv_bin, extension)
# Install the GRR server component to satisfy the dependency below.
major_minor_version = ".".join(
pkg_resources.get_distribution("grr-response-core").version.split(".")
[0:2])
# Note that this version spec requires a recent version of pip
subprocess.check_call([
sys.executable, pip, "install", "--upgrade", "-f",
"https://storage.googleapis.com/releases.grr-response.com/index.html",
"grr-response-templates==%s.*" % major_minor_version
])
def FinalizeConfigInit(config,
admin_password: Optional[Text] = None,
redownload_templates: bool = False,
repack_templates: bool = True,
prompt: bool = True):
"""Performs the final steps of config initialization."""
config.Set("Server.initialized", True)
print("\nWriting configuration to %s." % config["Config.writeback"])
config.Write()
print("Initializing the datastore.")
# Reload the config and initialize the GRR database.
server_startup.Init()
print("\nStep 3: Adding GRR Admin User")
try:
CreateUser("admin", password=admin_password, is_admin=True)
except UserAlreadyExistsError:
if prompt:
# pytype: disable=wrong-arg-count
if ((input("User 'admin' already exists, do you want to "
"reset the password? [yN]: ").upper() or "N") == "Y"):
UpdateUser("admin", password=admin_password, is_admin=True)
# pytype: enable=wrong-arg-count
else:
UpdateUser("admin", password=admin_password, is_admin=True)
print("\nStep 4: Repackaging clients with new configuration.")
if prompt:
redownload_templates = RetryBoolQuestion(
"Server debs include client templates. Re-download templates?", False)
repack_templates = RetryBoolQuestion("Repack client templates?", True)
if redownload_templates:
InstallTemplatePackage()
# Build debug binaries, then build release binaries.
if repack_templates:
repacking.TemplateRepacker().RepackAllTemplates(upload=True)
print("\nGRR Initialization complete! You can edit the new configuration "
"in %s.\n" % config["Config.writeback"])
if prompt and os.geteuid() == 0:
restart = RetryBoolQuestion(
"Restart service for the new configuration "
"to take effect?", True)
if restart:
for service in ("grr-server", "fleetspeak-server"):
try:
print(f"Restarting service: {service}.")
subprocess.check_call(["service", service, "restart"])
except subprocess.CalledProcessError as e:
print(f"Failed to restart: {service}.")
print(e, file=sys.stderr)
else:
print("Please restart the service for the new configuration to take "
"effect.\n")
def Initialize(config=None,
external_hostname: Optional[Text] = None,
admin_password: Optional[Text] = None,
redownload_templates: bool = False,
repack_templates: bool = True):
"""Initialize or update a GRR configuration."""
print("Checking write access on config %s" % config["Config.writeback"])
if not os.access(config.parser.config_path, os.W_OK):
raise IOError("Config not writeable (need sudo?)")
print("\nStep 0: Importing Configuration from previous installation.")
options_imported = 0
prev_config_file = config.Get("ConfigUpdater.old_config", default=None)
if prev_config_file and os.access(prev_config_file, os.R_OK):
print("Found config file %s." % prev_config_file)
# pytype: disable=wrong-arg-count
if input("Do you want to import this configuration? "
"[yN]: ").upper() == "Y":
options_imported = ImportConfig(prev_config_file, config)
# pytype: enable=wrong-arg-count
else:
print("No old config file found.")
print("\nStep 1: Setting Basic Configuration Parameters")
print("We are now going to configure the server using a bunch of questions.")
fs_config = FleetspeakConfig()
fs_config.Prompt(config)
ConfigureDatastore(config)
ConfigureUrls(config, external_hostname=external_hostname)
ConfigureEmails(config)
print("\nStep 2: Key Generation")
if config.Get("PrivateKeys.server_key", default=None):
if options_imported > 0:
print("Since you have imported keys from another installation in the "
"last step,\nyou probably do not want to generate new keys now.")
# pytype: disable=wrong-arg-count
if (input("You already have keys in your config, do you want to"
" overwrite them? [yN]: ").upper() or "N") == "Y":
config_updater_keys_util.GenerateKeys(config, overwrite_keys=True)
# pytype: enable=wrong-arg-count
else:
config_updater_keys_util.GenerateKeys(config)
fs_config.Write(config)
FinalizeConfigInit(
config,
admin_password=admin_password,
redownload_templates=redownload_templates,
repack_templates=repack_templates,
prompt=True)
def InitializeNoPrompt(
config=None,
external_hostname: Optional[Text] = None,
admin_password: Optional[Text] = None,
mysql_hostname: Optional[Text] = None,
mysql_port: Optional[int] = None,
mysql_username: Optional[Text] = None,
mysql_password: Optional[Text] = None,
mysql_db: Optional[Text] = None,
mysql_client_key_path: Optional[Text] = None,
mysql_client_cert_path: Optional[Text] = None,
mysql_ca_cert_path: Optional[Text] = None,
redownload_templates: bool = False,
repack_templates: bool = True,
use_fleetspeak: bool = False,
mysql_fleetspeak_db: Optional[Text] = None,
):
"""Initialize GRR with no prompts.
Args:
config: config object
external_hostname: A hostname.
admin_password: A password used for the admin user.
mysql_hostname: A hostname used for establishing connection to MySQL.
mysql_port: A port used for establishing connection to MySQL.
mysql_username: A username used for establishing connection to MySQL.
mysql_password: A password used for establishing connection to MySQL.
mysql_db: Name of the MySQL database to use.
mysql_client_key_path: The path name of the client private key file.
mysql_client_cert_path: The path name of the client public key certificate.
mysql_ca_cert_path: The path name of the CA certificate file.
redownload_templates: Indicates whether templates should be re-downloaded.
repack_templates: Indicates whether templates should be re-packed.
use_fleetspeak: Whether to use Fleetspeak.
mysql_fleetspeak_db: Name of the MySQL database to use for Fleetspeak.
Raises:
ValueError: if required flags are not provided, or if the config has
already been initialized.
IOError: if config is not writeable
ConfigInitError: if GRR is unable to connect to a running MySQL instance.
This method does the minimum work necessary to configure GRR without any user
prompting, relying heavily on config default values. User must supply the
external hostname, admin password, and MySQL password; everything else is set
automatically.
"""
if config["Server.initialized"]:
raise ValueError("Config has already been initialized.")
if not external_hostname:
raise ValueError(
"--noprompt set, but --external_hostname was not provided.")
if not admin_password:
raise ValueError("--noprompt set, but --admin_password was not provided.")
if mysql_password is None:
raise ValueError("--noprompt set, but --mysql_password was not provided.")
print("Checking write access on config %s" % config.parser)
if not os.access(config.parser.config_path, os.W_OK):
raise IOError("Config not writeable (need sudo?)")
config_dict = {}
config_dict["Database.implementation"] = "MysqlDB"
config_dict["Blobstore.implementation"] = "DbBlobStore"
config_dict["Mysql.host"] = mysql_hostname or config["Mysql.host"]
config_dict["Mysql.port"] = mysql_port or config["Mysql.port"]
config_dict["Mysql.database_name"] = config_dict[
"Mysql.database"] = mysql_db or config["Mysql.database_name"]
config_dict["Mysql.database_username"] = config_dict["Mysql.username"] = (
mysql_username or config["Mysql.database_username"])
config_dict["Client.server_urls"] = [
"http://%s:%s/" % (external_hostname, config["Frontend.bind_port"])
]
config_dict["AdminUI.url"] = "http://%s:%s" % (external_hostname,
config["AdminUI.port"])
config_dict["Logging.domain"] = external_hostname
config_dict["Monitoring.alert_email"] = ("grr-monitoring@%s" %
external_hostname)
config_dict["Monitoring.emergency_access_email"] = ("grr-emergency@%s" %
external_hostname)
# Print all configuration options, except for the MySQL password.
print("Setting configuration as:\n\n%s" % config_dict)
config_dict["Mysql.database_password"] = config_dict[
"Mysql.password"] = mysql_password
if mysql_client_key_path is not None:
config_dict["Mysql.client_key_path"] = mysql_client_key_path
config_dict["Mysql.client_cert_path"] = mysql_client_cert_path
config_dict["Mysql.ca_cert_path"] = mysql_ca_cert_path
if CheckMySQLConnection(config_dict):
print("Successfully connected to MySQL with the given configuration.")
else:
print("Error: Could not connect to MySQL with the given configuration.")
raise ConfigInitError()
for key, value in config_dict.items():
config.Set(key, value)
config_updater_keys_util.GenerateKeys(config)
fs_config = FleetspeakConfig()
fs_config.use_fleetspeak = use_fleetspeak
fs_config.external_hostname = external_hostname
fs_config.mysql_username = mysql_username
fs_config.mysql_password = mysql_password
fs_config.mysql_host = mysql_hostname
if mysql_port:
fs_config.mysql_port = mysql_port
fs_config.mysql_database = mysql_fleetspeak_db
fs_config.Write(config)
FinalizeConfigInit(
config,
admin_password=admin_password,
redownload_templates=redownload_templates,
repack_templates=repack_templates,
prompt=False)
def UploadSignedBinary(source_path,
binary_type,
platform,
upload_subdirectory=""):
"""Signs a binary and uploads it to the datastore.
Args:
source_path: Path to the binary to upload.
binary_type: Type of the binary, e.g python-hack or executable.
platform: Client platform where the binary is intended to be run.
upload_subdirectory: Path of a subdirectory to upload the binary to,
relative to the canonical path for binaries of the given type and
platform.
Raises:
BinaryTooLargeError: If the binary to upload is too large.
"""
file_size = os.path.getsize(source_path)
if file_size > _MAX_SIGNED_BINARY_BYTES:
raise BinaryTooLargeError(
"File [%s] is of size %d (bytes), which exceeds the allowed maximum "
"of %d bytes." % (source_path, file_size, _MAX_SIGNED_BINARY_BYTES))
context = ["Platform:%s" % platform.title(), "Client Context"]
signing_key = grr_config.CONFIG.Get(
"PrivateKeys.executable_signing_private_key", context=context)
root_api = maintenance_utils.InitGRRRootAPI()
binary_path = "/".join([
platform.lower(),
upload_subdirectory,
os.path.basename(source_path),
])
binary = root_api.GrrBinary(binary_type, binary_path)
with open(source_path, "rb") as fd:
binary.Upload(
fd,
sign_fn=binary.DefaultUploadSigner(
private_key=signing_key.GetRawPrivateKey()))
print("Uploaded %s to %s" % (binary_type, binary_path))
def CreateUser(username, password=None, is_admin=False):
"""Creates a new GRR user."""
grr_api = maintenance_utils.InitGRRRootAPI()
try:
user_exists = grr_api.GrrUser(username).Get() is not None
except api_errors.ResourceNotFoundError:
user_exists = False
if user_exists:
raise UserAlreadyExistsError("User '%s' already exists." % username)
user_type, password = _GetUserTypeAndPassword(
username, password=password, is_admin=is_admin)
grr_api.CreateGrrUser(
username=username, user_type=user_type, password=password)
def UpdateUser(username, password=None, is_admin=False):
"""Updates the password or privilege-level for a user."""
user_type, password = _GetUserTypeAndPassword(
username, password=password, is_admin=is_admin)
grr_api = maintenance_utils.InitGRRRootAPI()
grr_user = grr_api.GrrUser(username).Get()
grr_user.Modify(user_type=user_type, password=password)
def GetUserSummary(username):
"""Returns a string with summary info for a user."""
grr_api = maintenance_utils.InitGRRRootAPI()
try:
return _Summarize(grr_api.GrrUser(username).Get().data)
except api_errors.ResourceNotFoundError:
raise UserNotFoundError(username)
def GetAllUserSummaries():
"""Returns a string containing summary info for all GRR users."""
grr_api = maintenance_utils.InitGRRRootAPI()
user_wrappers = sorted(grr_api.ListGrrUsers(), key=lambda x: x.username)
summaries = [_Summarize(w.data) for w in user_wrappers]
return "\n\n".join(summaries)
def _Summarize(user_info):
"""Returns a string with summary info for a user."""
return "Username: %s\nIs Admin: %s" % (user_info.username, user_info.user_type
== api_root.GrrUser.USER_TYPE_ADMIN)
def DeleteUser(username):
"""Deletes a GRR user from the datastore."""
grr_api = maintenance_utils.InitGRRRootAPI()
try:
grr_api.GrrUser(username).Get().Delete()
except api_errors.ResourceNotFoundError:
raise UserNotFoundError(username)
def _GetUserTypeAndPassword(username, password=None, is_admin=False):
"""Returns the user-type and password for a user.
Args:
username: Username for the user.
password: Password for the user. If None, or not provided, we will prompt
for one via the terminal.
is_admin: Indicates whether the user should have admin privileges.
"""
if is_admin:
user_type = api_root.GrrUser.USER_TYPE_ADMIN
else:
user_type = api_root.GrrUser.USER_TYPE_STANDARD
if password is None:
password = GetPassword("Please enter password for user '%s':" % username)
return user_type, password
def SwitchToRelDB(config):
"""Switches a given config from using AFF4 to using REL_DB."""
print("***************************************************************\n"
"Make sure to back up the existing configuration writeback file.\n"
"Writeback file path:\n%s\n"
"***************************************************************\n" %
config["Config.writeback"])
RetryBoolQuestion("Continue?", True)
config.Set("Database.implementation", "MysqlDB")
if (config["Blobstore.implementation"] != "DbBlobStore" or RetryBoolQuestion(
"You have a custom 'Blobstore.implementation' setting. Do you want\n"
"to switch to DbBlobStore (default option for REL_DB, meaning that\n"
"blobs will be stored inside the MySQL database)?", True)):
config.Set("Blobstore.implementation", "DbBlobStore")
if (RetryBoolQuestion(
"Do you want to use a different MySQL database for the REL_DB datastore?",
True)):
db_name = RetryQuestion("MySQL Database", "^[A-Za-z0-9-]+$",
config["Mysql.database_name"])
else:
db_name = config["Mysql.database_name"]
config.Set("Mysql.database", db_name)
if (input("Do you want to use previously set up MySQL username and password\n"
"to connect to MySQL database '%s'? [Yn]: " % db_name).upper() or
"Y") == "Y":
username = config["Mysql.database_username"]
password = config["Mysql.database_password"]
else:
username = RetryQuestion("MySQL Username", "[A-Za-z0-9-@]+$",
config["Mysql.database_username"])
password = GetPassword("Please enter password for database user %s: " %
username)
config.Set("Mysql.username", username)
config.Set("Mysql.password", password)
print("Configuration updated.")
def ArgparseBool(raw_value):
"""Returns the boolean value of a raw argparse value.
When defining an argument with argparse, you would think it natural to
be able to set the type to 'bool' and then proceed to set it to
'True' and 'False' via the command line. Unfortunately, that is not possible.
Argparse will silently cast the raw string value of the argument by
calling 'bool()', meaning 'False' gets converted to True. This function is
meant to be used in place of the 'bool' builtin when defining argparse
arguments.
Args:
raw_value: The raw value of the argument, which is a string passed in via
the command line.
Raises:
ArgumentTypeError: If the raw value passed in is not a string equal to
'True' or 'False'.
"""
if not isinstance(raw_value, str):
raise argparse.ArgumentTypeError("Unexpected type: %s. Expected a string." %
compatibility.GetName(type(raw_value)))
if raw_value.lower() == "true":
return True
elif raw_value.lower() == "false":
return False
else:
raise argparse.ArgumentTypeError(
"Invalid value encountered. Expected 'True' or 'False'.")
| apache-2.0 | -4,523,725,888,638,511,600 | 2,734,826,975,969,197,000 | 38.803381 | 90 | 0.663359 | false |
ondra-novak/chromium.src | tools/telemetry/telemetry/core/platform/profiler/strace_profiler.py | 43 | 7827 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import logging
import re
import signal
import subprocess
import sys
import tempfile
from telemetry.core.platform import profiler
from telemetry.timeline import model
# Parses one line of strace output, for example:
# 6052 1311456063.159722 read(8, "\1\0\0\0\0\0\0\0", 8) = 8 <0.000022>
_STRACE_LINE_RE = re.compile(
'^(?P<tid>\d+)\s+'
'(?P<ts>\d+)'
'(?P<micro>.\d+)\s+'
'(?P<func>.*?)'
'[(](?P<args>.*?)[)]\s+=\s+'
'(?P<ret>.*?)\s+'
'<(?P<dur>[\d.]+)>$')
_UNFINISHED_LINE_RE = re.compile(
'^(?P<tid>\d+)\s+'
'(?P<line>.*?)'
'<unfinished ...>$')
_RESUMED_LINE_RE = re.compile(
'^(?P<tid>\d+)\s+'
'(?P<ts>\d+)'
'(?P<micro>.\d+)\s+'
'<[.][.][.]\s(?P<func>.*?)\sresumed>'
'(?P<line>.*?)$')
_KILLED_LINE_RE = re.compile(
'^(?P<tid>\d+)\s+'
'(?P<ts>\d+)'
'(?P<micro>.\d+)\s+'
'[+][+][+] killed by SIGKILL [+][+][+]$')
def _StraceToChromeTrace(pid, infile):
"""Returns chrometrace json format for |infile| strace output."""
# Map of fd:file_name for open file descriptors. Useful for displaying
# file name instead of the descriptor number.
fd_map = {}
# Map of tid:interrupted_call for the interrupted call on each thread. It is
# possible to context switch during a system call. In this case we must
# match up the lines.
interrupted_call_map = {}
out = []
with open(infile, 'r') as f:
for line in f.readlines():
# Ignore kill lines for now.
m = _KILLED_LINE_RE.match(line)
if m:
continue
# If this line is interrupted, then remember it and continue.
m = _UNFINISHED_LINE_RE.match(line)
if m:
assert m.group('tid') not in interrupted_call_map
interrupted_call_map[m.group('tid')] = line
continue
# If this is a resume of a previous line, stitch it together.
interrupted = False
m = _RESUMED_LINE_RE.match(line)
if m:
interrupted = True
assert m.group('tid') in interrupted_call_map
line = interrupted_call_map[m.group('tid')].replace(
'<unfinished ...>', m.group('line'))
del interrupted_call_map[m.group('tid')]
# At this point we can do a normal match.
m = _STRACE_LINE_RE.match(line)
if not m:
if ('exit' not in line and
'Profiling timer expired' not in line and
'<unavailable>' not in line):
logging.warn('Failed to parse line: %s' % line)
continue
ts_begin = int(1000000 * (int(m.group('ts')) + float(m.group('micro'))))
ts_end = ts_begin + int(1000000 * float(m.group('dur')))
tid = int(m.group('tid'))
function_name = unicode(m.group('func'), errors='ignore')
function_args = unicode(m.group('args'), errors='ignore')
ret = unicode(m.group('ret'), errors='ignore')
cat = 'strace'
possible_fd_arg = None
first_arg = function_args.split(',')[0]
if first_arg and first_arg.strip().isdigit():
possible_fd_arg = first_arg.strip()
if function_name == 'open' and ret.isdigit():
# 1918 1311606151.649379 open("/foo/bar.so", O_RDONLY) = 7 <0.000088>
fd_map[ret] = first_arg
args = {
'args': function_args,
'ret': ret,
}
if interrupted:
args['interrupted'] = True
if possible_fd_arg and possible_fd_arg in fd_map:
args['fd%s' % first_arg] = fd_map[possible_fd_arg]
out.append({
'cat': cat,
'pid': pid,
'tid': tid,
'ts': ts_begin,
'ph': 'B', # Begin
'name': function_name,
})
out.append({
'cat': cat,
'pid': pid,
'tid': tid,
'ts': ts_end,
'ph': 'E', # End
'name': function_name,
'args': args,
})
return out
def _GenerateTraceMetadata(timeline_model):
out = []
for process in timeline_model.processes:
out.append({
'name': 'process_name',
'ph': 'M', # Metadata
'pid': process,
'args': {
'name': timeline_model.processes[process].name
}
})
for thread in timeline_model.processes[process].threads:
out.append({
'name': 'thread_name',
'ph': 'M', # Metadata
'pid': process,
'tid': thread,
'args': {
'name': timeline_model.processes[process].threads[thread].name
}
})
return out
class _SingleProcessStraceProfiler(object):
"""An internal class for using perf for a given process."""
def __init__(self, pid, output_file, platform_backend):
self._pid = pid
self._platform_backend = platform_backend
self._output_file = output_file
self._tmp_output_file = tempfile.NamedTemporaryFile('w', 0)
self._proc = subprocess.Popen(
['strace', '-ttt', '-f', '-T', '-p', str(pid), '-o', output_file],
stdout=self._tmp_output_file, stderr=subprocess.STDOUT)
def CollectProfile(self):
if ('renderer' in self._output_file and
not self._platform_backend.GetCommandLine(self._pid)):
logging.warning('Renderer was swapped out during profiling. '
'To collect a full profile rerun with '
'"--extra-browser-args=--single-process"')
self._proc.send_signal(signal.SIGINT)
exit_code = self._proc.wait()
try:
if exit_code:
raise Exception('strace failed with exit code %d. Output:\n%s' % (
exit_code, self._GetStdOut()))
finally:
self._tmp_output_file.close()
return _StraceToChromeTrace(self._pid, self._output_file)
def _GetStdOut(self):
self._tmp_output_file.flush()
try:
with open(self._tmp_output_file.name) as f:
return f.read()
except IOError:
return ''
class StraceProfiler(profiler.Profiler):
def __init__(self, browser_backend, platform_backend, output_path, state):
super(StraceProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
assert self._browser_backend.supports_tracing
self._browser_backend.StartTracing(None, 10)
process_output_file_map = self._GetProcessOutputFileMap()
self._process_profilers = []
self._output_file = output_path + '.json'
for pid, output_file in process_output_file_map.iteritems():
if 'zygote' in output_file:
continue
self._process_profilers.append(
_SingleProcessStraceProfiler(pid, output_file, platform_backend))
@classmethod
def name(cls):
return 'strace'
@classmethod
def is_supported(cls, browser_type):
if sys.platform != 'linux2':
return False
# TODO(tonyg): This should be supported on android and cros.
if (browser_type.startswith('android') or
browser_type.startswith('cros')):
return False
return True
@classmethod
def CustomizeBrowserOptions(cls, browser_type, options):
options.AppendExtraBrowserArgs([
'--no-sandbox',
'--allow-sandbox-debugging'
])
def CollectProfile(self):
print 'Processing trace...'
out_json = []
for single_process in self._process_profilers:
out_json.extend(single_process.CollectProfile())
timeline_data = self._browser_backend.StopTracing()
timeline_model = model.TimelineModel(timeline_data)
out_json.extend(_GenerateTraceMetadata(timeline_model))
with open(self._output_file, 'w') as f:
f.write(json.dumps(out_json, separators=(',', ':')))
print 'Trace saved as %s' % self._output_file
print 'To view, open in chrome://tracing'
return [self._output_file]
| bsd-3-clause | -5,406,753,634,190,455,000 | -1,756,470,943,348,480,500 | 29.936759 | 78 | 0.58937 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.