repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
marczellm/algorimp | weimar/_parser.py | 1 | 3033 | """ A parser for chord progressions in the Weimar Jazzomat CSV format """
import re
from itertools import chain
from typing import Tuple, Optional
from music import ABCNote, ChordType, Chord, ChordProgression, Note
_chordtype_mapping = {
'': ChordType.maj,
'6': ChordType.maj,
'j7': ChordType.maj,
'-7': ChordType.m7,
'-': ChordType.m7,
'-6': ChordType.mmaj,
'-j': ChordType.mmaj,
'+': ChordType.aug7,
'+7': ChordType.aug7,
'+j': ChordType.augmaj,
'sus7': ChordType(7),
'o': ChordType.dim,
'o7': ChordType.dim,
}
def _capitalize(s: str):
return s[0].upper() + s[1:]
# Reverse ordering of the items inside the big OR is necessary to match longer ones first
_sre_roots = '|'.join(sorted(map(_capitalize, ABCNote.mapping().keys()), reverse=True))
_sre_types = '|'.join(sorted(chain(ChordType.__members__, _chordtype_mapping.keys()), reverse=True)).replace('||', '|')
_sre_chord = "({})({})?[913b#]*(/({}))?".format(_sre_roots, _sre_types, _sre_roots).replace('+', '\+')
_sre_optional_chord = r'({}|NC| )'.format(_sre_chord)
_sre_measure = r'\|{}{{4}}(?=\|)'.format(_sre_optional_chord)
_re_roots = re.compile(_sre_roots)
_re_chord = re.compile(_sre_chord)
_re_optional_chord = re.compile(_sre_optional_chord)
_re_measure = re.compile(_sre_measure)
re_invalid_measure = re.compile(r'\|(NC| )+\|')
def parse_key(s: str) -> ABCNote:
""" Parse a key signature. The Jazzomat format includes maj and min but we discard that. """
return ABCNote.from_string(_re_roots.match(s).group(0))
def parse_chordtype(s: str) -> ChordType:
""" Parse a chord type in the Weimar Jazzomat format """
if s in ChordType.__members__:
return ChordType[s]
elif s in _chordtype_mapping:
return _chordtype_mapping[s]
else:
raise KeyError(s + " chord unknown")
def parse_chord(s: str) -> Optional[Chord]:
"""
:return: None if the chord is invalid
"""
match = re.match(_re_chord, s)
if match:
return Chord(root=ABCNote.from_string(match.group(1)), typ=parse_chordtype(match.group(2)))
else:
return None
def parse_measure(s: str) -> Tuple[(Chord,) * Note.meter]:
""" Parse a measure.
:return four chords. Spaces translate to the chord before the space. """
ret = []
for match in re.finditer(_re_optional_chord, s):
if match.group(0) in [' ', 'NC']:
ret.append(ret[-1])
else:
ret.append(Chord(root=ABCNote.from_string(match.group(2)), typ=parse_chordtype(match.group(3))))
assert len(ret) == Note.meter
return tuple(ret)
def parse_changes(changes: str, key: str) -> ChordProgression:
ret = ChordProgression(parse_key(key))
for m in re.finditer(_re_measure, changes):
ret += parse_measure(m.group(0))
return ret
class SongMetadata:
def __init__(self, name: str, chord_changes: str, changes: ChordProgression=None, **_):
self.name = name
self.changes_str = chord_changes
self.changes = changes
| gpl-3.0 |
wbc2010/django1.2.5 | django1.2.5/django/contrib/gis/utils/srs.py | 311 | 3157 | from django.contrib.gis.gdal import SpatialReference
from django.db import connections, DEFAULT_DB_ALIAS
def add_srs_entry(srs, auth_name='EPSG', auth_srid=None, ref_sys_name=None,
database=DEFAULT_DB_ALIAS):
"""
This function takes a GDAL SpatialReference system and adds its information
to the `spatial_ref_sys` table of the spatial backend. Doing this enables
database-level spatial transformations for the backend. Thus, this utility
is useful for adding spatial reference systems not included by default with
the backend -- for example, the so-called "Google Maps Mercator Projection"
is excluded in PostGIS 1.3 and below, and the following adds it to the
`spatial_ref_sys` table:
>>> from django.contrib.gis.utils import add_srs_entry
>>> add_srs_entry(900913)
Keyword Arguments:
auth_name:
This keyword may be customized with the value of the `auth_name` field.
Defaults to 'EPSG'.
auth_srid:
This keyword may be customized with the value of the `auth_srid` field.
Defaults to the SRID determined by GDAL.
ref_sys_name:
For SpatiaLite users only, sets the value of the the `ref_sys_name` field.
Defaults to the name determined by GDAL.
database:
The name of the database connection to use; the default is the value
of `django.db.DEFAULT_DB_ALIAS` (at the time of this writing, it's value
is 'default').
"""
connection = connections[database]
if not hasattr(connection.ops, 'spatial_version'):
raise Exception('The `add_srs_entry` utility only works '
'with spatial backends.')
if connection.ops.oracle or connection.ops.mysql:
raise Exception('This utility does not support the '
'Oracle or MySQL spatial backends.')
SpatialRefSys = connection.ops.spatial_ref_sys()
# If argument is not a `SpatialReference` instance, use it as parameter
# to construct a `SpatialReference` instance.
if not isinstance(srs, SpatialReference):
srs = SpatialReference(srs)
if srs.srid is None:
raise Exception('Spatial reference requires an SRID to be '
'compatible with the spatial backend.')
# Initializing the keyword arguments dictionary for both PostGIS
# and SpatiaLite.
kwargs = {'srid' : srs.srid,
'auth_name' : auth_name,
'auth_srid' : auth_srid or srs.srid,
'proj4text' : srs.proj4,
}
# Backend-specific fields for the SpatialRefSys model.
if connection.ops.postgis:
kwargs['srtext'] = srs.wkt
if connection.ops.spatialite:
kwargs['ref_sys_name'] = ref_sys_name or srs.name
# Creating the spatial_ref_sys model.
try:
# Try getting via SRID only, because using all kwargs may
# differ from exact wkt/proj in database.
sr = SpatialRefSys.objects.get(srid=srs.srid)
except SpatialRefSys.DoesNotExist:
sr = SpatialRefSys.objects.create(**kwargs)
# Alias is for backwards-compatibility purposes.
add_postgis_srs = add_srs_entry
| bsd-3-clause |
borisroman/vdsm | tests/blockVolumeTests.py | 1 | 2058 | #
# Copyright 2015 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from vdsm.config import config
from storage.blockVolume import BlockVolume
from storage import storage_exception as se
from storage import volume
from testlib import permutations, expandPermutations
from testlib import VdsmTestCase as TestCaseBase
@expandPermutations
class BlockVolumeSizeTests(TestCaseBase):
@permutations([
# (preallocate, capacity, initial_size), result
[(volume.PREALLOCATED_VOL, 2048, None), 1],
[(volume.PREALLOCATED_VOL, 2049, None), 2],
[(volume.PREALLOCATED_VOL, 2097152, None), 1024],
[(volume.SPARSE_VOL, 9999, None),
config.getint("irs", "volume_utilization_chunk_mb")],
[(volume.SPARSE_VOL, 8388608, 1860), 1],
[(volume.SPARSE_VOL, 8388608, 1870), 2],
])
def test_block_volume_size(self, args, result):
size = BlockVolume._calculate_volume_alloc_size(*args)
self.assertEqual(size, result)
@permutations([
# preallocate
[volume.PREALLOCATED_VOL],
[volume.SPARSE_VOL],
])
def test_fail_invalid_block_volume_size(self, preallocate):
with self.assertRaises(se.InvalidParameterException):
BlockVolume._calculate_volume_alloc_size(preallocate, 2048, 2049)
| gpl-2.0 |
moypray/flocker | flocker/node/agents/functional/test_cinder.py | 5 | 5116 | # Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Functional tests for ``flocker.node.agents.cinder`` using a real OpenStack
cluster.
Ideally, there'd be some in-memory tests too. Some ideas:
* Maybe start a `mimic` server and use it to at test just the authentication
step.
* Mimic doesn't currently fake the cinder APIs but perhaps we could contribute
that feature.
See https://github.com/rackerlabs/mimic/issues/218
"""
from uuid import uuid4
from bitmath import Byte
from keystoneclient.openstack.common.apiclient.exceptions import Unauthorized
from twisted.python.filepath import FilePath
from twisted.trial.unittest import SkipTest, SynchronousTestCase
from flocker.ca import RootCredential, AUTHORITY_CERTIFICATE_FILENAME
# make_iblockdeviceapi_tests should really be in flocker.node.agents.testtools,
# but I want to keep the branch size down
from ..test.test_blockdevice import (
make_iblockdeviceapi_tests,
)
from ..test.blockdevicefactory import (
InvalidConfig, ProviderType, get_blockdeviceapi_args,
get_blockdeviceapi_with_cleanup, get_device_allocation_unit,
get_minimum_allocatable_size,
)
from ..cinder import wait_for_volume
def cinderblockdeviceapi_for_test(test_case):
"""
Create a ``CinderBlockDeviceAPI`` instance for use in tests.
:param TestCase test_case: The test being run.
:returns: A ``CinderBlockDeviceAPI`` instance. Any volumes it creates will
be cleaned up at the end of the test (using ``test_case``\ 's cleanup
features).
"""
return get_blockdeviceapi_with_cleanup(test_case, ProviderType.openstack)
# ``CinderBlockDeviceAPI`` only implements the ``create`` and ``list`` parts of
# ``IBlockDeviceAPI``. Skip the rest of the tests for now.
class CinderBlockDeviceAPIInterfaceTests(
make_iblockdeviceapi_tests(
blockdevice_api_factory=(
lambda test_case: cinderblockdeviceapi_for_test(
test_case=test_case,
)
),
minimum_allocatable_size=get_minimum_allocatable_size(),
device_allocation_unit=get_device_allocation_unit(),
unknown_blockdevice_id_factory=lambda test: unicode(uuid4()),
)
):
"""
Interface adherence Tests for ``CinderBlockDeviceAPI``.
"""
def test_foreign_volume(self):
"""
Non-Flocker Volumes are not listed.
"""
try:
cls, kwargs = get_blockdeviceapi_args(ProviderType.openstack)
except InvalidConfig as e:
raise SkipTest(str(e))
cinder_client = kwargs["cinder_client"]
requested_volume = cinder_client.volumes.create(
size=int(Byte(self.minimum_allocatable_size).to_GiB().value)
)
self.addCleanup(
cinder_client.volumes.delete,
requested_volume.id,
)
wait_for_volume(
volume_manager=cinder_client.volumes,
expected_volume=requested_volume
)
self.assertEqual([], self.api.list_volumes())
def test_foreign_cluster_volume(self):
"""
Test that list_volumes() excludes volumes belonging to
other Flocker clusters.
"""
blockdevice_api2 = cinderblockdeviceapi_for_test(
test_case=self,
)
flocker_volume = blockdevice_api2.create_volume(
dataset_id=uuid4(),
size=self.minimum_allocatable_size,
)
self.assert_foreign_volume(flocker_volume)
class CinderHttpsTests(SynchronousTestCase):
"""
Test connections to HTTPS-enabled OpenStack.
"""
@staticmethod
def _authenticates_ok(cinder_client):
"""
Check connection is authorized.
:return: True if client connected OK, False otherwise.
"""
try:
cinder_client.authenticate()
return True
except Unauthorized:
return False
def test_verify_false(self):
"""
With the peer_verify field set to False, connection to the
OpenStack servers always succeeds.
"""
try:
cls, kwargs = get_blockdeviceapi_args(
ProviderType.openstack, peer_verify=False)
except InvalidConfig as e:
raise SkipTest(str(e))
self.assertTrue(self._authenticates_ok(kwargs['cinder_client']))
def test_verify_ca_path_no_match_fails(self):
"""
With a CA file that does not match any CA, connection to the
OpenStack servers fails.
"""
path = FilePath(self.mktemp())
path.makedirs()
RootCredential.initialize(path, b"mycluster")
try:
cls, kwargs = get_blockdeviceapi_args(
ProviderType.openstack, backend='openstack',
auth_plugin='password', password='password', peer_verify=True,
peer_ca_path=path.child(AUTHORITY_CERTIFICATE_FILENAME).path)
except InvalidConfig as e:
raise SkipTest(str(e))
self.assertFalse(self._authenticates_ok(kwargs['cinder_client']))
| apache-2.0 |
Alphadelta14/ansible | lib/ansible/new_inventory/host.py | 236 | 1551 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
class Host:
def __init__(self, name):
self._name = name
self._connection = None
self._ipv4_address = ''
self._ipv6_address = ''
self._port = 22
self._vars = dict()
def __repr__(self):
return self.get_name()
def get_name(self):
return self._name
def get_groups(self):
return []
def set_variable(self, name, value):
''' sets a variable for this host '''
self._vars[name] = value
def get_vars(self):
''' returns all variables for this host '''
all_vars = self._vars.copy()
all_vars.update(dict(inventory_hostname=self._name))
return all_vars
| gpl-3.0 |
kwagyeman/openmv | scripts/examples/OpenMV/02-Board-Control/cpufreq_scaling.py | 4 | 1117 | # CPU frequency scaling example.
#
# This example shows how to use the cpufreq module to change the CPU frequency on the fly.
import sensor, image, time, cpufreq
sensor.reset() # Reset and initialize the sensor.
sensor.set_pixformat(sensor.GRAYSCALE) # Set pixel format to RGB565 (or GRAYSCALE)
sensor.set_framesize(sensor.QVGA) # Set frame size to QVGA (320x240)
clock = time.clock() # Create a clock object to track the FPS.
def test_image_processing():
for i in range(0, 50):
clock.tick() # Update the FPS clock.
img = sensor.snapshot() # Take a picture and return the image.
img.find_edges(image.EDGE_CANNY, threshold=(50, 80))
print("\nFrequency Scaling Test...")
for f in cpufreq.get_supported_frequencies():
print("Testing CPU Freq: %dMHz..." %(f))
cpufreq.set_frequency(f)
clock.reset()
test_image_processing()
freqs = cpufreq.get_current_frequencies()
print("CPU Freq:%dMHz HCLK:%dMhz PCLK1:%dMhz PCLK2:%dMhz FPS:%.2f" %(freqs[0], freqs[1], freqs[2], freqs[3], clock.fps()))
| mit |
feroda/odoo | addons/base_report_designer/base_report_designer.py | 314 | 3433 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base64
from StringIO import StringIO
from openerp.modules.module import get_module_resource
import openerp.modules.registry
from openerp.osv import osv
from openerp_sxw2rml import sxw2rml
class report_xml(osv.osv):
_inherit = 'ir.actions.report.xml'
def sxwtorml(self, cr, uid, file_sxw, file_type):
'''
The use of this function is to get rml file from sxw file.
'''
sxwval = StringIO(base64.decodestring(file_sxw))
if file_type=='sxw':
fp = open(get_module_resource('base_report_designer','openerp_sxw2rml', 'normalized_oo2rml.xsl'),'rb')
if file_type=='odt':
fp = open(get_module_resource('base_report_designer','openerp_sxw2rml', 'normalized_odt2rml.xsl'),'rb')
return {'report_rml_content': str(sxw2rml(sxwval, xsl=fp.read()))}
def upload_report(self, cr, uid, report_id, file_sxw, file_type, context=None):
'''
Untested function
'''
sxwval = StringIO(base64.decodestring(file_sxw))
if file_type=='sxw':
fp = open(get_module_resource('base_report_designer','openerp_sxw2rml', 'normalized_oo2rml.xsl'),'rb')
if file_type=='odt':
fp = open(get_module_resource('base_report_designer','openerp_sxw2rml', 'normalized_odt2rml.xsl'),'rb')
report = self.pool['ir.actions.report.xml'].write(cr, uid, [report_id], {
'report_sxw_content': base64.decodestring(file_sxw),
'report_rml_content': str(sxw2rml(sxwval, xsl=fp.read())),
})
return True
def report_get(self, cr, uid, report_id, context=None):
# skip osv.fields.sanitize_binary_value() because we want the raw bytes in all cases
context = dict(context or {}, bin_raw=True)
report = self.browse(cr, uid, report_id, context=context)
sxw_data = report.report_sxw_content
rml_data = report.report_rml_content
if isinstance(sxw_data, unicode):
sxw_data = sxw_data.encode("iso-8859-1", "replace")
if isinstance(rml_data, unicode):
rml_data = rml_data.encode("iso-8859-1", "replace")
return {
'file_type' : report.report_type,
'report_sxw_content': sxw_data and base64.encodestring(sxw_data) or False,
'report_rml_content': rml_data and base64.encodestring(rml_data) or False
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
dhesse/tree-level-improve | tli.py | 1 | 8135 | # -*- coding: utf-8 -*-
r"""
:mod:`tli` -- Tree level improvement.
=========================================
Calculate tree level improvements for lattice QCD observables in the
spirit of [1]_. Given an observable :math:`{\mathcal O}(a/L)`, one
defines the improved observable
.. math::
{\mathcal O}_\mathrm{I}(a/L) = \frac {{\mathcal O}(a/L)} { 1 +
\delta(a/L) }, \quad \delta(a/L) = \frac {{\mathcal O}(a/L) -
{\mathcal O}(0)} {{\mathcal O}(0)} = \delta^{(0)}(a/L) + g_0^2
\delta^{(1)}(a/L) + \ldots\,.
This package contains a function to calculate :math:`\delta(a/L)` for
given observables. In practially all cases, only the tree-level
approximation :math:`\mathcal O^{(0)}(a/L)` is known analytically, so
one will only be able to compute :math:`\delta^{(0)}`.
**References**
.. [1] G. de Divitiis *et al.*, *Universality and the approach to the
continuum limit in lattice gauge theory*, Nucl.Phys. B *437*,
447–470, 1995, ``[doi: 10.1016/0550-3213(94)00019-B]``.
"""
from math import exp, pi, sqrt, log
import scipy.optimize as sopt
import numpy as np
from observables import R1
ERR = 1e-10 # estimate of the round-off error for correlation fns.
############################################################
# Helper functions to calculate m_bare
############################################################
def min_x(z, LL = 0.195, NF = 0):
r"""Function to minimize for finding :math:`x(z)` according to the
formula given in append D of ``[arXiv:1001.4783]``. If you use
e.g. scipy's newton method to minimize the resulting funciton, be
careful to choose a 'good' starting value::
>>> import scipy.optimize as sopt
>>> import scipy.optimize as sopt
>>> sopt.newton(min_x(z = 10), .7, tol=1e-15)
0.5876680790030352
>>> sopt.newton(min_x(z = 12), .7, tol=1e-15)
0.5787751108231223
>>> sopt.newton(min_x(z = 12), .1, tol=1e-15)
RuntimeWarning: Tolerance of 0.00011 reached
warnings.warn(msg, RuntimeWarning)
:param z: Input z value.
:param LL: :math:`\Lambda L` as in ``[arXiv:1001.4783]``.
:param NF: Number of quark flavors.
:returns: Function. The value that minimizes that function is the
sought-after value for :math:`x`.
"""
B0 = 1. / (4 * pi)**2 * (11 - 2./3 * NF)
B1 = 1. / (4 * pi)**4 * (102 - 38./3 * NF)
D0 = 8. / (4 * pi)**2
return lambda x : abs(LL/z - 2**(B1 / 2 / B0**2) *
x**(1 - B1 / B0 / D0) *
exp(-x**(-2. * B0 / D0)))
def pretty_print(val, err, extra_err_digits = 1):
"""Given a vaule with an error, generate a string of the form
error(value)::
>>> from tli import pretty_print
>>> pretty_print(0.123, 0.011)
'0.123(11)'
>>> pretty_print(0.123, 0.011, 0)
'0.12(1)'
:param val: Float.
:param err: Error on val.
:param extra_err_digits: Display extra digits of the error.
:returns: String.
"""
if err == 0.0:
assert val == 0.0
return "0(0)"
digits = 1 + int(abs(log(err, 10))) + extra_err_digits
err = int(err * 10 ** digits + 0.5)
if err == 10 and extra_err_digits != 1:
err = 1
digits -= 1
return "{0:.{1}f}({2})".format(val, digits, err)
############################################################
# Tree level improvement
############################################################
def tli(obs, args, Lrange, n_cut, lrange, x_val, z):
"""Calculate the tree level improvment of an observable. This is
accomplished in two steps.
1. Estimate the continuum limit of obs by performing two fits of
the form :math:`a_0 + a_2 (a/L)^2 + a_3 (a/L)^3 + a_4\,
(a/L)^4` (i.e. we assume obs to be :math:`O(a)`-improved) to
data generated by applying obs to the values in Lrange, one
fit using all values in Lrange, and one omitting the first
n_cut points. The difference is then used as an estimate for
the systematic error.
2. Calculate :math:`\delta (a/L)` for the values of :math:`L/a`
provided in lrange.
A minimalistic example::
>>> from tli import tli
>>> tli, dtli, cl = tli(lambda L, x, z : 1. + 2./L**2, (),
... range(50,100,2), 10, (10,14), 0, 0)
>>> tli
[0.019999999999999792, 0.010204081632652738]
>>> dtli
[1.0200135891298212e-10, 1.0102175402546249e-10]
>>> cl
1.0000000000000002
:param obs: The observable (function).
:param args: The observable is expected to have a call signature
like ``obs(L, x_val, z, *args)``, where ``x`` is the x-value
obtained using :class:`min_x`, ``L`` the number of lattice
points in each direction and ``z`` the mass parameter.
:param Lrange: A iterable giving the lattice resoultions that
should be used in the fit to extract the continuum limit.
:param n_cut: Number of data points to be omitted in a second fit
to estimate the systematic error for the continuum limit.
:param lragne: Values for :math:`(L/a)` for which to calculate the
tree level improvement.
:param x_val: Value for :math:`x` as in ``[arXiv:1001.4783]``.
:param z: Mass parameter.
:returns: Tuple ``(tli, d_cli, cl)`` with tree level improvement
coefficients cl, errors on tli coefficients d_cli, and continuum
limits cl.
"""
# make a list of the observable for various values of L
# to extract the continuum limit
f_list = [obs(L, x_val, z, *args) for L in Lrange]
# error function for the fit
efn = lambda p, x, y : y - p[0] - p[1]/x/x \
- p[2]/x/x/x - p[3]/x/x/x/x
# perform two fits to be able to estimate the error
(cl1, p1, p2, p3), success = \
sopt.leastsq(efn, [1,0,0,0], args = (Lrange, f_list))
(cl2, p1, p2, p3), success = \
sopt.leastsq(efn, [1,0,0,0], args = (Lrange[n_cut:],
f_list[n_cut:]))
cl, dcl = cl1, abs(cl1-cl2)
if abs(cl) < 10*abs(dcl):
print " ** WARNING,", obs.__name__, "seems to vanish as a/L -> 0"
print " ** My estimate: {0} --> {1} as a/L --> 0".format(
obs.__name__, pretty_print(cl, dcl))
print " ** using delta = O(a/L) - O(0) for", obs.__name__
print " ** for argument", args
delta_fun = lambda x : x - cl
d_delta_fun = lambda dO, O, de: dO + dcl
else:
delta_fun = lambda x : (x - cl) / cl
d_delta_fun = lambda dO, O, de : \
((dO + dcl)/abs(O-cl) + dcl/abs(cl))*abs(de)\
if O != cl else 0.0
# the observable at lattice sizes given in lragne
Obs = [obs(L, x_val, z, *args) for L in lrange]
# the estimate error on the observables
d_Obs = [abs(O * ERR) for O in Obs]
# the tree level cut-off effects at those
delta = [ delta_fun(O) for O in Obs]
# the error on the cut-off effects
d_delta = [ d_delta_fun(dO, O, de)
for (dO, O, de) in zip(d_Obs, Obs, delta) ]
return delta, d_delta, cl
if __name__ == "__main__":
# Ls used for continuum limit
Lrange = range(86,257,2)
# Ls to produce TLI data for
lrange = (20,24,32)
# zs to be used
zrange = (10.4,12.1,13.3)
# these are the values for x by michele as reference
xmichele = {10.4 : 0.585712, 12.1: 0.578382, 13.3: 0.573977}
for z in zrange:
print " * z =", z
# this is my value for x
myx = sopt.newton(min_x(z), .7, tol=1e-15)
# here, we compare, including a "goodness of fit"
# the latter comes form checking how good
print " my x:", myx, min_x(z)(myx)
print " michele:", xmichele[z], min_x(z)(xmichele[z])
# choose here which one to use
x = myx
# x = xmichele[z]
# get the tree level improvement
delta, d_delta, cl = tli(R1, (0.0, 0.5),
Lrange, 17, lrange, x, z)
# print continuum limit
print " -> c.l. =", cl
# print tli
for L, d, dd in zip(lrange, delta, d_delta):
print " ", L, pretty_print(d,dd)
| mit |
lokeshh/stem | stem/connection.py | 4 | 48655 | # Copyright 2011-2014, Damian Johnson and The Tor Project
# See LICENSE for licensing information
"""
Functions for connecting and authenticating to the tor process.
The :func:`~stem.connection.connect` function give an easy, one line
method for getting an authenticated control connection. This is handy for CLI
applications and the python interactive interpreter, but does several things
that makes it undesirable for applications (uses stdin/stdout, suppresses
exceptions, etc).
::
import sys
from stem.connection import connect
if __name__ == '__main__':
controller = connect()
if not controller:
sys.exit(1) # unable to get a connection
print 'Tor is running version %s' % controller.get_version()
controller.close()
::
% python example.py
Tor is running version 0.2.4.10-alpha-dev (git-8be6058d8f31e578)
... or if Tor isn't running...
::
% python example.py
[Errno 111] Connection refused
The :func:`~stem.connection.authenticate` function, however, gives easy but
fine-grained control over the authentication process. For instance...
::
import sys
import getpass
import stem.connection
import stem.socket
try:
control_socket = stem.socket.ControlPort(port = 9051)
except stem.SocketError as exc:
print 'Unable to connect to port 9051 (%s)' % exc
sys.exit(1)
try:
stem.connection.authenticate(control_socket)
except stem.connection.IncorrectSocketType:
print 'Please check in your torrc that 9051 is the ControlPort.'
print 'Maybe you configured it to be the ORPort or SocksPort instead?'
sys.exit(1)
except stem.connection.MissingPassword:
controller_password = getpass.getpass('Controller password: ')
try:
stem.connection.authenticate_password(control_socket, controller_password)
except stem.connection.PasswordAuthFailed:
print 'Unable to authenticate, password is incorrect'
sys.exit(1)
except stem.connection.AuthenticationFailure as exc:
print 'Unable to authenticate: %s' % exc
sys.exit(1)
**Module Overview:**
::
connect - Simple method for getting authenticated control connection
authenticate - Main method for authenticating to a control socket
authenticate_none - Authenticates to an open control socket
authenticate_password - Authenticates to a socket supporting password auth
authenticate_cookie - Authenticates to a socket supporting cookie auth
authenticate_safecookie - Authenticates to a socket supporting safecookie auth
get_protocolinfo - Issues a PROTOCOLINFO query
AuthenticationFailure - Base exception raised for authentication failures
|- UnrecognizedAuthMethods - Authentication methods are unsupported
|- IncorrectSocketType - Socket does not speak the tor control protocol
|
|- OpenAuthFailed - Failure when authenticating by an open socket
| +- OpenAuthRejected - Tor rejected this method of authentication
|
|- PasswordAuthFailed - Failure when authenticating by a password
| |- PasswordAuthRejected - Tor rejected this method of authentication
| |- IncorrectPassword - Password was rejected
| +- MissingPassword - Socket supports password auth but wasn't attempted
|
|- CookieAuthFailed - Failure when authenticating by a cookie
| |- CookieAuthRejected - Tor rejected this method of authentication
| |- IncorrectCookieValue - Authentication cookie was rejected
| |- IncorrectCookieSize - Size of the cookie file is incorrect
| |- UnreadableCookieFile - Unable to read the contents of the auth cookie
| +- AuthChallengeFailed - Failure completing the authchallenge request
| |- AuthChallengeUnsupported - Tor doesn't recognize the AUTHCHALLENGE command
| |- AuthSecurityFailure - Server provided the wrong nonce credentials
| |- InvalidClientNonce - The client nonce is invalid
| +- UnrecognizedAuthChallengeMethod - AUTHCHALLENGE does not support the given methods.
|
+- MissingAuthInfo - Unexpected PROTOCOLINFO response, missing auth info
|- NoAuthMethods - Missing any methods for authenticating
+- NoAuthCookie - Supports cookie auth but doesn't have its path
.. data:: AuthMethod (enum)
Enumeration of PROTOCOLINFO responses for supported authentication methods.
============== ===========
AuthMethod Description
============== ===========
**NONE** No authentication required.
**PASSWORD** Password required, see tor's HashedControlPassword option.
**COOKIE** Contents of the cookie file required, see tor's CookieAuthentication option.
**SAFECOOKIE** Need to reply to a hmac challenge using the contents of the cookie file.
**UNKNOWN** Tor provided one or more authentication methods that we don't recognize, probably something new.
============== ===========
"""
import binascii
import getpass
import os
import stem.control
import stem.response
import stem.socket
import stem.util.connection
import stem.util.enum
import stem.util.str_tools
import stem.util.system
import stem.version
from stem.util import log
AuthMethod = stem.util.enum.Enum('NONE', 'PASSWORD', 'COOKIE', 'SAFECOOKIE', 'UNKNOWN')
CLIENT_HASH_CONSTANT = b'Tor safe cookie authentication controller-to-server hash'
SERVER_HASH_CONSTANT = b'Tor safe cookie authentication server-to-controller hash'
MISSING_PASSWORD_BUG_MSG = """
BUG: You provided a password but despite this stem reported that it was
missing. This shouldn't happen - please let us know about it!
http://bugs.torproject.org
"""
UNRECOGNIZED_AUTH_TYPE_MSG = """
Tor is using a type of authentication we do not recognize...
{auth_methods}
Please check that arm is up to date and if there is an existing issue on
'http://bugs.torproject.org'. If there isn't one then let us know!
"""
UNREADABLE_COOKIE_FILE_MSG = """
We were unable to read tor's authentication cookie...
Path: {path}
Issue: {issue}
"""
WRONG_PORT_TYPE_MSG = """
Please check in your torrc that {port} is the ControlPort. Maybe you
configured it to be the ORPort or SocksPort instead?
"""
WRONG_SOCKET_TYPE_MSG = """
Unable to connect to tor. Are you sure the interface you specified belongs to
tor?
"""
CONNECT_MESSAGES = {
'general_auth_failure': 'Unable to authenticate: {error}',
'incorrect_password': 'Incorrect password',
'no_control_port': "Unable to connect to tor. Maybe it's running without a ControlPort?",
'password_prompt': 'Tor controller password:',
'needs_password': 'Tor requires a password to authenticate',
'socket_doesnt_exist': "The socket file you specified ({path}) doesn't exist",
'tor_isnt_running': "Unable to connect to tor. Are you sure it's running?",
'unable_to_use_port': 'Unable to connect to {address}:{port}: {error}',
'unable_to_use_socket': "Unable to connect to '{path}': {error}",
'missing_password_bug': MISSING_PASSWORD_BUG_MSG.strip(),
'uncrcognized_auth_type': UNRECOGNIZED_AUTH_TYPE_MSG.strip(),
'unreadable_cookie_file': UNREADABLE_COOKIE_FILE_MSG.strip(),
'wrong_port_type': WRONG_PORT_TYPE_MSG.strip(),
'wrong_socket_type': WRONG_SOCKET_TYPE_MSG.strip(),
}
def connect(control_port = ('127.0.0.1', 9051), control_socket = '/var/run/tor/control', password = None, password_prompt = False, chroot_path = None, controller = stem.control.Controller):
"""
Convenience function for quickly getting a control connection. This is very
handy for debugging or CLI setup, handling setup and prompting for a password
if necessary (and none is provided). If any issues arise this prints a
description of the problem and returns **None**.
If both a **control_port** and **control_socket** are provided then the
**control_socket** is tried first, and this provides a generic error message
if they're both unavailable.
In much the same vein as git porcelain commands, users should not rely on
details of how this works. Messages and details of this function's behavior
could change in the future.
.. versionadded:: 1.2.0
:param tuple contol_port: address and port tuple, for instance **('127.0.0.1', 9051)**
:param str path: path where the control socket is located
:param str password: passphrase to authenticate to the socket
:param bool password_prompt: prompt for the controller password if it wasn't
supplied
:param str chroot_path: path prefix if in a chroot environment
:param Class controller: :class:`~stem.control.BaseController` subclass to be
returned, this provides a :class:`~stem.socket.ControlSocket` if **None**
:returns: authenticated control connection, the type based on the controller argument
:raises: **ValueError** if given an invalid control_port, or both
**control_port** and **control_socket** are **None**
"""
if control_port is None and control_socket is None:
raise ValueError('Neither a control port nor control socket were provided. Nothing to connect to.')
elif control_port:
if len(control_port) != 2:
raise ValueError('The control_port argument for connect() should be an (address, port) tuple.')
elif not stem.util.connection.is_valid_ipv4_address(control_port[0]):
raise ValueError("'%s' isn't a vaid IPv4 address" % control_port[0])
elif not stem.util.connection.is_valid_port(control_port[1]):
raise ValueError("'%s' isn't a valid port" % control_port[1])
control_connection, error_msg = None, ''
if control_socket:
if os.path.exists(control_socket):
try:
control_connection = stem.socket.ControlSocketFile(control_socket)
except stem.SocketError as exc:
error_msg = CONNECT_MESSAGES['unable_to_use_socket'].format(path = control_socket, error = exc)
else:
error_msg = CONNECT_MESSAGES['socket_doesnt_exist'].format(path = control_socket)
if control_port and not control_connection:
address, port = control_port
try:
control_connection = stem.socket.ControlPort(address, port)
except stem.SocketError as exc:
error_msg = CONNECT_MESSAGES['unable_to_use_port'].format(address = address, port = port, error = exc)
# If unable to connect to either a control socket or port then finally fail
# out. If we only attempted to connect to one of them then provide the error
# output from that. Otherwise we provide a more generic error message.
#
# We check for a 'tor.real' process name because that's what TBB uses.
if not control_connection:
if control_socket and control_port:
is_tor_running = stem.util.system.is_running('tor') or stem.util.system.is_running('tor.real')
error_msg = CONNECT_MESSAGES['no_control_port'] if is_tor_running else CONNECT_MESSAGES['tor_isnt_running']
print(error_msg)
return None
return _connect_auth(control_connection, password, password_prompt, chroot_path, controller)
def connect_port(address = '127.0.0.1', port = 9051, password = None, chroot_path = None, controller = stem.control.Controller):
"""
Convenience function for quickly getting a control connection. This is very
handy for debugging or CLI setup, handling setup and prompting for a password
if necessary (and none is provided). If any issues arise this prints a
description of the problem and returns **None**.
.. deprecated:: 1.2.0
Use :func:`~stem.connection.connect` instead.
:param str address: ip address of the controller
:param int port: port number of the controller
:param str password: passphrase to authenticate to the socket
:param str chroot_path: path prefix if in a chroot environment
:param Class controller: :class:`~stem.control.BaseController` subclass to be
returned, this provides a :class:`~stem.socket.ControlSocket` if **None**
:returns: authenticated control connection, the type based on the controller argument
"""
try:
control_port = stem.socket.ControlPort(address, port)
except stem.SocketError as exc:
print(exc)
return None
return _connect_auth(control_port, password, True, chroot_path, controller)
def connect_socket_file(path = '/var/run/tor/control', password = None, chroot_path = None, controller = stem.control.Controller):
"""
Convenience function for quickly getting a control connection. For more
information see the :func:`~stem.connection.connect_port` function.
In much the same vein as git porcelain commands, users should not rely on
details of how this works. Messages or details of this function's behavior
might change in the future.
.. deprecated:: 1.2.0
Use :func:`~stem.connection.connect` instead.
:param str path: path where the control socket is located
:param str password: passphrase to authenticate to the socket
:param str chroot_path: path prefix if in a chroot environment
:param Class controller: :class:`~stem.control.BaseController` subclass to be
returned, this provides a :class:`~stem.socket.ControlSocket` if **None**
:returns: authenticated control connection, the type based on the controller argument
"""
try:
control_socket = stem.socket.ControlSocketFile(path)
except stem.SocketError as exc:
print(exc)
return None
return _connect_auth(control_socket, password, True, chroot_path, controller)
def _connect_auth(control_socket, password, password_prompt, chroot_path, controller):
"""
Helper for the connect_* functions that authenticates the socket and
constructs the controller.
:param stem.socket.ControlSocket control_socket: socket being authenticated to
:param str password: passphrase to authenticate to the socket
:param bool password_prompt: prompt for the controller password if it wasn't
supplied
:param str chroot_path: path prefix if in a chroot environment
:param Class controller: :class:`~stem.control.BaseController` subclass to be
returned, this provides a :class:`~stem.socket.ControlSocket` if **None**
:returns: authenticated control connection, the type based on the controller argument
"""
try:
authenticate(control_socket, password, chroot_path)
if controller is None:
return control_socket
else:
return controller(control_socket, is_authenticated = True)
except IncorrectSocketType:
if isinstance(control_socket, stem.socket.ControlPort):
print(CONNECT_MESSAGES['wrong_port_type'].format(port = control_socket.get_port()))
else:
print(CONNECT_MESSAGES['wrong_socket_type'])
control_socket.close()
return None
except UnrecognizedAuthMethods as exc:
print(CONNECT_MESSAGES['uncrcognized_auth_type'].format(auth_methods = ', '.join(exc.unknown_auth_methods)))
control_socket.close()
return None
except IncorrectPassword:
print(CONNECT_MESSAGES['incorrect_password'])
control_socket.close()
return None
except MissingPassword:
if password is not None:
control_socket.close()
raise ValueError(CONNECT_MESSAGES['missing_password_bug'])
if password_prompt:
try:
password = getpass.getpass(CONNECT_MESSAGES['password_prompt'] + ' ')
except KeyboardInterrupt:
control_socket.close()
return None
return _connect_auth(control_socket, password, password_prompt, chroot_path, controller)
else:
print(CONNECT_MESSAGES['needs_password'])
control_socket.close()
return None
except UnreadableCookieFile as exc:
print(CONNECT_MESSAGES['unreadable_cookie_file'].format(path = exc.cookie_path, issue = str(exc)))
control_socket.close()
return None
except AuthenticationFailure as exc:
print(CONNECT_MESSAGES['general_auth_failure'].format(error = exc))
control_socket.close()
return None
def authenticate(controller, password = None, chroot_path = None, protocolinfo_response = None):
"""
Authenticates to a control socket using the information provided by a
PROTOCOLINFO response. In practice this will often be all we need to
authenticate, raising an exception if all attempts to authenticate fail.
All exceptions are subclasses of AuthenticationFailure so, in practice,
callers should catch the types of authentication failure that they care
about, then have a :class:`~stem.connection.AuthenticationFailure` catch-all
at the end.
This can authenticate to either a :class:`~stem.control.BaseController` or
:class:`~stem.socket.ControlSocket`.
:param controller: tor controller or socket to be authenticated
:param str password: passphrase to present to the socket if it uses password
authentication (skips password auth if **None**)
:param str chroot_path: path prefix if in a chroot environment
:param stem.response.protocolinfo.ProtocolInfoResponse protocolinfo_response:
tor protocolinfo response, this is retrieved on our own if **None**
:raises: If all attempts to authenticate fails then this will raise a
:class:`~stem.connection.AuthenticationFailure` subclass. Since this may
try multiple authentication methods it may encounter multiple exceptions.
If so then the exception this raises is prioritized as follows...
* :class:`stem.connection.IncorrectSocketType`
The controller does not speak the tor control protocol. Most often this
happened because the user confused the SocksPort or ORPort with the
ControlPort.
* :class:`stem.connection.UnrecognizedAuthMethods`
All of the authentication methods tor will accept are new and
unrecognized. Please upgrade stem and, if that doesn't work, file a
ticket on 'trac.torproject.org' and I'd be happy to add support.
* :class:`stem.connection.MissingPassword`
We were unable to authenticate but didn't attempt password authentication
because none was provided. You should prompt the user for a password and
try again via 'authenticate_password'.
* :class:`stem.connection.IncorrectPassword`
We were provided with a password but it was incorrect.
* :class:`stem.connection.IncorrectCookieSize`
Tor allows for authentication by reading it a cookie file, but that file
is the wrong size to be an authentication cookie.
* :class:`stem.connection.UnreadableCookieFile`
Tor allows for authentication by reading it a cookie file, but we can't
read that file (probably due to permissions).
* **\***:class:`stem.connection.IncorrectCookieValue`
Tor allows for authentication by reading it a cookie file, but rejected
the contents of that file.
* **\***:class:`stem.connection.AuthChallengeUnsupported`
Tor doesn't recognize the AUTHCHALLENGE command. This is probably a Tor
version prior to SAFECOOKIE being implement, but this exception shouldn't
arise because we won't attempt SAFECOOKIE auth unless Tor claims to
support it.
* **\***:class:`stem.connection.UnrecognizedAuthChallengeMethod`
Tor couldn't recognize the AUTHCHALLENGE method Stem sent to it. This
shouldn't happen at all.
* **\***:class:`stem.connection.InvalidClientNonce`
Tor says that the client nonce provided by Stem during the AUTHCHALLENGE
process is invalid.
* **\***:class:`stem.connection.AuthSecurityFailure`
Nonce value provided by the server was invalid.
* **\***:class:`stem.connection.OpenAuthRejected`
Tor says that it allows for authentication without any credentials, but
then rejected our authentication attempt.
* **\***:class:`stem.connection.MissingAuthInfo`
Tor provided us with a PROTOCOLINFO reply that is technically valid, but
missing the information we need to authenticate.
* **\***:class:`stem.connection.AuthenticationFailure`
There are numerous other ways that authentication could have failed
including socket failures, malformed controller responses, etc. These
mostly constitute transient failures or bugs.
**\*** In practice it is highly unusual for this to occur, being more of a
theoretical possibility rather than something you should expect. It's fine
to treat these as errors. If you have a use case where this commonly
happens, please file a ticket on 'trac.torproject.org'.
In the future new :class:`~stem.connection.AuthenticationFailure`
subclasses may be added to allow for better error handling.
"""
if not protocolinfo_response:
try:
protocolinfo_response = get_protocolinfo(controller)
except stem.ProtocolError:
raise IncorrectSocketType('unable to use the control socket')
except stem.SocketError as exc:
raise AuthenticationFailure('socket connection failed (%s)' % exc)
auth_methods = list(protocolinfo_response.auth_methods)
auth_exceptions = []
if len(auth_methods) == 0:
raise NoAuthMethods('our PROTOCOLINFO response did not have any methods for authenticating')
# remove authentication methods that are either unknown or for which we don't
# have an input
if AuthMethod.UNKNOWN in auth_methods:
auth_methods.remove(AuthMethod.UNKNOWN)
unknown_methods = protocolinfo_response.unknown_auth_methods
plural_label = 's' if len(unknown_methods) > 1 else ''
methods_label = ', '.join(unknown_methods)
# we... er, can't do anything with only unrecognized auth types
if not auth_methods:
exc_msg = 'unrecognized authentication method%s (%s)' % (plural_label, methods_label)
auth_exceptions.append(UnrecognizedAuthMethods(exc_msg, unknown_methods))
else:
log.debug('Authenticating to a socket with unrecognized auth method%s, ignoring them: %s' % (plural_label, methods_label))
if protocolinfo_response.cookie_path is None:
for cookie_auth_method in (AuthMethod.COOKIE, AuthMethod.SAFECOOKIE):
if cookie_auth_method in auth_methods:
auth_methods.remove(cookie_auth_method)
exc_msg = 'our PROTOCOLINFO response did not have the location of our authentication cookie'
auth_exceptions.append(NoAuthCookie(exc_msg, cookie_auth_method == AuthMethod.SAFECOOKIE))
if AuthMethod.PASSWORD in auth_methods and password is None:
auth_methods.remove(AuthMethod.PASSWORD)
auth_exceptions.append(MissingPassword('no passphrase provided'))
# iterating over AuthMethods so we can try them in this order
for auth_type in (AuthMethod.NONE, AuthMethod.PASSWORD, AuthMethod.SAFECOOKIE, AuthMethod.COOKIE):
if auth_type not in auth_methods:
continue
try:
if auth_type == AuthMethod.NONE:
authenticate_none(controller, False)
elif auth_type == AuthMethod.PASSWORD:
authenticate_password(controller, password, False)
elif auth_type in (AuthMethod.COOKIE, AuthMethod.SAFECOOKIE):
cookie_path = protocolinfo_response.cookie_path
if chroot_path:
cookie_path = os.path.join(chroot_path, cookie_path.lstrip(os.path.sep))
if auth_type == AuthMethod.SAFECOOKIE:
authenticate_safecookie(controller, cookie_path, False)
else:
authenticate_cookie(controller, cookie_path, False)
return # success!
except OpenAuthRejected as exc:
auth_exceptions.append(exc)
except IncorrectPassword as exc:
auth_exceptions.append(exc)
except PasswordAuthRejected as exc:
# Since the PROTOCOLINFO says password auth is available we can assume
# that if PasswordAuthRejected is raised it's being raised in error.
log.debug('The authenticate_password method raised a PasswordAuthRejected when password auth should be available. Stem may need to be corrected to recognize this response: %s' % exc)
auth_exceptions.append(IncorrectPassword(str(exc)))
except AuthSecurityFailure as exc:
log.info('Tor failed to provide the nonce expected for safecookie authentication. (%s)' % exc)
auth_exceptions.append(exc)
except (InvalidClientNonce, UnrecognizedAuthChallengeMethod, AuthChallengeFailed) as exc:
auth_exceptions.append(exc)
except (IncorrectCookieSize, UnreadableCookieFile, IncorrectCookieValue) as exc:
auth_exceptions.append(exc)
except CookieAuthRejected as exc:
auth_func = 'authenticate_safecookie' if exc.is_safecookie else 'authenticate_cookie'
log.debug('The %s method raised a CookieAuthRejected when cookie auth should be available. Stem may need to be corrected to recognize this response: %s' % (auth_func, exc))
auth_exceptions.append(IncorrectCookieValue(str(exc), exc.cookie_path, exc.is_safecookie))
except stem.ControllerError as exc:
auth_exceptions.append(AuthenticationFailure(str(exc)))
# All authentication attempts failed. Raise the exception that takes priority
# according to our pydocs.
for exc_type in AUTHENTICATE_EXCEPTIONS:
for auth_exc in auth_exceptions:
if isinstance(auth_exc, exc_type):
raise auth_exc
# We really, really shouldn't get here. It means that auth_exceptions is
# either empty or contains something that isn't an AuthenticationFailure.
raise AssertionError('BUG: Authentication failed without providing a recognized exception: %s' % str(auth_exceptions))
def authenticate_none(controller, suppress_ctl_errors = True):
"""
Authenticates to an open control socket. All control connections need to
authenticate before they can be used, even if tor hasn't been configured to
use any authentication.
If authentication fails tor will disconnect and we'll make a best effort
attempt to re-establish the connection. This may not succeed, so check
:func:`~stem.socket.ControlSocket.is_alive` before using the socket further.
This can authenticate to either a :class:`~stem.control.BaseController` or
:class:`~stem.socket.ControlSocket`.
For general usage use the :func:`~stem.connection.authenticate` function
instead.
:param controller: tor controller or socket to be authenticated
:param bool suppress_ctl_errors: reports raised
:class:`~stem.ControllerError` as authentication rejection if
**True**, otherwise they're re-raised
:raises: :class:`stem.connection.OpenAuthRejected` if the empty authentication credentials aren't accepted
"""
try:
auth_response = _msg(controller, 'AUTHENTICATE')
# if we got anything but an OK response then error
if str(auth_response) != 'OK':
try:
controller.connect()
except:
pass
raise OpenAuthRejected(str(auth_response), auth_response)
except stem.ControllerError as exc:
try:
controller.connect()
except:
pass
if not suppress_ctl_errors:
raise exc
else:
raise OpenAuthRejected('Socket failed (%s)' % exc)
def authenticate_password(controller, password, suppress_ctl_errors = True):
"""
Authenticates to a control socket that uses a password (via the
HashedControlPassword torrc option). Quotes in the password are escaped.
If authentication fails tor will disconnect and we'll make a best effort
attempt to re-establish the connection. This may not succeed, so check
:func:`~stem.socket.ControlSocket.is_alive` before using the socket further.
If you use this function directly, rather than
:func:`~stem.connection.authenticate`, we may mistakenly raise a
PasswordAuthRejected rather than IncorrectPassword. This is because we rely
on tor's error messaging which is liable to change in future versions
(:trac:`4817`).
This can authenticate to either a :class:`~stem.control.BaseController` or
:class:`~stem.socket.ControlSocket`.
For general usage use the :func:`~stem.connection.authenticate` function
instead.
:param controller: tor controller or socket to be authenticated
:param str password: passphrase to present to the socket
:param bool suppress_ctl_errors: reports raised
:class:`~stem.ControllerError` as authentication rejection if
**True**, otherwise they're re-raised
:raises:
* :class:`stem.connection.PasswordAuthRejected` if the socket doesn't
accept password authentication
* :class:`stem.connection.IncorrectPassword` if the authentication
credentials aren't accepted
"""
# Escapes quotes. Tor can include those in the password hash, in which case
# it expects escaped quotes from the controller. For more information see...
# https://trac.torproject.org/projects/tor/ticket/4600
password = password.replace('"', '\\"')
try:
auth_response = _msg(controller, 'AUTHENTICATE "%s"' % password)
# if we got anything but an OK response then error
if str(auth_response) != 'OK':
try:
controller.connect()
except:
pass
# all we have to go on is the error message from tor...
# Password did not match HashedControlPassword value value from configuration...
# Password did not match HashedControlPassword *or*...
if 'Password did not match HashedControlPassword' in str(auth_response):
raise IncorrectPassword(str(auth_response), auth_response)
else:
raise PasswordAuthRejected(str(auth_response), auth_response)
except stem.ControllerError as exc:
try:
controller.connect()
except:
pass
if not suppress_ctl_errors:
raise exc
else:
raise PasswordAuthRejected('Socket failed (%s)' % exc)
def authenticate_cookie(controller, cookie_path, suppress_ctl_errors = True):
"""
Authenticates to a control socket that uses the contents of an authentication
cookie (generated via the CookieAuthentication torrc option). This does basic
validation that this is a cookie before presenting the contents to the
socket.
The :class:`~stem.connection.IncorrectCookieSize` and
:class:`~stem.connection.UnreadableCookieFile` exceptions take precedence
over the other types.
If authentication fails tor will disconnect and we'll make a best effort
attempt to re-establish the connection. This may not succeed, so check
:func:`~stem.socket.ControlSocket.is_alive` before using the socket further.
If you use this function directly, rather than
:func:`~stem.connection.authenticate`, we may mistakenly raise a
:class:`~stem.connection.CookieAuthRejected` rather than
:class:`~stem.connection.IncorrectCookieValue`. This is because we rely on
tor's error messaging which is liable to change in future versions
(:trac:`4817`).
This can authenticate to either a :class:`~stem.control.BaseController` or
:class:`~stem.socket.ControlSocket`.
For general usage use the :func:`~stem.connection.authenticate` function
instead.
:param controller: tor controller or socket to be authenticated
:param str cookie_path: path of the authentication cookie to send to tor
:param bool suppress_ctl_errors: reports raised
:class:`~stem.ControllerError` as authentication rejection if
**True**, otherwise they're re-raised
:raises:
* :class:`stem.connection.IncorrectCookieSize` if the cookie file's size
is wrong
* :class:`stem.connection.UnreadableCookieFile` if the cookie file doesn't
exist or we're unable to read it
* :class:`stem.connection.CookieAuthRejected` if cookie authentication is
attempted but the socket doesn't accept it
* :class:`stem.connection.IncorrectCookieValue` if the cookie file's value
is rejected
"""
cookie_data = _read_cookie(cookie_path, False)
try:
# binascii.b2a_hex() takes a byte string and returns one too. With python 3
# this is a problem because string formatting for byte strings includes the
# b'' wrapper...
#
# >>> "AUTHENTICATE %s" % b'content'
# "AUTHENTICATE b'content'"
#
# This seems dumb but oh well. Converting the result to unicode so it won't
# misbehave.
auth_token_hex = binascii.b2a_hex(stem.util.str_tools._to_bytes(cookie_data))
msg = 'AUTHENTICATE %s' % stem.util.str_tools._to_unicode(auth_token_hex)
auth_response = _msg(controller, msg)
# if we got anything but an OK response then error
if str(auth_response) != 'OK':
try:
controller.connect()
except:
pass
# all we have to go on is the error message from tor...
# ... Authentication cookie did not match expected value.
# ... *or* authentication cookie.
if '*or* authentication cookie.' in str(auth_response) or \
'Authentication cookie did not match expected value.' in str(auth_response):
raise IncorrectCookieValue(str(auth_response), cookie_path, False, auth_response)
else:
raise CookieAuthRejected(str(auth_response), cookie_path, False, auth_response)
except stem.ControllerError as exc:
try:
controller.connect()
except:
pass
if not suppress_ctl_errors:
raise exc
else:
raise CookieAuthRejected('Socket failed (%s)' % exc, cookie_path, False)
def authenticate_safecookie(controller, cookie_path, suppress_ctl_errors = True):
"""
Authenticates to a control socket using the safe cookie method, which is
enabled by setting the CookieAuthentication torrc option on Tor client's which
support it.
Authentication with this is a two-step process...
1. send a nonce to the server and receives a challenge from the server for
the cookie's contents
2. generate a hash digest using the challenge received in the first step, and
use it to authenticate the controller
The :class:`~stem.connection.IncorrectCookieSize` and
:class:`~stem.connection.UnreadableCookieFile` exceptions take precedence
over the other exception types.
The :class:`~stem.connection.AuthChallengeUnsupported`,
:class:`~stem.connection.UnrecognizedAuthChallengeMethod`,
:class:`~stem.connection.InvalidClientNonce` and
:class:`~stem.connection.CookieAuthRejected` exceptions are next in the order
of precedence. Depending on the reason, one of these is raised if the first
(AUTHCHALLENGE) step fails.
In the second (AUTHENTICATE) step,
:class:`~stem.connection.IncorrectCookieValue` or
:class:`~stem.connection.CookieAuthRejected` maybe raised.
If authentication fails tor will disconnect and we'll make a best effort
attempt to re-establish the connection. This may not succeed, so check
:func:`~stem.socket.ControlSocket.is_alive` before using the socket further.
For general usage use the :func:`~stem.connection.authenticate` function
instead.
:param controller: tor controller or socket to be authenticated
:param str cookie_path: path of the authentication cookie to send to tor
:param bool suppress_ctl_errors: reports raised
:class:`~stem.ControllerError` as authentication rejection if
**True**, otherwise they're re-raised
:raises:
* :class:`stem.connection.IncorrectCookieSize` if the cookie file's size
is wrong
* :class:`stem.connection.UnreadableCookieFile` if the cookie file doesn't
exist or we're unable to read it
* :class:`stem.connection.CookieAuthRejected` if cookie authentication is
attempted but the socket doesn't accept it
* :class:`stem.connection.IncorrectCookieValue` if the cookie file's value
is rejected
* :class:`stem.connection.UnrecognizedAuthChallengeMethod` if the Tor
client fails to recognize the AuthChallenge method
* :class:`stem.connection.AuthChallengeUnsupported` if AUTHCHALLENGE is
unimplemented, or if unable to parse AUTHCHALLENGE response
* :class:`stem.connection.AuthSecurityFailure` if AUTHCHALLENGE's response
looks like a security attack
* :class:`stem.connection.InvalidClientNonce` if stem's AUTHCHALLENGE
client nonce is rejected for being invalid
"""
cookie_data = _read_cookie(cookie_path, True)
client_nonce = os.urandom(32)
try:
client_nonce_hex = stem.util.str_tools._to_unicode(binascii.b2a_hex(client_nonce))
authchallenge_response = _msg(controller, 'AUTHCHALLENGE SAFECOOKIE %s' % client_nonce_hex)
if not authchallenge_response.is_ok():
try:
controller.connect()
except:
pass
authchallenge_response_str = str(authchallenge_response)
if 'Authentication required.' in authchallenge_response_str:
raise AuthChallengeUnsupported("SAFECOOKIE authentication isn't supported", cookie_path)
elif 'AUTHCHALLENGE only supports' in authchallenge_response_str:
raise UnrecognizedAuthChallengeMethod(authchallenge_response_str, cookie_path)
elif 'Invalid base16 client nonce' in authchallenge_response_str:
raise InvalidClientNonce(authchallenge_response_str, cookie_path)
elif "Cookie authentication is disabled" in authchallenge_response_str:
raise CookieAuthRejected(authchallenge_response_str, cookie_path, True)
else:
raise AuthChallengeFailed(authchallenge_response, cookie_path)
except stem.ControllerError as exc:
try:
controller.connect()
except:
pass
if not suppress_ctl_errors:
raise exc
else:
raise AuthChallengeFailed('Socket failed (%s)' % exc, cookie_path, True)
try:
stem.response.convert('AUTHCHALLENGE', authchallenge_response)
except stem.ProtocolError as exc:
if not suppress_ctl_errors:
raise exc
else:
raise AuthChallengeFailed('Unable to parse AUTHCHALLENGE response: %s' % exc, cookie_path)
expected_server_hash = stem.util.connection._hmac_sha256(
SERVER_HASH_CONSTANT,
cookie_data + client_nonce + authchallenge_response.server_nonce)
if not stem.util.connection._cryptovariables_equal(authchallenge_response.server_hash, expected_server_hash):
raise AuthSecurityFailure('Tor provided the wrong server nonce', cookie_path)
try:
client_hash = stem.util.connection._hmac_sha256(
CLIENT_HASH_CONSTANT,
cookie_data + client_nonce + authchallenge_response.server_nonce)
auth_response = _msg(controller, 'AUTHENTICATE %s' % stem.util.str_tools._to_unicode(binascii.b2a_hex(client_hash)))
except stem.ControllerError as exc:
try:
controller.connect()
except:
pass
if not suppress_ctl_errors:
raise exc
else:
raise CookieAuthRejected('Socket failed (%s)' % exc, cookie_path, True, auth_response)
# if we got anything but an OK response then err
if not auth_response.is_ok():
try:
controller.connect()
except:
pass
# all we have to go on is the error message from tor...
# ... Safe cookie response did not match expected value
# ... *or* authentication cookie.
if '*or* authentication cookie.' in str(auth_response) or \
'Safe cookie response did not match expected value' in str(auth_response):
raise IncorrectCookieValue(str(auth_response), cookie_path, True, auth_response)
else:
raise CookieAuthRejected(str(auth_response), cookie_path, True, auth_response)
def get_protocolinfo(controller):
"""
Issues a PROTOCOLINFO query to a control socket, getting information about
the tor process running on it. If the socket is already closed then it is
first reconnected.
According to the control spec the cookie_file is an absolute path. However,
this often is not the case (especially for the Tor Browser Bundle). If the
path is relative then we'll make an attempt (which may not work) to correct
this (:trac:`1101`).
This can authenticate to either a :class:`~stem.control.BaseController` or
:class:`~stem.socket.ControlSocket`.
:param controller: tor controller or socket to be queried
:returns: :class:`~stem.response.protocolinfo.ProtocolInfoResponse` provided by tor
:raises:
* :class:`stem.ProtocolError` if the PROTOCOLINFO response is
malformed
* :class:`stem.SocketError` if problems arise in establishing or
using the socket
"""
try:
protocolinfo_response = _msg(controller, 'PROTOCOLINFO 1')
except:
protocolinfo_response = None
# Tor hangs up on sockets after receiving a PROTOCOLINFO query if it isn't
# next followed by authentication. Transparently reconnect if that happens.
if not protocolinfo_response or str(protocolinfo_response) == 'Authentication required.':
controller.connect()
try:
protocolinfo_response = _msg(controller, 'PROTOCOLINFO 1')
except stem.SocketClosed as exc:
raise stem.SocketError(exc)
stem.response.convert('PROTOCOLINFO', protocolinfo_response)
# attempt to expand relative cookie paths
if protocolinfo_response.cookie_path:
_expand_cookie_path(protocolinfo_response, stem.util.system.pid_by_name, 'tor')
# attempt to expand relative cookie paths via the control port or socket file
if isinstance(controller, stem.socket.ControlSocket):
control_socket = controller
else:
control_socket = controller.get_socket()
if isinstance(control_socket, stem.socket.ControlPort):
if control_socket.get_address() == '127.0.0.1':
pid_method = stem.util.system.pid_by_port
_expand_cookie_path(protocolinfo_response, pid_method, control_socket.get_port())
elif isinstance(control_socket, stem.socket.ControlSocketFile):
pid_method = stem.util.system.pid_by_open_file
_expand_cookie_path(protocolinfo_response, pid_method, control_socket.get_socket_path())
return protocolinfo_response
def _msg(controller, message):
"""
Sends and receives a message with either a
:class:`~stem.socket.ControlSocket` or :class:`~stem.control.BaseController`.
"""
if isinstance(controller, stem.socket.ControlSocket):
controller.send(message)
return controller.recv()
else:
return controller.msg(message)
def _read_cookie(cookie_path, is_safecookie):
"""
Provides the contents of a given cookie file.
:param str cookie_path: absolute path of the cookie file
:param bool is_safecookie: **True** if this was for SAFECOOKIE
authentication, **False** if for COOKIE
:raises:
* :class:`stem.connection.UnreadableCookieFile` if the cookie file is
unreadable
* :class:`stem.connection.IncorrectCookieSize` if the cookie size is
incorrect (not 32 bytes)
"""
if not os.path.exists(cookie_path):
exc_msg = "Authentication failed: '%s' doesn't exist" % cookie_path
raise UnreadableCookieFile(exc_msg, cookie_path, is_safecookie)
# Abort if the file isn't 32 bytes long. This is to avoid exposing arbitrary
# file content to the port.
#
# Without this a malicious socket could, for instance, claim that
# '~/.bash_history' or '~/.ssh/id_rsa' was its authentication cookie to trick
# us into reading it for them with our current permissions.
#
# https://trac.torproject.org/projects/tor/ticket/4303
auth_cookie_size = os.path.getsize(cookie_path)
if auth_cookie_size != 32:
exc_msg = "Authentication failed: authentication cookie '%s' is the wrong size (%i bytes instead of 32)" % (cookie_path, auth_cookie_size)
raise IncorrectCookieSize(exc_msg, cookie_path, is_safecookie)
try:
with open(cookie_path, 'rb', 0) as f:
return f.read()
except IOError as exc:
exc_msg = "Authentication failed: unable to read '%s' (%s)" % (cookie_path, exc)
raise UnreadableCookieFile(exc_msg, cookie_path, is_safecookie)
def _expand_cookie_path(protocolinfo_response, pid_resolver, pid_resolution_arg):
"""
Attempts to expand a relative cookie path with the given pid resolver. This
leaves the cookie_path alone if it's already absolute, **None**, or the
system calls fail.
"""
cookie_path = protocolinfo_response.cookie_path
if cookie_path and not os.path.isabs(cookie_path):
try:
tor_pid = pid_resolver(pid_resolution_arg)
if not tor_pid:
raise IOError('pid lookup failed')
tor_cwd = stem.util.system.cwd(tor_pid)
if not tor_cwd:
raise IOError('cwd lookup failed')
cookie_path = stem.util.system.expand_path(cookie_path, tor_cwd)
except IOError as exc:
resolver_labels = {
stem.util.system.pid_by_name: ' by name',
stem.util.system.pid_by_port: ' by port',
stem.util.system.pid_by_open_file: ' by socket file',
}
pid_resolver_label = resolver_labels.get(pid_resolver, '')
log.debug('unable to expand relative tor cookie path%s: %s' % (pid_resolver_label, exc))
protocolinfo_response.cookie_path = cookie_path
class AuthenticationFailure(Exception):
"""
Base error for authentication failures.
:var stem.socket.ControlMessage auth_response: AUTHENTICATE response from the
control socket, **None** if one wasn't received
"""
def __init__(self, message, auth_response = None):
super(AuthenticationFailure, self).__init__(message)
self.auth_response = auth_response
class UnrecognizedAuthMethods(AuthenticationFailure):
"""
All methods for authenticating aren't recognized.
:var list unknown_auth_methods: authentication methods that weren't recognized
"""
def __init__(self, message, unknown_auth_methods):
super(UnrecognizedAuthMethods, self).__init__(message)
self.unknown_auth_methods = unknown_auth_methods
class IncorrectSocketType(AuthenticationFailure):
'Socket does not speak the control protocol.'
class OpenAuthFailed(AuthenticationFailure):
'Failure to authenticate to an open socket.'
class OpenAuthRejected(OpenAuthFailed):
'Attempt to connect to an open control socket was rejected.'
class PasswordAuthFailed(AuthenticationFailure):
'Failure to authenticate with a password.'
class PasswordAuthRejected(PasswordAuthFailed):
'Socket does not support password authentication.'
class IncorrectPassword(PasswordAuthFailed):
'Authentication password incorrect.'
class MissingPassword(PasswordAuthFailed):
"Password authentication is supported but we weren't provided with one."
class CookieAuthFailed(AuthenticationFailure):
"""
Failure to authenticate with an authentication cookie.
:param str cookie_path: location of the authentication cookie we attempted
:param bool is_safecookie: **True** if this was for SAFECOOKIE
authentication, **False** if for COOKIE
:param stem.response.ControlMessage auth_response: reply to our
authentication attempt
"""
def __init__(self, message, cookie_path, is_safecookie, auth_response = None):
super(CookieAuthFailed, self).__init__(message, auth_response)
self.is_safecookie = is_safecookie
self.cookie_path = cookie_path
class CookieAuthRejected(CookieAuthFailed):
'Socket does not support password authentication.'
class IncorrectCookieValue(CookieAuthFailed):
'Authentication cookie value was rejected.'
class IncorrectCookieSize(CookieAuthFailed):
'Aborted because the cookie file is the wrong size.'
class UnreadableCookieFile(CookieAuthFailed):
'Error arose in reading the authentication cookie.'
class AuthChallengeFailed(CookieAuthFailed):
"""
AUTHCHALLENGE command has failed.
"""
def __init__(self, message, cookie_path):
super(AuthChallengeFailed, self).__init__(message, cookie_path, True)
class AuthChallengeUnsupported(AuthChallengeFailed):
"""
AUTHCHALLENGE isn't implemented.
"""
class UnrecognizedAuthChallengeMethod(AuthChallengeFailed):
"""
Tor couldn't recognize our AUTHCHALLENGE method.
:var str authchallenge_method: AUTHCHALLENGE method that Tor couldn't recognize
"""
def __init__(self, message, cookie_path, authchallenge_method):
super(UnrecognizedAuthChallengeMethod, self).__init__(message, cookie_path)
self.authchallenge_method = authchallenge_method
class AuthSecurityFailure(AuthChallengeFailed):
'AUTHCHALLENGE response is invalid.'
class InvalidClientNonce(AuthChallengeFailed):
'AUTHCHALLENGE request contains an invalid client nonce.'
class MissingAuthInfo(AuthenticationFailure):
"""
The PROTOCOLINFO response didn't have enough information to authenticate.
These are valid control responses but really shouldn't happen in practice.
"""
class NoAuthMethods(MissingAuthInfo):
"PROTOCOLINFO response didn't have any methods for authenticating."
class NoAuthCookie(MissingAuthInfo):
"""
PROTOCOLINFO response supports cookie auth but doesn't have its path.
:param bool is_safecookie: **True** if this was for SAFECOOKIE
authentication, **False** if for COOKIE
"""
def __init__(self, message, is_safecookie):
super(NoAuthCookie, self).__init__(message)
self.is_safecookie = is_safecookie
# authentication exceptions ordered as per the authenticate function's pydocs
AUTHENTICATE_EXCEPTIONS = (
IncorrectSocketType,
UnrecognizedAuthMethods,
MissingPassword,
IncorrectPassword,
IncorrectCookieSize,
UnreadableCookieFile,
IncorrectCookieValue,
AuthChallengeUnsupported,
UnrecognizedAuthChallengeMethod,
InvalidClientNonce,
AuthSecurityFailure,
OpenAuthRejected,
MissingAuthInfo,
AuthenticationFailure
)
| lgpl-3.0 |
TomBaxter/waterbutler | waterbutler/server/api/v0/move.py | 6 | 1852 | import time
from waterbutler import tasks
from waterbutler.server.api.v0 import core
from waterbutler.core import remote_logging
class MoveHandler(core.BaseCrossProviderHandler):
JSON_REQUIRED = True
ACTION_MAP = {
'POST': 'move'
}
async def post(self):
if not self.source_provider.can_intra_move(self.destination_provider, self.json['source']['path']):
resp = await tasks.move.adelay({
'nid': self.json['source']['nid'],
'path': self.json['source']['path'],
'provider': self.source_provider.serialized()
}, {
'nid': self.json['destination']['nid'],
'path': self.json['destination']['path'],
'provider': self.destination_provider.serialized()
},
rename=self.json.get('rename'),
conflict=self.json.get('conflict', 'replace'),
start_time=time.time(),
request=remote_logging._serialize_request(self.request),
)
metadata, created = await tasks.wait_on_celery(resp)
else:
metadata, created = (
await tasks.backgrounded(
self.source_provider.move,
self.destination_provider,
self.json['source']['path'],
self.json['destination']['path'],
rename=self.json.get('rename'),
conflict=self.json.get('conflict', 'replace'),
)
)
if created:
self.set_status(201)
else:
self.set_status(200)
self.write(metadata.serialized())
if self.source_provider.can_intra_move(self.destination_provider, self.json['source']['path']):
self._send_hook('move', metadata)
| apache-2.0 |
Edraak/edx-platform | common/djangoapps/dark_lang/tests.py | 46 | 10165 | """
Tests of DarkLangMiddleware
"""
from django.contrib.auth.models import User
from django.http import HttpRequest
import ddt
from django.test import TestCase
from mock import Mock
import unittest
from dark_lang.middleware import DarkLangMiddleware
from dark_lang.models import DarkLangConfig
from django.utils.translation import LANGUAGE_SESSION_KEY
from student.tests.factories import UserFactory
UNSET = object()
def set_if_set(dct, key, value):
"""
Sets ``key`` in ``dct`` to ``value``
unless ``value`` is ``UNSET``
"""
if value is not UNSET:
dct[key] = value
@ddt.ddt
class DarkLangMiddlewareTests(TestCase):
"""
Tests of DarkLangMiddleware
"""
def setUp(self):
super(DarkLangMiddlewareTests, self).setUp()
self.user = User()
self.user.save()
DarkLangConfig(
released_languages='rel',
changed_by=self.user,
enabled=True
).save()
def process_request(self, language_session_key=UNSET, accept=UNSET, preview_lang=UNSET, clear_lang=UNSET):
"""
Build a request and then process it using the ``DarkLangMiddleware``.
Args:
language_session_key (str): The language code to set in request.session[LANUGAGE_SESSION_KEY]
accept (str): The accept header to set in request.META['HTTP_ACCEPT_LANGUAGE']
preview_lang (str): The value to set in request.GET['preview_lang']
clear_lang (str): The value to set in request.GET['clear_lang']
"""
session = {}
set_if_set(session, LANGUAGE_SESSION_KEY, language_session_key)
meta = {}
set_if_set(meta, 'HTTP_ACCEPT_LANGUAGE', accept)
get = {}
set_if_set(get, 'preview-lang', preview_lang)
set_if_set(get, 'clear-lang', clear_lang)
request = Mock(
spec=HttpRequest,
session=session,
META=meta,
GET=get,
user=UserFactory()
)
self.assertIsNone(DarkLangMiddleware().process_request(request))
return request
def assertAcceptEquals(self, value, request):
"""
Assert that the HTML_ACCEPT_LANGUAGE header in request
is equal to value
"""
self.assertEquals(
value,
request.META.get('HTTP_ACCEPT_LANGUAGE', UNSET)
)
def test_empty_accept(self):
self.assertAcceptEquals(UNSET, self.process_request())
def test_wildcard_accept(self):
self.assertAcceptEquals('*', self.process_request(accept='*'))
def test_malformed_accept(self):
self.assertAcceptEquals('', self.process_request(accept='xxxxxxxxxxxx'))
self.assertAcceptEquals('', self.process_request(accept='en;q=1.0, es-419:q-0.8'))
def test_released_accept(self):
self.assertAcceptEquals(
'rel;q=1.0',
self.process_request(accept='rel;q=1.0')
)
def test_unreleased_accept(self):
self.assertAcceptEquals(
'rel;q=1.0',
self.process_request(accept='rel;q=1.0, unrel;q=0.5')
)
def test_accept_with_syslang(self):
self.assertAcceptEquals(
'en;q=1.0, rel;q=0.8',
self.process_request(accept='en;q=1.0, rel;q=0.8, unrel;q=0.5')
)
def test_accept_multiple_released_langs(self):
DarkLangConfig(
released_languages=('rel, unrel'),
changed_by=self.user,
enabled=True
).save()
self.assertAcceptEquals(
'rel;q=1.0, unrel;q=0.5',
self.process_request(accept='rel;q=1.0, unrel;q=0.5')
)
self.assertAcceptEquals(
'rel;q=1.0, unrel;q=0.5',
self.process_request(accept='rel;q=1.0, notrel;q=0.3, unrel;q=0.5')
)
self.assertAcceptEquals(
'rel;q=1.0, unrel;q=0.5',
self.process_request(accept='notrel;q=0.3, rel;q=1.0, unrel;q=0.5')
)
def test_accept_released_territory(self):
# We will munge 'rel-ter' to be 'rel', so the 'rel-ter'
# user will actually receive the released language 'rel'
# (Otherwise, the user will actually end up getting the server default)
self.assertAcceptEquals(
'rel;q=1.0, rel;q=0.5',
self.process_request(accept='rel-ter;q=1.0, rel;q=0.5')
)
def test_accept_mixed_case(self):
self.assertAcceptEquals(
'rel;q=1.0, rel;q=0.5',
self.process_request(accept='rel-TER;q=1.0, REL;q=0.5')
)
DarkLangConfig(
released_languages=('REL-TER'),
changed_by=self.user,
enabled=True
).save()
# Since we have only released "rel-ter", the requested code "rel" will
# fuzzy match to "rel-ter", in addition to "rel-ter" exact matching "rel-ter"
self.assertAcceptEquals(
'rel-ter;q=1.0, rel-ter;q=0.5',
self.process_request(accept='rel-ter;q=1.0, rel;q=0.5')
)
@ddt.data(
('es;q=1.0, pt;q=0.5', 'es-419;q=1.0'), # 'es' should get 'es-419', not English
('es-AR;q=1.0, pt;q=0.5', 'es-419;q=1.0'), # 'es-AR' should get 'es-419', not English
)
@ddt.unpack
def test_partial_match_es419(self, accept_header, expected):
# Release es-419
DarkLangConfig(
released_languages=('es-419, en'),
changed_by=self.user,
enabled=True
).save()
self.assertAcceptEquals(
expected,
self.process_request(accept=accept_header)
)
def test_partial_match_esar_es(self):
# If I release 'es', 'es-AR' should get 'es', not English
DarkLangConfig(
released_languages=('es, en'),
changed_by=self.user,
enabled=True
).save()
self.assertAcceptEquals(
'es;q=1.0',
self.process_request(accept='es-AR;q=1.0, pt;q=0.5')
)
@ddt.data(
# Test condition: If I release 'es-419, es, es-es'...
('es;q=1.0, pt;q=0.5', 'es;q=1.0'), # 1. es should get es
('es-419;q=1.0, pt;q=0.5', 'es-419;q=1.0'), # 2. es-419 should get es-419
('es-es;q=1.0, pt;q=0.5', 'es-es;q=1.0'), # 3. es-es should get es-es
)
@ddt.unpack
def test_exact_match_gets_priority(self, accept_header, expected):
# Release 'es-419, es, es-es'
DarkLangConfig(
released_languages=('es-419, es, es-es'),
changed_by=self.user,
enabled=True
).save()
self.assertAcceptEquals(
expected,
self.process_request(accept=accept_header)
)
@unittest.skip("This won't work until fallback is implemented for LA country codes. See LOC-86")
@ddt.data(
'es-AR', # Argentina
'es-PY', # Paraguay
)
def test_partial_match_es_la(self, latin_america_code):
# We need to figure out the best way to implement this. There are a ton of LA country
# codes that ought to fall back to 'es-419' rather than 'es-es'.
# http://unstats.un.org/unsd/methods/m49/m49regin.htm#americas
# If I release 'es, es-419'
# Latin American codes should get es-419
DarkLangConfig(
released_languages=('es, es-419'),
changed_by=self.user,
enabled=True
).save()
self.assertAcceptEquals(
'es-419;q=1.0',
self.process_request(accept='{};q=1.0, pt;q=0.5'.format(latin_america_code))
)
def assertSessionLangEquals(self, value, request):
"""
Assert that the LANGUAGE_SESSION_KEY set in request.session is equal to value
"""
self.assertEquals(
value,
request.session.get(LANGUAGE_SESSION_KEY, UNSET)
)
def test_preview_lang_with_released_language(self):
# Preview lang should always override selection.
self.assertSessionLangEquals(
'rel',
self.process_request(preview_lang='rel')
)
self.assertSessionLangEquals(
'rel',
self.process_request(preview_lang='rel', language_session_key='notrel')
)
def test_preview_lang_with_dark_language(self):
self.assertSessionLangEquals(
'unrel',
self.process_request(preview_lang='unrel')
)
self.assertSessionLangEquals(
'unrel',
self.process_request(preview_lang='unrel', language_session_key='notrel')
)
def test_clear_lang(self):
self.assertSessionLangEquals(
UNSET,
self.process_request(clear_lang=True)
)
self.assertSessionLangEquals(
UNSET,
self.process_request(clear_lang=True, language_session_key='rel')
)
self.assertSessionLangEquals(
UNSET,
self.process_request(clear_lang=True, language_session_key='unrel')
)
def test_disabled(self):
DarkLangConfig(enabled=False, changed_by=self.user).save()
self.assertAcceptEquals(
'notrel;q=0.3, rel;q=1.0, unrel;q=0.5',
self.process_request(accept='notrel;q=0.3, rel;q=1.0, unrel;q=0.5')
)
self.assertSessionLangEquals(
'rel',
self.process_request(clear_lang=True, language_session_key='rel')
)
self.assertSessionLangEquals(
'unrel',
self.process_request(clear_lang=True, language_session_key='unrel')
)
self.assertSessionLangEquals(
'rel',
self.process_request(preview_lang='unrel', language_session_key='rel')
)
def test_accept_chinese_language_codes(self):
DarkLangConfig(
released_languages=('zh-cn, zh-hk, zh-tw'),
changed_by=self.user,
enabled=True
).save()
self.assertAcceptEquals(
'zh-cn;q=1.0, zh-tw;q=0.5, zh-hk;q=0.3',
self.process_request(accept='zh-Hans;q=1.0, zh-Hant-TW;q=0.5, zh-HK;q=0.3')
)
| agpl-3.0 |
NL66278/odoo | addons/portal_project/tests/__init__.py | 170 | 1124 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_access_rights
checks = [
test_access_rights,
]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
stevekuznetsov/ansible | lib/ansible/module_utils/vmware.py | 51 | 16732 | # -*- coding: utf-8 -*-
# (c) 2015, Joseph Callen <jcallen () csc.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from ansible.module_utils.six import iteritems
import atexit
import ssl
import time
try:
# requests is required for exception handling of the ConnectionError
import requests
from pyVim import connect
from pyVmomi import vim
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
class TaskError(Exception):
pass
def wait_for_task(task):
while True:
if task.info.state == vim.TaskInfo.State.success:
return True, task.info.result
if task.info.state == vim.TaskInfo.State.error:
try:
raise TaskError(task.info.error)
except AttributeError:
raise TaskError("An unknown error has occurred")
if task.info.state == vim.TaskInfo.State.running:
time.sleep(15)
if task.info.state == vim.TaskInfo.State.queued:
time.sleep(15)
def find_dvspg_by_name(dv_switch, portgroup_name):
portgroups = dv_switch.portgroup
for pg in portgroups:
if pg.name == portgroup_name:
return pg
return None
def find_entity_child_by_path(content, entityRootFolder, path):
entity = entityRootFolder
searchIndex = content.searchIndex
paths = path.split("/")
try:
for path in paths:
entity = searchIndex.FindChild (entity, path)
if entity.name == paths[-1]:
return entity
except:
pass
return None
# Maintain for legacy, or remove with 2.1 ?
# Should be replaced with find_cluster_by_name
def find_cluster_by_name_datacenter(datacenter, cluster_name):
host_folder = datacenter.hostFolder
for folder in host_folder.childEntity:
if folder.name == cluster_name:
return folder
return None
def find_cluster_by_name(content, cluster_name, datacenter=None):
if datacenter:
folder = datacenter.hostFolder
else:
folder = content.rootFolder
clusters = get_all_objs(content, [vim.ClusterComputeResource], folder)
for cluster in clusters:
if cluster.name == cluster_name:
return cluster
return None
def find_datacenter_by_name(content, datacenter_name):
datacenters = get_all_objs(content, [vim.Datacenter])
for dc in datacenters:
if dc.name == datacenter_name:
return dc
return None
def find_datastore_by_name(content, datastore_name):
datastores = get_all_objs(content, [vim.Datastore])
for ds in datastores:
if ds.name == datastore_name:
return ds
return None
def find_dvs_by_name(content, switch_name):
vmware_distributed_switches = get_all_objs(content, [vim.dvs.VmwareDistributedVirtualSwitch])
for dvs in vmware_distributed_switches:
if dvs.name == switch_name:
return dvs
return None
def find_hostsystem_by_name(content, hostname):
host_system = get_all_objs(content, [vim.HostSystem])
for host in host_system:
if host.name == hostname:
return host
return None
def find_vm_by_id(content, vm_id, vm_id_type="vm_name", datacenter=None, cluster=None):
""" UUID is unique to a VM, every other id returns the first match. """
si = content.searchIndex
vm = None
if vm_id_type == 'dns_name':
vm = si.FindByDnsName(datacenter=datacenter, dnsName=vm_id, vmSearch=True)
elif vm_id_type == 'inventory_path':
vm = si.FindByInventoryPath(inventoryPath=vm_id)
if isinstance(vm, vim.VirtualMachine):
vm = None
elif vm_id_type == 'uuid':
vm = si.FindByUuid(datacenter=datacenter, instanceUuid=vm_id, vmSearch=True)
elif vm_id_type == 'ip':
vm = si.FindByIp(datacenter=datacenter, ip=vm_id, vmSearch=True)
elif vm_id_type == 'vm_name':
folder = None
if cluster:
folder = cluster
elif datacenter:
folder = datacenter.hostFolder
vm = find_vm_by_name(content, vm_id, folder)
return vm
def find_vm_by_name(content, vm_name, folder=None, recurse=True):
vms = get_all_objs(content, [vim.VirtualMachine], folder, recurse=recurse)
for vm in vms:
if vm.name == vm_name:
return vm
return None
def find_host_portgroup_by_name(host, portgroup_name):
for portgroup in host.config.network.portgroup:
if portgroup.spec.name == portgroup_name:
return portgroup
return None
def gather_vm_facts(content, vm):
""" Gather facts from vim.VirtualMachine object. """
facts = {
'module_hw': True,
'hw_name': vm.config.name,
'hw_power_status': vm.summary.runtime.powerState,
'hw_guest_full_name': vm.summary.guest.guestFullName,
'hw_guest_id': vm.summary.guest.guestId,
'hw_product_uuid': vm.config.uuid,
'hw_processor_count': vm.config.hardware.numCPU,
'hw_memtotal_mb': vm.config.hardware.memoryMB,
'hw_interfaces': [],
'guest_tools_status': vm.guest.toolsRunningStatus,
'guest_tools_version': vm.guest.toolsVersion,
'ipv4': None,
'ipv6': None,
'annotation': vm.config.annotation,
'customvalues': {},
'snapshots': [],
'current_snapshot': None,
}
cfm = content.customFieldsManager
# Resolve custom values
for value_obj in vm.summary.customValue:
kn = value_obj.key
if cfm is not None and cfm.field:
for f in cfm.field:
if f.key == value_obj.key:
kn = f.name
# Exit the loop immediately, we found it
break
facts['customvalues'][kn] = value_obj.value
net_dict = {}
for device in vm.guest.net:
net_dict[device.macAddress] = list(device.ipAddress)
for k, v in iteritems(net_dict):
for ipaddress in v:
if ipaddress:
if '::' in ipaddress:
facts['ipv6'] = ipaddress
else:
facts['ipv4'] = ipaddress
ethernet_idx = 0
for idx, entry in enumerate(vm.config.hardware.device):
if not hasattr(entry, 'macAddress'):
continue
factname = 'hw_eth' + str(ethernet_idx)
facts[factname] = {
'addresstype': entry.addressType,
'label': entry.deviceInfo.label,
'macaddress': entry.macAddress,
'ipaddresses': net_dict.get(entry.macAddress, None),
'macaddress_dash': entry.macAddress.replace(':', '-'),
'summary': entry.deviceInfo.summary,
}
facts['hw_interfaces'].append('eth' + str(ethernet_idx))
ethernet_idx += 1
snapshot_facts = list_snapshots(vm)
if 'snapshots' in snapshot_facts:
facts['snapshots'] = snapshot_facts['snapshots']
facts['current_snapshot'] = snapshot_facts['current_snapshot']
return facts
def deserialize_snapshot_obj(obj):
return {'id': obj.id,
'name': obj.name,
'description': obj.description,
'creation_time': obj.createTime,
'state': obj.state}
def list_snapshots_recursively(snapshots):
snapshot_data = []
for snapshot in snapshots:
snapshot_data.append(deserialize_snapshot_obj(snapshot))
snapshot_data = snapshot_data + list_snapshots_recursively(snapshot.childSnapshotList)
return snapshot_data
def get_current_snap_obj(snapshots, snapob):
snap_obj = []
for snapshot in snapshots:
if snapshot.snapshot == snapob:
snap_obj.append(snapshot)
snap_obj = snap_obj + get_current_snap_obj(snapshot.childSnapshotList, snapob)
return snap_obj
def list_snapshots(vm):
result = {}
if vm.snapshot is None:
return result
result['snapshots'] = list_snapshots_recursively(vm.snapshot.rootSnapshotList)
current_snapref = vm.snapshot.currentSnapshot
current_snap_obj = get_current_snap_obj(vm.snapshot.rootSnapshotList, current_snapref)
result['current_snapshot'] = deserialize_snapshot_obj(current_snap_obj[0])
return result
def vmware_argument_spec():
return dict(
hostname=dict(type='str', required=True),
username=dict(type='str', aliases=['user', 'admin'], required=True),
password=dict(type='str', aliases=['pass', 'pwd'], required=True, no_log=True),
validate_certs=dict(type='bool', required=False, default=True),
)
def connect_to_api(module, disconnect_atexit=True):
hostname = module.params['hostname']
username = module.params['username']
password = module.params['password']
validate_certs = module.params['validate_certs']
if validate_certs and not hasattr(ssl, 'SSLContext'):
module.fail_json(msg='pyVim does not support changing verification mode with python < 2.7.9. Either update '
'python or or use validate_certs=false')
try:
service_instance = connect.SmartConnect(host=hostname, user=username, pwd=password)
except vim.fault.InvalidLogin as invalid_login:
module.fail_json(msg=invalid_login.msg, apierror=str(invalid_login))
except (requests.ConnectionError, ssl.SSLError) as connection_error:
if '[SSL: CERTIFICATE_VERIFY_FAILED]' in str(connection_error) and not validate_certs:
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_NONE
service_instance = connect.SmartConnect(host=hostname, user=username, pwd=password, sslContext=context)
else:
module.fail_json(msg="Unable to connect to vCenter or ESXi API on TCP/443.", apierror=str(connection_error))
except Exception as e:
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_NONE
service_instance = connect.SmartConnect(host=hostname, user=username, pwd=password, sslContext=context)
# Disabling atexit should be used in special cases only.
# Such as IP change of the ESXi host which removes the connection anyway.
# Also removal significantly speeds up the return of the module
if disconnect_atexit:
atexit.register(connect.Disconnect, service_instance)
return service_instance.RetrieveContent()
def get_all_objs(content, vimtype, folder=None, recurse=True):
if not folder:
folder = content.rootFolder
obj = {}
container = content.viewManager.CreateContainerView(folder, vimtype, recurse)
for managed_object_ref in container.view:
obj.update({managed_object_ref: managed_object_ref.name})
return obj
def fetch_file_from_guest(content, vm, username, password, src, dest):
""" Use VMWare's filemanager api to fetch a file over http """
result = {'failed': False}
tools_status = vm.guest.toolsStatus
if tools_status == 'toolsNotInstalled' or tools_status == 'toolsNotRunning':
result['failed'] = True
result['msg'] = "VMwareTools is not installed or is not running in the guest"
return result
# https://github.com/vmware/pyvmomi/blob/master/docs/vim/vm/guest/NamePasswordAuthentication.rst
creds = vim.vm.guest.NamePasswordAuthentication(
username=username, password=password
)
# https://github.com/vmware/pyvmomi/blob/master/docs/vim/vm/guest/FileManager/FileTransferInformation.rst
fti = content.guestOperationsManager.fileManager. \
InitiateFileTransferFromGuest(vm, creds, src)
result['size'] = fti.size
result['url'] = fti.url
# Use module_utils to fetch the remote url returned from the api
rsp, info = fetch_url(self.module, fti.url, use_proxy=False,
force=True, last_mod_time=None,
timeout=10, headers=None)
# save all of the transfer data
for k, v in iteritems(info):
result[k] = v
# exit early if xfer failed
if info['status'] != 200:
result['failed'] = True
return result
# attempt to read the content and write it
try:
with open(dest, 'wb') as f:
f.write(rsp.read())
except Exception as e:
result['failed'] = True
result['msg'] = str(e)
return result
def push_file_to_guest(content, vm, username, password, src, dest, overwrite=True):
""" Use VMWare's filemanager api to fetch a file over http """
result = {'failed': False}
tools_status = vm.guest.toolsStatus
if tools_status == 'toolsNotInstalled' or tools_status == 'toolsNotRunning':
result['failed'] = True
result['msg'] = "VMwareTools is not installed or is not running in the guest"
return result
# https://github.com/vmware/pyvmomi/blob/master/docs/vim/vm/guest/NamePasswordAuthentication.rst
creds = vim.vm.guest.NamePasswordAuthentication(
username=username, password=password
)
# the api requires a filesize in bytes
fdata = None
try:
# filesize = os.path.getsize(src)
filesize = os.stat(src).st_size
with open(src, 'rb') as f:
fdata = f.read()
result['local_filesize'] = filesize
except Exception as e:
result['failed'] = True
result['msg'] = "Unable to read src file: %s" % str(e)
return result
# https://www.vmware.com/support/developer/converter-sdk/conv60_apireference/vim.vm.guest.FileManager.html#initiateFileTransferToGuest
file_attribute = vim.vm.guest.FileManager.FileAttributes()
url = content.guestOperationsManager.fileManager. \
InitiateFileTransferToGuest(vm, creds, dest, file_attribute,
filesize, overwrite)
# PUT the filedata to the url ...
rsp, info = fetch_url(self.module, url, method="put", data=fdata,
use_proxy=False, force=True, last_mod_time=None,
timeout=10, headers=None)
result['msg'] = str(rsp.read())
# save all of the transfer data
for k, v in iteritems(info):
result[k] = v
return result
def run_command_in_guest(content, vm, username, password, program_path, program_args, program_cwd, program_env):
result = {'failed': False}
tools_status = vm.guest.toolsStatus
if (tools_status == 'toolsNotInstalled' or
tools_status == 'toolsNotRunning'):
result['failed'] = True
result['msg'] = "VMwareTools is not installed or is not running in the guest"
return result
# https://github.com/vmware/pyvmomi/blob/master/docs/vim/vm/guest/NamePasswordAuthentication.rst
creds = vim.vm.guest.NamePasswordAuthentication(
username=username, password=password
)
try:
# https://github.com/vmware/pyvmomi/blob/master/docs/vim/vm/guest/ProcessManager.rst
pm = content.guestOperationsManager.processManager
# https://www.vmware.com/support/developer/converter-sdk/conv51_apireference/vim.vm.guest.ProcessManager.ProgramSpec.html
ps = vim.vm.guest.ProcessManager.ProgramSpec(
# programPath=program,
# arguments=args
programPath=program_path,
arguments=program_args,
workingDirectory=program_cwd,
)
res = pm.StartProgramInGuest(vm, creds, ps)
result['pid'] = res
pdata = pm.ListProcessesInGuest(vm, creds, [res])
# wait for pid to finish
while not pdata[0].endTime:
time.sleep(1)
pdata = pm.ListProcessesInGuest(vm, creds, [res])
result['owner'] = pdata[0].owner
result['startTime'] = pdata[0].startTime.isoformat()
result['endTime'] = pdata[0].endTime.isoformat()
result['exitCode'] = pdata[0].exitCode
if result['exitCode'] != 0:
result['failed'] = True
result['msg'] = "program exited non-zero"
else:
result['msg'] = "program completed successfully"
except Exception as e:
result['msg'] = str(e)
result['failed'] = True
return result
| gpl-3.0 |
tangyiyong/odoo | addons/analytic_user_function/__init__.py | 441 | 1086 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import analytic_user_function
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
zzzombat/precise-python-django-social-auth | debian/python-django-social-auth/usr/share/pyshared/social_auth/backends/contrib/weibo.py | 2 | 2383 | #coding:utf8
#author:[email protected] https://github.com/hepochen
"""
Weibo OAuth2 support.
This script adds support for Weibo OAuth service. An application must
be registered first on http://open.weibo.com.
WEIBO_CLIENT_KEY and WEIBO_CLIENT_SECRET must be defined in the settings.py
correctly.
By default account id,profile_image_url,gender are stored in extra_data field,
check OAuthBackend class for details on how to extend it.
"""
from urllib import urlencode
from django.utils import simplejson
from social_auth.backends import OAuthBackend, USERNAME, BaseOAuth2
from social_auth.utils import dsa_urlopen
WEIBO_SERVER = 'api.weibo.com'
WEIBO_REQUEST_TOKEN_URL = 'https://%s/oauth2/request_token' % WEIBO_SERVER
WEIBO_ACCESS_TOKEN_URL = 'https://%s/oauth2/access_token' % WEIBO_SERVER
WEIBO_AUTHORIZATION_URL = 'https://%s/oauth2/authorize' % WEIBO_SERVER
class WeiboBackend(OAuthBackend):
"""Weibo (of sina) OAuth authentication backend"""
name = 'weibo'
# Default extra data to store
EXTRA_DATA = [
('id', 'id'),
('name', 'username'),
('profile_image_url', 'profile_image_url'),
('gender', 'gender')
]
def get_user_id(self, details, response):
return response['uid']
def get_user_details(self, response):
"""Return user details from Weibo. API URL is:
https://api.weibo.com/2/users/show.json/?uid=<UID>&access_token=<TOKEN>
"""
return {USERNAME: response.get("name", ""),
'first_name': response.get('screen_name', '')}
class WeiboAuth(BaseOAuth2):
"""Weibo OAuth authentication mechanism"""
AUTHORIZATION_URL = WEIBO_AUTHORIZATION_URL
REQUEST_TOKEN_URL = WEIBO_REQUEST_TOKEN_URL
ACCESS_TOKEN_URL = WEIBO_ACCESS_TOKEN_URL
SERVER_URL = WEIBO_SERVER
AUTH_BACKEND = WeiboBackend
SETTINGS_KEY_NAME = 'WEIBO_CLIENT_KEY'
SETTINGS_SECRET_NAME = 'WEIBO_CLIENT_SECRET'
REDIRECT_STATE = False
def user_data(self, access_token, *args, **kwargs):
uid = args[0]['uid']
data = {'access_token': access_token, 'uid': uid}
url = 'https://api.weibo.com/2/users/show.json?' + urlencode(data)
try:
return simplejson.loads(dsa_urlopen(url).read())
except (ValueError, KeyError, IOError):
return None
# Backend definition
BACKENDS = {
'weibo': WeiboAuth
}
| bsd-3-clause |
ghandiosm/Test | addons/google_spreadsheet/google_spreadsheet.py | 41 | 4717 | # Part of Odoo. See LICENSE file for full copyright and licensing details.
import cgi
import json
import logging
from lxml import etree
import re
import werkzeug.urls
import urllib2
from openerp.osv import osv
from openerp.addons.google_account import TIMEOUT
_logger = logging.getLogger(__name__)
class config(osv.osv):
_inherit = 'google.drive.config'
def get_google_scope(self):
scope = super(config, self).get_google_scope()
return '%s https://spreadsheets.google.com/feeds' % scope
def write_config_formula(self, cr, uid, attachment_id, spreadsheet_key, model, domain, groupbys, view_id, context=None):
access_token = self.get_access_token(cr, uid, scope='https://spreadsheets.google.com/feeds', context=context)
fields = self.pool.get(model).fields_view_get(cr, uid, view_id=view_id, view_type='tree')
doc = etree.XML(fields.get('arch'))
display_fields = []
for node in doc.xpath("//field"):
if node.get('modifiers'):
modifiers = json.loads(node.get('modifiers'))
if not modifiers.get('invisible') and not modifiers.get('tree_invisible'):
display_fields.append(node.get('name'))
fields = " ".join(display_fields)
domain = domain.replace("'", r"\'").replace('"', "'")
if groupbys:
fields = "%s %s" % (groupbys, fields)
formula = '=oe_read_group("%s";"%s";"%s";"%s")' % (model, fields, groupbys, domain)
else:
formula = '=oe_browse("%s";"%s";"%s")' % (model, fields, domain)
url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
dbname = cr.dbname
user = self.pool['res.users'].read(cr, uid, [uid], ['login', 'password'], context=context)[0]
username = user['login']
password = user['password']
if not password:
config_formula = '=oe_settings("%s";"%s")' % (url, dbname)
else:
config_formula = '=oe_settings("%s";"%s";"%s";"%s")' % (url, dbname, username, password)
request = '''<feed xmlns="http://www.w3.org/2005/Atom"
xmlns:batch="http://schemas.google.com/gdata/batch"
xmlns:gs="http://schemas.google.com/spreadsheets/2006">
<id>https://spreadsheets.google.com/feeds/cells/{key}/od6/private/full</id>
<entry>
<batch:id>A1</batch:id>
<batch:operation type="update"/>
<id>https://spreadsheets.google.com/feeds/cells/{key}/od6/private/full/R1C1</id>
<link rel="edit" type="application/atom+xml"
href="https://spreadsheets.google.com/feeds/cells/{key}/od6/private/full/R1C1"/>
<gs:cell row="1" col="1" inputValue="{formula}"/>
</entry>
<entry>
<batch:id>A2</batch:id>
<batch:operation type="update"/>
<id>https://spreadsheets.google.com/feeds/cells/{key}/od6/private/full/R60C15</id>
<link rel="edit" type="application/atom+xml"
href="https://spreadsheets.google.com/feeds/cells/{key}/od6/private/full/R60C15"/>
<gs:cell row="60" col="15" inputValue="{config}"/>
</entry>
</feed>''' .format(key=spreadsheet_key, formula=cgi.escape(formula, quote=True), config=cgi.escape(config_formula, quote=True))
try:
req = urllib2.Request(
'https://spreadsheets.google.com/feeds/cells/%s/od6/private/full/batch?%s' % (spreadsheet_key, werkzeug.url_encode({'v': 3, 'access_token': access_token})),
data=request,
headers={'content-type': 'application/atom+xml', 'If-Match': '*'})
urllib2.urlopen(req, timeout=TIMEOUT)
except (urllib2.HTTPError, urllib2.URLError):
_logger.warning("An error occured while writting the formula on the Google Spreadsheet.")
description = '''
formula: %s
''' % formula
if attachment_id:
self.pool['ir.attachment'].write(cr, uid, attachment_id, {'description': description}, context=context)
return True
def set_spreadsheet(self, cr, uid, model, domain, groupbys, view_id, context=None):
try:
config_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'google_spreadsheet', 'google_spreadsheet_template')[1]
except ValueError:
raise
config = self.browse(cr, uid, config_id, context=context)
title = 'Spreadsheet %s' % model
res = self.copy_doc(cr, uid, False, config.google_drive_resource_id, title, model, context=context)
mo = re.search("(key=|/d/)([A-Za-z0-9-_]+)", res['url'])
if mo:
key = mo.group(2)
self.write_config_formula(cr, uid, res.get('id'), key, model, domain, groupbys, view_id, context=context)
return res
| gpl-3.0 |
yiakwy/numpy | numpy/doc/misc.py | 124 | 6164 | """
=============
Miscellaneous
=============
IEEE 754 Floating Point Special Values
--------------------------------------
Special values defined in numpy: nan, inf,
NaNs can be used as a poor-man's mask (if you don't care what the
original value was)
Note: cannot use equality to test NaNs. E.g.: ::
>>> myarr = np.array([1., 0., np.nan, 3.])
>>> np.where(myarr == np.nan)
>>> np.nan == np.nan # is always False! Use special numpy functions instead.
False
>>> myarr[myarr == np.nan] = 0. # doesn't work
>>> myarr
array([ 1., 0., NaN, 3.])
>>> myarr[np.isnan(myarr)] = 0. # use this instead find
>>> myarr
array([ 1., 0., 0., 3.])
Other related special value functions: ::
isinf(): True if value is inf
isfinite(): True if not nan or inf
nan_to_num(): Map nan to 0, inf to max float, -inf to min float
The following corresponds to the usual functions except that nans are excluded
from the results: ::
nansum()
nanmax()
nanmin()
nanargmax()
nanargmin()
>>> x = np.arange(10.)
>>> x[3] = np.nan
>>> x.sum()
nan
>>> np.nansum(x)
42.0
How numpy handles numerical exceptions
--------------------------------------
The default is to ``'warn'`` for ``invalid``, ``divide``, and ``overflow``
and ``'ignore'`` for ``underflow``. But this can be changed, and it can be
set individually for different kinds of exceptions. The different behaviors
are:
- 'ignore' : Take no action when the exception occurs.
- 'warn' : Print a `RuntimeWarning` (via the Python `warnings` module).
- 'raise' : Raise a `FloatingPointError`.
- 'call' : Call a function specified using the `seterrcall` function.
- 'print' : Print a warning directly to ``stdout``.
- 'log' : Record error in a Log object specified by `seterrcall`.
These behaviors can be set for all kinds of errors or specific ones:
- all : apply to all numeric exceptions
- invalid : when NaNs are generated
- divide : divide by zero (for integers as well!)
- overflow : floating point overflows
- underflow : floating point underflows
Note that integer divide-by-zero is handled by the same machinery.
These behaviors are set on a per-thread basis.
Examples
--------
::
>>> oldsettings = np.seterr(all='warn')
>>> np.zeros(5,dtype=np.float32)/0.
invalid value encountered in divide
>>> j = np.seterr(under='ignore')
>>> np.array([1.e-100])**10
>>> j = np.seterr(invalid='raise')
>>> np.sqrt(np.array([-1.]))
FloatingPointError: invalid value encountered in sqrt
>>> def errorhandler(errstr, errflag):
... print "saw stupid error!"
>>> np.seterrcall(errorhandler)
<function err_handler at 0x...>
>>> j = np.seterr(all='call')
>>> np.zeros(5, dtype=np.int32)/0
FloatingPointError: invalid value encountered in divide
saw stupid error!
>>> j = np.seterr(**oldsettings) # restore previous
... # error-handling settings
Interfacing to C
----------------
Only a survey of the choices. Little detail on how each works.
1) Bare metal, wrap your own C-code manually.
- Plusses:
- Efficient
- No dependencies on other tools
- Minuses:
- Lots of learning overhead:
- need to learn basics of Python C API
- need to learn basics of numpy C API
- need to learn how to handle reference counting and love it.
- Reference counting often difficult to get right.
- getting it wrong leads to memory leaks, and worse, segfaults
- API will change for Python 3.0!
2) Cython
- Plusses:
- avoid learning C API's
- no dealing with reference counting
- can code in pseudo python and generate C code
- can also interface to existing C code
- should shield you from changes to Python C api
- has become the de-facto standard within the scientific Python community
- fast indexing support for arrays
- Minuses:
- Can write code in non-standard form which may become obsolete
- Not as flexible as manual wrapping
4) ctypes
- Plusses:
- part of Python standard library
- good for interfacing to existing sharable libraries, particularly
Windows DLLs
- avoids API/reference counting issues
- good numpy support: arrays have all these in their ctypes
attribute: ::
a.ctypes.data a.ctypes.get_strides
a.ctypes.data_as a.ctypes.shape
a.ctypes.get_as_parameter a.ctypes.shape_as
a.ctypes.get_data a.ctypes.strides
a.ctypes.get_shape a.ctypes.strides_as
- Minuses:
- can't use for writing code to be turned into C extensions, only a wrapper
tool.
5) SWIG (automatic wrapper generator)
- Plusses:
- around a long time
- multiple scripting language support
- C++ support
- Good for wrapping large (many functions) existing C libraries
- Minuses:
- generates lots of code between Python and the C code
- can cause performance problems that are nearly impossible to optimize
out
- interface files can be hard to write
- doesn't necessarily avoid reference counting issues or needing to know
API's
7) scipy.weave
- Plusses:
- can turn many numpy expressions into C code
- dynamic compiling and loading of generated C code
- can embed pure C code in Python module and have weave extract, generate
interfaces and compile, etc.
- Minuses:
- Future very uncertain: it's the only part of Scipy not ported to Python 3
and is effectively deprecated in favor of Cython.
8) Psyco
- Plusses:
- Turns pure python into efficient machine code through jit-like
optimizations
- very fast when it optimizes well
- Minuses:
- Only on intel (windows?)
- Doesn't do much for numpy?
Interfacing to Fortran:
-----------------------
The clear choice to wrap Fortran code is
`f2py <http://docs.scipy.org/doc/numpy-dev/f2py/>`_.
Pyfort is an older alternative, but not supported any longer.
Fwrap is a newer project that looked promising but isn't being developed any
longer.
Interfacing to C++:
-------------------
1) Cython
2) CXX
3) Boost.python
4) SWIG
5) SIP (used mainly in PyQT)
"""
from __future__ import division, absolute_import, print_function
| bsd-3-clause |
mnahm5/django-estore | Lib/site-packages/crispy_forms/base.py | 19 | 1042 | def from_iterable(iterables):
"""
Backport of `itertools.chain.from_iterable` compatible with Python 2.5
"""
for it in iterables:
for element in it:
if isinstance(element, dict):
for key in element:
yield key
else:
yield element
class KeepContext(object):
"""
Context manager that receives a `django.template.Context` instance and a list of keys
Once the context manager is exited, it removes `keys` from the context, to avoid
side effects in later layout objects that may use the same context variables.
Layout objects should use `extra_context` to introduce context variables, never
touch context object themselves, that could introduce side effects.
"""
def __init__(self, context, keys):
self.context = context
self.keys = keys
def __enter__(self):
pass
def __exit__(self, type, value, traceback):
for key in list(self.keys):
del self.context[key]
| mit |
beswarm/django-allauth | allauth/socialaccount/providers/flickr/provider.py | 50 | 2011 | from allauth.socialaccount import providers
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth.provider import OAuthProvider
class FlickrAccount(ProviderAccount):
def get_profile_url(self):
return self.account.extra_data \
.get('person').get('profileurl').get('_content')
def get_avatar_url(self):
return self.account.extra_data.get('picture-url')
def to_str(self):
dflt = super(FlickrAccount, self).to_str()
name = self.account.extra_data \
.get('person').get('realname').get('_content', dflt)
return name
class FlickrProvider(OAuthProvider):
id = 'flickr'
name = 'Flickr'
package = 'allauth.socialaccount.providers.flickr'
account_class = FlickrAccount
def get_default_scope(self):
scope = []
return scope
def get_auth_params(self, request, action):
ret = super(FlickrProvider, self).get_auth_params(request,
action)
if 'perms' not in ret:
ret['perms'] = 'read'
return ret
def get_profile_fields(self):
default_fields = ['id',
'first-name',
'last-name',
'email-address',
'picture-url',
'public-profile-url']
fields = self.get_settings().get('PROFILE_FIELDS',
default_fields)
return fields
def extract_uid(self, data):
return data['person']['nsid']
def extract_common_fields(self, data):
person = data.get('person', {})
name = person.get('realname', {}).get('_content')
username = person.get('username', {}).get('_content')
return dict(email=data.get('email-address'),
name=name,
username=username)
providers.registry.register(FlickrProvider)
| mit |
fluxw42/youtube-dl | youtube_dl/extractor/urort.py | 64 | 2249 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
)
from ..utils import (
unified_strdate,
)
class UrortIE(InfoExtractor):
IE_DESC = 'NRK P3 Urørt'
_VALID_URL = r'https?://(?:www\.)?urort\.p3\.no/#!/Band/(?P<id>[^/]+)$'
_TEST = {
'url': 'https://urort.p3.no/#!/Band/Gerilja',
'md5': '5ed31a924be8a05e47812678a86e127b',
'info_dict': {
'id': '33124-24',
'ext': 'mp3',
'title': 'The Bomb',
'thumbnail': r're:^https?://.+\.jpg',
'uploader': 'Gerilja',
'uploader_id': 'Gerilja',
'upload_date': '20100323',
},
'params': {
'matchtitle': '^The Bomb$', # To test, we want just one video
}
}
def _real_extract(self, url):
playlist_id = self._match_id(url)
fstr = compat_urllib_parse.quote("InternalBandUrl eq '%s'" % playlist_id)
json_url = 'http://urort.p3.no/breeze/urort/TrackDTOViews?$filter=%s&$orderby=Released%%20desc&$expand=Tags%%2CFiles' % fstr
songs = self._download_json(json_url, playlist_id)
entries = []
for s in songs:
formats = [{
'tbr': f.get('Quality'),
'ext': f['FileType'],
'format_id': '%s-%s' % (f['FileType'], f.get('Quality', '')),
'url': 'http://p3urort.blob.core.windows.net/tracks/%s' % f['FileRef'],
'preference': 3 if f['FileType'] == 'mp3' else 2,
} for f in s['Files']]
self._sort_formats(formats)
e = {
'id': '%d-%s' % (s['BandId'], s['$id']),
'title': s['Title'],
'uploader_id': playlist_id,
'uploader': s.get('BandName', playlist_id),
'thumbnail': 'http://urort.p3.no/cloud/images/%s' % s['Image'],
'upload_date': unified_strdate(s.get('Released')),
'formats': formats,
}
entries.append(e)
return {
'_type': 'playlist',
'id': playlist_id,
'title': playlist_id,
'entries': entries,
}
| unlicense |
yw374cornell/e-mission-server | emission/analysis/result/carbon.py | 2 | 12278 | # Standard imports
import logging
from datetime import datetime, timedelta
from uuid import UUID
# Our imports
import emission.net.api.distance as distance
# from get_database import get_user_db
from emission.core.wrapper.user import User
from emission.core.common import getDistinctUserCount, getAllModes, getDisplayModes, getQuerySpec, addFilterToSpec, getTripCountForMode, getModeShare, getDistanceForMode,\
getModeShareDistance, convertToAvg
# Although air is a motorized mode, we don't include it here because there is
# not much point in finding < 5 km air trips to convert to non motorized trips
# Instead, we handle air separately
motorizedModeList = ["bus", "train", "car"]
longMotorizedModeList = ["air"]
carbonFootprintForMode = {'walking' : 0,
'running' : 0,
'cycling' : 0,
'mixed' : 0,
'bus_short' : 267.0/1609,
'bus_long' : 267.0/1609,
'train_short' : 92.0/1609,
'train_long' : 92.0/1609,
'car_short' : 278.0/1609,
'car_long' : 278.0/1609,
'air_short' : 217.0/1609,
'air_long' : 217.0/1609
}
# TODO: What should the optimal carbon footprint for air travel be? One option
# is to say that it should be replaced by a train ride just like all motorized
# transport. But that is not really practical for overseas trips. Punt it for
# now and just assume it is optimal? But some car trips might not be
# replaceable by car trips anyway. Ok so punt it and replace with train
optimalCarbonFootprintForMode = {'walking' : 0,
'running' : 0,
'cycling' : 0,
'mixed' : 0,
'bus_short' : 0,
'bus_long' : 92.0/1609,
'train_short' : 0,
'train_long' : 92.0/1609,
'car_short' : 0,
'car_long' : 92.0/1609,
'air_short' : 92.0/1609,
'air_long' : 217.0/1609
}
# TODO: We need to figure out whether to pass in mode or modeID
def getModeCarbonFootprint(user, carbonFootprintMap,start,end):
modeDistanceMap = getShortLongModeShareDistance(user,start,end)
logging.debug("getModeCarbonFootprint, modeDistanceMap = %s" % modeDistanceMap)
logging.debug("footprintMap = %s" % carbonFootprintMap)
return getCarbonFootprintsForMap(modeDistanceMap, carbonFootprintMap)
def getCarbonFootprintsForMap(modeDistanceMap, carbonFootprintMap):
logging.debug("In getCarbonFootprintsForMap, modeDistanceMap = %s" % modeDistanceMap)
modeFootprintMap = {}
for modeName in modeDistanceMap:
# logging.debug("Consider mode with name %s" % modeName)
carbonForMode = float(carbonFootprintMap[modeName] * modeDistanceMap[modeName])/1000
# logging.debug("carbonForMode %s = %s from %s * %s" %
# (modeName, carbonForMode, carbonFootprintMap[modeName], modeDistanceMap[modeName]))
modeFootprintMap[modeName] = carbonForMode
return modeFootprintMap
def getShortLongModeShareDistance(user,start,end):
displayModes = getDisplayModes()
modeDistanceMap = {}
for mode in displayModes:
modeId = mode['mode_id']
if mode['mode_name'] in (motorizedModeList + longMotorizedModeList):
# We need to split it into short and long
if mode['mode_name'] in motorizedModeList:
specShort = appendDistanceFilter(getQuerySpec(user, modeId,start,end), {"$lte": 5000})
specLong = appendDistanceFilter(getQuerySpec(user, modeId,start,end), {"$gte": 5000})
else:
assert(mode['mode_name'] in longMotorizedModeList)
threshold = 600 * 1000 # 600km in meters
specShort = appendDistanceFilter(getQuerySpec(user, modeId,start,end), {"$lte": threshold})
specLong = appendDistanceFilter(getQuerySpec(user, modeId,start,end), {"$gte": threshold})
shortDistanceForMode = getDistanceForMode(specShort)
modeDistanceMap[mode['mode_name']+"_short"] = shortDistanceForMode
longDistanceForMode = getDistanceForMode(specLong)
modeDistanceMap[mode['mode_name']+"_long"] = longDistanceForMode
else:
spec = getQuerySpec(user, mode['mode_id'],start,end)
distanceForMode = getDistanceForMode(spec)
modeDistanceMap[mode['mode_name']] = distanceForMode
return modeDistanceMap
def appendDistanceFilter(spec, distFilter):
distanceFilter = {'distance': distFilter}
return addFilterToSpec(spec, distanceFilter)
def delModeNameWithSuffix(modeName, prefix, modeDistanceMap):
modeNameWithSuffix = '%s%s' % (modeName, prefix)
logging.debug("In delModeNameWithSuffix.modeNameWithSuffix = %s" % modeNameWithSuffix)
if modeNameWithSuffix in modeDistanceMap:
del modeDistanceMap[modeNameWithSuffix]
# Doesn't return anything, deletes entries from the distance map as a side effect
def delLongMotorizedModes(modeDistanceMap):
logging.debug("At the beginning of delLongMotorizedModes, the distance map was %s" % modeDistanceMap)
for mode in longMotorizedModeList:
logging.debug("Deleting entries for mode %s from the distance map" % mode)
delModeNameWithSuffix(mode, "", modeDistanceMap)
delModeNameWithSuffix(mode, "_short", modeDistanceMap)
delModeNameWithSuffix(mode, "_long", modeDistanceMap)
logging.debug("At the end of delLongMotorizedModes, the distance map was %s" % modeDistanceMap)
def getFootprintCompare(user_uuid):
"""
The user is assumed to be a UUID, not a User object
"""
assert(not isinstance(user_uuid, User))
now = datetime.now()
weekago = now - timedelta(days=7)
return getFootprintCompareForRange(user_uuid, weekago, now)
def getFootprintCompareForRange(user_uuid, start, end):
"""
The input userObj is assumed to be a UUID, not a User object
"""
assert(not isinstance(user_uuid, User))
userObj = User.fromUUID(user_uuid)
myCarbonFootprintForMode = userObj.getCarbonFootprintForMode()
myModeShareCount = getModeShare(user_uuid, start,end)
totalModeShareCount = getModeShare(None, start,end)
logging.debug("myModeShareCount = %s totalModeShareCount = %s" %
(myModeShareCount, totalModeShareCount))
myModeShareDistance = getModeShareDistance(user_uuid,start,end)
totalModeShareDistance = getModeShareDistance(None, start,end)
logging.debug("myModeShareDistance = %s totalModeShareDistance = %s" %
(myModeShareDistance, totalModeShareDistance))
myShortLongModeShareDistance = getShortLongModeShareDistance(user_uuid, start, end)
totalShortLongModeShareDistance = getShortLongModeShareDistance(None, start, end)
myModeCarbonFootprint = getCarbonFootprintsForMap(myShortLongModeShareDistance, myCarbonFootprintForMode)
totalModeCarbonFootprint = getCarbonFootprintsForMap(totalShortLongModeShareDistance, myCarbonFootprintForMode)
logging.debug("myModeCarbonFootprint = %s, totalModeCarbonFootprint = %s" %
(myModeCarbonFootprint, totalModeCarbonFootprint))
myOptimalCarbonFootprint = getCarbonFootprintsForMap(myShortLongModeShareDistance, optimalCarbonFootprintForMode)
totalOptimalCarbonFootprint = getCarbonFootprintsForMap(totalShortLongModeShareDistance, optimalCarbonFootprintForMode)
logging.debug("myOptimalCarbonFootprint = %s, totalOptimalCarbonFootprint = %s" %
(myOptimalCarbonFootprint, totalOptimalCarbonFootprint))
delLongMotorizedModes(myShortLongModeShareDistance)
delLongMotorizedModes(totalShortLongModeShareDistance)
logging.debug("After deleting long motorized mode, map is %s", myShortLongModeShareDistance)
myModeCarbonFootprintNoLongMotorized = getCarbonFootprintsForMap(myShortLongModeShareDistance, myCarbonFootprintForMode)
totalModeCarbonFootprintNoLongMotorized = getCarbonFootprintsForMap(totalShortLongModeShareDistance, myCarbonFootprintForMode)
myOptimalCarbonFootprintNoLongMotorized = getCarbonFootprintsForMap(myShortLongModeShareDistance, optimalCarbonFootprintForMode)
totalOptimalCarbonFootprintNoLongMotorized = getCarbonFootprintsForMap(totalShortLongModeShareDistance, optimalCarbonFootprintForMode)
nUsers = getDistinctUserCount(getQuerySpec(None, None, start, end))
# Hack to prevent divide by zero on an empty DB.
# We will never really have an empty DB in the real production world,
# but shouldn't crash in that case.
# This is pretty safe because if we have no user_uuids, we won't have any modeCarbonFootprint either
if nUsers == 0:
nUsers = 1
avgModeShareCount = convertToAvg(totalModeShareCount, nUsers)
avgModeShareDistance = convertToAvg(totalModeShareDistance, nUsers)
avgModeCarbonFootprint = convertToAvg(totalModeCarbonFootprint, nUsers)
avgModeCarbonFootprintNoLongMotorized = convertToAvg(totalModeCarbonFootprintNoLongMotorized, nUsers)
avgOptimalCarbonFootprint = convertToAvg(totalModeCarbonFootprint, nUsers)
avgOptimalCarbonFootprintNoLongMotorized = convertToAvg(totalModeCarbonFootprintNoLongMotorized, nUsers)
# avgCarbonFootprint = totalCarbonFootprint/nUsers
#
# carbonFootprint = {"mine": myCarbonFootprint,
# "mean": avgCarbonFootprint,
# "2005 avg": 47173.568,
# "2020 target": 43771.628,
# "2035 target": 40142.892}
return (myModeShareCount, avgModeShareCount,
myModeShareDistance, avgModeShareDistance,
myModeCarbonFootprint, avgModeCarbonFootprint,
myModeCarbonFootprintNoLongMotorized, avgModeCarbonFootprintNoLongMotorized,
myOptimalCarbonFootprint, avgOptimalCarbonFootprint,
myOptimalCarbonFootprintNoLongMotorized, avgOptimalCarbonFootprintNoLongMotorized)
def getSummaryAllTrips(start,end):
# totalModeShareDistance = getModeShareDistance(None, start, end)
totalShortLongModeShareDistance = getShortLongModeShareDistance(None, start, end)
totalModeCarbonFootprint = getCarbonFootprintsForMap(totalShortLongModeShareDistance,
carbonFootprintForMode)
totalOptimalCarbonFootprint = getCarbonFootprintsForMap(totalShortLongModeShareDistance,
optimalCarbonFootprintForMode)
# Hack to prevent divide by zero on an empty DB.
# We will never really have an empty DB in the real production world,
# but shouldn't crash in that case.
# This is pretty safe because if we have no users, we won't have any modeCarbonFootprint either
nUsers = getDistinctUserCount(getQuerySpec(None, None, start, end))
if nUsers == 0:
nUsers = 1
sumModeCarbonFootprint = sum(totalModeCarbonFootprint.values())
sumOptimalCarbonFootprint = sum(totalOptimalCarbonFootprint.values())
sumModeShareDistance = sum(totalShortLongModeShareDistance.values())/1000
# We need to calculate the sums before we delete certain modes from the mode share dict
delLongMotorizedModes(totalShortLongModeShareDistance)
logging.debug("After deleting long motorized mode, map is %s", totalShortLongModeShareDistance)
totalModeCarbonFootprintNoLongMotorized = getCarbonFootprintsForMap(
totalShortLongModeShareDistance,
carbonFootprintForMode)
totalOptimalCarbonFootprintNoLongMotorized = getCarbonFootprintsForMap(
totalShortLongModeShareDistance,
optimalCarbonFootprintForMode)
return {
"current": float(sumModeCarbonFootprint)/nUsers,
"optimal": float(sumOptimalCarbonFootprint)/nUsers,
"current no air": float(sum(totalModeCarbonFootprintNoLongMotorized.values()))/nUsers,
"optimal no air": float(sum(totalOptimalCarbonFootprintNoLongMotorized.values()))/nUsers,
"all drive": float((sumModeShareDistance * carbonFootprintForMode['car_short']))/nUsers,
"SB375 mandate for 2035": 40.142892,
"EO 2050 goal (80% below 1990)": 8.28565
}
def getAllDrive(user_uuid, modeDistanceMap):
assert(not isinstance(user_uuid, User))
user = User.fromUUID(user_uuid)
myCarbonFootprintForMode = user.getCarbonFootprintForMode()
totalDistance = sum(modeDistanceMap.values()) / 1000
return totalDistance * myCarbonFootprintForMode['car_short']
| bsd-3-clause |
botswana-harvard/getresults-distribute | getresults_dst/actions.py | 2 | 2794 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Erik van Widenfelt
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
import os
import pytz
from builtins import FileNotFoundError
from datetime import datetime
from django.conf import settings
from django.core.exceptions import MultipleObjectsReturned
from django.utils import timezone
from .models import History, Pending
tz = pytz.timezone(settings.TIME_ZONE)
def update_on_sent_action(modeladmin, request, uploads):
for upload in uploads:
try:
filename = os.path.split(upload.filename)[1]
history = History.objects.get(filename=filename)
upload.sent = True
upload.sent_datetime = history.sent_datetime
except MultipleObjectsReturned:
history = History.objects.filter(filename=filename).order_by('sent_datetime')
upload.sent = True
upload.sent_datetime = history[0].sent_datetime
except History.DoesNotExist:
upload.sent = False
upload.sent_datetime = None
try:
upload.save()
except FileNotFoundError:
upload.file = None
upload.save()
update_on_sent_action.short_description = "Check sent history"
def upload_audit_action(modeladmin, request, queryset):
for obj in queryset:
if obj.sent:
obj.audited = True
obj.audited_datetime = timezone.now()
obj.auditer = request.user
obj.save()
upload_audit_action.short_description = "Audit sent (flag uploads as audited if sent)"
def upload_unaudit_action(modeladmin, request, queryset):
for obj in queryset:
obj.audited = False
obj.audited_datetime = None
obj.auditer = None
obj.save()
upload_unaudit_action.short_description = "Undo audit (flag uploads as not audited)"
def update_pending_files(modeladmin, request, queryset):
upload_path = os.path.join(settings.MEDIA_ROOT, settings.GRTX_UPLOAD_FOLDER)
Pending.objects.all().delete()
for filename in os.listdir(upload_path):
fileinfo = os.stat(os.path.join(upload_path, filename))
Pending.objects.create(
filename=filename,
filesize=fileinfo.st_size,
filetimestamp=tz.localize(datetime.fromtimestamp(fileinfo.st_mtime))
)
update_pending_files.short_description = "Update the list of uploaded files pending delivery."
def unacknowledge_action(modeladmin, request, queryset):
for obj in queryset:
obj.ack_datetime = None
obj.ack_user = None
obj.acknowledged = False
obj.save()
unacknowledge_action.short_description = "Undo an acknowledgement."
| gpl-2.0 |
opentechinstitute/piecewise | piecewise/piecewise/__main__.py | 2 | 6058 | import argparse
from sqlalchemy import create_engine, MetaData
import piecewise.aggregate
import piecewise.config
import piecewise.ingest
import piecewise.query
def refine(config, args):
modified_aggregations = []
for agg in config.aggregations:
if args.only_compute is not None and not agg.name in args.only_compute:
continue
modified_bins = []
for b in agg.bins:
if args.only_bins is not None and not b.label in args.only_bins:
continue
modified_bins.append(b)
modified_stats = []
for s in agg.statistics:
if args.only_statistics is not None and not s.label in args.only_statistics:
continue
modified_stats.append(s)
modified_agg = piecewise.aggregate.Aggregation(
name = agg.name,
statistics_table_name = agg.statistics_table_name,
bins = modified_bins,
statistics = modified_stats)
modified_aggregations.append(modified_agg)
return piecewise.aggregate.Aggregator(
database_uri = config.database_uri,
cache_table_name = config.cache_table_name,
filters = config.filters,
aggregations = modified_aggregations)
def do_ingest(args):
config = piecewise.config.read_system_config()
config = refine(config, args)
if not args.debug:
piecewise.ingest.ingest(config)
else:
print "Displaying bigquery SQL instead of performing query"
print config.ingest_bigquery_query()
def do_aggregate(args):
config = piecewise.config.read_system_config()
config = refine(config, args)
if not args.debug:
piecewise.aggregate.aggregate(config)
else:
print "Displaying Postgres SQL instead of performing query"
piecewise.aggregate.aggregate(config, args.debug)
def do_query(args):
from piecewise.aggregate import AverageRTT
config = piecewise.config.read_system_config()
config = refine(config, args)
aggregation = None
for agg in config.aggregations:
if agg.name == args.aggregation:
aggregation = agg
if args.stats is not None:
statistics = [piecewise.config.known_statistics[s] for s in args.stats]
else:
statistics = aggregation.statistics
if args.bins is not None:
bins = args.bins
else:
bins = dict()
if args.filters is not None:
filters = args.filters
else:
filters = dict()
if not args.debug:
results = piecewise.query.query(config, name, statistics, bins, filters)
for row in results:
print row
else:
engine = create_engine(config.database_uri)
metadata = MetaData()
engine.metadata = metadata
selection = aggregation.selection(engine, metadata, bins, filters, statistics)
print selection.compile(engine)
def do_load(args):
do_ingest(args)
do_aggregate(args)
def do_display_config(args):
config = piecewise.config.read_system_config()
config = refine(config, args)
print 'Postgres connection: {}'.format(config.database_uri)
print 'Results cache table: {}'.format(config.cache_table_name)
print 'Filters:'
for filt in config.filters:
print '\t{}'.format(filt)
print
print 'Aggregations:'
for agg in config.aggregations:
print '\t{}'.format(agg.name)
print '\t* Bin dimensions'
for b in agg.bins:
print '\t\t{}: {}'.format(b.label, b)
print '\t* Aggregate statistics'
for s in agg.statistics:
print '\t\t{}'.format(s)
def add_ingest_args(parser):
pass
def add_aggregate_args(parser):
pass
def split_string(string):
return string.split(',')
def colon_dict(string):
pairs = string.split(',')
def as_pair(s):
if ':' in s:
return tuple(s.split(':', 1))
else:
return (s, '')
return dict(as_pair(p) for p in pairs)
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog="piecewise", description="Download and aggregate m-lab internet performance data")
parser.add_argument("--debug", action='store_true', help = 'Display rather than execute queries')
parser.add_argument("--only-compute", type=split_string, help='Use only the named aggregations for this run')
parser.add_argument("--only-bins", type=split_string, help='Use only the named bin dimensions for this run')
parser.add_argument("--only-statistics", type=split_string, help='Use only the named statistics for this run')
subparsers = parser.add_subparsers(help="Operation")
ingest_parser = subparsers.add_parser('ingest', help='Pull data from BigQuery into postgres database')
add_ingest_args(ingest_parser)
ingest_parser.set_defaults(func=do_ingest)
aggregate_parser = subparsers.add_parser('aggregate', help='Compute statistics from ingested internet performance data')
add_aggregate_args(aggregate_parser)
aggregate_parser.set_defaults(func=do_aggregate)
display_config_parser = subparsers.add_parser("display-config", help='Display parsed configuration')
display_config_parser.set_defaults(func=do_display_config)
query_parser = subparsers.add_parser("query", help='Query statistics tables')
query_parser.add_argument("-b", "--bins", help="Select and configure bins for query", type=colon_dict)
query_parser.add_argument("-s", "--stats", help="Select statistics for query", type=split_string)
query_parser.add_argument("-f", "--filters", help="Select and configure filters for query", type=colon_dict)
query_parser.add_argument("aggregation", help="Select aggregation for query")
query_parser.set_defaults(func=do_query)
load_parser = subparsers.add_parser('load', help='Ingest and aggregate data in one run')
add_ingest_args(load_parser)
add_aggregate_args(load_parser)
load_parser.set_defaults(func=do_load)
args = parser.parse_args()
args.func(args)
| apache-2.0 |
qgis/QGIS | python/plugins/grassprovider/ext/r_li_padcv_ascii.py | 45 | 1440 | # -*- coding: utf-8 -*-
"""
***************************************************************************
r_li_padcv_ascii.py
-------------------
Date : February 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'February 2016'
__copyright__ = '(C) 2016, Médéric Ribreux'
from .r_li import checkMovingWindow, configFile, moveOutputTxtFile
def checkParameterValuesBeforeExecuting(alg, parameters, context):
return checkMovingWindow(alg, parameters, context, True)
def processCommand(alg, parameters, context, feedback):
configFile(alg, parameters, context, feedback, True)
def processOutputs(alg, parameters, context, feedback):
moveOutputTxtFile(alg, parameters, context)
| gpl-2.0 |
xianggong/m2c_unit_test | test/operator/unary_minus_uint8/compile.py | 1861 | 4430 | #!/usr/bin/python
import os
import subprocess
import re
def runCommand(command):
p = subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
p.wait()
return iter(p.stdout.readline, b'')
def dumpRunCommand(command, dump_file_name, postfix):
dumpFile = open(dump_file_name + postfix, "w+")
dumpFile.write(command + "\n")
for line in runCommand(command.split()):
dumpFile.write(line)
def rmFile(file_name):
cmd = "rm -rf " + file_name
runCommand(cmd.split())
def rnm_ir(file_name):
# Append all unnamed variable with prefix 'tmp_'
ir_file_name = file_name + ".ll"
if os.path.isfile(ir_file_name):
fo = open(ir_file_name, "rw+")
lines = fo.readlines()
fo.seek(0)
fo.truncate()
for line in lines:
# Add entry block identifier
if "define" in line:
line += "entry:\n"
# Rename all unnamed variables
line = re.sub('\%([0-9]+)',
r'%tmp_\1',
line.rstrip())
# Also rename branch name
line = re.sub('(\;\ \<label\>\:)([0-9]+)',
r'tmp_\2:',
line.rstrip())
fo.write(line + '\n')
def gen_ir(file_name):
# Directories
root_dir = '../../../'
header_dir = root_dir + "inc/"
# Headers
header = " -I " + header_dir
header += " -include " + header_dir + "m2c_buildin_fix.h "
header += " -include " + header_dir + "clc/clc.h "
header += " -D cl_clang_storage_class_specifiers "
gen_ir = "clang -S -emit-llvm -O0 -target r600-- -mcpu=verde "
cmd_gen_ir = gen_ir + header + file_name + ".cl"
dumpRunCommand(cmd_gen_ir, file_name, ".clang.log")
def asm_ir(file_name):
if os.path.isfile(file_name + ".ll"):
# Command to assemble IR to bitcode
gen_bc = "llvm-as "
gen_bc_src = file_name + ".ll"
gen_bc_dst = file_name + ".bc"
cmd_gen_bc = gen_bc + gen_bc_src + " -o " + gen_bc_dst
runCommand(cmd_gen_bc.split())
def opt_bc(file_name):
if os.path.isfile(file_name + ".bc"):
# Command to optmize bitcode
opt_bc = "opt --mem2reg "
opt_ir_src = file_name + ".bc"
opt_ir_dst = file_name + ".opt.bc"
cmd_opt_bc = opt_bc + opt_ir_src + " -o " + opt_ir_dst
runCommand(cmd_opt_bc.split())
def dis_bc(file_name):
if os.path.isfile(file_name + ".bc"):
# Command to disassemble bitcode
dis_bc = "llvm-dis "
dis_ir_src = file_name + ".opt.bc"
dis_ir_dst = file_name + ".opt.ll"
cmd_dis_bc = dis_bc + dis_ir_src + " -o " + dis_ir_dst
runCommand(cmd_dis_bc.split())
def m2c_gen(file_name):
if os.path.isfile(file_name + ".opt.bc"):
# Command to disassemble bitcode
m2c_gen = "m2c --llvm2si "
m2c_gen_src = file_name + ".opt.bc"
cmd_m2c_gen = m2c_gen + m2c_gen_src
dumpRunCommand(cmd_m2c_gen, file_name, ".m2c.llvm2si.log")
# Remove file if size is 0
if os.path.isfile(file_name + ".opt.s"):
if os.path.getsize(file_name + ".opt.s") == 0:
rmFile(file_name + ".opt.s")
def m2c_bin(file_name):
if os.path.isfile(file_name + ".opt.s"):
# Command to disassemble bitcode
m2c_bin = "m2c --si2bin "
m2c_bin_src = file_name + ".opt.s"
cmd_m2c_bin = m2c_bin + m2c_bin_src
dumpRunCommand(cmd_m2c_bin, file_name, ".m2c.si2bin.log")
def main():
# Commands
for file in os.listdir("./"):
if file.endswith(".cl"):
file_name = os.path.splitext(file)[0]
# Execute commands
gen_ir(file_name)
rnm_ir(file_name)
asm_ir(file_name)
opt_bc(file_name)
dis_bc(file_name)
m2c_gen(file_name)
m2c_bin(file_name)
if __name__ == "__main__":
main()
| gpl-2.0 |
credativ/pulp | server/test/unit/server/db/migrations/test_0005_unit_last_updated.py | 4 | 1940 | from ....base import PulpServerTests
from pulp.plugins.types.database import TYPE_COLLECTION_PREFIX
from pulp.server.db import connection
from pulp.server.db.migrate.models import MigrationModule
ID = '_id'
LAST_UPDATED = '_last_updated'
MIGRATION = 'pulp.server.db.migrations.0005_unit_last_updated'
def test_collections(n=3):
names = []
for suffix in range(0, n):
name = TYPE_COLLECTION_PREFIX + str(suffix)
names.append(name)
return names
def test_units(n=10):
units = []
for unit_id in range(0, n):
unit = {ID: unit_id}
if unit_id % 2 == 0:
unit[LAST_UPDATED] = 1
units.append(unit)
return units
TEST_COLLECTIONS = test_collections()
TEST_UNITS = test_units()
class TestMigration_0005(PulpServerTests):
def setUp(self):
self.clean()
super(TestMigration_0005, self).setUp()
for collection in [connection.get_collection(n, True) for n in TEST_COLLECTIONS]:
for unit in TEST_UNITS:
collection.save(unit, safe=True)
def tearDown(self):
super(TestMigration_0005, self).tearDown()
self.clean()
def clean(self):
database = connection.get_database()
for name in [n for n in database.collection_names() if n in TEST_COLLECTIONS]:
database.drop_collection(name)
def test(self):
# migrate
module = MigrationModule(MIGRATION)._module
module.migrate()
# validation
for collection in [connection.get_collection(n) for n in TEST_COLLECTIONS]:
for unit in collection.find({}):
self.assertTrue(LAST_UPDATED in unit)
unit_id = unit[ID]
last_updated = unit[LAST_UPDATED]
if unit_id % 2 == 0:
self.assertEqual(last_updated, 1)
else:
self.assertTrue(isinstance(last_updated, float))
| gpl-2.0 |
shaanlan/youtube-dl | youtube_dl/extractor/liveleak.py | 110 | 4940 | from __future__ import unicode_literals
import json
import re
from .common import InfoExtractor
from ..utils import int_or_none
class LiveLeakIE(InfoExtractor):
_VALID_URL = r'https?://(?:\w+\.)?liveleak\.com/view\?(?:.*?)i=(?P<id>[\w_]+)(?:.*)'
_TESTS = [{
'url': 'http://www.liveleak.com/view?i=757_1364311680',
'md5': '50f79e05ba149149c1b4ea961223d5b3',
'info_dict': {
'id': '757_1364311680',
'ext': 'flv',
'description': 'extremely bad day for this guy..!',
'uploader': 'ljfriel2',
'title': 'Most unlucky car accident'
}
}, {
'url': 'http://www.liveleak.com/view?i=f93_1390833151',
'md5': 'b13a29626183c9d33944e6a04f41aafc',
'info_dict': {
'id': 'f93_1390833151',
'ext': 'mp4',
'description': 'German Television Channel NDR does an exclusive interview with Edward Snowden.\r\nUploaded on LiveLeak cause German Television thinks the rest of the world isn\'t intereseted in Edward Snowden.',
'uploader': 'ARD_Stinkt',
'title': 'German Television does first Edward Snowden Interview (ENGLISH)',
}
}, {
'url': 'http://www.liveleak.com/view?i=4f7_1392687779',
'md5': '42c6d97d54f1db107958760788c5f48f',
'info_dict': {
'id': '4f7_1392687779',
'ext': 'mp4',
'description': "The guy with the cigarette seems amazingly nonchalant about the whole thing... I really hope my friends' reactions would be a bit stronger.\r\n\r\nAction-go to 0:55.",
'uploader': 'CapObveus',
'title': 'Man is Fatally Struck by Reckless Car While Packing up a Moving Truck',
'age_limit': 18,
}
}, {
# Covers https://github.com/rg3/youtube-dl/pull/5983
'url': 'http://www.liveleak.com/view?i=801_1409392012',
'md5': '0b3bec2d888c20728ca2ad3642f0ef15',
'info_dict': {
'id': '801_1409392012',
'ext': 'mp4',
'description': "Happened on 27.7.2014. \r\nAt 0:53 you can see people still swimming at near beach.",
'uploader': 'bony333',
'title': 'Crazy Hungarian tourist films close call waterspout in Croatia'
}
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_title = self._og_search_title(webpage).replace('LiveLeak.com -', '').strip()
video_description = self._og_search_description(webpage)
video_uploader = self._html_search_regex(
r'By:.*?(\w+)</a>', webpage, 'uploader', fatal=False)
age_limit = int_or_none(self._search_regex(
r'you confirm that you are ([0-9]+) years and over.',
webpage, 'age limit', default=None))
sources_raw = self._search_regex(
r'(?s)sources:\s*(\[.*?\]),', webpage, 'video URLs', default=None)
if sources_raw is None:
alt_source = self._search_regex(
r'(file: ".*?"),', webpage, 'video URL', default=None)
if alt_source:
sources_raw = '[{ %s}]' % alt_source
else:
# Maybe an embed?
embed_url = self._search_regex(
r'<iframe[^>]+src="(http://www.prochan.com/embed\?[^"]+)"',
webpage, 'embed URL')
return {
'_type': 'url_transparent',
'url': embed_url,
'id': video_id,
'title': video_title,
'description': video_description,
'uploader': video_uploader,
'age_limit': age_limit,
}
sources_json = re.sub(r'\s([a-z]+):\s', r'"\1": ', sources_raw)
sources = json.loads(sources_json)
formats = [{
'format_id': '%s' % i,
'format_note': s.get('label'),
'url': s['file'],
} for i, s in enumerate(sources)]
for i, s in enumerate(sources):
# Removing '.h264_*.mp4' gives the raw video, which is essentially
# the same video without the LiveLeak logo at the top (see
# https://github.com/rg3/youtube-dl/pull/4768)
orig_url = re.sub(r'\.h264_.+?\.mp4', '', s['file'])
if s['file'] != orig_url:
formats.append({
'format_id': 'original-%s' % i,
'format_note': s.get('label'),
'url': orig_url,
'preference': 1,
})
self._sort_formats(formats)
return {
'id': video_id,
'title': video_title,
'description': video_description,
'uploader': video_uploader,
'formats': formats,
'age_limit': age_limit,
}
| unlicense |
Hellrungj/CSC-412-Networking | Protocol_Buffers/venv/lib/python2.7/site-packages/google/protobuf/service.py | 243 | 9144 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""DEPRECATED: Declares the RPC service interfaces.
This module declares the abstract interfaces underlying proto2 RPC
services. These are intended to be independent of any particular RPC
implementation, so that proto2 services can be used on top of a variety
of implementations. Starting with version 2.3.0, RPC implementations should
not try to build on these, but should instead provide code generator plugins
which generate code specific to the particular RPC implementation. This way
the generated code can be more appropriate for the implementation in use
and can avoid unnecessary layers of indirection.
"""
__author__ = '[email protected] (Petar Petrov)'
class RpcException(Exception):
"""Exception raised on failed blocking RPC method call."""
pass
class Service(object):
"""Abstract base interface for protocol-buffer-based RPC services.
Services themselves are abstract classes (implemented either by servers or as
stubs), but they subclass this base interface. The methods of this
interface can be used to call the methods of the service without knowing
its exact type at compile time (analogous to the Message interface).
"""
def GetDescriptor():
"""Retrieves this service's descriptor."""
raise NotImplementedError
def CallMethod(self, method_descriptor, rpc_controller,
request, done):
"""Calls a method of the service specified by method_descriptor.
If "done" is None then the call is blocking and the response
message will be returned directly. Otherwise the call is asynchronous
and "done" will later be called with the response value.
In the blocking case, RpcException will be raised on error.
Preconditions:
* method_descriptor.service == GetDescriptor
* request is of the exact same classes as returned by
GetRequestClass(method).
* After the call has started, the request must not be modified.
* "rpc_controller" is of the correct type for the RPC implementation being
used by this Service. For stubs, the "correct type" depends on the
RpcChannel which the stub is using.
Postconditions:
* "done" will be called when the method is complete. This may be
before CallMethod() returns or it may be at some point in the future.
* If the RPC failed, the response value passed to "done" will be None.
Further details about the failure can be found by querying the
RpcController.
"""
raise NotImplementedError
def GetRequestClass(self, method_descriptor):
"""Returns the class of the request message for the specified method.
CallMethod() requires that the request is of a particular subclass of
Message. GetRequestClass() gets the default instance of this required
type.
Example:
method = service.GetDescriptor().FindMethodByName("Foo")
request = stub.GetRequestClass(method)()
request.ParseFromString(input)
service.CallMethod(method, request, callback)
"""
raise NotImplementedError
def GetResponseClass(self, method_descriptor):
"""Returns the class of the response message for the specified method.
This method isn't really needed, as the RpcChannel's CallMethod constructs
the response protocol message. It's provided anyway in case it is useful
for the caller to know the response type in advance.
"""
raise NotImplementedError
class RpcController(object):
"""An RpcController mediates a single method call.
The primary purpose of the controller is to provide a way to manipulate
settings specific to the RPC implementation and to find out about RPC-level
errors. The methods provided by the RpcController interface are intended
to be a "least common denominator" set of features which we expect all
implementations to support. Specific implementations may provide more
advanced features (e.g. deadline propagation).
"""
# Client-side methods below
def Reset(self):
"""Resets the RpcController to its initial state.
After the RpcController has been reset, it may be reused in
a new call. Must not be called while an RPC is in progress.
"""
raise NotImplementedError
def Failed(self):
"""Returns true if the call failed.
After a call has finished, returns true if the call failed. The possible
reasons for failure depend on the RPC implementation. Failed() must not
be called before a call has finished. If Failed() returns true, the
contents of the response message are undefined.
"""
raise NotImplementedError
def ErrorText(self):
"""If Failed is true, returns a human-readable description of the error."""
raise NotImplementedError
def StartCancel(self):
"""Initiate cancellation.
Advises the RPC system that the caller desires that the RPC call be
canceled. The RPC system may cancel it immediately, may wait awhile and
then cancel it, or may not even cancel the call at all. If the call is
canceled, the "done" callback will still be called and the RpcController
will indicate that the call failed at that time.
"""
raise NotImplementedError
# Server-side methods below
def SetFailed(self, reason):
"""Sets a failure reason.
Causes Failed() to return true on the client side. "reason" will be
incorporated into the message returned by ErrorText(). If you find
you need to return machine-readable information about failures, you
should incorporate it into your response protocol buffer and should
NOT call SetFailed().
"""
raise NotImplementedError
def IsCanceled(self):
"""Checks if the client cancelled the RPC.
If true, indicates that the client canceled the RPC, so the server may
as well give up on replying to it. The server should still call the
final "done" callback.
"""
raise NotImplementedError
def NotifyOnCancel(self, callback):
"""Sets a callback to invoke on cancel.
Asks that the given callback be called when the RPC is canceled. The
callback will always be called exactly once. If the RPC completes without
being canceled, the callback will be called after completion. If the RPC
has already been canceled when NotifyOnCancel() is called, the callback
will be called immediately.
NotifyOnCancel() must be called no more than once per request.
"""
raise NotImplementedError
class RpcChannel(object):
"""Abstract interface for an RPC channel.
An RpcChannel represents a communication line to a service which can be used
to call that service's methods. The service may be running on another
machine. Normally, you should not use an RpcChannel directly, but instead
construct a stub {@link Service} wrapping it. Example:
Example:
RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234")
RpcController controller = rpcImpl.Controller()
MyService service = MyService_Stub(channel)
service.MyMethod(controller, request, callback)
"""
def CallMethod(self, method_descriptor, rpc_controller,
request, response_class, done):
"""Calls the method identified by the descriptor.
Call the given method of the remote service. The signature of this
procedure looks the same as Service.CallMethod(), but the requirements
are less strict in one important way: the request object doesn't have to
be of any specific class as long as its descriptor is method.input_type.
"""
raise NotImplementedError
| gpl-3.0 |
wickman/compactor | tests/test_protobuf_process.py | 1 | 1546 | import threading
from compactor.context import Context
from compactor.process import ProtobufProcess
import pytest
try:
from google.protobuf import descriptor_pb2
HAS_PROTOBUF = True
except ImportError:
HAS_PROTOBUF = False
import logging
logging.basicConfig()
# Send from one to another, swap out contexts to test local vs remote dispatch.
def ping_pong(context1, context2):
ping_calls = []
event = threading.Event()
class Pinger(ProtobufProcess):
@ProtobufProcess.install(descriptor_pb2.DescriptorProto)
def ping(self, from_pid, message):
ping_calls.append((from_pid, message))
event.set()
class Ponger(ProtobufProcess):
pass
pinger = Pinger('pinger')
ponger = Ponger('ponger')
ping_pid = context1.spawn(pinger)
pong_pid = context2.spawn(ponger)
send_msg = descriptor_pb2.DescriptorProto()
send_msg.name = 'ping'
ponger.send(ping_pid, send_msg)
event.wait(timeout=1)
assert event.is_set()
assert len(ping_calls) == 1
from_pid, message = ping_calls[0]
assert from_pid == pong_pid
assert message == send_msg
@pytest.mark.skipif('not HAS_PROTOBUF')
def test_protobuf_process_remote_dispatch():
context1 = Context()
context1.start()
context2 = Context()
context2.start()
try:
ping_pong(context1, context2)
finally:
context1.stop()
context2.stop()
@pytest.mark.skipif('not HAS_PROTOBUF')
def test_protobuf_process_local_dispatch():
context = Context()
context.start()
try:
ping_pong(context, context)
finally:
context.stop()
| apache-2.0 |
realincubus/pluto_clang | orio-0.1.0/src/main/ann_parser.py | 5 | 9643 | #
# Parser to extract annotations from the source code
#
import re, sys
import code_frag
#----------------------------------------
class AnnParser:
'''The parser used for annotations extraction'''
# regular expressions
__vname_re = r'[A-Za-z_]\w*'
__any_re = r'(.|\n)'
__ann_re = r'/\*@' + __any_re + r'*?@\*/'
__leader_ann_re = (r'/\*@\s*begin\s+(' + __vname_re + r')\s*\(\s*(' + __any_re +
r'*?)\s*\)\s*@\*/')
__trailer_ann_re = r'/\*@\s*end\s*@\*/'
__non_indent_char_re = r'[^ \t]'
#----------------------------------------
def __init__(self):
'''To instantiate the annotation parser'''
pass
#----------------------------------------
def __getIndentSizeFrom(self, code):
'''
Compute the indentation size based on the given code (i.e. count the number of spaces from
the end of the given code, until a non-space character or a newline is found)
'''
indent_size = 0
for i in range(len(code)-1, -1, -1):
if re.match(self.__non_indent_char_re, code[i]):
break
else:
indent_size += 1
return indent_size
#----------------------------------------
def __markAnnCodeRegions(self, code_seq):
'''Mark all annotation code regions by encapsulating each code region with a list'''
# initialize the code sequence with annotation code regions
marked_code_seq = []
# iterate over all codes
for i in range(0, len(code_seq)):
code, code_line_no, indent_size, is_ann = code_seq[i]
# if an annotation
if is_ann:
# if a leader annotation
if re.match(self.__leader_ann_re, code):
# find the index position of a matching trailer annotation
trailer_ipos = -1
leaders_seen = 1
for j in range(i+1, len(code_seq)):
t_code, t_code_line_no, t_indent_size, t_is_ann = code_seq[j]
if t_is_ann:
if re.match(self.__leader_ann_re, t_code):
leaders_seen += 1
else:
leaders_seen -= 1
if leaders_seen == 0:
trailer_ipos = j
break
# if no matching trailer annotations
if trailer_ipos == -1:
print 'error:%s: no matching trailer annotation exists' % code_line_no
sys.exit(1)
# apply recursions on the annotation body and the trailing code sequence
body_code_seq = self.__markAnnCodeRegions(code_seq[i+1:trailer_ipos])
trailing_code_seq = self.__markAnnCodeRegions(code_seq[trailer_ipos+1:])
# return the code sequence with annotation code regions
marked_code_seq.append([code_seq[i], body_code_seq, code_seq[trailer_ipos]])
return marked_code_seq + trailing_code_seq
# if a trailer annotation
else:
print 'error:%s: no matching leader annotation exists' % code_line_no
sys.exit(1)
# if a non-annotation
else:
marked_code_seq.append(code_seq[i])
# return the code sequence with annotation code regions
return marked_code_seq
#----------------------------------------
def __getCodeSeq(self, code, line_no):
'''
Parse the code and return a code sequence that consists of non-annotations and
annotation code regions.
A code region is denoted as a list, which its first element is a leader annotation, and
its last element is a trailer annotation. And anything in between the two annotations
is another code sequence.
'''
# initialize the code seq
code_seq = []
# divide the code into a code sequence of non-annotations and annotations
while True:
# find the next annotation in the code
match_obj = re.search(self.__ann_re, code)
# if nothing matches
if not match_obj:
indent_size = 0
if len(code_seq) > 0:
indent_size = self.__getIndentSizeFrom(code_seq[-1][0])
if code != '':
code_seq.append((code, line_no, indent_size, False))
break
# get the leading non-annotation
non_ann = code[:match_obj.start()]
non_ann_line_no = line_no
non_ann_indent_size = 0
if len(code_seq) > 0:
non_ann_indent_size = self.__getIndentSizeFrom(code_seq[-1][0])
# insert the non-annotation into the code sequence
if non_ann != '':
code_seq.append((non_ann, non_ann_line_no, non_ann_indent_size, False))
# get the matching annotation
ann = code[match_obj.start():match_obj.end()]
ann_line_no = line_no + code[:match_obj.start()].count('\n')
ann_indent_size = 0
if len(code_seq) > 0:
ann_indent_size = self.__getIndentSizeFrom(code_seq[-1][0])
# insert the matching annotation into the code sequence
code_seq.append((ann, ann_line_no, ann_indent_size, True))
# an unrecognized form of annotation
if not re.match(self.__leader_ann_re, ann) and not re.match(self.__trailer_ann_re, ann):
print 'error:%s: unrecognized form of annotation' % ann_line_no
sys.exit(1)
# update the code and line number
line_no += code[:match_obj.end()].count('\n')
code = code[match_obj.end():]
# mark all annotation code regions
code_seq = self.__markAnnCodeRegions(code_seq)
# return the code sequence
return code_seq
#----------------------------------------
def __getModuleInfo(self, code, line_no):
'''
Given the leader annotation code, return the module name, the module code,
and their corresponding starting line number.
'''
# parse the given code
match_obj = re.match(self.__leader_ann_re, code)
# if not a match
if not match_obj:
print 'error:%s: not a leader annotation code' % line_no
sys.exit(1)
# create the module info
mname = match_obj.group(1)
mname_line_no = line_no + code[:match_obj.start(1)].count('\n')
mcode = match_obj.group(2)
mcode_line_no = line_no + code[:match_obj.start(2)].count('\n')
mod_info = (mname, mname_line_no, mcode, mcode_line_no)
# return the module info
return mod_info
#----------------------------------------
def __convertToCodeFragment(self, code):
'''Convert the given code into a code fragment object'''
# if a code region (indicated by a list)
if isinstance(code, list):
# assert that the code list has exactly three elements
if len(code) != 3:
print 'internal error: the code list must have a length of three '
sys.exit(1)
# get all three elements
leader, leader_line_no, leader_indent_size, leader_is_ann = code[0]
body_code_seq = code[1]
trailer, trailer_line_no, trailer_indent_size, trailer_is_ann = code[2]
# create a leader-annotation code-fragment
mname, mname_line_no, mcode, mcode_line_no = self.__getModuleInfo(leader, leader_line_no)
leader_cfrag = code_frag.LeaderAnn(leader, leader_line_no, leader_indent_size,
mname, mname_line_no, mcode, mcode_line_no)
# apply recursions on the annotation's body code sequence
cfrags = map(self.__convertToCodeFragment, body_code_seq)
# create a trailer-annotation code-fragment
trailer_cfrag = code_frag.TrailerAnn(trailer, trailer_line_no, trailer_indent_size)
# create a code-region code-fragment
return code_frag.AnnCodeRegion(leader_cfrag, cfrags, trailer_cfrag)
# a non-annotation
else:
# create a non-annotation code fragment
code, line_no, indent_size, is_ann = code
cfrag = code_frag.NonAnn(code, line_no, indent_size)
# check if the given code is an annotation
if is_ann:
print 'internal error:%s: unexpected annotation' % line_no
sys.exit(1)
# return the code fragment
return cfrag
#----------------------------------------
def removeAnns(self, code):
'''Remove all annotations from the given code'''
return re.sub(self.__ann_re, '', code)
#----------------------------------------
def parse(self, code, line_no = 1):
'''Parse the code and return a sequence of code fragments'''
# parse the code to obtain the code sequence
code_seq = self.__getCodeSeq(code, line_no)
# convert the code sequence to a sequence of code fragments
cfrags = map(self.__convertToCodeFragment, code_seq)
# return the sequence of code fragments
return cfrags
| gpl-3.0 |
LeartS/odoo | addons/payment_adyen/models/adyen.py | 136 | 7759 | # -*- coding: utf-'8' "-*-"
import base64
try:
import simplejson as json
except ImportError:
import json
from hashlib import sha1
import hmac
import logging
import urlparse
from openerp.addons.payment.models.payment_acquirer import ValidationError
from openerp.addons.payment_adyen.controllers.main import AdyenController
from openerp.osv import osv, fields
from openerp.tools import float_round
_logger = logging.getLogger(__name__)
class AcquirerAdyen(osv.Model):
_inherit = 'payment.acquirer'
def _get_adyen_urls(self, cr, uid, environment, context=None):
""" Adyen URLs
- yhpp: hosted payment page: pay.shtml for single, select.shtml for multiple
"""
return {
'adyen_form_url': 'https://%s.adyen.com/hpp/pay.shtml' % environment,
}
def _get_providers(self, cr, uid, context=None):
providers = super(AcquirerAdyen, self)._get_providers(cr, uid, context=context)
providers.append(['adyen', 'Adyen'])
return providers
_columns = {
'adyen_merchant_account': fields.char('Merchant Account', required_if_provider='adyen'),
'adyen_skin_code': fields.char('Skin Code', required_if_provider='adyen'),
'adyen_skin_hmac_key': fields.char('Skin HMAC Key', required_if_provider='adyen'),
}
def _adyen_generate_merchant_sig(self, acquirer, inout, values):
""" Generate the shasign for incoming or outgoing communications.
:param browse acquirer: the payment.acquirer browse record. It should
have a shakey in shaky out
:param string inout: 'in' (openerp contacting ogone) or 'out' (adyen
contacting openerp). In this last case only some
fields should be contained (see e-Commerce basic)
:param dict values: transaction values
:return string: shasign
"""
assert inout in ('in', 'out')
assert acquirer.provider == 'adyen'
if inout == 'in':
keys = "paymentAmount currencyCode shipBeforeDate merchantReference skinCode merchantAccount sessionValidity shopperEmail shopperReference recurringContract allowedMethods blockedMethods shopperStatement merchantReturnData billingAddressType deliveryAddressType offset".split()
else:
keys = "authResult pspReference merchantReference skinCode paymentMethod shopperLocale merchantReturnData".split()
def get_value(key):
if values.get(key):
return values[key]
return ''
sign = ''.join('%s' % get_value(k) for k in keys).encode('ascii')
key = acquirer.adyen_skin_hmac_key.encode('ascii')
return base64.b64encode(hmac.new(key, sign, sha1).digest())
def adyen_form_generate_values(self, cr, uid, id, partner_values, tx_values, context=None):
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
acquirer = self.browse(cr, uid, id, context=context)
# tmp
import datetime
from dateutil import relativedelta
tmp_date = datetime.date.today() + relativedelta.relativedelta(days=1)
adyen_tx_values = dict(tx_values)
adyen_tx_values.update({
'merchantReference': tx_values['reference'],
'paymentAmount': '%d' % int(float_round(tx_values['amount'], 2) * 100),
'currencyCode': tx_values['currency'] and tx_values['currency'].name or '',
'shipBeforeDate': tmp_date,
'skinCode': acquirer.adyen_skin_code,
'merchantAccount': acquirer.adyen_merchant_account,
'shopperLocale': partner_values['lang'],
'sessionValidity': tmp_date,
'resURL': '%s' % urlparse.urljoin(base_url, AdyenController._return_url),
})
if adyen_tx_values.get('return_url'):
adyen_tx_values['merchantReturnData'] = json.dumps({'return_url': '%s' % adyen_tx_values.pop('return_url')})
adyen_tx_values['merchantSig'] = self._adyen_generate_merchant_sig(acquirer, 'in', adyen_tx_values)
return partner_values, adyen_tx_values
def adyen_get_form_action_url(self, cr, uid, id, context=None):
acquirer = self.browse(cr, uid, id, context=context)
return self._get_adyen_urls(cr, uid, acquirer.environment, context=context)['adyen_form_url']
class TxAdyen(osv.Model):
_inherit = 'payment.transaction'
_columns = {
'adyen_psp_reference': fields.char('Adyen PSP Reference'),
}
# --------------------------------------------------
# FORM RELATED METHODS
# --------------------------------------------------
def _adyen_form_get_tx_from_data(self, cr, uid, data, context=None):
reference, pspReference = data.get('merchantReference'), data.get('pspReference')
if not reference or not pspReference:
error_msg = 'Adyen: received data with missing reference (%s) or missing pspReference (%s)' % (reference, pspReference)
_logger.error(error_msg)
raise ValidationError(error_msg)
# find tx -> @TDENOTE use pspReference ?
tx_ids = self.pool['payment.transaction'].search(cr, uid, [('reference', '=', reference)], context=context)
if not tx_ids or len(tx_ids) > 1:
error_msg = 'Adyen: received data for reference %s' % (reference)
if not tx_ids:
error_msg += '; no order found'
else:
error_msg += '; multiple order found'
_logger.error(error_msg)
raise ValidationError(error_msg)
tx = self.pool['payment.transaction'].browse(cr, uid, tx_ids[0], context=context)
# verify shasign
shasign_check = self.pool['payment.acquirer']._adyen_generate_merchant_sig(tx.acquirer_id, 'out', data)
if shasign_check != data.get('merchantSig'):
error_msg = 'Adyen: invalid merchantSig, received %s, computed %s' % (data.get('merchantSig'), shasign_check)
_logger.warning(error_msg)
# raise ValidationError(error_msg)
return tx
def _adyen_form_get_invalid_parameters(self, cr, uid, tx, data, context=None):
invalid_parameters = []
# reference at acquirer: pspReference
if tx.acquirer_reference and data.get('pspReference') != tx.acquirer_reference:
invalid_parameters.append(('pspReference', data.get('pspReference'), tx.acquirer_reference))
# seller
if data.get('skinCode') != tx.acquirer_id.adyen_skin_code:
invalid_parameters.append(('skinCode', data.get('skinCode'), tx.acquirer_id.adyen_skin_code))
# result
if not data.get('authResult'):
invalid_parameters.append(('authResult', data.get('authResult'), 'something'))
return invalid_parameters
def _adyen_form_validate(self, cr, uid, tx, data, context=None):
status = data.get('authResult', 'PENDING')
if status == 'AUTHORISED':
tx.write({
'state': 'done',
'adyen_psp_reference': data.get('pspReference'),
# 'date_validate': data.get('payment_date', fields.datetime.now()),
# 'paypal_txn_type': data.get('express_checkout')
})
return True
elif status == 'PENDING':
tx.write({
'state': 'pending',
'adyen_psp_reference': data.get('pspReference'),
})
return True
else:
error = 'Paypal: feedback error'
_logger.info(error)
tx.write({
'state': 'error',
'state_message': error
})
return False
| agpl-3.0 |
louislam/android-ocr-precompiled-tesstwo | android/jni/com_googlecode_tesseract_android/src/contrib/tesseract-c_api-demo.py | 21 | 2184 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2012 Zdenko Podobný
# Author: Zdenko Podobný
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Simple python demo script of tesseract-ocr 3.02 c-api
"""
import os
import sys
import ctypes
# Demo variables
lang = "eng"
filename = "../phototest.tif"
libpath = "/usr/local/lib64/"
libpath_w = "../vs2008/DLL_Release/"
TESSDATA_PREFIX = os.environ.get('TESSDATA_PREFIX')
if not TESSDATA_PREFIX:
TESSDATA_PREFIX = "../"
if sys.platform == "win32":
libname = libpath_w + "libtesseract302.dll"
libname_alt = "libtesseract302.dll"
os.environ["PATH"] += os.pathsep + libpath_w
else:
libname = libpath + "libtesseract.so.3.0.2"
libname_alt = "libtesseract.so.3"
try:
tesseract = ctypes.cdll.LoadLibrary(libname)
except:
try:
tesseract = ctypes.cdll.LoadLibrary(libname_alt)
except WindowsError, err:
print("Trying to load '%s'..." % libname)
print("Trying to load '%s'..." % libname_alt)
print(err)
exit(1)
tesseract.TessVersion.restype = ctypes.c_char_p
tesseract_version = tesseract.TessVersion()[:4]
# We need to check library version because libtesseract.so.3 is symlink
# and can point to other version than 3.02
if float(tesseract_version) < 3.02:
print("Found tesseract-ocr library version %s." % tesseract_version)
print("C-API is present only in version 3.02!")
exit(2)
api = tesseract.TessBaseAPICreate()
rc = tesseract.TessBaseAPIInit3(api, TESSDATA_PREFIX, lang);
if (rc):
tesseract.TessBaseAPIDelete(api)
print("Could not initialize tesseract.\n")
exit(3)
text_out = tesseract.TessBaseAPIProcessPages(api, filename, None , 0);
result_text = ctypes.string_at(text_out)
print result_text
| apache-2.0 |
ROAND/dota2herovoices | .buildozer/venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/charade/mbcssm.py | 982 | 19608 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .constants import eStart, eError, eItsMe
# BIG5
BIG5_cls = (
1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,1, # 78 - 7f
4,4,4,4,4,4,4,4, # 80 - 87
4,4,4,4,4,4,4,4, # 88 - 8f
4,4,4,4,4,4,4,4, # 90 - 97
4,4,4,4,4,4,4,4, # 98 - 9f
4,3,3,3,3,3,3,3, # a0 - a7
3,3,3,3,3,3,3,3, # a8 - af
3,3,3,3,3,3,3,3, # b0 - b7
3,3,3,3,3,3,3,3, # b8 - bf
3,3,3,3,3,3,3,3, # c0 - c7
3,3,3,3,3,3,3,3, # c8 - cf
3,3,3,3,3,3,3,3, # d0 - d7
3,3,3,3,3,3,3,3, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,3,3,3, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,3,3,0 # f8 - ff
)
BIG5_st = (
eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,#08-0f
eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart#10-17
)
Big5CharLenTable = (0, 1, 1, 2, 0)
Big5SMModel = {'classTable': BIG5_cls,
'classFactor': 5,
'stateTable': BIG5_st,
'charLenTable': Big5CharLenTable,
'name': 'Big5'}
# CP949
CP949_cls = (
1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f
1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f
1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f
1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f
1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f
4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f
1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f
5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f
0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f
6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f
6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af
7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf
7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf
2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df
2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef
2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff
)
CP949_st = (
#cls= 0 1 2 3 4 5 6 7 8 9 # previous state =
eError,eStart, 3,eError,eStart,eStart, 4, 5,eError, 6, # eStart
eError,eError,eError,eError,eError,eError,eError,eError,eError,eError, # eError
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe, # eItsMe
eError,eError,eStart,eStart,eError,eError,eError,eStart,eStart,eStart, # 3
eError,eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 4
eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 5
eError,eStart,eStart,eStart,eStart,eError,eError,eStart,eStart,eStart, # 6
)
CP949CharLenTable = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2)
CP949SMModel = {'classTable': CP949_cls,
'classFactor': 10,
'stateTable': CP949_st,
'charLenTable': CP949CharLenTable,
'name': 'CP949'}
# EUC-JP
EUCJP_cls = (
4,4,4,4,4,4,4,4, # 00 - 07
4,4,4,4,4,4,5,5, # 08 - 0f
4,4,4,4,4,4,4,4, # 10 - 17
4,4,4,5,4,4,4,4, # 18 - 1f
4,4,4,4,4,4,4,4, # 20 - 27
4,4,4,4,4,4,4,4, # 28 - 2f
4,4,4,4,4,4,4,4, # 30 - 37
4,4,4,4,4,4,4,4, # 38 - 3f
4,4,4,4,4,4,4,4, # 40 - 47
4,4,4,4,4,4,4,4, # 48 - 4f
4,4,4,4,4,4,4,4, # 50 - 57
4,4,4,4,4,4,4,4, # 58 - 5f
4,4,4,4,4,4,4,4, # 60 - 67
4,4,4,4,4,4,4,4, # 68 - 6f
4,4,4,4,4,4,4,4, # 70 - 77
4,4,4,4,4,4,4,4, # 78 - 7f
5,5,5,5,5,5,5,5, # 80 - 87
5,5,5,5,5,5,1,3, # 88 - 8f
5,5,5,5,5,5,5,5, # 90 - 97
5,5,5,5,5,5,5,5, # 98 - 9f
5,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,0,5 # f8 - ff
)
EUCJP_st = (
3, 4, 3, 5,eStart,eError,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eStart,eError,eStart,eError,eError,eError,#10-17
eError,eError,eStart,eError,eError,eError, 3,eError,#18-1f
3,eError,eError,eError,eStart,eStart,eStart,eStart#20-27
)
EUCJPCharLenTable = (2, 2, 2, 3, 1, 0)
EUCJPSMModel = {'classTable': EUCJP_cls,
'classFactor': 6,
'stateTable': EUCJP_st,
'charLenTable': EUCJPCharLenTable,
'name': 'EUC-JP'}
# EUC-KR
EUCKR_cls = (
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
1,1,1,1,1,1,1,1, # 40 - 47
1,1,1,1,1,1,1,1, # 48 - 4f
1,1,1,1,1,1,1,1, # 50 - 57
1,1,1,1,1,1,1,1, # 58 - 5f
1,1,1,1,1,1,1,1, # 60 - 67
1,1,1,1,1,1,1,1, # 68 - 6f
1,1,1,1,1,1,1,1, # 70 - 77
1,1,1,1,1,1,1,1, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,3,3,3, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,3,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,0 # f8 - ff
)
EUCKR_st = (
eError,eStart, 3,eError,eError,eError,eError,eError,#00-07
eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,eStart #08-0f
)
EUCKRCharLenTable = (0, 1, 2, 0)
EUCKRSMModel = {'classTable': EUCKR_cls,
'classFactor': 4,
'stateTable': EUCKR_st,
'charLenTable': EUCKRCharLenTable,
'name': 'EUC-KR'}
# EUC-TW
EUCTW_cls = (
2,2,2,2,2,2,2,2, # 00 - 07
2,2,2,2,2,2,0,0, # 08 - 0f
2,2,2,2,2,2,2,2, # 10 - 17
2,2,2,0,2,2,2,2, # 18 - 1f
2,2,2,2,2,2,2,2, # 20 - 27
2,2,2,2,2,2,2,2, # 28 - 2f
2,2,2,2,2,2,2,2, # 30 - 37
2,2,2,2,2,2,2,2, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,2, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,6,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,3,4,4,4,4,4,4, # a0 - a7
5,5,1,1,1,1,1,1, # a8 - af
1,1,1,1,1,1,1,1, # b0 - b7
1,1,1,1,1,1,1,1, # b8 - bf
1,1,3,1,3,3,3,3, # c0 - c7
3,3,3,3,3,3,3,3, # c8 - cf
3,3,3,3,3,3,3,3, # d0 - d7
3,3,3,3,3,3,3,3, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,3,3,3, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,3,3,0 # f8 - ff
)
EUCTW_st = (
eError,eError,eStart, 3, 3, 3, 4,eError,#00-07
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eStart,eError,#10-17
eStart,eStart,eStart,eError,eError,eError,eError,eError,#18-1f
5,eError,eError,eError,eStart,eError,eStart,eStart,#20-27
eStart,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
)
EUCTWCharLenTable = (0, 0, 1, 2, 2, 2, 3)
EUCTWSMModel = {'classTable': EUCTW_cls,
'classFactor': 7,
'stateTable': EUCTW_st,
'charLenTable': EUCTWCharLenTable,
'name': 'x-euc-tw'}
# GB2312
GB2312_cls = (
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
3,3,3,3,3,3,3,3, # 30 - 37
3,3,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,4, # 78 - 7f
5,6,6,6,6,6,6,6, # 80 - 87
6,6,6,6,6,6,6,6, # 88 - 8f
6,6,6,6,6,6,6,6, # 90 - 97
6,6,6,6,6,6,6,6, # 98 - 9f
6,6,6,6,6,6,6,6, # a0 - a7
6,6,6,6,6,6,6,6, # a8 - af
6,6,6,6,6,6,6,6, # b0 - b7
6,6,6,6,6,6,6,6, # b8 - bf
6,6,6,6,6,6,6,6, # c0 - c7
6,6,6,6,6,6,6,6, # c8 - cf
6,6,6,6,6,6,6,6, # d0 - d7
6,6,6,6,6,6,6,6, # d8 - df
6,6,6,6,6,6,6,6, # e0 - e7
6,6,6,6,6,6,6,6, # e8 - ef
6,6,6,6,6,6,6,6, # f0 - f7
6,6,6,6,6,6,6,0 # f8 - ff
)
GB2312_st = (
eError,eStart,eStart,eStart,eStart,eStart, 3,eError,#00-07
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,#10-17
4,eError,eStart,eStart,eError,eError,eError,eError,#18-1f
eError,eError, 5,eError,eError,eError,eItsMe,eError,#20-27
eError,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
)
# To be accurate, the length of class 6 can be either 2 or 4.
# But it is not necessary to discriminate between the two since
# it is used for frequency analysis only, and we are validing
# each code range there as well. So it is safe to set it to be
# 2 here.
GB2312CharLenTable = (0, 1, 1, 1, 1, 1, 2)
GB2312SMModel = {'classTable': GB2312_cls,
'classFactor': 7,
'stateTable': GB2312_st,
'charLenTable': GB2312CharLenTable,
'name': 'GB2312'}
# Shift_JIS
SJIS_cls = (
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,1, # 78 - 7f
3,3,3,3,3,3,3,3, # 80 - 87
3,3,3,3,3,3,3,3, # 88 - 8f
3,3,3,3,3,3,3,3, # 90 - 97
3,3,3,3,3,3,3,3, # 98 - 9f
#0xa0 is illegal in sjis encoding, but some pages does
#contain such byte. We need to be more error forgiven.
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,4,4,4, # e8 - ef
4,4,4,4,4,4,4,4, # f0 - f7
4,4,4,4,4,0,0,0 # f8 - ff
)
SJIS_st = (
eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eError,eError,eStart,eStart,eStart,eStart #10-17
)
SJISCharLenTable = (0, 1, 1, 2, 0, 0)
SJISSMModel = {'classTable': SJIS_cls,
'classFactor': 6,
'stateTable': SJIS_st,
'charLenTable': SJISCharLenTable,
'name': 'Shift_JIS'}
# UCS2-BE
UCS2BE_cls = (
0,0,0,0,0,0,0,0, # 00 - 07
0,0,1,0,0,2,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,3,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,3,3,3,3,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,0,0,0,0,0,0,0, # a0 - a7
0,0,0,0,0,0,0,0, # a8 - af
0,0,0,0,0,0,0,0, # b0 - b7
0,0,0,0,0,0,0,0, # b8 - bf
0,0,0,0,0,0,0,0, # c0 - c7
0,0,0,0,0,0,0,0, # c8 - cf
0,0,0,0,0,0,0,0, # d0 - d7
0,0,0,0,0,0,0,0, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,4,5 # f8 - ff
)
UCS2BE_st = (
5, 7, 7,eError, 4, 3,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe, 6, 6, 6, 6,eError,eError,#10-17
6, 6, 6, 6, 6,eItsMe, 6, 6,#18-1f
6, 6, 6, 6, 5, 7, 7,eError,#20-27
5, 8, 6, 6,eError, 6, 6, 6,#28-2f
6, 6, 6, 6,eError,eError,eStart,eStart #30-37
)
UCS2BECharLenTable = (2, 2, 2, 0, 2, 2)
UCS2BESMModel = {'classTable': UCS2BE_cls,
'classFactor': 6,
'stateTable': UCS2BE_st,
'charLenTable': UCS2BECharLenTable,
'name': 'UTF-16BE'}
# UCS2-LE
UCS2LE_cls = (
0,0,0,0,0,0,0,0, # 00 - 07
0,0,1,0,0,2,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,3,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,3,3,3,3,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,0,0,0,0,0,0,0, # a0 - a7
0,0,0,0,0,0,0,0, # a8 - af
0,0,0,0,0,0,0,0, # b0 - b7
0,0,0,0,0,0,0,0, # b8 - bf
0,0,0,0,0,0,0,0, # c0 - c7
0,0,0,0,0,0,0,0, # c8 - cf
0,0,0,0,0,0,0,0, # d0 - d7
0,0,0,0,0,0,0,0, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,4,5 # f8 - ff
)
UCS2LE_st = (
6, 6, 7, 6, 4, 3,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe, 5, 5, 5,eError,eItsMe,eError,#10-17
5, 5, 5,eError, 5,eError, 6, 6,#18-1f
7, 6, 8, 8, 5, 5, 5,eError,#20-27
5, 5, 5,eError,eError,eError, 5, 5,#28-2f
5, 5, 5,eError, 5,eError,eStart,eStart #30-37
)
UCS2LECharLenTable = (2, 2, 2, 2, 2, 2)
UCS2LESMModel = {'classTable': UCS2LE_cls,
'classFactor': 6,
'stateTable': UCS2LE_st,
'charLenTable': UCS2LECharLenTable,
'name': 'UTF-16LE'}
# UTF-8
UTF8_cls = (
1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
1,1,1,1,1,1,1,1, # 40 - 47
1,1,1,1,1,1,1,1, # 48 - 4f
1,1,1,1,1,1,1,1, # 50 - 57
1,1,1,1,1,1,1,1, # 58 - 5f
1,1,1,1,1,1,1,1, # 60 - 67
1,1,1,1,1,1,1,1, # 68 - 6f
1,1,1,1,1,1,1,1, # 70 - 77
1,1,1,1,1,1,1,1, # 78 - 7f
2,2,2,2,3,3,3,3, # 80 - 87
4,4,4,4,4,4,4,4, # 88 - 8f
4,4,4,4,4,4,4,4, # 90 - 97
4,4,4,4,4,4,4,4, # 98 - 9f
5,5,5,5,5,5,5,5, # a0 - a7
5,5,5,5,5,5,5,5, # a8 - af
5,5,5,5,5,5,5,5, # b0 - b7
5,5,5,5,5,5,5,5, # b8 - bf
0,0,6,6,6,6,6,6, # c0 - c7
6,6,6,6,6,6,6,6, # c8 - cf
6,6,6,6,6,6,6,6, # d0 - d7
6,6,6,6,6,6,6,6, # d8 - df
7,8,8,8,8,8,8,8, # e0 - e7
8,8,8,8,8,9,8,8, # e8 - ef
10,11,11,11,11,11,11,11, # f0 - f7
12,13,13,13,14,15,0,0 # f8 - ff
)
UTF8_st = (
eError,eStart,eError,eError,eError,eError, 12, 10,#00-07
9, 11, 8, 7, 6, 5, 4, 3,#08-0f
eError,eError,eError,eError,eError,eError,eError,eError,#10-17
eError,eError,eError,eError,eError,eError,eError,eError,#18-1f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#20-27
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#28-2f
eError,eError, 5, 5, 5, 5,eError,eError,#30-37
eError,eError,eError,eError,eError,eError,eError,eError,#38-3f
eError,eError,eError, 5, 5, 5,eError,eError,#40-47
eError,eError,eError,eError,eError,eError,eError,eError,#48-4f
eError,eError, 7, 7, 7, 7,eError,eError,#50-57
eError,eError,eError,eError,eError,eError,eError,eError,#58-5f
eError,eError,eError,eError, 7, 7,eError,eError,#60-67
eError,eError,eError,eError,eError,eError,eError,eError,#68-6f
eError,eError, 9, 9, 9, 9,eError,eError,#70-77
eError,eError,eError,eError,eError,eError,eError,eError,#78-7f
eError,eError,eError,eError,eError, 9,eError,eError,#80-87
eError,eError,eError,eError,eError,eError,eError,eError,#88-8f
eError,eError, 12, 12, 12, 12,eError,eError,#90-97
eError,eError,eError,eError,eError,eError,eError,eError,#98-9f
eError,eError,eError,eError,eError, 12,eError,eError,#a0-a7
eError,eError,eError,eError,eError,eError,eError,eError,#a8-af
eError,eError, 12, 12, 12,eError,eError,eError,#b0-b7
eError,eError,eError,eError,eError,eError,eError,eError,#b8-bf
eError,eError,eStart,eStart,eStart,eStart,eError,eError,#c0-c7
eError,eError,eError,eError,eError,eError,eError,eError #c8-cf
)
UTF8CharLenTable = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)
UTF8SMModel = {'classTable': UTF8_cls,
'classFactor': 16,
'stateTable': UTF8_st,
'charLenTable': UTF8CharLenTable,
'name': 'UTF-8'}
# flake8: noqa
| gpl-2.0 |
ltilve/chromium | tools/chrome_proxy/integration_tests/chrome_proxy_metrics_unittest.py | 4 | 15208 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import base64
import unittest
from integration_tests import chrome_proxy_metrics as metrics
from integration_tests import network_metrics_unittest as network_unittest
from telemetry.unittest_util import test_page_test_results
# Timeline events used in tests.
# An HTML not via proxy.
EVENT_HTML_DIRECT = network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
url='http://test.html1',
response_headers={
'Content-Type': 'text/html',
'Content-Length': str(len(network_unittest.HTML_BODY)),
},
body=network_unittest.HTML_BODY)
# An HTML via proxy.
EVENT_HTML_PROXY_VIA = (
network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
url='http://test.html2',
response_headers={
'Content-Type': 'text/html',
'Content-Encoding': 'gzip',
'X-Original-Content-Length': str(len(network_unittest.HTML_BODY)),
'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
},
body=network_unittest.HTML_BODY,
remote_port=443))
# An HTML via the HTTP fallback proxy.
EVENT_HTML_PROXY_VIA_HTTP_FALLBACK = (
network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
url='http://test.html2',
response_headers={
'Content-Type': 'text/html',
'Content-Encoding': 'gzip',
'X-Original-Content-Length': str(len(network_unittest.HTML_BODY)),
'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
},
body=network_unittest.HTML_BODY,
remote_port=80))
# An image via proxy with Via header.
EVENT_IMAGE_PROXY_VIA = (
network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
url='http://test.image',
response_headers={
'Content-Type': 'image/jpeg',
'Content-Encoding': 'gzip',
'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
},
body=base64.b64encode(network_unittest.IMAGE_BODY),
base64_encoded_body=True,
remote_port=443))
# An image via the HTTP fallback proxy.
EVENT_IMAGE_PROXY_VIA_HTTP_FALLBACK = (
network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
url='http://test.image',
response_headers={
'Content-Type': 'image/jpeg',
'Content-Encoding': 'gzip',
'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
},
body=base64.b64encode(network_unittest.IMAGE_BODY),
base64_encoded_body=True,
remote_port=80))
# An image via proxy with Via header and it is cached.
EVENT_IMAGE_PROXY_CACHED = (
network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
url='http://test.image',
response_headers={
'Content-Type': 'image/jpeg',
'Content-Encoding': 'gzip',
'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
},
body=base64.b64encode(network_unittest.IMAGE_BODY),
base64_encoded_body=True,
served_from_cache=True))
# An image fetched directly.
EVENT_IMAGE_DIRECT = (
network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
url='http://test.image',
response_headers={
'Content-Type': 'image/jpeg',
'Content-Encoding': 'gzip',
},
body=base64.b64encode(network_unittest.IMAGE_BODY),
base64_encoded_body=True))
# A safe-browsing malware response.
EVENT_MALWARE_PROXY = (
network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
url='http://test.malware',
response_headers={
'X-Malware-Url': '1',
'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
'Location': 'http://test.malware',
},
status=307))
# An HTML via proxy with the Via header.
EVENT_IMAGE_BYPASS = (
network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
url='http://test.image',
response_headers={
'Chrome-Proxy': 'bypass=1',
'Content-Type': 'text/html',
'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
},
status=502))
# An image fetched directly.
EVENT_IMAGE_DIRECT = (
network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
url='http://test.image',
response_headers={
'Content-Type': 'image/jpeg',
'Content-Encoding': 'gzip',
},
body=base64.b64encode(network_unittest.IMAGE_BODY),
base64_encoded_body=True))
class ChromeProxyMetricTest(unittest.TestCase):
_test_proxy_info = {}
def _StubGetProxyInfo(self, info):
def stub(unused_tab, unused_url=''): # pylint: disable=W0613
return ChromeProxyMetricTest._test_proxy_info
metrics.GetProxyInfoFromNetworkInternals = stub
ChromeProxyMetricTest._test_proxy_info = info
def testChromeProxyResponse(self):
# An https non-proxy response.
resp = metrics.ChromeProxyResponse(
network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
url='https://test.url',
response_headers={
'Content-Type': 'text/html',
'Content-Length': str(len(network_unittest.HTML_BODY)),
'Via': 'some other via',
},
body=network_unittest.HTML_BODY))
self.assertFalse(resp.ShouldHaveChromeProxyViaHeader())
self.assertFalse(resp.HasChromeProxyViaHeader())
self.assertTrue(resp.IsValidByViaHeader())
# A proxied JPEG image response
resp = metrics.ChromeProxyResponse(
network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
url='http://test.image',
response_headers={
'Content-Type': 'image/jpeg',
'Content-Encoding': 'gzip',
'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
},
body=base64.b64encode(network_unittest.IMAGE_BODY),
base64_encoded_body=True))
self.assertTrue(resp.ShouldHaveChromeProxyViaHeader())
self.assertTrue(resp.HasChromeProxyViaHeader())
self.assertTrue(resp.IsValidByViaHeader())
def testChromeProxyMetricForDataSaving(self):
metric = metrics.ChromeProxyMetric()
events = [
EVENT_HTML_DIRECT,
EVENT_HTML_PROXY_VIA,
EVENT_IMAGE_PROXY_CACHED,
EVENT_IMAGE_DIRECT]
metric.SetEvents(events)
self.assertTrue(len(events), len(list(metric.IterResponses(None))))
results = test_page_test_results.TestPageTestResults(self)
metric.AddResultsForDataSaving(None, results)
results.AssertHasPageSpecificScalarValue('resources_via_proxy', 'count', 2)
results.AssertHasPageSpecificScalarValue('resources_from_cache', 'count', 1)
results.AssertHasPageSpecificScalarValue('resources_direct', 'count', 2)
# Passing in zero responses should cause a failure.
metric.SetEvents([])
no_responses_exception = False
try:
metric.AddResultsForDataSaving(None, results)
except metrics.ChromeProxyMetricException:
no_responses_exception = True
self.assertTrue(no_responses_exception)
def testChromeProxyMetricForHeaderValidation(self):
metric = metrics.ChromeProxyMetric()
metric.SetEvents([
EVENT_HTML_DIRECT,
EVENT_HTML_PROXY_VIA,
EVENT_IMAGE_PROXY_CACHED,
EVENT_IMAGE_DIRECT])
results = test_page_test_results.TestPageTestResults(self)
missing_via_exception = False
try:
metric.AddResultsForHeaderValidation(None, results)
except metrics.ChromeProxyMetricException:
missing_via_exception = True
# Only the HTTP image response does not have a valid Via header.
self.assertTrue(missing_via_exception)
# Two events with valid Via headers.
metric.SetEvents([
EVENT_HTML_PROXY_VIA,
EVENT_IMAGE_PROXY_CACHED])
metric.AddResultsForHeaderValidation(None, results)
results.AssertHasPageSpecificScalarValue('checked_via_header', 'count', 2)
# Passing in zero responses should cause a failure.
metric.SetEvents([])
no_responses_exception = False
try:
metric.AddResultsForHeaderValidation(None, results)
except metrics.ChromeProxyMetricException:
no_responses_exception = True
self.assertTrue(no_responses_exception)
def testChromeProxyMetricForBypass(self):
metric = metrics.ChromeProxyMetric()
metric.SetEvents([
EVENT_HTML_DIRECT,
EVENT_HTML_PROXY_VIA,
EVENT_IMAGE_PROXY_CACHED,
EVENT_IMAGE_DIRECT])
results = test_page_test_results.TestPageTestResults(self)
bypass_exception = False
try:
metric.AddResultsForBypass(None, results)
except metrics.ChromeProxyMetricException:
bypass_exception = True
# Two of the first three events have Via headers.
self.assertTrue(bypass_exception)
# Use directly fetched image only. It is treated as bypassed.
metric.SetEvents([EVENT_IMAGE_DIRECT])
metric.AddResultsForBypass(None, results)
results.AssertHasPageSpecificScalarValue('bypass', 'count', 1)
# Passing in zero responses should cause a failure.
metric.SetEvents([])
no_responses_exception = False
try:
metric.AddResultsForBypass(None, results)
except metrics.ChromeProxyMetricException:
no_responses_exception = True
self.assertTrue(no_responses_exception)
def testChromeProxyMetricForCorsBypass(self):
metric = metrics.ChromeProxyMetric()
metric.SetEvents([EVENT_HTML_PROXY_VIA,
EVENT_IMAGE_BYPASS,
EVENT_IMAGE_DIRECT])
results = test_page_test_results.TestPageTestResults(self)
metric.AddResultsForCorsBypass(None, results)
results.AssertHasPageSpecificScalarValue('cors_bypass', 'count', 1)
# Passing in zero responses should cause a failure.
metric.SetEvents([])
no_responses_exception = False
try:
metric.AddResultsForCorsBypass(None, results)
except metrics.ChromeProxyMetricException:
no_responses_exception = True
self.assertTrue(no_responses_exception)
def testChromeProxyMetricForBlockOnce(self):
metric = metrics.ChromeProxyMetric()
metric.SetEvents([EVENT_HTML_DIRECT,
EVENT_IMAGE_PROXY_VIA])
results = test_page_test_results.TestPageTestResults(self)
metric.AddResultsForBlockOnce(None, results)
results.AssertHasPageSpecificScalarValue('eligible_responses', 'count', 2)
results.AssertHasPageSpecificScalarValue('bypass', 'count', 1)
metric.SetEvents([EVENT_HTML_DIRECT,
EVENT_IMAGE_DIRECT])
exception_occurred = False
try:
metric.AddResultsForBlockOnce(None, results)
except metrics.ChromeProxyMetricException:
exception_occurred = True
# The second response was over direct, but was expected via proxy.
self.assertTrue(exception_occurred)
# Passing in zero responses should cause a failure.
metric.SetEvents([])
no_responses_exception = False
try:
metric.AddResultsForBlockOnce(None, results)
except metrics.ChromeProxyMetricException:
no_responses_exception = True
self.assertTrue(no_responses_exception)
def testChromeProxyMetricForSafebrowsingOn(self):
metric = metrics.ChromeProxyMetric()
metric.SetEvents([EVENT_MALWARE_PROXY])
results = test_page_test_results.TestPageTestResults(self)
metric.AddResultsForSafebrowsingOn(None, results)
results.AssertHasPageSpecificScalarValue(
'safebrowsing', 'timeout responses', 1)
# Clear results and metrics to test no response for safebrowsing
results = test_page_test_results.TestPageTestResults(self)
metric.SetEvents([])
metric.AddResultsForSafebrowsingOn(None, results)
results.AssertHasPageSpecificScalarValue(
'safebrowsing', 'timeout responses', 1)
def testChromeProxyMetricForHTTPFallback(self):
metric = metrics.ChromeProxyMetric()
metric.SetEvents([EVENT_HTML_PROXY_VIA_HTTP_FALLBACK,
EVENT_IMAGE_PROXY_VIA_HTTP_FALLBACK])
results = test_page_test_results.TestPageTestResults(self)
metric.AddResultsForHTTPFallback(None, results)
results.AssertHasPageSpecificScalarValue('via_fallback', 'count', 2)
metric.SetEvents([EVENT_HTML_PROXY_VIA,
EVENT_IMAGE_PROXY_VIA])
exception_occurred = False
try:
metric.AddResultsForHTTPFallback(None, results)
except metrics.ChromeProxyMetricException:
exception_occurred = True
# The responses came through the SPDY proxy, but were expected through the
# HTTP fallback proxy.
self.assertTrue(exception_occurred)
# Passing in zero responses should cause a failure.
metric.SetEvents([])
no_responses_exception = False
try:
metric.AddResultsForHTTPFallback(None, results)
except metrics.ChromeProxyMetricException:
no_responses_exception = True
self.assertTrue(no_responses_exception)
def testChromeProxyMetricForHTTPToDirectFallback(self):
metric = metrics.ChromeProxyMetric()
metric.SetEvents([EVENT_HTML_PROXY_VIA_HTTP_FALLBACK,
EVENT_HTML_DIRECT,
EVENT_IMAGE_DIRECT])
results = test_page_test_results.TestPageTestResults(self)
metric.AddResultsForHTTPToDirectFallback(None, results, 'test.html2')
results.AssertHasPageSpecificScalarValue('via_fallback', 'count', 1)
results.AssertHasPageSpecificScalarValue('bypass', 'count', 2)
metric.SetEvents([EVENT_HTML_PROXY_VIA,
EVENT_HTML_DIRECT])
exception_occurred = False
try:
metric.AddResultsForHTTPToDirectFallback(None, results, 'test.html2')
except metrics.ChromeProxyMetricException:
exception_occurred = True
# The first response was expected through the HTTP fallback proxy.
self.assertTrue(exception_occurred)
metric.SetEvents([EVENT_HTML_PROXY_VIA_HTTP_FALLBACK,
EVENT_HTML_PROXY_VIA_HTTP_FALLBACK,
EVENT_IMAGE_PROXY_VIA_HTTP_FALLBACK])
exception_occurred = False
try:
metric.AddResultsForHTTPToDirectFallback(None, results, 'test.html2')
except metrics.ChromeProxyMetricException:
exception_occurred = True
# All but the first response were expected to be over direct.
self.assertTrue(exception_occurred)
metric.SetEvents([EVENT_HTML_DIRECT,
EVENT_HTML_DIRECT,
EVENT_IMAGE_DIRECT])
exception_occurred = False
try:
metric.AddResultsForHTTPToDirectFallback(None, results, 'test.html2')
except metrics.ChromeProxyMetricException:
exception_occurred = True
# The first response was expected through the HTTP fallback proxy.
self.assertTrue(exception_occurred)
# Passing in zero responses should cause a failure.
metric.SetEvents([])
no_responses_exception = False
try:
metric.AddResultsForHTTPToDirectFallback(None, results, 'test.html2')
except metrics.ChromeProxyMetricException:
no_responses_exception = True
self.assertTrue(no_responses_exception)
| bsd-3-clause |
HiSPARC/sapphire | scripts/simulations/analyze_shower_front.py | 1 | 5153 | import numpy as np
import tables
from scipy.optimize import curve_fit
from scipy.stats import scoreatpercentile
from artist import GraphArtist
from pylab import *
import matplotlib.pyplot as plt
import utils
USE_TEX = False
# For matplotlib plots
if USE_TEX:
rcParams['font.serif'] = 'Computer Modern'
rcParams['font.sans-serif'] = 'Computer Modern'
rcParams['font.family'] = 'sans-serif'
rcParams['figure.figsize'] = [4 * x for x in (1, 2. / 3)]
rcParams['figure.subplot.left'] = 0.175
rcParams['figure.subplot.bottom'] = 0.175
rcParams['font.size'] = 10
rcParams['legend.fontsize'] = 'small'
rcParams['text.usetex'] = True
def main():
global data
data = tables.open_file('master-ch4v2.h5', 'r')
#utils.set_suffix('E_1PeV')
#scatterplot_core_distance_vs_time()
#median_core_distance_vs_time()
boxplot_core_distance_vs_time()
#hists_core_distance_vs_time()
plot_front_passage()
def scatterplot_core_distance_vs_time():
plt.figure()
sim = data.root.showers.E_1PeV.zenith_0
electrons = sim.electrons
plt.loglog(electrons[:]['core_distance'], electrons[:]['arrival_time'], ',')
plt.xlim(1e0, 1e2)
plt.ylim(1e-3, 1e3)
plt.xlabel("Core distance [m]")
plt.ylabel("Arrival time [ns]")
utils.title("Shower front timing structure")
utils.saveplot()
def median_core_distance_vs_time():
plt.figure()
plot_and_fit_statistic(lambda a: scoreatpercentile(a, 25))
plot_and_fit_statistic(lambda a: scoreatpercentile(a, 75))
utils.title("Shower front timing structure (25, 75 %)")
utils.saveplot()
plt.xlabel("Core distance [m]")
plt.ylabel("Median arrival time [ns]")
legend(loc='lower right')
def plot_and_fit_statistic(func):
sim = data.root.showers.E_1PeV.zenith_0
electrons = sim.electrons
bins = np.logspace(0, 2, 25)
x, y = [], []
for low, high in zip(bins[:-1], bins[1:]):
sel = electrons.read_where('(low < core_distance) & (core_distance <= high)')
statistic = func(sel[:]['arrival_time'])
x.append(np.mean([low, high]))
y.append(statistic)
plt.loglog(x, y)
logx = log10(x)
logy = log10(y)
logf = lambda x, a, b: a * x + b
g = lambda x, a, b: 10 ** logf(log10(x), a, b)
popt, pcov = curve_fit(logf, logx, logy)
plot(x, g(x, *popt), label="f(x) = %.2e * x ^ %.2e" % (10 ** popt[1],
popt[0]))
def boxplot_core_distance_vs_time():
plt.figure()
sim = data.root.showers.E_1PeV.zenith_0.shower_0
leptons = sim.leptons
#bins = np.logspace(0, 2, 25)
bins = np.linspace(0, 100, 15)
x, arrival_time, widths = [], [], []
t25, t50, t75 = [], [], []
for low, high in zip(bins[:-1], bins[1:]):
sel = leptons.read_where('(low < core_distance) & (core_distance <= high)')
x.append(np.mean([low, high]))
arrival_time.append(sel[:]['arrival_time'])
widths.append((high - low) / 2)
ts = sel[:]['arrival_time']
t25.append(scoreatpercentile(ts, 25))
t50.append(scoreatpercentile(ts, 50))
t75.append(scoreatpercentile(ts, 75))
fill_between(x, t25, t75, color='0.75')
plot(x, t50, 'o-', color='black')
plt.xlabel("Core distance [m]")
plt.ylabel("Arrival time [ns]")
#utils.title("Shower front timing structure")
utils.saveplot()
graph = GraphArtist()
graph.plot(x, t50, linestyle=None)
graph.shade_region(x, t25, t75)
graph.set_xlabel(r"Core distance [\si{\meter}]")
graph.set_ylabel(r"Arrival time [\si{\nano\second}]")
graph.set_ylimits(0, 30)
graph.set_xlimits(0, 100)
graph.save('plots/front-passage-vs-R')
def hists_core_distance_vs_time():
plt.figure()
sim = data.root.showers.E_1PeV.zenith_0
electrons = sim.electrons
bins = np.logspace(0, 2, 5)
for low, high in zip(bins[:-1], bins[1:]):
sel = electrons.read_where('(low < core_distance) & (core_distance <= high)')
arrival_time = sel[:]['arrival_time']
plt.hist(arrival_time, bins=np.logspace(-2, 3, 50), histtype='step',
label="%.2f <= log10(R) < %.2f" % (np.log10(low),
np.log10(high)))
plt.xscale('log')
plt.xlabel("Arrival Time [ns]")
plt.ylabel("Count")
plt.legend(loc='upper left')
utils.title("Shower front timing structure")
utils.saveplot()
def plot_front_passage():
sim = data.root.showers.E_1PeV.zenith_0.shower_0
leptons = sim.leptons
R = 40
dR = 2
low = R - dR
high = R + dR
global t
t = leptons.read_where('(low < core_distance) & (core_distance <= high)',
field='arrival_time')
n, bins, patches = hist(t, bins=linspace(0, 30, 31), histtype='step')
graph = GraphArtist()
graph.histogram(n, bins)
graph.set_xlabel(r"Arrival time [\si{\nano\second}]")
graph.set_ylabel("Number of leptons")
graph.set_ylimits(min=0)
graph.set_xlimits(0, 30)
graph.save('plots/front-passage')
if __name__ == '__main__':
main()
| gpl-3.0 |
spottradingllc/zoom | server/zoom/www/handlers/reload_cache_handler.py | 1 | 1371 | import logging
import tornado.web
from httplib import INTERNAL_SERVER_ERROR
from zoom.common.decorators import TimeThis
class ReloadCacheHandler(tornado.web.RequestHandler):
@property
def data_store(self):
"""
:rtype: zoom.www.cache.data_store.DataStore
"""
return self.application.data_store
@TimeThis(__file__)
def post(self):
"""
@api {post} /api/v1/cache/reload/ Reload data from Zookeeper
@apiParam {String} user The user that submitted the task
@apiParam {String} command Can be anything...currently only used for logging
@apiVersion 1.0.0
@apiName Reload
@apiGroup Cache
"""
try:
user = self.get_argument("user")
command = self.get_argument("command")
logging.info("Received reload cache command for target '{0}' from "
"user {1}:{2}"
.format(command, user, self.request.remote_ip))
logging.info("Clearing and reloading all server side caches")
self.data_store.reload()
self.write('Cache Reloaded')
self.set_header('Content-Type', 'text/html')
except Exception as e:
self.set_status(INTERNAL_SERVER_ERROR)
self.write({'errorText': str(e)})
logging.exception(e)
| gpl-2.0 |
mhnatiuk/phd_sociology_of_religion | scrapper/lib/python2.7/site-packages/twisted/conch/test/test_agent.py | 42 | 13082 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.conch.ssh.agent}.
"""
import struct
from twisted.trial import unittest
try:
import OpenSSL
except ImportError:
iosim = None
else:
from twisted.test import iosim
try:
import Crypto.Cipher.DES3
except ImportError:
Crypto = None
try:
import pyasn1
except ImportError:
pyasn1 = None
if Crypto and pyasn1:
from twisted.conch.ssh import keys, agent
else:
keys = agent = None
from twisted.conch.test import keydata
from twisted.conch.error import ConchError, MissingKeyStoreError
class StubFactory(object):
"""
Mock factory that provides the keys attribute required by the
SSHAgentServerProtocol
"""
def __init__(self):
self.keys = {}
class AgentTestBase(unittest.TestCase):
"""
Tests for SSHAgentServer/Client.
"""
if iosim is None:
skip = "iosim requires SSL, but SSL is not available"
elif agent is None or keys is None:
skip = "Cannot run without PyCrypto or PyASN1"
def setUp(self):
# wire up our client <-> server
self.client, self.server, self.pump = iosim.connectedServerAndClient(
agent.SSHAgentServer, agent.SSHAgentClient)
# the server's end of the protocol is stateful and we store it on the
# factory, for which we only need a mock
self.server.factory = StubFactory()
# pub/priv keys of each kind
self.rsaPrivate = keys.Key.fromString(keydata.privateRSA_openssh)
self.dsaPrivate = keys.Key.fromString(keydata.privateDSA_openssh)
self.rsaPublic = keys.Key.fromString(keydata.publicRSA_openssh)
self.dsaPublic = keys.Key.fromString(keydata.publicDSA_openssh)
class TestServerProtocolContractWithFactory(AgentTestBase):
"""
The server protocol is stateful and so uses its factory to track state
across requests. This test asserts that the protocol raises if its factory
doesn't provide the necessary storage for that state.
"""
def test_factorySuppliesKeyStorageForServerProtocol(self):
# need a message to send into the server
msg = struct.pack('!LB',1, agent.AGENTC_REQUEST_IDENTITIES)
del self.server.factory.__dict__['keys']
self.assertRaises(MissingKeyStoreError,
self.server.dataReceived, msg)
class TestUnimplementedVersionOneServer(AgentTestBase):
"""
Tests for methods with no-op implementations on the server. We need these
for clients, such as openssh, that try v1 methods before going to v2.
Because the client doesn't expose these operations with nice method names,
we invoke sendRequest directly with an op code.
"""
def test_agentc_REQUEST_RSA_IDENTITIES(self):
"""
assert that we get the correct op code for an RSA identities request
"""
d = self.client.sendRequest(agent.AGENTC_REQUEST_RSA_IDENTITIES, '')
self.pump.flush()
def _cb(packet):
self.assertEqual(
agent.AGENT_RSA_IDENTITIES_ANSWER, ord(packet[0]))
return d.addCallback(_cb)
def test_agentc_REMOVE_RSA_IDENTITY(self):
"""
assert that we get the correct op code for an RSA remove identity request
"""
d = self.client.sendRequest(agent.AGENTC_REMOVE_RSA_IDENTITY, '')
self.pump.flush()
return d.addCallback(self.assertEqual, '')
def test_agentc_REMOVE_ALL_RSA_IDENTITIES(self):
"""
assert that we get the correct op code for an RSA remove all identities
request.
"""
d = self.client.sendRequest(agent.AGENTC_REMOVE_ALL_RSA_IDENTITIES, '')
self.pump.flush()
return d.addCallback(self.assertEqual, '')
if agent is not None:
class CorruptServer(agent.SSHAgentServer):
"""
A misbehaving server that returns bogus response op codes so that we can
verify that our callbacks that deal with these op codes handle such
miscreants.
"""
def agentc_REQUEST_IDENTITIES(self, data):
self.sendResponse(254, '')
def agentc_SIGN_REQUEST(self, data):
self.sendResponse(254, '')
class TestClientWithBrokenServer(AgentTestBase):
"""
verify error handling code in the client using a misbehaving server
"""
def setUp(self):
AgentTestBase.setUp(self)
self.client, self.server, self.pump = iosim.connectedServerAndClient(
CorruptServer, agent.SSHAgentClient)
# the server's end of the protocol is stateful and we store it on the
# factory, for which we only need a mock
self.server.factory = StubFactory()
def test_signDataCallbackErrorHandling(self):
"""
Assert that L{SSHAgentClient.signData} raises a ConchError
if we get a response from the server whose opcode doesn't match
the protocol for data signing requests.
"""
d = self.client.signData(self.rsaPublic.blob(), "John Hancock")
self.pump.flush()
return self.assertFailure(d, ConchError)
def test_requestIdentitiesCallbackErrorHandling(self):
"""
Assert that L{SSHAgentClient.requestIdentities} raises a ConchError
if we get a response from the server whose opcode doesn't match
the protocol for identity requests.
"""
d = self.client.requestIdentities()
self.pump.flush()
return self.assertFailure(d, ConchError)
class TestAgentKeyAddition(AgentTestBase):
"""
Test adding different flavors of keys to an agent.
"""
def test_addRSAIdentityNoComment(self):
"""
L{SSHAgentClient.addIdentity} adds the private key it is called
with to the SSH agent server to which it is connected, associating
it with the comment it is called with.
This test asserts that ommitting the comment produces an
empty string for the comment on the server.
"""
d = self.client.addIdentity(self.rsaPrivate.privateBlob())
self.pump.flush()
def _check(ignored):
serverKey = self.server.factory.keys[self.rsaPrivate.blob()]
self.assertEqual(self.rsaPrivate, serverKey[0])
self.assertEqual('', serverKey[1])
return d.addCallback(_check)
def test_addDSAIdentityNoComment(self):
"""
L{SSHAgentClient.addIdentity} adds the private key it is called
with to the SSH agent server to which it is connected, associating
it with the comment it is called with.
This test asserts that ommitting the comment produces an
empty string for the comment on the server.
"""
d = self.client.addIdentity(self.dsaPrivate.privateBlob())
self.pump.flush()
def _check(ignored):
serverKey = self.server.factory.keys[self.dsaPrivate.blob()]
self.assertEqual(self.dsaPrivate, serverKey[0])
self.assertEqual('', serverKey[1])
return d.addCallback(_check)
def test_addRSAIdentityWithComment(self):
"""
L{SSHAgentClient.addIdentity} adds the private key it is called
with to the SSH agent server to which it is connected, associating
it with the comment it is called with.
This test asserts that the server receives/stores the comment
as sent by the client.
"""
d = self.client.addIdentity(
self.rsaPrivate.privateBlob(), comment='My special key')
self.pump.flush()
def _check(ignored):
serverKey = self.server.factory.keys[self.rsaPrivate.blob()]
self.assertEqual(self.rsaPrivate, serverKey[0])
self.assertEqual('My special key', serverKey[1])
return d.addCallback(_check)
def test_addDSAIdentityWithComment(self):
"""
L{SSHAgentClient.addIdentity} adds the private key it is called
with to the SSH agent server to which it is connected, associating
it with the comment it is called with.
This test asserts that the server receives/stores the comment
as sent by the client.
"""
d = self.client.addIdentity(
self.dsaPrivate.privateBlob(), comment='My special key')
self.pump.flush()
def _check(ignored):
serverKey = self.server.factory.keys[self.dsaPrivate.blob()]
self.assertEqual(self.dsaPrivate, serverKey[0])
self.assertEqual('My special key', serverKey[1])
return d.addCallback(_check)
class TestAgentClientFailure(AgentTestBase):
def test_agentFailure(self):
"""
verify that the client raises ConchError on AGENT_FAILURE
"""
d = self.client.sendRequest(254, '')
self.pump.flush()
return self.assertFailure(d, ConchError)
class TestAgentIdentityRequests(AgentTestBase):
"""
Test operations against a server with identities already loaded.
"""
def setUp(self):
AgentTestBase.setUp(self)
self.server.factory.keys[self.dsaPrivate.blob()] = (
self.dsaPrivate, 'a comment')
self.server.factory.keys[self.rsaPrivate.blob()] = (
self.rsaPrivate, 'another comment')
def test_signDataRSA(self):
"""
Sign data with an RSA private key and then verify it with the public
key.
"""
d = self.client.signData(self.rsaPublic.blob(), "John Hancock")
self.pump.flush()
def _check(sig):
expected = self.rsaPrivate.sign("John Hancock")
self.assertEqual(expected, sig)
self.assertTrue(self.rsaPublic.verify(sig, "John Hancock"))
return d.addCallback(_check)
def test_signDataDSA(self):
"""
Sign data with a DSA private key and then verify it with the public
key.
"""
d = self.client.signData(self.dsaPublic.blob(), "John Hancock")
self.pump.flush()
def _check(sig):
# Cannot do this b/c DSA uses random numbers when signing
# expected = self.dsaPrivate.sign("John Hancock")
# self.assertEqual(expected, sig)
self.assertTrue(self.dsaPublic.verify(sig, "John Hancock"))
return d.addCallback(_check)
def test_signDataRSAErrbackOnUnknownBlob(self):
"""
Assert that we get an errback if we try to sign data using a key that
wasn't added.
"""
del self.server.factory.keys[self.rsaPublic.blob()]
d = self.client.signData(self.rsaPublic.blob(), "John Hancock")
self.pump.flush()
return self.assertFailure(d, ConchError)
def test_requestIdentities(self):
"""
Assert that we get all of the keys/comments that we add when we issue a
request for all identities.
"""
d = self.client.requestIdentities()
self.pump.flush()
def _check(keyt):
expected = {}
expected[self.dsaPublic.blob()] = 'a comment'
expected[self.rsaPublic.blob()] = 'another comment'
received = {}
for k in keyt:
received[keys.Key.fromString(k[0], type='blob').blob()] = k[1]
self.assertEqual(expected, received)
return d.addCallback(_check)
class TestAgentKeyRemoval(AgentTestBase):
"""
Test support for removing keys in a remote server.
"""
def setUp(self):
AgentTestBase.setUp(self)
self.server.factory.keys[self.dsaPrivate.blob()] = (
self.dsaPrivate, 'a comment')
self.server.factory.keys[self.rsaPrivate.blob()] = (
self.rsaPrivate, 'another comment')
def test_removeRSAIdentity(self):
"""
Assert that we can remove an RSA identity.
"""
# only need public key for this
d = self.client.removeIdentity(self.rsaPrivate.blob())
self.pump.flush()
def _check(ignored):
self.assertEqual(1, len(self.server.factory.keys))
self.assertIn(self.dsaPrivate.blob(), self.server.factory.keys)
self.assertNotIn(self.rsaPrivate.blob(), self.server.factory.keys)
return d.addCallback(_check)
def test_removeDSAIdentity(self):
"""
Assert that we can remove a DSA identity.
"""
# only need public key for this
d = self.client.removeIdentity(self.dsaPrivate.blob())
self.pump.flush()
def _check(ignored):
self.assertEqual(1, len(self.server.factory.keys))
self.assertIn(self.rsaPrivate.blob(), self.server.factory.keys)
return d.addCallback(_check)
def test_removeAllIdentities(self):
"""
Assert that we can remove all identities.
"""
d = self.client.removeAllIdentities()
self.pump.flush()
def _check(ignored):
self.assertEqual(0, len(self.server.factory.keys))
return d.addCallback(_check)
| gpl-2.0 |
thetimpotter/bitcoin_1.0 | qa/rpc-tests/decodescript.py | 26 | 13617 | #!/usr/bin/env python2
# Copyright (c) 2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import *
from binascii import hexlify, unhexlify
from cStringIO import StringIO
class DecodeScriptTest(BitcoinTestFramework):
"""Tests decoding scripts via RPC command "decodescript"."""
def setup_chain(self):
print('Initializing test directory ' + self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 1)
def setup_network(self, split=False):
self.nodes = start_nodes(1, self.options.tmpdir)
self.is_network_split = False
def decodescript_script_sig(self):
signature = '304502207fa7a6d1e0ee81132a269ad84e68d695483745cde8b541e3bf630749894e342a022100c1f7ab20e13e22fb95281a870f3dcf38d782e53023ee313d741ad0cfbc0c509001'
push_signature = '48' + signature
public_key = '03b0da749730dc9b4b1f4a14d6902877a92541f5368778853d9c4a0cb7802dcfb2'
push_public_key = '21' + public_key
# below are test cases for all of the standard transaction types
# 1) P2PK scriptSig
# the scriptSig of a public key scriptPubKey simply pushes a signature onto the stack
rpc_result = self.nodes[0].decodescript(push_signature)
assert_equal(signature, rpc_result['asm'])
# 2) P2PKH scriptSig
rpc_result = self.nodes[0].decodescript(push_signature + push_public_key)
assert_equal(signature + ' ' + public_key, rpc_result['asm'])
# 3) multisig scriptSig
# this also tests the leading portion of a P2SH multisig scriptSig
# OP_0 <A sig> <B sig>
rpc_result = self.nodes[0].decodescript('00' + push_signature + push_signature)
assert_equal('0 ' + signature + ' ' + signature, rpc_result['asm'])
# 4) P2SH scriptSig
# an empty P2SH redeemScript is valid and makes for a very simple test case.
# thus, such a spending scriptSig would just need to pass the outer redeemScript
# hash test and leave true on the top of the stack.
rpc_result = self.nodes[0].decodescript('5100')
assert_equal('1 0', rpc_result['asm'])
# 5) null data scriptSig - no such thing because null data scripts can not be spent.
# thus, no test case for that standard transaction type is here.
def decodescript_script_pub_key(self):
public_key = '03b0da749730dc9b4b1f4a14d6902877a92541f5368778853d9c4a0cb7802dcfb2'
push_public_key = '21' + public_key
public_key_hash = '11695b6cd891484c2d49ec5aa738ec2b2f897777'
push_public_key_hash = '14' + public_key_hash
# below are test cases for all of the standard transaction types
# 1) P2PK scriptPubKey
# <pubkey> OP_CHECKSIG
rpc_result = self.nodes[0].decodescript(push_public_key + 'ac')
assert_equal(public_key + ' OP_CHECKSIG', rpc_result['asm'])
# 2) P2PKH scriptPubKey
# OP_DUP OP_HASH160 <PubKeyHash> OP_EQUALVERIFY OP_CHECKSIG
rpc_result = self.nodes[0].decodescript('76a9' + push_public_key_hash + '88ac')
assert_equal('OP_DUP OP_HASH160 ' + public_key_hash + ' OP_EQUALVERIFY OP_CHECKSIG', rpc_result['asm'])
# 3) multisig scriptPubKey
# <m> <A pubkey> <B pubkey> <C pubkey> <n> OP_CHECKMULTISIG
# just imagine that the pub keys used below are different.
# for our purposes here it does not matter that they are the same even though it is unrealistic.
rpc_result = self.nodes[0].decodescript('52' + push_public_key + push_public_key + push_public_key + '53ae')
assert_equal('2 ' + public_key + ' ' + public_key + ' ' + public_key + ' 3 OP_CHECKMULTISIG', rpc_result['asm'])
# 4) P2SH scriptPubKey
# OP_HASH160 <Hash160(redeemScript)> OP_EQUAL.
# push_public_key_hash here should actually be the hash of a redeem script.
# but this works the same for purposes of this test.
rpc_result = self.nodes[0].decodescript('a9' + push_public_key_hash + '87')
assert_equal('OP_HASH160 ' + public_key_hash + ' OP_EQUAL', rpc_result['asm'])
# 5) null data scriptPubKey
# use a signature look-alike here to make sure that we do not decode random data as a signature.
# this matters if/when signature sighash decoding comes along.
# would want to make sure that no such decoding takes place in this case.
signature_imposter = '48304502207fa7a6d1e0ee81132a269ad84e68d695483745cde8b541e3bf630749894e342a022100c1f7ab20e13e22fb95281a870f3dcf38d782e53023ee313d741ad0cfbc0c509001'
# OP_RETURN <data>
rpc_result = self.nodes[0].decodescript('6a' + signature_imposter)
assert_equal('OP_RETURN ' + signature_imposter[2:], rpc_result['asm'])
# 6) a CLTV redeem script. redeem scripts are in-effect scriptPubKey scripts, so adding a test here.
# OP_NOP2 is also known as OP_CHECKLOCKTIMEVERIFY.
# just imagine that the pub keys used below are different.
# for our purposes here it does not matter that they are the same even though it is unrealistic.
#
# OP_IF
# <receiver-pubkey> OP_CHECKSIGVERIFY
# OP_ELSE
# <lock-until> OP_NOP2 OP_DROP
# OP_ENDIF
# <sender-pubkey> OP_CHECKSIG
#
# lock until block 500,000
rpc_result = self.nodes[0].decodescript('63' + push_public_key + 'ad670320a107b17568' + push_public_key + 'ac')
assert_equal('OP_IF ' + public_key + ' OP_CHECKSIGVERIFY OP_ELSE 500000 OP_NOP2 OP_DROP OP_ENDIF ' + public_key + ' OP_CHECKSIG', rpc_result['asm'])
def decoderawtransaction_asm_sighashtype(self):
"""Tests decoding scripts via RPC command "decoderawtransaction".
This test is in with the "decodescript" tests because they are testing the same "asm" script decodes.
"""
# this test case uses a random plain vanilla mainnet transaction with a single P2PKH input and output
tx = '0100000001696a20784a2c70143f634e95227dbdfdf0ecd51647052e70854512235f5986ca010000008a47304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb014104d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536ffffffff0100e1f505000000001976a914eb6c6e0cdb2d256a32d97b8df1fc75d1920d9bca88ac00000000'
rpc_result = self.nodes[0].decoderawtransaction(tx)
assert_equal('304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb[ALL] 04d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536', rpc_result['vin'][0]['scriptSig']['asm'])
# this test case uses a mainnet transaction that has a P2SH input and both P2PKH and P2SH outputs.
# it's from James D'Angelo's awesome introductory videos about multisig: https://www.youtube.com/watch?v=zIbUSaZBJgU and https://www.youtube.com/watch?v=OSA1pwlaypc
# verify that we have not altered scriptPubKey decoding.
tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914dc863734a218bfe83ef770ee9d41a27f824a6e5688acee2a02000000000017a9142a5edea39971049a540474c6a99edf0aa4074c588700000000'
rpc_result = self.nodes[0].decoderawtransaction(tx)
assert_equal('8e3730608c3b0bb5df54f09076e196bc292a8e39a78e73b44b6ba08c78f5cbb0', rpc_result['txid'])
assert_equal('0 3045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea[ALL] 3045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75[ALL] 5221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53ae', rpc_result['vin'][0]['scriptSig']['asm'])
assert_equal('OP_DUP OP_HASH160 dc863734a218bfe83ef770ee9d41a27f824a6e56 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm'])
assert_equal('OP_HASH160 2a5edea39971049a540474c6a99edf0aa4074c58 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm'])
txSave = CTransaction()
txSave.deserialize(StringIO(unhexlify(tx)))
# make sure that a specifically crafted op_return value will not pass all the IsDERSignature checks and then get decoded as a sighash type
tx = '01000000015ded05872fdbda629c7d3d02b194763ce3b9b1535ea884e3c8e765d42e316724020000006b48304502204c10d4064885c42638cbff3585915b322de33762598321145ba033fc796971e2022100bb153ad3baa8b757e30a2175bd32852d2e1cb9080f84d7e32fcdfd667934ef1b012103163c0ff73511ea1743fb5b98384a2ff09dd06949488028fd819f4d83f56264efffffffff0200000000000000000b6a0930060201000201000180380100000000001976a9141cabd296e753837c086da7a45a6c2fe0d49d7b7b88ac00000000'
rpc_result = self.nodes[0].decoderawtransaction(tx)
assert_equal('OP_RETURN 300602010002010001', rpc_result['vout'][0]['scriptPubKey']['asm'])
# verify that we have not altered scriptPubKey processing even of a specially crafted P2PKH pubkeyhash and P2SH redeem script hash that is made to pass the der signature checks
tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914301102070101010101010102060101010101010188acee2a02000000000017a91430110207010101010101010206010101010101018700000000'
rpc_result = self.nodes[0].decoderawtransaction(tx)
assert_equal('OP_DUP OP_HASH160 3011020701010101010101020601010101010101 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm'])
assert_equal('OP_HASH160 3011020701010101010101020601010101010101 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm'])
# some more full transaction tests of varying specific scriptSigs. used instead of
# tests in decodescript_script_sig because the decodescript RPC is specifically
# for working on scriptPubKeys (argh!).
push_signature = hexlify(txSave.vin[0].scriptSig)[2:(0x48*2+4)]
signature = push_signature[2:]
der_signature = signature[:-2]
signature_sighash_decoded = der_signature + '[ALL]'
signature_2 = der_signature + '82'
push_signature_2 = '48' + signature_2
signature_2_sighash_decoded = der_signature + '[NONE|ANYONECANPAY]'
# 1) P2PK scriptSig
txSave.vin[0].scriptSig = unhexlify(push_signature)
rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
assert_equal(signature_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
# make sure that the sighash decodes come out correctly for a more complex / lesser used case.
txSave.vin[0].scriptSig = unhexlify(push_signature_2)
rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
assert_equal(signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
# 2) multisig scriptSig
txSave.vin[0].scriptSig = unhexlify('00' + push_signature + push_signature_2)
rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
assert_equal('0 ' + signature_sighash_decoded + ' ' + signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
# 3) test a scriptSig that contains more than push operations.
# in fact, it contains an OP_RETURN with data specially crafted to cause improper decode if the code does not catch it.
txSave.vin[0].scriptSig = unhexlify('6a143011020701010101010101020601010101010101')
rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
print(hexlify('636174'))
assert_equal('OP_RETURN 3011020701010101010101020601010101010101', rpc_result['vin'][0]['scriptSig']['asm'])
def run_test(self):
self.decodescript_script_sig()
self.decodescript_script_pub_key()
self.decoderawtransaction_asm_sighashtype()
if __name__ == '__main__':
DecodeScriptTest().main()
| mit |
mseaborn/switch | sandbox_dev/param_v_expression.py | 5 | 4430 | #!/usr/local/bin/python
# Copyright 2015 The Switch Authors. All rights reserved.
# Licensed under the Apache License, Version 2, which is in the LICENSE file.
"""
I wrote this script to explore the usage and limitations of parameters
and expression objects in Pyomo. I thought they were interchangable, but
when I converted a parameter to an expression I got an error where it
was being used later in an if statement as part of a test for data
validity.
The punchline is that expressions are not a drop-in replacement for
parameters and don't offer any clear and obvious benefits. If I were to
use expressions to replace derived parameters and I don't want to keep
track of which component is which object, then I should wrap all if
statements in value() expressions to force the expressions to resolve.
The best use of expressions is probably as a substitute for derived
variables. Expressions and derived variables can enable more readable
and easier-to-maintain models by replacing repeated portions of
equations with a single term. Derived variables increase the size of the
optimization problem, and effective preprocessing is required to remove
them. Expressions will not increase the size of the optimization problem
and will be resolved during compilation.
"""
from coopr.pyomo import *
mod = AbstractModel()
mod.set = Set(initialize=[1, 2])
mod.param = Param(mod.set, initialize=lambda m, i: i+10)
# This expression should always be greater than param
mod.expression = Expression(mod.set, initialize=lambda m, i: m.param[i]+1)
# exp_as_param should have a value identical to expression
mod.exp_as_param = Param(mod.set, initialize=lambda m, i: m.param[i]+1)
# This simple syntax that treats model components as normal variables
# works if both components are parameters. m.param[i] > m.exp_as_param[i]
try:
print "A test treating both components as normal variables works " +\
"if both components are parameters."
mod.build_check = BuildCheck(
mod.set, rule=lambda m, i: m.param[i] > m.exp_as_param[i])
instance = mod.create()
print "The test passed. This wasn't supposed to happen.\n"
except ValueError as e:
print "The test failed as expected!\n"
# This failed check illustrates that expressions cannot be used in the
# same way as parameters. Attempting to access them in the same manner
# will return an expression object that is not evaluated into a value.
try:
print "This method doesn't work when one component is an expression."
mod.del_component('build_check')
mod.build_check = BuildCheck(
mod.set, rule=lambda m, i: m.param[i] > m.expression[i])
instance = mod.create()
print "The test passed. This wasn't supposed to happen.\n"
except ValueError as e:
print "The test failed as expected!\n"
# Wrapping the overall expression in a value() statement will give the
# expected behavior, whether the components are params or expressions.
try:
print "It will work if you wrap the whole test in a value() function."
mod.del_component('build_check')
mod.working_check = BuildCheck(
mod.set, rule=lambda m, i: value(m.param[i] > m.expression[i]))
instance = mod.create()
print "The test passed. This wasn't supposed to happen.\n"
except ValueError as e:
print "The test failed as expected!\n"
# If you keep track of which compoenents are expressions, you can wrap
# them in a value() function to access their value, but keeping track of
# expressions vs parameters could be cumbersomw. An alternative method
# of accessing the value is m.expression[i](), but that syntax will
# generate an error if you try to use it on a parameter. Calling
# m.expression[i].value will return the expression object, which isn't
# useful in this sort of mathematical statement, and the .value
# attribute is not defined for parameters.
try:
print "It also works if you wrap one or both components in a value() function."
mod.del_component('build_check')
mod.working_check3 = BuildCheck(
mod.set,
rule=lambda m, i: m.param[i] > value(m.expression[i]))
# Treating both components the same and wrapping them in value()
# functions works but it is too verbose :/
# rule=lambda m, i: value(m.param[i]) > value(m.expression[i]))
instance = mod.create()
print "The test passed. This wasn't supposed to happen.\n"
except ValueError as e:
print "The test failed as expected!\n"
| apache-2.0 |
raspberrydie/Jasper | client/tts.py | 9 | 21928 | # -*- coding: utf-8-*-
"""
A Speaker handles audio output from Jasper to the user
Speaker methods:
say - output 'phrase' as speech
play - play the audio in 'filename'
is_available - returns True if the platform supports this implementation
"""
import os
import platform
import re
import tempfile
import subprocess
import pipes
import logging
import wave
import urllib
import urlparse
import requests
from abc import ABCMeta, abstractmethod
import argparse
import yaml
try:
import mad
import gtts
except ImportError:
pass
import diagnose
import jasperpath
class AbstractTTSEngine(object):
"""
Generic parent class for all speakers
"""
__metaclass__ = ABCMeta
@classmethod
def get_config(cls):
return {}
@classmethod
def get_instance(cls):
config = cls.get_config()
instance = cls(**config)
return instance
@classmethod
@abstractmethod
def is_available(cls):
return diagnose.check_executable('aplay')
def __init__(self, **kwargs):
self._logger = logging.getLogger(__name__)
@abstractmethod
def say(self, phrase, *args):
pass
def play(self, filename):
# FIXME: Use platform-independent audio-output here
# See issue jasperproject/jasper-client#188
cmd = ['aplay', '-D', 'hw:1,0', str(filename)]
self._logger.debug('Executing %s', ' '.join([pipes.quote(arg)
for arg in cmd]))
with tempfile.TemporaryFile() as f:
subprocess.call(cmd, stdout=f, stderr=f)
f.seek(0)
output = f.read()
if output:
self._logger.debug("Output was: '%s'", output)
class AbstractMp3TTSEngine(AbstractTTSEngine):
"""
Generic class that implements the 'play' method for mp3 files
"""
@classmethod
def is_available(cls):
return (super(AbstractMp3TTSEngine, cls).is_available() and
diagnose.check_python_import('mad'))
def play_mp3(self, filename):
mf = mad.MadFile(filename)
with tempfile.NamedTemporaryFile(suffix='.wav') as f:
wav = wave.open(f, mode='wb')
wav.setframerate(mf.samplerate())
wav.setnchannels(1 if mf.mode() == mad.MODE_SINGLE_CHANNEL else 2)
# 4L is the sample width of 32 bit audio
wav.setsampwidth(4L)
frame = mf.read()
while frame is not None:
wav.writeframes(frame)
frame = mf.read()
wav.close()
self.play(f.name)
class DummyTTS(AbstractTTSEngine):
"""
Dummy TTS engine that logs phrases with INFO level instead of synthesizing
speech.
"""
SLUG = "dummy-tts"
@classmethod
def is_available(cls):
return True
def say(self, phrase):
self._logger.info(phrase)
def play(self, filename):
self._logger.debug("Playback of file '%s' requested")
pass
class EspeakTTS(AbstractTTSEngine):
"""
Uses the eSpeak speech synthesizer included in the Jasper disk image
Requires espeak to be available
"""
SLUG = "espeak-tts"
def __init__(self, voice='default+m3', pitch_adjustment=40,
words_per_minute=160):
super(self.__class__, self).__init__()
self.voice = voice
self.pitch_adjustment = pitch_adjustment
self.words_per_minute = words_per_minute
@classmethod
def get_config(cls):
# FIXME: Replace this as soon as we have a config module
config = {}
# HMM dir
# Try to get hmm_dir from config
profile_path = jasperpath.config('profile.yml')
if os.path.exists(profile_path):
with open(profile_path, 'r') as f:
profile = yaml.safe_load(f)
if 'espeak-tts' in profile:
if 'voice' in profile['espeak-tts']:
config['voice'] = profile['espeak-tts']['voice']
if 'pitch_adjustment' in profile['espeak-tts']:
config['pitch_adjustment'] = \
profile['espeak-tts']['pitch_adjustment']
if 'words_per_minute' in profile['espeak-tts']:
config['words_per_minute'] = \
profile['espeak-tts']['words_per_minute']
return config
@classmethod
def is_available(cls):
return (super(cls, cls).is_available() and
diagnose.check_executable('espeak'))
def say(self, phrase):
self._logger.debug("Saying '%s' with '%s'", phrase, self.SLUG)
with tempfile.NamedTemporaryFile(suffix='.wav', delete=False) as f:
fname = f.name
cmd = ['espeak', '-v', self.voice,
'-p', self.pitch_adjustment,
'-s', self.words_per_minute,
'-w', fname,
phrase]
cmd = [str(x) for x in cmd]
self._logger.debug('Executing %s', ' '.join([pipes.quote(arg)
for arg in cmd]))
with tempfile.TemporaryFile() as f:
subprocess.call(cmd, stdout=f, stderr=f)
f.seek(0)
output = f.read()
if output:
self._logger.debug("Output was: '%s'", output)
self.play(fname)
os.remove(fname)
class FestivalTTS(AbstractTTSEngine):
"""
Uses the festival speech synthesizer
Requires festival (text2wave) to be available
"""
SLUG = 'festival-tts'
@classmethod
def is_available(cls):
if (super(cls, cls).is_available() and
diagnose.check_executable('text2wave') and
diagnose.check_executable('festival')):
logger = logging.getLogger(__name__)
cmd = ['festival', '--pipe']
with tempfile.SpooledTemporaryFile() as out_f:
with tempfile.SpooledTemporaryFile() as in_f:
logger.debug('Executing %s', ' '.join([pipes.quote(arg)
for arg in cmd]))
subprocess.call(cmd, stdin=in_f, stdout=out_f,
stderr=out_f)
out_f.seek(0)
output = out_f.read().strip()
if output:
logger.debug("Output was: '%s'", output)
return ('No default voice found' not in output)
return False
def say(self, phrase):
self._logger.debug("Saying '%s' with '%s'", phrase, self.SLUG)
cmd = ['text2wave']
with tempfile.NamedTemporaryFile(suffix='.wav') as out_f:
with tempfile.SpooledTemporaryFile() as in_f:
in_f.write(phrase)
in_f.seek(0)
with tempfile.SpooledTemporaryFile() as err_f:
self._logger.debug('Executing %s',
' '.join([pipes.quote(arg)
for arg in cmd]))
subprocess.call(cmd, stdin=in_f, stdout=out_f,
stderr=err_f)
err_f.seek(0)
output = err_f.read()
if output:
self._logger.debug("Output was: '%s'", output)
self.play(out_f.name)
class FliteTTS(AbstractTTSEngine):
"""
Uses the flite speech synthesizer
Requires flite to be available
"""
SLUG = 'flite-tts'
def __init__(self, voice=''):
super(self.__class__, self).__init__()
self.voice = voice if voice and voice in self.get_voices() else ''
@classmethod
def get_voices(cls):
cmd = ['flite', '-lv']
voices = []
with tempfile.SpooledTemporaryFile() as out_f:
subprocess.call(cmd, stdout=out_f)
out_f.seek(0)
for line in out_f:
if line.startswith('Voices available: '):
voices.extend([x.strip() for x in line[18:].split()
if x.strip()])
return voices
@classmethod
def get_config(cls):
# FIXME: Replace this as soon as we have a config module
config = {}
# HMM dir
# Try to get hmm_dir from config
profile_path = jasperpath.config('profile.yml')
if os.path.exists(profile_path):
with open(profile_path, 'r') as f:
profile = yaml.safe_load(f)
if 'flite-tts' in profile:
if 'voice' in profile['flite-tts']:
config['voice'] = profile['flite-tts']['voice']
return config
@classmethod
def is_available(cls):
return (super(cls, cls).is_available() and
diagnose.check_executable('flite') and
len(cls.get_voices()) > 0)
def say(self, phrase):
self._logger.debug("Saying '%s' with '%s'", phrase, self.SLUG)
cmd = ['flite']
if self.voice:
cmd.extend(['-voice', self.voice])
cmd.extend(['-t', phrase])
with tempfile.NamedTemporaryFile(suffix='.wav', delete=False) as f:
fname = f.name
cmd.append(fname)
with tempfile.SpooledTemporaryFile() as out_f:
self._logger.debug('Executing %s',
' '.join([pipes.quote(arg)
for arg in cmd]))
subprocess.call(cmd, stdout=out_f, stderr=out_f)
out_f.seek(0)
output = out_f.read().strip()
if output:
self._logger.debug("Output was: '%s'", output)
self.play(fname)
os.remove(fname)
class MacOSXTTS(AbstractTTSEngine):
"""
Uses the OS X built-in 'say' command
"""
SLUG = "osx-tts"
@classmethod
def is_available(cls):
return (platform.system().lower() == 'darwin' and
diagnose.check_executable('say') and
diagnose.check_executable('afplay'))
def say(self, phrase):
self._logger.debug("Saying '%s' with '%s'", phrase, self.SLUG)
cmd = ['say', str(phrase)]
self._logger.debug('Executing %s', ' '.join([pipes.quote(arg)
for arg in cmd]))
with tempfile.TemporaryFile() as f:
subprocess.call(cmd, stdout=f, stderr=f)
f.seek(0)
output = f.read()
if output:
self._logger.debug("Output was: '%s'", output)
def play(self, filename):
cmd = ['afplay', str(filename)]
self._logger.debug('Executing %s', ' '.join([pipes.quote(arg)
for arg in cmd]))
with tempfile.TemporaryFile() as f:
subprocess.call(cmd, stdout=f, stderr=f)
f.seek(0)
output = f.read()
if output:
self._logger.debug("Output was: '%s'", output)
class PicoTTS(AbstractTTSEngine):
"""
Uses the svox-pico-tts speech synthesizer
Requires pico2wave to be available
"""
SLUG = "pico-tts"
def __init__(self, language="en-US"):
super(self.__class__, self).__init__()
self.language = language
@classmethod
def is_available(cls):
return (super(cls, cls).is_available() and
diagnose.check_executable('pico2wave'))
@classmethod
def get_config(cls):
# FIXME: Replace this as soon as we have a config module
config = {}
# HMM dir
# Try to get hmm_dir from config
profile_path = jasperpath.config('profile.yml')
if os.path.exists(profile_path):
with open(profile_path, 'r') as f:
profile = yaml.safe_load(f)
if 'pico-tts' in profile and 'language' in profile['pico-tts']:
config['language'] = profile['pico-tts']['language']
return config
@property
def languages(self):
cmd = ['pico2wave', '-l', 'NULL',
'-w', os.devnull,
'NULL']
with tempfile.SpooledTemporaryFile() as f:
subprocess.call(cmd, stderr=f)
f.seek(0)
output = f.read()
pattern = re.compile(r'Unknown language: NULL\nValid languages:\n' +
r'((?:[a-z]{2}-[A-Z]{2}\n)+)')
matchobj = pattern.match(output)
if not matchobj:
raise RuntimeError("pico2wave: valid languages not detected")
langs = matchobj.group(1).split()
return langs
def say(self, phrase):
self._logger.debug("Saying '%s' with '%s'", phrase, self.SLUG)
with tempfile.NamedTemporaryFile(suffix='.wav', delete=False) as f:
fname = f.name
cmd = ['pico2wave', '--wave', fname]
if self.language not in self.languages:
raise ValueError("Language '%s' not supported by '%s'",
self.language, self.SLUG)
cmd.extend(['-l', self.language])
cmd.append(phrase)
self._logger.debug('Executing %s', ' '.join([pipes.quote(arg)
for arg in cmd]))
with tempfile.TemporaryFile() as f:
subprocess.call(cmd, stdout=f, stderr=f)
f.seek(0)
output = f.read()
if output:
self._logger.debug("Output was: '%s'", output)
self.play(fname)
os.remove(fname)
class GoogleTTS(AbstractMp3TTSEngine):
"""
Uses the Google TTS online translator
Requires pymad and gTTS to be available
"""
SLUG = "google-tts"
def __init__(self, language='en'):
super(self.__class__, self).__init__()
self.language = language
@classmethod
def is_available(cls):
return (super(cls, cls).is_available() and
diagnose.check_python_import('gtts') and
diagnose.check_network_connection())
@classmethod
def get_config(cls):
# FIXME: Replace this as soon as we have a config module
config = {}
# HMM dir
# Try to get hmm_dir from config
profile_path = jasperpath.config('profile.yml')
if os.path.exists(profile_path):
with open(profile_path, 'r') as f:
profile = yaml.safe_load(f)
if ('google-tts' in profile and
'language' in profile['google-tts']):
config['language'] = profile['google-tts']['language']
return config
@property
def languages(self):
langs = ['af', 'sq', 'ar', 'hy', 'ca', 'zh-CN', 'zh-TW', 'hr', 'cs',
'da', 'nl', 'en', 'eo', 'fi', 'fr', 'de', 'el', 'ht', 'hi',
'hu', 'is', 'id', 'it', 'ja', 'ko', 'la', 'lv', 'mk', 'no',
'pl', 'pt', 'ro', 'ru', 'sr', 'sk', 'es', 'sw', 'sv', 'ta',
'th', 'tr', 'vi', 'cy']
return langs
def say(self, phrase):
self._logger.debug("Saying '%s' with '%s'", phrase, self.SLUG)
if self.language not in self.languages:
raise ValueError("Language '%s' not supported by '%s'",
self.language, self.SLUG)
tts = gtts.gTTS(text=phrase, lang=self.language)
with tempfile.NamedTemporaryFile(suffix='.mp3', delete=False) as f:
tmpfile = f.name
tts.save(tmpfile)
self.play_mp3(tmpfile)
os.remove(tmpfile)
class MaryTTS(AbstractTTSEngine):
"""
Uses the MARY Text-to-Speech System (MaryTTS)
MaryTTS is an open-source, multilingual Text-to-Speech Synthesis platform
written in Java.
Please specify your own server instead of using the demonstration server
(http://mary.dfki.de:59125/) to save bandwidth and to protect your privacy.
"""
SLUG = "mary-tts"
def __init__(self, server="mary.dfki.de", port="59125", language="en_GB",
voice="dfki-spike"):
super(self.__class__, self).__init__()
self.server = server
self.port = port
self.netloc = '{server}:{port}'.format(server=self.server,
port=self.port)
self.language = language
self.voice = voice
self.session = requests.Session()
@property
def languages(self):
try:
r = self.session.get(self._makeurl('/locales'))
r.raise_for_status()
except requests.exceptions.RequestException:
self._logger.critical("Communication with MaryTTS server at %s " +
"failed.", self.netloc)
raise
return r.text.splitlines()
@property
def voices(self):
r = self.session.get(self._makeurl('/voices'))
r.raise_for_status()
return [line.split()[0] for line in r.text.splitlines()]
@classmethod
def get_config(cls):
# FIXME: Replace this as soon as we have a config module
config = {}
# HMM dir
# Try to get hmm_dir from config
profile_path = jasperpath.config('profile.yml')
if os.path.exists(profile_path):
with open(profile_path, 'r') as f:
profile = yaml.safe_load(f)
if 'mary-tts' in profile:
if 'server' in profile['mary-tts']:
config['server'] = profile['mary-tts']['server']
if 'port' in profile['mary-tts']:
config['port'] = profile['mary-tts']['port']
if 'language' in profile['mary-tts']:
config['language'] = profile['mary-tts']['language']
if 'voice' in profile['mary-tts']:
config['voice'] = profile['mary-tts']['voice']
return config
@classmethod
def is_available(cls):
return (super(cls, cls).is_available() and
diagnose.check_network_connection())
def _makeurl(self, path, query={}):
query_s = urllib.urlencode(query)
urlparts = ('http', self.netloc, path, query_s, '')
return urlparse.urlunsplit(urlparts)
def say(self, phrase):
self._logger.debug("Saying '%s' with '%s'", phrase, self.SLUG)
if self.language not in self.languages:
raise ValueError("Language '%s' not supported by '%s'"
% (self.language, self.SLUG))
if self.voice not in self.voices:
raise ValueError("Voice '%s' not supported by '%s'"
% (self.voice, self.SLUG))
query = {'OUTPUT_TYPE': 'AUDIO',
'AUDIO': 'WAVE_FILE',
'INPUT_TYPE': 'TEXT',
'INPUT_TEXT': phrase,
'LOCALE': self.language,
'VOICE': self.voice}
r = self.session.get(self._makeurl('/process', query=query))
with tempfile.NamedTemporaryFile(suffix='.wav', delete=False) as f:
f.write(r.content)
tmpfile = f.name
self.play(tmpfile)
os.remove(tmpfile)
def get_default_engine_slug():
return 'osx-tts' if platform.system().lower() == 'darwin' else 'espeak-tts'
def get_engine_by_slug(slug=None):
"""
Returns:
A speaker implementation available on the current platform
Raises:
ValueError if no speaker implementation is supported on this platform
"""
if not slug or type(slug) is not str:
raise TypeError("Invalid slug '%s'", slug)
selected_engines = filter(lambda engine: hasattr(engine, "SLUG") and
engine.SLUG == slug, get_engines())
if len(selected_engines) == 0:
raise ValueError("No TTS engine found for slug '%s'" % slug)
else:
if len(selected_engines) > 1:
print("WARNING: Multiple TTS engines found for slug '%s'. " +
"This is most certainly a bug." % slug)
engine = selected_engines[0]
if not engine.is_available():
raise ValueError(("TTS engine '%s' is not available (due to " +
"missing dependencies, etc.)") % slug)
return engine
def get_engines():
def get_subclasses(cls):
subclasses = set()
for subclass in cls.__subclasses__():
subclasses.add(subclass)
subclasses.update(get_subclasses(subclass))
return subclasses
return [tts_engine for tts_engine in
list(get_subclasses(AbstractTTSEngine))
if hasattr(tts_engine, 'SLUG') and tts_engine.SLUG]
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Jasper TTS module')
parser.add_argument('--debug', action='store_true',
help='Show debug messages')
args = parser.parse_args()
logging.basicConfig()
if args.debug:
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
engines = get_engines()
available_engines = []
for engine in get_engines():
if engine.is_available():
available_engines.append(engine)
disabled_engines = list(set(engines).difference(set(available_engines)))
print("Available TTS engines:")
for i, engine in enumerate(available_engines, start=1):
print("%d. %s" % (i, engine.SLUG))
print("")
print("Disabled TTS engines:")
for i, engine in enumerate(disabled_engines, start=1):
print("%d. %s" % (i, engine.SLUG))
print("")
for i, engine in enumerate(available_engines, start=1):
print("%d. Testing engine '%s'..." % (i, engine.SLUG))
engine.get_instance().say("This is a test.")
print("Done.")
| mit |
gigglesninja/senior-design | ardupilot/Tools/LogAnalyzer/UnitTest.py | 160 | 8893 | #!/usr/bin/env python
#
#
# Unit and regression tests for the LogAnalyzer code
#
#
# TODO: implement more unit+regression tests
import DataflashLog
import traceback
try:
# test DataflashLog reading 1
logdata = DataflashLog.DataflashLog()
logdata.read("examples/robert_lefebvre_octo_PM.log", ignoreBadlines=False)
assert(logdata.filename == "examples/robert_lefebvre_octo_PM.log")
assert(logdata.vehicleType == "ArduCopter")
assert(logdata.firmwareVersion == "V3.0.1")
assert(logdata.firmwareHash == "5c6503e2")
assert(logdata.freeRAM == 1331)
assert(logdata.hardwareType == "APM 2")
assert(len(logdata.formats) == 27)
assert(logdata.formats['GPS'].labels == ['Status', 'Time', 'NSats', 'HDop', 'Lat', 'Lng', 'RelAlt', 'Alt', 'Spd', 'GCrs'])
assert(logdata.formats['ATT'].labels == ['RollIn', 'Roll', 'PitchIn', 'Pitch', 'YawIn', 'Yaw', 'NavYaw'])
assert(logdata.parameters == {'RC7_REV': 1.0, 'MNT_MODE': 3.0, 'LOITER_LON_P': 1.0, 'FLTMODE1': 1.0, 'FLTMODE3': 0.0, 'FLTMODE2': 6.0, 'TUNE_HIGH': 10000.0, 'FLTMODE4': 5.0, 'FLTMODE6': 2.0, 'SYSID_SW_TYPE': 10.0, 'LOITER_LON_D': 0.0, 'RC5_REV': 1.0, 'THR_RATE_IMAX': 300.0, 'MNT_RC_IN_PAN': 0.0, 'RC2_MIN': 1110.0, 'LOITER_LON_I': 0.5, 'HLD_LON_P': 1.0, 'STB_RLL_I': 0.0, 'LOW_VOLT': 10.5, 'MNT_CONTROL_Y': 0.0, 'MNT_CONTROL_X': 0.0, 'FRAME': 1.0, 'MNT_CONTROL_Z': 0.0, 'OF_PIT_IMAX': 100.0, 'AHRS_ORIENTATION': 0.0, 'SIMPLE': 0.0, 'RC2_MAX': 1929.0, 'MNT_JSTICK_SPD': 0.0, 'RC8_FUNCTION': 0.0, 'INS_ACCSCAL_X': 0.992788, 'ACRO_P': 4.5, 'MNT_ANGMIN_ROL': -4500.0, 'OF_RLL_P': 2.5, 'STB_RLL_P': 3.5, 'STB_YAW_P': 3.0, 'SR0_RAW_SENS': 2.0, 'FLTMODE5': 0.0, 'RATE_YAW_I': 0.02, 'MAG_ENABLE': 1.0, 'MNT_RETRACT_Y': 0.0, 'MNT_RETRACT_X': 0.0, 'RATE_YAW_IMAX': 800.0, 'WPNAV_SPEED_DN': 150.0, 'WP_YAW_BEHAVIOR': 2.0, 'RC11_REV': 1.0, 'SYSID_THISMAV': 1.0, 'SR0_EXTRA1': 10.0, 'SR0_EXTRA2': 10.0, 'ACRO_BAL_PITCH': 200.0, 'STB_YAW_I': 0.0, 'INS_ACCSCAL_Z': 0.97621, 'INS_ACCSCAL_Y': 1.00147, 'LED_MODE': 9.0, 'FS_GCS_ENABLE': 0.0, 'MNT_RC_IN_ROLL': 0.0, 'INAV_TC_Z': 8.0, 'RATE_PIT_IMAX': 4500.0, 'HLD_LON_IMAX': 3000.0, 'THR_RATE_I': 0.0, 'SR3_EXTRA1': 0.0, 'STB_PIT_IMAX': 800.0, 'AHRS_TRIM_Z': 0.0, 'RC2_REV': 1.0, 'INS_MPU6K_FILTER': 20.0, 'THR_MIN': 130.0, 'AHRS_TRIM_Y': 0.021683, 'RC11_DZ': 0.0, 'THR_MAX': 1000.0, 'SR3_EXTRA2': 0.0, 'MNT_NEUTRAL_Z': 0.0, 'THR_MID': 300.0, 'MNT_NEUTRAL_X': 0.0, 'AMP_PER_VOLT': 18.002001, 'SR0_POSITION': 3.0, 'MNT_STAB_PAN': 0.0, 'FS_BATT_ENABLE': 0.0, 'LAND_SPEED': 50.0, 'OF_PIT_D': 0.12, 'SR0_PARAMS': 50.0, 'COMPASS_ORIENT': 0.0, 'WPNAV_ACCEL': 200.0, 'THR_ACCEL_IMAX': 5000.0, 'SR3_POSITION': 0.0, 'WPNAV_RADIUS': 100.0, 'WP_TOTAL': 14.0, 'RC8_MAX': 1856.0, 'OF_PIT_P': 2.5, 'SR3_RAW_SENS': 0.0, 'RTL_ALT_FINAL': 0.0, 'SR3_PARAMS': 0.0, 'SR0_EXTRA3': 2.0, 'LOITER_LAT_I': 0.5, 'RC6_DZ': 0.0, 'RC4_TRIM': 1524.0, 'RATE_RLL_P': 0.07, 'LOITER_LAT_D': 0.0, 'STB_PIT_P': 3.5, 'OF_PIT_I': 0.5, 'RATE_RLL_I': 1.0, 'AHRS_TRIM_X': 0.003997, 'RC3_REV': 1.0, 'STB_PIT_I': 0.0, 'FS_THR_ENABLE': 0.0, 'LOITER_LAT_P': 1.0, 'AHRS_RP_P': 0.1, 'FENCE_ACTION': 1.0, 'TOY_RATE': 1.0, 'RATE_RLL_D': 0.006, 'RC5_MIN': 1151.0, 'RC5_TRIM': 1676.0, 'STB_RLL_IMAX': 800.0, 'RC4_DZ': 40.0, 'AHRS_YAW_P': 0.1, 'RC11_TRIM': 1500.0, 'MOT_TCRV_ENABLE': 1.0, 'CAM_TRIGG_TYPE': 1.0, 'STB_YAW_IMAX': 800.0, 'RC4_MAX': 1942.0, 'LOITER_LAT_IMAX': 400.0, 'CH7_OPT': 9.0, 'RC11_FUNCTION': 7.0, 'SR0_EXT_STAT': 2.0, 'SONAR_TYPE': 0.0, 'RC3_MAX': 1930.0, 'RATE_YAW_D': 0.0, 'FENCE_ALT_MAX': 30.0, 'COMPASS_MOT_Y': 0.0, 'AXIS_ENABLE': 1.0, 'FENCE_ENABLE': 0.0, 'RC10_DZ': 0.0, 'PILOT_VELZ_MAX': 250.0, 'BATT_CAPACITY': 1760.0, 'FS_THR_VALUE': 975.0, 'RC4_MIN': 1115.0, 'MNT_ANGMAX_TIL': 4500.0, 'RTL_LOIT_TIME': 5000.0, 'ARMING_CHECK': 1.0, 'THR_RATE_P': 6.0, 'OF_RLL_IMAX': 100.0, 'RC6_MIN': 971.0, 'SR0_RAW_CTRL': 0.0, 'RC6_MAX': 2078.0, 'RC5_MAX': 1829.0, 'LOITER_LON_IMAX': 400.0, 'MNT_STAB_TILT': 0.0, 'MOT_TCRV_MIDPCT': 52.0, 'COMPASS_OFS_Z': -5.120774, 'COMPASS_OFS_Y': 46.709824, 'COMPASS_OFS_X': -20.490345, 'THR_ALT_I': 0.0, 'RC10_TRIM': 1500.0, 'INS_PRODUCT_ID': 88.0, 'RC11_MIN': 1100.0, 'FS_GPS_ENABLE': 1.0, 'HLD_LAT_IMAX': 3000.0, 'RC3_TRIM': 1476.0, 'RC6_FUNCTION': 0.0, 'TRIM_THROTTLE': 260.0, 'MNT_STAB_ROLL': 0.0, 'INAV_TC_XY': 2.5, 'RC1_DZ': 30.0, 'MNT_RETRACT_Z': 0.0, 'THR_ACC_ENABLE': 1.0, 'LOG_BITMASK': 830.0, 'TUNE_LOW': 0.0, 'CIRCLE_RATE': 5.0, 'CAM_DURATION': 10.0, 'MNT_NEUTRAL_Y': 0.0, 'RC10_MIN': 1100.0, 'INS_ACCOFFS_X': -0.019376, 'THR_RATE_D': 0.0, 'INS_ACCOFFS_Z': 1.370947, 'RC4_REV': 1.0, 'CIRCLE_RADIUS': 10.0, 'RATE_RLL_IMAX': 4500.0, 'HLD_LAT_P': 1.0, 'AHRS_GPS_MINSATS': 6.0, 'FLOW_ENABLE': 0.0, 'RC8_REV': 1.0, 'SONAR_GAIN': 0.2, 'RC2_TRIM': 1521.0, 'WP_INDEX': 0.0, 'RC1_REV': 1.0, 'RC7_DZ': 0.0, 'AHRS_GPS_USE': 1.0, 'MNT_ANGMIN_PAN': -4500.0, 'SR3_RC_CHAN': 0.0, 'COMPASS_LEARN': 0.0, 'ACRO_TRAINER': 1.0, 'CAM_SERVO_OFF': 1100.0, 'RC5_DZ': 0.0, 'SCHED_DEBUG': 0.0, 'RC11_MAX': 1900.0, 'AHRS_WIND_MAX': 0.0, 'SR3_EXT_STAT': 0.0, 'MNT_ANGMAX_PAN': 4500.0, 'MNT_ANGMAX_ROL': 4500.0, 'RC_SPEED': 490.0, 'SUPER_SIMPLE': 0.0, 'VOLT_DIVIDER': 10.0, 'COMPASS_MOTCT': 0.0, 'SR3_RAW_CTRL': 0.0, 'SONAR_ENABLE': 0.0, 'INS_ACCOFFS_Y': 0.362242, 'SYSID_SW_MREV': 120.0, 'WPNAV_LOIT_SPEED': 1000.0, 'BATT_MONITOR': 4.0, 'MNT_RC_IN_TILT': 8.0, 'CH8_OPT': 0.0, 'RTL_ALT': 1000.0, 'SR0_RC_CHAN': 2.0, 'RC1_MIN': 1111.0, 'RSSI_PIN': -1.0, 'MOT_TCRV_MAXPCT': 93.0, 'GND_ABS_PRESS': 101566.97, 'RC1_MAX': 1936.0, 'FENCE_TYPE': 3.0, 'RC5_FUNCTION': 0.0, 'OF_RLL_D': 0.12, 'BATT_VOLT_PIN': 13.0, 'WPNAV_SPEED': 1000.0, 'RC7_MAX': 1884.0, 'CAM_SERVO_ON': 1300.0, 'RATE_PIT_I': 1.0, 'RC7_MIN': 969.0, 'AHRS_COMP_BETA': 0.1, 'OF_RLL_I': 0.5, 'COMPASS_DEC': 0.0, 'RC3_MIN': 1113.0, 'RC2_DZ': 30.0, 'FENCE_RADIUS': 30.0, 'HLD_LON_I': 0.0, 'ACRO_BAL_ROLL': 200.0, 'COMPASS_AUTODEC': 1.0, 'SR3_EXTRA3': 0.0, 'COMPASS_USE': 1.0, 'RC10_MAX': 1900.0, 'RATE_PIT_P': 0.07, 'GND_TEMP': 21.610104, 'RC7_TRIM': 970.0, 'RC10_REV': 1.0, 'RATE_YAW_P': 0.2, 'THR_ALT_P': 1.0, 'RATE_PIT_D': 0.006, 'ESC': 0.0, 'MNT_ANGMIN_TIL': -4500.0, 'SERIAL3_BAUD': 57.0, 'RC8_MIN': 968.0, 'THR_ALT_IMAX': 300.0, 'SYSID_MYGCS': 255.0, 'INS_GYROFFS_Y': 0.581989, 'TUNE': 0.0, 'RC8_TRIM': 970.0, 'RC3_DZ': 30.0, 'AHRS_GPS_GAIN': 1.0, 'THR_ACCEL_D': 0.0, 'TELEM_DELAY': 0.0, 'THR_ACCEL_I': 0.5, 'COMPASS_MOT_X': 0.0, 'COMPASS_MOT_Z': 0.0, 'RC10_FUNCTION': 0.0, 'INS_GYROFFS_X': -0.001698, 'INS_GYROFFS_Z': 0.01517, 'RC6_TRIM': 1473.0, 'THR_ACCEL_P': 1.2, 'RC8_DZ': 0.0, 'HLD_LAT_I': 0.0, 'RC7_FUNCTION': 0.0, 'RC6_REV': 1.0, 'BATT_CURR_PIN': 12.0, 'WPNAV_SPEED_UP': 250.0, 'RC1_TRIM': 1524.0})
assert(logdata.messages == {})
assert(logdata.modeChanges == {2204: ('LOITER', 269), 4594: ('STABILIZE', 269), 644: ('ALT_HOLD', 269), 4404: ('ALT_HOLD', 269)})
assert(logdata.channels['GPS']['NSats'].min() == 6)
assert(logdata.channels['GPS']['NSats'].max() == 8)
assert(logdata.channels['GPS']['HDop'].listData[0] == (552, 4.68))
assert(logdata.channels['GPS']['HDop'].listData[44] == (768, 4.67))
assert(logdata.channels['GPS']['HDop'].listData[157] == (1288, 2.28))
assert(logdata.channels['CTUN']['ThrOut'].listData[5] == (321, 139))
assert(logdata.channels['CTUN']['ThrOut'].listData[45] == (409, 242))
assert(logdata.channels['CTUN']['ThrOut'].listData[125] == (589, 266))
assert(logdata.channels['CTUN']['CRate'].listData[3] == (317, 35))
assert(logdata.channels['CTUN']['CRate'].listData[51] == (421, 31))
assert(logdata.channels['CTUN']['CRate'].listData[115] == (563, -8))
assert(int(logdata.filesizeKB) == 302)
assert(logdata.durationSecs == 155)
assert(logdata.lineCount == 4750)
# test LogIterator class
lit = DataflashLog.LogIterator(logdata)
assert(lit.currentLine == 0)
assert(lit.iterators == {'CURR': (0, 310), 'ERR': (0, 307), 'NTUN': (0, 2206), 'CTUN': (0, 308), 'GPS': (0, 552), 'CMD': (0, 607), 'D32': (0, 305), 'ATT': (0, 311), 'EV': (0, 306), 'DU32': (0, 309), 'PM': (0, 479)})
lit.jump(500)
assert(lit.iterators == {'CURR': (9, 514), 'ERR': (1, 553), 'NTUN': (0, 2206), 'CTUN': (87, 500), 'GPS': (0, 552), 'CMD': (0, 607), 'D32': (0, 305), 'ATT': (83, 501), 'EV': (4, 606), 'DU32': (9, 513), 'PM': (1, 719)})
assert(lit['CTUN']['ThrIn'] == 450)
assert(lit['ATT']['RollIn'] == 11.19)
assert(lit['CURR']['CurrTot'] == 25.827288)
assert(lit['D32']['Value'] == 11122)
lit.next()
assert(lit.iterators == {'CURR': (9, 514), 'ERR': (1, 553), 'NTUN': (0, 2206), 'CTUN': (88, 502), 'GPS': (0, 552), 'CMD': (0, 607), 'D32': (0, 305), 'ATT': (83, 501), 'EV': (4, 606), 'DU32': (9, 513), 'PM': (1, 719)})
lit.jump(4750)
lit.next()
assert(lit.currentLine == 4751)
assert(lit['ATT']['Roll'] == 2.99)
# TODO: unit test DataflashLog reading 2
# ...
# TODO: unit test the log test classes
# ...
print "All unit/regression tests GOOD\n"
except Exception as e:
print "Error found: " + traceback.format_exc()
print "UNIT TEST FAILED\n"
| gpl-2.0 |
vathpela/pytrace | test.py | 1 | 2157 | #!/usr/bin/python3
import pdb
import sys
from module import *
def print_it(tp, tl, time, s):
print("%s %s.%d: %s" % (time, tp, tl, s))
def print_if_9(tp, tl, time, s):
if tl >= 9:
print("%s %s.%d: %s" % (time, tp, tl, s))
tl = trace.Logger("debug", 9, print_if_9)
loggers = [
{'trace_point':'debug', 'trace_level':9, 'logger':tl},
{'trace_point':'default', 'trace_level':1, 'logger':None},
{'trace_point':'debug', 'trace_level':1, 'logger':None},
{'trace_point':'ingress', 'trace_level':9, 'logger':None},
{'trace_point':'egress', 'trace_level':9, 'logger':None},
]
del tl
def makeloggers():
for logger in loggers:
if not logger['logger']:
print("creating a logger for %(trace_point)s.%(trace_level)d" % logger)
logger['logger'] = trace.Logger(logger['trace_point'],
logger['trace_level'], print_it)
makeloggers()
print("instantiating otherstuff.Foo() as x")
x = otherstuff.Foo()
print("calling x.bar()")
x.bar()
# pdb.set_trace()
x.log(1, "foo")
loggers += [
{'trace_point':'ingress', 'trace_level':3, 'logger':None},
{'trace_point':'egress', 'trace_level':3, 'logger':None},
]
makeloggers()
print("defining a, b, c")
@trace.TracedFunction
def a():
x.bar()
@trace.TracedFunction
def b():
a()
@trace.TracedFunction
def c():
b()
c.log(1, "baz")
c.log.ingress(9, "foo")
# print("dir(c): %s" % (dir(c),))
print("calling c")
c()
y = trace.LogFunction()
y.ingress(1, "x")
loggers += [
{'trace_point':'ingress', 'trace_level':5, 'logger':None},
{'trace_point':'egress', 'trace_level':5, 'logger':None},
]
makeloggers()
print("calling c")
c()
loggers += [
{'trace_point':'ingress', 'trace_level':7, 'logger':None},
{'trace_point':'egress', 'trace_level':7, 'logger':None},
]
makeloggers()
print("calling c")
c()
loggers += [
{'trace_point':'ingress', 'trace_level':9, 'logger':None},
{'trace_point':'egress', 'trace_level':9, 'logger':None},
]
makeloggers()
print("calling c")
c()
| gpl-3.0 |
xiaoxiamii/scikit-learn | sklearn/feature_selection/univariate_selection.py | 78 | 23706 | """Univariate features selection."""
# Authors: V. Michel, B. Thirion, G. Varoquaux, A. Gramfort, E. Duchesnay.
# L. Buitinck, A. Joly
# License: BSD 3 clause
import numpy as np
import warnings
from scipy import special, stats
from scipy.sparse import issparse
from ..base import BaseEstimator
from ..preprocessing import LabelBinarizer
from ..utils import (as_float_array, check_array, check_X_y, safe_sqr,
safe_mask)
from ..utils.extmath import norm, safe_sparse_dot
from ..utils.validation import check_is_fitted
from .base import SelectorMixin
def _clean_nans(scores):
"""
Fixes Issue #1240: NaNs can't be properly compared, so change them to the
smallest value of scores's dtype. -inf seems to be unreliable.
"""
# XXX where should this function be called? fit? scoring functions
# themselves?
scores = as_float_array(scores, copy=True)
scores[np.isnan(scores)] = np.finfo(scores.dtype).min
return scores
######################################################################
# Scoring functions
# The following function is a rewriting of scipy.stats.f_oneway
# Contrary to the scipy.stats.f_oneway implementation it does not
# copy the data while keeping the inputs unchanged.
def f_oneway(*args):
"""Performs a 1-way ANOVA.
The one-way ANOVA tests the null hypothesis that 2 or more groups have
the same population mean. The test is applied to samples from two or
more groups, possibly with differing sizes.
Read more in the :ref:`User Guide <univariate_feature_selection>`.
Parameters
----------
sample1, sample2, ... : array_like, sparse matrices
The sample measurements should be given as arguments.
Returns
-------
F-value : float
The computed F-value of the test.
p-value : float
The associated p-value from the F-distribution.
Notes
-----
The ANOVA test has important assumptions that must be satisfied in order
for the associated p-value to be valid.
1. The samples are independent
2. Each sample is from a normally distributed population
3. The population standard deviations of the groups are all equal. This
property is known as homoscedasticity.
If these assumptions are not true for a given set of data, it may still be
possible to use the Kruskal-Wallis H-test (`scipy.stats.kruskal`_) although
with some loss of power.
The algorithm is from Heiman[2], pp.394-7.
See ``scipy.stats.f_oneway`` that should give the same results while
being less efficient.
References
----------
.. [1] Lowry, Richard. "Concepts and Applications of Inferential
Statistics". Chapter 14.
http://faculty.vassar.edu/lowry/ch14pt1.html
.. [2] Heiman, G.W. Research Methods in Statistics. 2002.
"""
n_classes = len(args)
args = [as_float_array(a) for a in args]
n_samples_per_class = np.array([a.shape[0] for a in args])
n_samples = np.sum(n_samples_per_class)
ss_alldata = sum(safe_sqr(a).sum(axis=0) for a in args)
sums_args = [np.asarray(a.sum(axis=0)) for a in args]
square_of_sums_alldata = sum(sums_args) ** 2
square_of_sums_args = [s ** 2 for s in sums_args]
sstot = ss_alldata - square_of_sums_alldata / float(n_samples)
ssbn = 0.
for k, _ in enumerate(args):
ssbn += square_of_sums_args[k] / n_samples_per_class[k]
ssbn -= square_of_sums_alldata / float(n_samples)
sswn = sstot - ssbn
dfbn = n_classes - 1
dfwn = n_samples - n_classes
msb = ssbn / float(dfbn)
msw = sswn / float(dfwn)
constant_features_idx = np.where(msw == 0.)[0]
if (np.nonzero(msb)[0].size != msb.size and constant_features_idx.size):
warnings.warn("Features %s are constant." % constant_features_idx,
UserWarning)
f = msb / msw
# flatten matrix to vector in sparse case
f = np.asarray(f).ravel()
prob = special.fdtrc(dfbn, dfwn, f)
return f, prob
def f_classif(X, y):
"""Compute the ANOVA F-value for the provided sample.
Read more in the :ref:`User Guide <univariate_feature_selection>`.
Parameters
----------
X : {array-like, sparse matrix} shape = [n_samples, n_features]
The set of regressors that will tested sequentially.
y : array of shape(n_samples)
The data matrix.
Returns
-------
F : array, shape = [n_features,]
The set of F values.
pval : array, shape = [n_features,]
The set of p-values.
See also
--------
chi2: Chi-squared stats of non-negative features for classification tasks.
f_regression: F-value between label/feature for regression tasks.
"""
X, y = check_X_y(X, y, ['csr', 'csc', 'coo'])
args = [X[safe_mask(X, y == k)] for k in np.unique(y)]
return f_oneway(*args)
def _chisquare(f_obs, f_exp):
"""Fast replacement for scipy.stats.chisquare.
Version from https://github.com/scipy/scipy/pull/2525 with additional
optimizations.
"""
f_obs = np.asarray(f_obs, dtype=np.float64)
k = len(f_obs)
# Reuse f_obs for chi-squared statistics
chisq = f_obs
chisq -= f_exp
chisq **= 2
chisq /= f_exp
chisq = chisq.sum(axis=0)
return chisq, special.chdtrc(k - 1, chisq)
def chi2(X, y):
"""Compute chi-squared stats between each non-negative feature and class.
This score can be used to select the n_features features with the
highest values for the test chi-squared statistic from X, which must
contain only non-negative features such as booleans or frequencies
(e.g., term counts in document classification), relative to the classes.
Recall that the chi-square test measures dependence between stochastic
variables, so using this function "weeds out" the features that are the
most likely to be independent of class and therefore irrelevant for
classification.
Read more in the :ref:`User Guide <univariate_feature_selection>`.
Parameters
----------
X : {array-like, sparse matrix}, shape = (n_samples, n_features_in)
Sample vectors.
y : array-like, shape = (n_samples,)
Target vector (class labels).
Returns
-------
chi2 : array, shape = (n_features,)
chi2 statistics of each feature.
pval : array, shape = (n_features,)
p-values of each feature.
Notes
-----
Complexity of this algorithm is O(n_classes * n_features).
See also
--------
f_classif: ANOVA F-value between labe/feature for classification tasks.
f_regression: F-value between label/feature for regression tasks.
"""
# XXX: we might want to do some of the following in logspace instead for
# numerical stability.
X = check_array(X, accept_sparse='csr')
if np.any((X.data if issparse(X) else X) < 0):
raise ValueError("Input X must be non-negative.")
Y = LabelBinarizer().fit_transform(y)
if Y.shape[1] == 1:
Y = np.append(1 - Y, Y, axis=1)
observed = safe_sparse_dot(Y.T, X) # n_classes * n_features
feature_count = X.sum(axis=0).reshape(1, -1)
class_prob = Y.mean(axis=0).reshape(1, -1)
expected = np.dot(class_prob.T, feature_count)
return _chisquare(observed, expected)
def f_regression(X, y, center=True):
"""Univariate linear regression tests.
Quick linear model for testing the effect of a single regressor,
sequentially for many regressors.
This is done in 3 steps:
1. The regressor of interest and the data are orthogonalized
wrt constant regressors.
2. The cross correlation between data and regressors is computed.
3. It is converted to an F score then to a p-value.
Read more in the :ref:`User Guide <univariate_feature_selection>`.
Parameters
----------
X : {array-like, sparse matrix} shape = (n_samples, n_features)
The set of regressors that will tested sequentially.
y : array of shape(n_samples).
The data matrix
center : True, bool,
If true, X and y will be centered.
Returns
-------
F : array, shape=(n_features,)
F values of features.
pval : array, shape=(n_features,)
p-values of F-scores.
See also
--------
f_classif: ANOVA F-value between labe/feature for classification tasks.
chi2: Chi-squared stats of non-negative features for classification tasks.
"""
if issparse(X) and center:
raise ValueError("center=True only allowed for dense data")
X, y = check_X_y(X, y, ['csr', 'csc', 'coo'], dtype=np.float)
if center:
y = y - np.mean(y)
X = X.copy('F') # faster in fortran
X -= X.mean(axis=0)
# compute the correlation
corr = safe_sparse_dot(y, X)
# XXX could use corr /= row_norms(X.T) here, but the test doesn't pass
corr /= np.asarray(np.sqrt(safe_sqr(X).sum(axis=0))).ravel()
corr /= norm(y)
# convert to p-value
degrees_of_freedom = y.size - (2 if center else 1)
F = corr ** 2 / (1 - corr ** 2) * degrees_of_freedom
pv = stats.f.sf(F, 1, degrees_of_freedom)
return F, pv
######################################################################
# Base classes
class _BaseFilter(BaseEstimator, SelectorMixin):
"""Initialize the univariate feature selection.
Parameters
----------
score_func : callable
Function taking two arrays X and y, and returning a pair of arrays
(scores, pvalues).
"""
def __init__(self, score_func):
self.score_func = score_func
def fit(self, X, y):
"""Run score function on (X, y) and get the appropriate features.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
The training input samples.
y : array-like, shape = [n_samples]
The target values (class labels in classification, real numbers in
regression).
Returns
-------
self : object
Returns self.
"""
X, y = check_X_y(X, y, ['csr', 'csc'])
if not callable(self.score_func):
raise TypeError("The score function should be a callable, %s (%s) "
"was passed."
% (self.score_func, type(self.score_func)))
self._check_params(X, y)
self.scores_, self.pvalues_ = self.score_func(X, y)
self.scores_ = np.asarray(self.scores_)
self.pvalues_ = np.asarray(self.pvalues_)
return self
def _check_params(self, X, y):
pass
######################################################################
# Specific filters
######################################################################
class SelectPercentile(_BaseFilter):
"""Select features according to a percentile of the highest scores.
Read more in the :ref:`User Guide <univariate_feature_selection>`.
Parameters
----------
score_func : callable
Function taking two arrays X and y, and returning a pair of arrays
(scores, pvalues).
percentile : int, optional, default=10
Percent of features to keep.
Attributes
----------
scores_ : array-like, shape=(n_features,)
Scores of features.
pvalues_ : array-like, shape=(n_features,)
p-values of feature scores.
Notes
-----
Ties between features with equal scores will be broken in an unspecified
way.
See also
--------
f_classif: ANOVA F-value between labe/feature for classification tasks.
chi2: Chi-squared stats of non-negative features for classification tasks.
f_regression: F-value between label/feature for regression tasks.
SelectKBest: Select features based on the k highest scores.
SelectFpr: Select features based on a false positive rate test.
SelectFdr: Select features based on an estimated false discovery rate.
SelectFwe: Select features based on family-wise error rate.
GenericUnivariateSelect: Univariate feature selector with configurable mode.
"""
def __init__(self, score_func=f_classif, percentile=10):
super(SelectPercentile, self).__init__(score_func)
self.percentile = percentile
def _check_params(self, X, y):
if not 0 <= self.percentile <= 100:
raise ValueError("percentile should be >=0, <=100; got %r"
% self.percentile)
def _get_support_mask(self):
check_is_fitted(self, 'scores_')
# Cater for NaNs
if self.percentile == 100:
return np.ones(len(self.scores_), dtype=np.bool)
elif self.percentile == 0:
return np.zeros(len(self.scores_), dtype=np.bool)
scores = _clean_nans(self.scores_)
treshold = stats.scoreatpercentile(scores,
100 - self.percentile)
mask = scores > treshold
ties = np.where(scores == treshold)[0]
if len(ties):
max_feats = len(scores) * self.percentile // 100
kept_ties = ties[:max_feats - mask.sum()]
mask[kept_ties] = True
return mask
class SelectKBest(_BaseFilter):
"""Select features according to the k highest scores.
Read more in the :ref:`User Guide <univariate_feature_selection>`.
Parameters
----------
score_func : callable
Function taking two arrays X and y, and returning a pair of arrays
(scores, pvalues).
k : int or "all", optional, default=10
Number of top features to select.
The "all" option bypasses selection, for use in a parameter search.
Attributes
----------
scores_ : array-like, shape=(n_features,)
Scores of features.
pvalues_ : array-like, shape=(n_features,)
p-values of feature scores.
Notes
-----
Ties between features with equal scores will be broken in an unspecified
way.
See also
--------
f_classif: ANOVA F-value between labe/feature for classification tasks.
chi2: Chi-squared stats of non-negative features for classification tasks.
f_regression: F-value between label/feature for regression tasks.
SelectPercentile: Select features based on percentile of the highest scores.
SelectFpr: Select features based on a false positive rate test.
SelectFdr: Select features based on an estimated false discovery rate.
SelectFwe: Select features based on family-wise error rate.
GenericUnivariateSelect: Univariate feature selector with configurable mode.
"""
def __init__(self, score_func=f_classif, k=10):
super(SelectKBest, self).__init__(score_func)
self.k = k
def _check_params(self, X, y):
if not (self.k == "all" or 0 <= self.k <= X.shape[1]):
raise ValueError("k should be >=0, <= n_features; got %r."
"Use k='all' to return all features."
% self.k)
def _get_support_mask(self):
check_is_fitted(self, 'scores_')
if self.k == 'all':
return np.ones(self.scores_.shape, dtype=bool)
elif self.k == 0:
return np.zeros(self.scores_.shape, dtype=bool)
else:
scores = _clean_nans(self.scores_)
mask = np.zeros(scores.shape, dtype=bool)
# Request a stable sort. Mergesort takes more memory (~40MB per
# megafeature on x86-64).
mask[np.argsort(scores, kind="mergesort")[-self.k:]] = 1
return mask
class SelectFpr(_BaseFilter):
"""Filter: Select the pvalues below alpha based on a FPR test.
FPR test stands for False Positive Rate test. It controls the total
amount of false detections.
Read more in the :ref:`User Guide <univariate_feature_selection>`.
Parameters
----------
score_func : callable
Function taking two arrays X and y, and returning a pair of arrays
(scores, pvalues).
alpha : float, optional
The highest p-value for features to be kept.
Attributes
----------
scores_ : array-like, shape=(n_features,)
Scores of features.
pvalues_ : array-like, shape=(n_features,)
p-values of feature scores.
See also
--------
f_classif: ANOVA F-value between labe/feature for classification tasks.
chi2: Chi-squared stats of non-negative features for classification tasks.
f_regression: F-value between label/feature for regression tasks.
SelectPercentile: Select features based on percentile of the highest scores.
SelectKBest: Select features based on the k highest scores.
SelectFdr: Select features based on an estimated false discovery rate.
SelectFwe: Select features based on family-wise error rate.
GenericUnivariateSelect: Univariate feature selector with configurable mode.
"""
def __init__(self, score_func=f_classif, alpha=5e-2):
super(SelectFpr, self).__init__(score_func)
self.alpha = alpha
def _get_support_mask(self):
check_is_fitted(self, 'scores_')
return self.pvalues_ < self.alpha
class SelectFdr(_BaseFilter):
"""Filter: Select the p-values for an estimated false discovery rate
This uses the Benjamini-Hochberg procedure. ``alpha`` is an upper bound
on the expected false discovery rate.
Read more in the :ref:`User Guide <univariate_feature_selection>`.
Parameters
----------
score_func : callable
Function taking two arrays X and y, and returning a pair of arrays
(scores, pvalues).
alpha : float, optional
The highest uncorrected p-value for features to keep.
Attributes
----------
scores_ : array-like, shape=(n_features,)
Scores of features.
pvalues_ : array-like, shape=(n_features,)
p-values of feature scores.
References
----------
http://en.wikipedia.org/wiki/False_discovery_rate
See also
--------
f_classif: ANOVA F-value between labe/feature for classification tasks.
chi2: Chi-squared stats of non-negative features for classification tasks.
f_regression: F-value between label/feature for regression tasks.
SelectPercentile: Select features based on percentile of the highest scores.
SelectKBest: Select features based on the k highest scores.
SelectFpr: Select features based on a false positive rate test.
SelectFwe: Select features based on family-wise error rate.
GenericUnivariateSelect: Univariate feature selector with configurable mode.
"""
def __init__(self, score_func=f_classif, alpha=5e-2):
super(SelectFdr, self).__init__(score_func)
self.alpha = alpha
def _get_support_mask(self):
check_is_fitted(self, 'scores_')
n_features = len(self.pvalues_)
sv = np.sort(self.pvalues_)
selected = sv[sv <= float(self.alpha) / n_features
* np.arange(n_features)]
if selected.size == 0:
return np.zeros_like(self.pvalues_, dtype=bool)
return self.pvalues_ <= selected.max()
class SelectFwe(_BaseFilter):
"""Filter: Select the p-values corresponding to Family-wise error rate
Read more in the :ref:`User Guide <univariate_feature_selection>`.
Parameters
----------
score_func : callable
Function taking two arrays X and y, and returning a pair of arrays
(scores, pvalues).
alpha : float, optional
The highest uncorrected p-value for features to keep.
Attributes
----------
scores_ : array-like, shape=(n_features,)
Scores of features.
pvalues_ : array-like, shape=(n_features,)
p-values of feature scores.
See also
--------
f_classif: ANOVA F-value between labe/feature for classification tasks.
chi2: Chi-squared stats of non-negative features for classification tasks.
f_regression: F-value between label/feature for regression tasks.
SelectPercentile: Select features based on percentile of the highest scores.
SelectKBest: Select features based on the k highest scores.
SelectFpr: Select features based on a false positive rate test.
SelectFdr: Select features based on an estimated false discovery rate.
GenericUnivariateSelect: Univariate feature selector with configurable mode.
"""
def __init__(self, score_func=f_classif, alpha=5e-2):
super(SelectFwe, self).__init__(score_func)
self.alpha = alpha
def _get_support_mask(self):
check_is_fitted(self, 'scores_')
return (self.pvalues_ < self.alpha / len(self.pvalues_))
######################################################################
# Generic filter
######################################################################
# TODO this class should fit on either p-values or scores,
# depending on the mode.
class GenericUnivariateSelect(_BaseFilter):
"""Univariate feature selector with configurable strategy.
Read more in the :ref:`User Guide <univariate_feature_selection>`.
Parameters
----------
score_func : callable
Function taking two arrays X and y, and returning a pair of arrays
(scores, pvalues).
mode : {'percentile', 'k_best', 'fpr', 'fdr', 'fwe'}
Feature selection mode.
param : float or int depending on the feature selection mode
Parameter of the corresponding mode.
Attributes
----------
scores_ : array-like, shape=(n_features,)
Scores of features.
pvalues_ : array-like, shape=(n_features,)
p-values of feature scores.
See also
--------
f_classif: ANOVA F-value between labe/feature for classification tasks.
chi2: Chi-squared stats of non-negative features for classification tasks.
f_regression: F-value between label/feature for regression tasks.
SelectPercentile: Select features based on percentile of the highest scores.
SelectKBest: Select features based on the k highest scores.
SelectFpr: Select features based on a false positive rate test.
SelectFdr: Select features based on an estimated false discovery rate.
SelectFwe: Select features based on family-wise error rate.
"""
_selection_modes = {'percentile': SelectPercentile,
'k_best': SelectKBest,
'fpr': SelectFpr,
'fdr': SelectFdr,
'fwe': SelectFwe}
def __init__(self, score_func=f_classif, mode='percentile', param=1e-5):
super(GenericUnivariateSelect, self).__init__(score_func)
self.mode = mode
self.param = param
def _make_selector(self):
selector = self._selection_modes[self.mode](score_func=self.score_func)
# Now perform some acrobatics to set the right named parameter in
# the selector
possible_params = selector._get_param_names()
possible_params.remove('score_func')
selector.set_params(**{possible_params[0]: self.param})
return selector
def _check_params(self, X, y):
if self.mode not in self._selection_modes:
raise ValueError("The mode passed should be one of %s, %r,"
" (type %s) was passed."
% (self._selection_modes.keys(), self.mode,
type(self.mode)))
self._make_selector()._check_params(X, y)
def _get_support_mask(self):
check_is_fitted(self, 'scores_')
selector = self._make_selector()
selector.pvalues_ = self.pvalues_
selector.scores_ = self.scores_
return selector._get_support_mask()
| bsd-3-clause |
sashinde/VulkanTools | loader/vk-loader-generate.py | 4 | 16672 | #!/usr/bin/env python3
#
# Copyright (c) 2015-2016 The Khronos Group Inc.
# Copyright (c) 2015-2016 Valve Corporation
# Copyright (c) 2015-2016 LunarG, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Jon Ashburn <[email protected]>
#
import os, sys
# add main repo directory so vulkan.py can be imported. This needs to be a complete path.
ld_path = os.path.dirname(os.path.abspath(__file__))
main_path = os.path.abspath(ld_path + "/../")
sys.path.append(main_path)
import vulkan
def generate_get_proc_addr_check(name):
return " if (!%s || %s[0] != 'v' || %s[1] != 'k')\n" \
" return NULL;" % ((name,) * 3)
class Subcommand(object):
def __init__(self, argv):
self.argv = argv
self.headers = vulkan.headers
self.protos = vulkan.protos
def run(self):
print(self.generate())
def _requires_special_trampoline_code(self, name):
# Don't be cute trying to use a general rule to programmatically populate this list
# it just obsfucates what is going on!
wsi_creates_dispatchable_object = ["CreateSwapchainKHR"]
creates_dispatchable_object = ["CreateDevice", "GetDeviceQueue", "AllocateCommandBuffers"] + wsi_creates_dispatchable_object
if name in creates_dispatchable_object:
return True
else:
return False
def _is_loader_non_trampoline_entrypoint(self, proto):
if proto.name in ["GetDeviceProcAddr", "EnumeratePhysicalDevices", "EnumerateLayers", "DbgRegisterMsgCallback", "DbgUnregisterMsgCallback", "DbgSetGlobalOption", "DestroyInstance"]:
return True
return not self.is_dispatchable_object_first_param(proto)
def is_dispatchable_object_first_param(self, proto):
in_objs = proto.object_in_params()
non_dispatch_objs = []
param0 = proto.params[0]
return (len(in_objs) > 0) and (in_objs[0].ty == param0.ty) and (param0.ty not in non_dispatch_objs)
def generate(self):
copyright = self.generate_copyright()
header = self.generate_header()
body = self.generate_body()
footer = self.generate_footer()
contents = []
if copyright:
contents.append(copyright)
if header:
contents.append(header)
if body:
contents.append(body)
if footer:
contents.append(footer)
return "\n\n".join(contents)
def generate_copyright(self):
return """/* THIS FILE IS GENERATED. DO NOT EDIT. */
/*
* Copyright (c) 2015-2016 The Khronos Group Inc.
* Copyright (c) 2015-2016 Valve Corporation
* Copyright (c) 2015-2016 LunarG, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Author: Jon Ashburn <[email protected]>
* Author: Chia-I Wu <[email protected]>
* Author: Courtney Goeltzenleuchter <[email protected]>
*/"""
def generate_header(self):
return "\n".join(["#include <" + h + ">" for h in self.headers])
def generate_body(self):
pass
def generate_footer(self):
pass
class DevExtTrampolineSubcommand(Subcommand):
def generate_header(self):
lines = []
lines.append("#include \"vk_loader_platform.h\"")
lines.append("#include \"loader.h\"")
lines.append("#if defined(__linux__)")
lines.append("#pragma GCC optimize(3) // force gcc to use tail-calls")
lines.append("#endif")
return "\n".join(lines)
def generate_body(self):
lines = []
for i in range(250):
lines.append('\nVKAPI_ATTR void VKAPI_CALL vkDevExt%s(VkDevice device)' % i)
lines.append('{')
lines.append(' const struct loader_dev_dispatch_table *disp;')
lines.append(' disp = loader_get_dev_dispatch(device);')
lines.append(' disp->ext_dispatch.DevExt[%s](device);' % i)
lines.append('}')
lines.append('')
lines.append('void *loader_get_dev_ext_trampoline(uint32_t index)')
lines.append('{')
lines.append(' switch (index) {')
for i in range(250):
lines.append(' case %s:' % i)
lines.append(' return vkDevExt%s;' % i)
lines.append(' }')
lines.append(' return NULL;')
lines.append('}')
return "\n".join(lines)
class LoaderEntrypointsSubcommand(Subcommand):
def generate_header(self):
return "#include \"loader.h\""
def _generate_object_setup(self, proto):
method = "loader_init_dispatch"
cond = "res == VK_SUCCESS"
setup = []
if not self._requires_special_trampoline_code(proto.name):
return setup
if "Get" in proto.name:
method = "loader_set_dispatch"
if proto.name == "GetSwapchainInfoKHR":
ptype = proto.params[-3].name
psize = proto.params[-2].name
pdata = proto.params[-1].name
cond = ("%s == VK_SWAP_CHAIN_INFO_TYPE_PERSISTENT_IMAGES_KHR && "
"%s && %s" % (ptype, pdata, cond))
setup.append("VkSwapchainImageInfoKHR *info = %s;" % pdata)
setup.append("size_t count = *%s / sizeof(*info), i;" % psize)
setup.append("for (i = 0; i < count; i++) {")
setup.append(" %s(info[i].image, disp);" % method)
setup.append(" %s(info[i].memory, disp);" % method)
setup.append("}")
else:
obj_params = proto.object_out_params()
for param in obj_params:
setup.append("%s(*%s, disp);" % (method, param.name))
if setup:
joined = "\n ".join(setup)
setup = []
setup.append(" if (%s) {" % cond)
setup.append(" " + joined)
setup.append(" }")
return "\n".join(setup)
def _generate_loader_dispatch_entrypoints(self, qual=""):
if qual:
qual += " "
funcs = []
for proto in self.protos:
if self._is_loader_non_trampoline_entrypoint(proto):
continue
func = []
obj_setup = self._generate_object_setup(proto)
func.append(qual + proto.c_func(prefix="vk", attr="VKAPI"))
func.append("{")
# declare local variables
func.append(" const VkLayerDispatchTable *disp;")
if proto.ret != 'void' and obj_setup:
func.append(" VkResult res;")
func.append("")
# get dispatch table
func.append(" disp = loader_get_dispatch(%s);" %
proto.params[0].name)
func.append("")
# dispatch!
dispatch = "disp->%s;" % proto.c_call()
if proto.ret == 'void':
func.append(" " + dispatch)
elif not obj_setup:
func.append(" return " + dispatch)
else:
func.append(" res = " + dispatch)
func.append(obj_setup)
func.append("")
func.append(" return res;")
func.append("}")
funcs.append("\n".join(func))
return "\n\n".join(funcs)
def generate_body(self):
body = [self._generate_loader_dispatch_entrypoints("LOADER_EXPORT")]
return "\n\n".join(body)
class DispatchTableOpsSubcommand(Subcommand):
def run(self):
if len(self.argv) != 1:
print("DispatchTableOpsSubcommand: <prefix> unspecified")
return
self.prefix = self.argv[0]
super().run()
def generate_header(self):
return "\n".join(["#include <vulkan/vulkan.h>",
"#include <vkLayer.h>",
"#include <string.h>",
"#include \"loader_platform.h\""])
def _generate_init(self, type):
stmts = []
func = []
if type == "device":
for proto in self.protos:
if self.is_dispatchable_object_first_param(proto) or proto.name == "CreateInstance":
stmts.append("table->%s = (PFN_vk%s) gpa(gpu, \"vk%s\");" %
(proto.name, proto.name, proto.name))
else:
stmts.append("table->%s = vk%s; /* non-dispatchable */" %
(proto.name, proto.name))
func.append("static inline void %s_init_device_dispatch_table(VkLayerDispatchTable *table,"
% self.prefix)
func.append("%s PFN_vkGetDeviceProcAddr gpa,"
% (" " * len(self.prefix)))
func.append("%s VkPhysicalDevice gpu)"
% (" " * len(self.prefix)))
else:
for proto in self.protos:
if proto.params[0].ty != "VkInstance" and proto.params[0].ty != "VkPhysicalDevice":
continue
stmts.append("table->%s = vk%s;" % (proto.name, proto.name))
func.append("static inline void %s_init_instance_dispatch_table(VkLayerInstanceDispatchTable *table)"
% self.prefix)
func.append("{")
func.append(" %s" % "\n ".join(stmts))
func.append("}")
return "\n".join(func)
def _generate_lookup(self):
lookups = []
for proto in self.protos:
if self.is_dispatchable_object_first_param(proto):
lookups.append("if (!strcmp(name, \"%s\"))" % (proto.name))
lookups.append(" return (void *) table->%s;"
% (proto.name))
func = []
func.append("static inline void *%s_lookup_dispatch_table(const VkLayerDispatchTable *table,"
% self.prefix)
func.append("%s const char *name)"
% (" " * len(self.prefix)))
func.append("{")
func.append(generate_get_proc_addr_check("name"))
func.append("")
func.append(" name += 2;")
func.append(" %s" % "\n ".join(lookups))
func.append("")
func.append(" return NULL;")
func.append("}")
return "\n".join(func)
def generate_body(self):
body = [self._generate_init("device"),
self._generate_lookup(),
self._generate_init("instance")]
return "\n\n".join(body)
class WinDefFileSubcommand(Subcommand):
def run(self):
library_exports = {
"all": [],
}
if len(self.argv) != 2 or self.argv[1] not in library_exports:
print("WinDefFileSubcommand: <library-name> {%s}" %
"|".join(library_exports.keys()))
return
self.library = self.argv[0]
self.exports = library_exports[self.argv[1]]
super().run()
def generate_copyright(self):
return """; THIS FILE IS GENERATED. DO NOT EDIT.
;;;; Begin Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; Copyright (c) 2015-2016 The Khronos Group Inc.
; Copyright (c) 2015-2016 Valve Corporation
; Copyright (c) 2015-2016 LunarG, Inc.
;
; Licensed under the Apache License, Version 2.0 (the "License");
; you may not use this file except in compliance with the License.
; You may obtain a copy of the License at
;
; http://www.apache.org/licenses/LICENSE-2.0
;
; Unless required by applicable law or agreed to in writing, software
; distributed under the License is distributed on an "AS IS" BASIS,
; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
; See the License for the specific language governing permissions and
; limitations under the License.
;
;
; Author: Jon Ashburn <[email protected]>
;;;; End Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"""
def generate_header(self):
return "; The following is required on Windows, for exporting symbols from the DLL"
def generate_body(self):
body = []
body.append("LIBRARY " + self.library)
body.append("EXPORTS")
for proto in self.protos:
if self.exports and proto.name not in self.exports:
continue
# This was intended to reject WSI calls, but actually rejects ALL extensions
# TODO: Make this WSI-extension specific
# if proto.name.endswith("KHR"):
# continue
body.append(" vk" + proto.name)
return "\n".join(body)
class LoaderGetProcAddrSubcommand(Subcommand):
def run(self):
self.prefix = "vk"
# we could get the list from argv if wanted
self.intercepted = [proto.name for proto in self.protos]
for proto in self.protos:
if proto.name == "GetDeviceProcAddr":
self.gpa = proto
super().run()
def generate_header(self):
return "\n".join(["#include <string.h>"])
def generate_body(self):
lookups = []
for proto in self.protos:
if proto.name not in self.intercepted:
lookups.append("/* no %s%s */" % (self.prefix, proto.name))
continue
lookups.append("if (!strcmp(name, \"%s\"))" % proto.name)
lookups.append(" return (%s) %s%s;" %
(self.gpa.ret, self.prefix, proto.name))
special_lookups = []
for proto in self.protos:
if self._is_loader_non_trampoline_entrypoint(proto) or self._requires_special_trampoline_code(proto.name):
special_lookups.append("if (!strcmp(name, \"%s\"))" % proto.name)
special_lookups.append(" return (%s) %s%s;" %
(self.gpa.ret, self.prefix, proto.name))
else:
continue
body = []
body.append("static inline %s globalGetProcAddr(const char *name)" %
self.gpa.ret)
body.append("{")
body.append(generate_get_proc_addr_check("name"))
body.append("")
body.append(" name += 2;")
body.append(" %s" % "\n ".join(lookups))
body.append("")
body.append(" return NULL;")
body.append("}")
body.append("")
body.append("static inline void *loader_non_passthrough_gpa(const char *name)")
body.append("{")
body.append(generate_get_proc_addr_check("name"))
body.append("")
body.append(" name += 2;")
body.append(" %s" % "\n ".join(special_lookups))
body.append("")
body.append(" return NULL;")
body.append("}")
return "\n".join(body)
def main():
wsi = {
"Win32",
"Android",
"Xcb",
"Xlib",
"Wayland",
"Mir"
}
subcommands = {
"dev-ext-trampoline": DevExtTrampolineSubcommand,
"loader-entrypoints": LoaderEntrypointsSubcommand,
"dispatch-table-ops": DispatchTableOpsSubcommand,
"win-def-file": WinDefFileSubcommand,
"loader-get-proc-addr": LoaderGetProcAddrSubcommand,
}
if len(sys.argv) < 3 or sys.argv[1] not in wsi or sys.argv[2] not in subcommands:
print("Usage: %s <wsi> <subcommand> [options]" % sys.argv[0])
print
print("Available wsi (displayservers) are: %s" % " ".join(wsi))
print("Available subcommands are: %s" % " ".join(subcommands))
exit(1)
subcmd = subcommands[sys.argv[2]](sys.argv[3:])
subcmd.run()
if __name__ == "__main__":
main()
| apache-2.0 |
jmbeuken/abinit | scripts/post_processing/abinit_eignc_to_bandstructure.py | 3 | 47417 | #!/usr/bin/python
#=================================================================#
# Script to plot the bandstructure from an abinit bandstructure #
# _EIG.nc netcdf file or from a wannier bandstructure, or from #
# an _EIG.nc file+GW file+ bandstructure _EIG.nc file #
#=================================================================#
#########
#IMPORTS#
#########
import numpy as N
import matplotlib.pyplot as P
import netCDF4 as nc
import sys
import os
import argparse
import time
#############
##VARIABLES##
#############
class VariableContainer:pass
#Constants
csts = VariableContainer()
csts.hartree2ev = N.float(27.211396132)
csts.ev2hartree = N.float(1/csts.hartree2ev)
csts.sqrtpi = N.float(N.sqrt(N.pi))
csts.invsqrtpi = N.float(1/csts.sqrtpi)
csts.TOLKPTS = N.float(0.00001)
###########
##CLASSES##
###########
class PolynomialFit(object):
def __init__(self):
self.degree = 2
class EigenvalueContainer(object):
nsppol = None
nkpt = None
mband = None
eigenvalues = None
units = None
wtk = None
filename = None
filefullpath = None
bd_indices = None
eigenvalue_type = None
kpoints = None
#kpoint_sampling_type: can be Monkhorst-Pack or Bandstructure
KPT_W90_TOL = N.float(1.0e-6)
KPT_DFT_TOL = N.float(1.0e-8)
kpoint_sampling_type = 'Monkhorst-Pack'
inputgvectors = None
gvectors = None
special_kpoints = None
special_kpoints_names = None
special_kpoints_indices = None
kpoint_path_values = None
kpoint_reduced_path_values = None
kpoint_path_length = None
#reduced_norm = None
norm_paths = None
norm_reduced_paths = None
def __init__(self,directory=None,filename=None):
if filename == None:return
if directory == None:directory='.'
self.filename = filename
self.filefullpath = '%s/%s' %(directory,filename)
self.file_open(self.filefullpath)
def set_kpoint_sampling_type(self,kpoint_sampling_type):
if kpoint_sampling_type != 'Monkhorst-Pack' and kpoint_sampling_type != 'Bandstructure':
print 'ERROR: kpoint_sampling_type "%s" does not exists' %kpoint_sampling_type
print ' it should be "Monkhorst-Pack" or "Bandstructure" ... exit'
sys.exit()
self.kpoint_sampling_type = kpoint_sampling_type
def correct_kpt(self,kpoint,tolerance=N.float(1.0e-6)):
kpt_correct = N.array(kpoint,N.float)
changed = False
for ii in range(3):
if N.allclose(kpoint[ii],N.float(1.0/3.0),atol=tolerance):
kpt_correct[ii] = N.float(1.0/3.0)
changed = True
elif N.allclose(kpoint[ii],N.float(1.0/6.0),atol=tolerance):
kpt_correct[ii] = N.float(1.0/6.0)
changed = True
elif N.allclose(kpoint[ii],N.float(-1.0/6.0),atol=tolerance):
kpt_correct[ii] = N.float(-1.0/6.0)
changed = True
elif N.allclose(kpoint[ii],N.float(-1.0/3.0),atol=tolerance):
kpt_correct[ii] = N.float(-1.0/3.0)
changed = True
if changed:
print 'COMMENT: kpoint %15.12f %15.12f %15.12f has been changed to %15.12f %15.12f %15.12f' %(kpoint[0],kpoint[1],kpoint[2],kpt_correct[0],kpt_correct[1],kpt_correct[2])
return kpt_correct
def find_special_kpoints(self,gvectors=None):
if self.kpoint_sampling_type != 'Bandstructure':
print 'ERROR: special kpoints are usefull only for bandstructures ... returning find_special_kpoints'
return
if self.eigenvalue_type == 'W90':
correct_kpt_tolerance = N.float(1.0e-4)
KPT_TOL = self.KPT_W90_TOL
elif self.eigenvalue_type == 'DFT':
correct_kpt_tolerance = N.float(1.0e-6)
KPT_TOL = self.KPT_DFT_TOL
else:
print 'ERROR: eigenvalue_type is "%s" while it should be "W90" or "DFT" ... returning find_special_kpoints' %self.eigenvalue_type
return
if gvectors == None:
self.inputgvectors = False
self.gvectors = N.identity(3,N.float)
else:
if N.shape(gvectors) != (3, 3):
print 'ERROR: wrong gvectors ... exiting now'
sys.exit()
self.inputgvectors = True
self.gvectors = gvectors
full_kpoints = N.zeros((self.nkpt,3),N.float)
for ikpt in range(self.nkpt):
full_kpoints[ikpt,:] = self.kpoints[ikpt,0]*self.gvectors[0,:]+self.kpoints[ikpt,1]*self.gvectors[1,:]+self.kpoints[ikpt,2]*self.gvectors[2,:]
delta_kpt = full_kpoints[1,:]-full_kpoints[0,:]
self.special_kpoints_indices = list()
self.special_kpoints = list()
self.special_kpoints_indices.append(0)
self.special_kpoints.append(self.correct_kpt(self.kpoints[0,:],tolerance=correct_kpt_tolerance))
for ikpt in range(1,self.nkpt-1):
thisdelta = full_kpoints[ikpt+1,:]-full_kpoints[ikpt,:]
if not N.allclose(thisdelta,delta_kpt,atol=KPT_TOL):
delta_kpt = thisdelta
self.special_kpoints_indices.append(ikpt)
self.special_kpoints.append(self.correct_kpt(self.kpoints[ikpt,:],tolerance=correct_kpt_tolerance))
self.special_kpoints_indices.append(N.shape(self.kpoints)[0]-1)
self.special_kpoints.append(self.correct_kpt(self.kpoints[-1,:],tolerance=correct_kpt_tolerance))
print 'Special Kpoints : '
print ' {0:d} : {1[0]: 8.8f} {1[1]: 8.8f} {1[2]: 8.8f}'.format(1,self.kpoints[0,:])
self.norm_paths = N.zeros((N.shape(self.special_kpoints_indices)[0]-1),N.float)
self.norm_reduced_paths = N.zeros((N.shape(self.special_kpoints_indices)[0]-1),N.float)
for ispkpt in range(1,N.shape(self.special_kpoints_indices)[0]):
self.norm_paths[ispkpt-1] = N.linalg.norm(full_kpoints[self.special_kpoints_indices[ispkpt]]-full_kpoints[self.special_kpoints_indices[ispkpt-1]])
self.norm_reduced_paths[ispkpt-1] = N.linalg.norm(self.special_kpoints[ispkpt]-self.special_kpoints[ispkpt-1])
print ' {2:d}-{3:d} path length : {0: 8.8f} | reduced path length : {1: 8.8f}'.\
format(self.norm_paths[ispkpt-1],self.norm_reduced_paths[ispkpt-1],ispkpt,ispkpt+1)
print ' {0:d} : {1[0]: 8.8f} {1[1]: 8.8f} {1[2]: 8.8f}'.format(ispkpt+1,self.kpoints[self.special_kpoints_indices[ispkpt],:])
self.kpoint_path_length = N.sum(self.norm_paths)
self.kpoint_reduced_path_length = N.sum(self.norm_reduced_paths)
self.normalized_kpoint_path_norm = self.norm_paths/self.kpoint_path_length
self.normalized_kpoint_reduced_path_norm = self.norm_reduced_paths/self.kpoint_reduced_path_length
kptredpathval = list()
kptpathval = list()
kptredpathval.append(N.float(0.0))
kptpathval.append(N.float(0.0))
curlen = N.float(0.0)
redcurlen = N.float(0.0)
for ispkpt in range(1,N.shape(self.special_kpoints_indices)[0]):
kptredpathval.extend(N.linspace(redcurlen,redcurlen+self.norm_reduced_paths[ispkpt-1],self.special_kpoints_indices[ispkpt]-self.special_kpoints_indices[ispkpt-1]+1)[1:])
kptpathval.extend(N.linspace(curlen,curlen+self.norm_paths[ispkpt-1],self.special_kpoints_indices[ispkpt]-self.special_kpoints_indices[ispkpt-1]+1)[1:])
redcurlen = redcurlen + self.norm_reduced_paths[ispkpt-1]
curlen = curlen + self.norm_paths[ispkpt-1]
self.kpoint_path_values = N.array(kptpathval,N.float)
self.kpoint_reduced_path_values = N.array(kptredpathval,N.float)
self.normalized_kpoint_path_values = self.kpoint_path_values/self.kpoint_path_length
self.normalized_kpoint_reduced_path_values = self.kpoint_reduced_path_values/self.kpoint_reduced_path_length
self.special_kpoints = N.array(self.special_kpoints,N.float)
def file_open(self,filefullpath):
if filefullpath[-3:] == '_GW':
self.gw_file_open(filefullpath)
elif filefullpath[-7:] == '_EIG.nc':
self.nc_eig_open(filefullpath)
elif filefullpath[-4:] == '.dat':
self.wannier_bs_file_open(filefullpath)
def has_eigenvalue(self,nsppol,isppol,kpoint,iband):
if self.nsppol != nsppol:
return False
for ikpt in range(self.nkpt):
if N.absolute(self.kpoints[ikpt,0]-kpoint[0]) < csts.TOLKPTS and \
N.absolute(self.kpoints[ikpt,1]-kpoint[1]) < csts.TOLKPTS and \
N.absolute(self.kpoints[ikpt,2]-kpoint[2]) < csts.TOLKPTS:
if iband >= self.bd_indices[isppol,ikpt,0]-1 and iband < self.bd_indices[isppol,ikpt,1]:
return True
return False
return False
def get_eigenvalue(self,nsppol,isppol,kpoint,iband):
for ikpt in range(self.nkpt):
if N.absolute(self.kpoints[ikpt,0]-kpoint[0]) < csts.TOLKPTS and \
N.absolute(self.kpoints[ikpt,1]-kpoint[1]) < csts.TOLKPTS and \
N.absolute(self.kpoints[ikpt,2]-kpoint[2]) < csts.TOLKPTS:
return self.eigenvalues[isppol,ikpt,iband]
def wannier_bs_file_open(self,filefullpath):
if not (os.path.isfile(filefullpath)):
print 'ERROR : file "%s" does not exists' %filefullpath
print '... exiting now ...'
sys.exit()
print 'WARNING: no spin polarization reading yet for Wannier90 bandstructure files!'
self.eigenvalue_type = 'W90'
self.nsppol = None
self.nkpt = None
self.mband = None
self.eigenvalues = None
self.units = None
self.filefullpath = filefullpath
reader = open(self.filefullpath,'r')
filedata = reader.readlines()
reader.close()
for iline in range(len(filedata)):
if filedata[iline].strip() == '':
self.nkpt = iline
break
self.mband = N.int(len(filedata)/self.nkpt)
self.nsppol = 1
self.eigenvalues = N.zeros([self.nsppol,self.nkpt,self.mband],N.float)
self.kpoints = N.zeros([self.nkpt,3],N.float)
iline = 0
kpt_file = '%s.kpt' %filefullpath[:-4]
if os.path.isfile(kpt_file):
reader = open(kpt_file,'r')
kptdata = reader.readlines()
reader.close()
if N.int(kptdata[0]) != self.nkpt:
print 'ERROR : the number of kpoints in file "%s" is not the same as in "%s" ... exit' %(self.filefullpath,kpt_file)
sys.exit()
for ikpt in range(self.nkpt):
linesplit = kptdata[ikpt+1].split()
self.kpoints[ikpt,0] = N.float(linesplit[0])
self.kpoints[ikpt,1] = N.float(linesplit[1])
self.kpoints[ikpt,2] = N.float(linesplit[2])
else:
for ikpt in range(self.nkpt):
self.kpoints[ikpt,0] = N.float(filedata[ikpt].split()[0])
for iband in range(self.mband):
for ikpt in range(self.nkpt):
self.eigenvalues[0,ikpt,iband] = N.float(filedata[iline].split()[1])
iline = iline+1
iline = iline+1
self.eigenvalues = self.eigenvalues*csts.ev2hartree
self.units = 'Hartree'
def gw_file_open(self,filefullpath):
if not (os.path.isfile(filefullpath)):
print 'ERROR : file "%s" does not exists' %filefullpath
print '... exiting now ...'
sys.exit()
self.eigenvalue_type = 'GW'
self.nsppol = None
self.nkpt = None
self.mband = None
self.eigenvalues = None
self.units = None
self.filefullpath = filefullpath
reader = open(self.filefullpath,'r')
filedata = reader.readlines()
reader.close()
self.nkpt = N.int(filedata[0].split()[0])
self.kpoints = N.ones([self.nkpt,3],N.float)
self.nsppol = N.int(filedata[0].split()[1])
self.bd_indices = N.zeros((self.nsppol,self.nkpt,2),N.int)
icur = 1
nbd_kpt = N.zeros([self.nsppol,self.nkpt],N.int)
for isppol in range(self.nsppol):
for ikpt in range(self.nkpt):
self.kpoints[ikpt,:] = N.array(filedata[icur].split()[:],N.float)
icur = icur + 1
nbd_kpt[isppol,ikpt] = N.int(filedata[icur])
self.bd_indices[isppol,ikpt,0] = N.int(filedata[icur+1].split()[0])
self.bd_indices[isppol,ikpt,1] = N.int(filedata[icur+nbd_kpt[isppol,ikpt]].split()[0])
icur = icur + nbd_kpt[isppol,ikpt] + 1
self.mband = N.max(self.bd_indices[:,:,1])
self.eigenvalues = N.zeros([self.nsppol,self.nkpt,self.mband],N.float)
self.eigenvalues[:,:,:] = N.nan
ii = 3
for isppol in range(self.nsppol):
for ikpt in range(self.nkpt):
for iband in range(self.bd_indices[isppol,ikpt,0]-1,self.bd_indices[isppol,ikpt,1]):
self.eigenvalues[isppol,ikpt,iband] = N.float(filedata[ii].split()[1])
ii = ii + 1
ii = ii + 2
self.eigenvalues = csts.ev2hartree*self.eigenvalues
self.units = 'Hartree'
def pfit_gw_file_write(self,polyfitlist,directory=None,filename=None,bdgw=None,energy_pivots=None,gwec=None):
if filename == None:return
if directory == None:directory='.'
filefullpath = '%s/%s' %(directory,filename)
if (os.path.isfile(filefullpath)):
user_input = raw_input('WARNING : file "%s" exists, do you want to overwrite it ? (y/n)' %filefullpath)
if not (user_input == 'y' or user_input == 'Y'):
return
writer = open(filefullpath,'w')
writer.write('%12s%12s\n' %(self.nkpt,self.nsppol))
if gwec == None:
for ikpt in range(self.nkpt):
for isppol in range(self.nsppol):
writer.write('%10.6f%10.6f%10.6f\n' %(self.kpoints[ikpt,0],self.kpoints[ikpt,1],self.kpoints[ikpt,2]))
writer.write('%4i\n' %(bdgw[1]-bdgw[0]+1))
for iband in range(bdgw[0]-1,bdgw[1]):
delta = N.polyval(polyfitlist[-1],csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband])
for ipivot in range(len(energy_pivots)):
if csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband] <= energy_pivots[ipivot]:
delta = N.polyval(polyfitlist[ipivot],csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband])
break
writer.write('%6i%9.4f%9.4f%9.4f\n' %(iband+1,csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband]+delta,delta,0.0))
else:
for ikpt in range(self.nkpt):
for isppol in range(self.nsppol):
writer.write('%10.6f%10.6f%10.6f\n' %(self.kpoints[ikpt,0],self.kpoints[ikpt,1],self.kpoints[ikpt,2]))
writer.write('%4i\n' %(bdgw[1]-bdgw[0]+1))
for iband in range(bdgw[0]-1,bdgw[1]):
if gwec.has_eigenvalue(self.nsppol,isppol,self.kpoints[ikpt],iband):
gw_eig = gwec.get_eigenvalue(self.nsppol,isppol,self.kpoints[ikpt],iband)
writer.write('%6i%9.4f%9.4f%9.4f\n' %(iband+1,csts.hartree2ev*gw_eig,csts.hartree2ev*(gw_eig-self.eigenvalues[isppol,ikpt,iband]),0.0))
else:
delta = N.polyval(polyfitlist[-1],csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband])
for ipivot in range(len(energy_pivots)):
if csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband] <= energy_pivots[ipivot]:
delta = N.polyval(polyfitlist[ipivot],csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband])
break
writer.write('%6i%9.4f%9.4f%9.4f\n' %(iband+1,csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband]+delta,delta,0.0))
writer.close()
def pfit_dft_to_gw_bs_write(self,polyfitlist,directory=None,filename=None,bdgw=None,energy_pivots=None,gwec=None):
if filename == None:return
if directory == None:directory='.'
filefullpath = '%s/%s' %(directory,filename)
if (os.path.isfile(filefullpath)):
user_input = raw_input('WARNING : file "%s" exists, do you want to overwrite it ? (y/n)' %filefullpath)
if not (user_input == 'y' or user_input == 'Y'):
return
writer = open(filefullpath,'w')
if gwec == None:
for ikpt in range(self.nkpt):
writer.write('%s' %ikpt)
for isppol in range(self.nsppol):
for iband in range(bdgw[0]-1,bdgw[1]):
delta = N.polyval(polyfitlist[-1],csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband])
for ipivot in range(len(energy_pivots)):
if csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband] <= energy_pivots[ipivot]:
delta = N.polyval(polyfitlist[ipivot],csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband])
break
writer.write(' %s' %(csts.hartree2ev*self.eigenvalues[isppol,ikpt,iband]+delta))
writer.write('\n')
else:
print 'NOT SUPPORTED YET'
sys.exit()
writer.close()
def nc_eig_open(self,filefullpath):
if not (os.path.isfile(filefullpath)):
print 'ERROR : file "%s" does not exists' %filefullpath
print '... exiting now ...'
sys.exit()
ncdata = nc.Dataset(filefullpath)
self.eigenvalue_type = 'DFT'
self.nsppol = None
self.nkpt = None
self.mband = None
self.eigenvalues = None
self.units = None
self.filefullpath = filefullpath
for dimname,dimobj in ncdata.dimensions.iteritems():
if dimname == 'nsppol':self.nsppol = N.int(len(dimobj))
if dimname == 'nkpt':self.nkpt = N.int(len(dimobj))
if dimname == 'mband':self.mband = N.int(len(dimobj))
for varname in ncdata.variables:
if varname == 'Eigenvalues':
varobj = ncdata.variables[varname]
varshape = N.shape(varobj[:])
self.units = None
for attrname in varobj.ncattrs():
if attrname == 'units':
self.units = varobj.getncattr(attrname)
if self.units == None:
print 'WARNING : units are not specified'
print '... assuming "Hartree" units ...'
self.units = 'Hartree'
elif self.units != 'Hartree':
print 'ERROR : units are unknown : "%s"' %self.units
print '... exiting now ...'
sys.exit()
self.eigenvalues = N.reshape(N.array(varobj,N.float),varshape)
self.nsppol = varshape[0]
self.nkpt = varshape[1]
self.kpoints = -1*N.ones((self.nkpt,3),N.float)
self.mband = varshape[2]
self.bd_indices = N.zeros((self.nsppol,self.nkpt,2),N.int)
self.bd_indices[:,:,0] = 1
self.bd_indices[:,:,1] = self.mband
break
for varname in ncdata.variables:
if varname == 'Kptns':
varobj = ncdata.variables[varname]
varshape = N.shape(varobj[:])
self.kpoints = N.reshape(N.array(varobj,N.float),varshape)
def write_bandstructure_to_file(self,filename,option_kpts='bohrm1_units'):
#if option_kpts is set to 'normalized', the path of the bandstructure will be normalized to 1 (and special k-points correctly chosen)
if self.kpoint_sampling_type != 'Bandstructure':
print 'ERROR: kpoint_sampling_type is not "Bandstructure" ... returning from write_bandstructure_to_file'
return
if self.nsppol > 1:
print 'ERROR: number of spins is more than 1, this is not fully tested ... use with care !'
writer = open(filename,'w')
writer.write('# BANDSTRUCTURE FILE FROM DAVID\'S SCRIPT\n')
writer.write('# nsppol = %s\n' %self.nsppol)
writer.write('# nband = %s\n' %self.mband)
writer.write('# eigenvalue_type = %s\n' %self.eigenvalue_type)
if self.inputgvectors:
writer.write('# inputgvectors = 1 (%s)\n' %self.inputgvectors)
else:
writer.write('# inputgvectors = 0 (%s)\n' %self.inputgvectors)
writer.write('# gvectors(1) = %20.17f %20.17f %20.17f \n' %(self.gvectors[0,0],self.gvectors[0,1],self.gvectors[0,2]))
writer.write('# gvectors(2) = %20.17f %20.17f %20.17f \n' %(self.gvectors[1,0],self.gvectors[1,1],self.gvectors[1,2]))
writer.write('# gvectors(3) = %20.17f %20.17f %20.17f \n' %(self.gvectors[2,0],self.gvectors[2,1],self.gvectors[2,2]))
writer.write('# special_kpoints_number = %s\n' %(len(self.special_kpoints_indices)))
writer.write('# list of special kpoints : (given in reduced coordinates, value_path is in Bohr^-1, value_red_path has its total path normalized to 1)\n')
for ii in range(len(self.special_kpoints_indices)):
ispkpt = self.special_kpoints_indices[ii]
spkpt = self.special_kpoints[ii]
writer.write('# special_kpt_index %5s : %20.17f %20.17f %20.17f (value_path = %20.17f | value_red_path = %20.17f)\n' %(ispkpt,spkpt[0],spkpt[1],spkpt[2],self.kpoint_path_values[ispkpt],self.kpoint_reduced_path_values[ispkpt]))
writer.write('# special_kpoints_names :\n')
for ii in range(len(self.special_kpoints_indices)):
ispkpt = self.special_kpoints_indices[ii]
spkpt = self.special_kpoints[ii]
writer.write('# special_kpt_name %3s : "%s" : %20.17f %20.17f %20.17f\n' %(ii+1,self.special_kpoints_names[ii],spkpt[0],spkpt[1],spkpt[2]))
writer.write('# kpoint_path_length = %20.17f \n' %(self.kpoint_path_length))
writer.write('# kpoint_path_number = %s \n' %(self.nkpt))
if self.inputgvectors:
writer.write('# kpoint_path_units = %s\n' %(option_kpts))
else:
writer.write('# kpoint_path_units = %s (!!! CONSIDERING UNITARY GVECTORS MATRIX !!!)\n' %(option_kpts))
writer.write('#BEGIN\n')
if option_kpts == 'bohrm1_units':
values_path = self.kpoint_path_values
elif option_kpts == 'reduced':
values_path = self.kpoint_reduced_path_values
elif option_kpts == 'bohrm1_units_normalized':
values_path = self.normalized_kpoint_path_values
elif option_kpts == 'reduced_normalized':
values_path = self.normalized_kpoint_reduced_path_values
else:
print 'ERROR: wrong option_kpts ... exit'
writer.write('... CANCELLED (wrong option_kpts)')
writer.close()
sys.exit()
for isppol in range(self.nsppol):
writer.write('#isppol %s\n' %isppol)
for iband in range(self.mband):
writer.write('#iband %5s (band number %s)\n' %(iband,iband+1))
for ikpt in range(self.nkpt):
writer.write('%20.17f %20.17f\n' %(values_path[ikpt],self.eigenvalues[isppol,ikpt,iband]))
writer.write('\n')
writer.write('#END\n')
writer.write('\n#KPT_LIST\n')
for ikpt in range(self.nkpt):
writer.write('# %6d : %20.17f %20.17f %20.17f\n' %(ikpt,self.kpoints[ikpt,0],self.kpoints[ikpt,1],self.kpoints[ikpt,2]))
writer.close()
def read_bandstructure_from_file(self,filename):
reader = open(filename,'r')
bs_data = reader.readlines()
reader.close()
self.gvectors = N.identity(3,N.float)
self.kpoint_sampling_type = 'Bandstructure'
self.special_kpoints_indices = list()
self.special_kpoints = list()
for ii in range(len(bs_data)):
if bs_data[ii] == '#BEGIN\n':
ibegin = ii
break
elif bs_data[ii][:10] == '# nsppol =':
self.nsppol = N.int(bs_data[ii][10:])
elif bs_data[ii][:9] == '# nband =':
self.mband = N.int(bs_data[ii][9:])
elif bs_data[ii][:19] == '# eigenvalue_type =':
self.eigenvalue_type = bs_data[ii][19:].strip()
elif bs_data[ii][:17] == '# inputgvectors =':
tt = N.int(bs_data[ii][18])
if tt == 1:
self.inputgvectors = True
elif tt == 0:
self.inputgvectors = False
else:
print 'ERROR: reading inputgvectors ... exit'
sys.exit()
elif bs_data[ii][:15] == '# gvectors(1) =':
sp = bs_data[ii][15:].split()
self.gvectors[0,0] = N.float(sp[0])
self.gvectors[0,1] = N.float(sp[1])
self.gvectors[0,2] = N.float(sp[2])
elif bs_data[ii][:15] == '# gvectors(2) =':
sp = bs_data[ii][15:].split()
self.gvectors[1,0] = N.float(sp[0])
self.gvectors[1,1] = N.float(sp[1])
self.gvectors[1,2] = N.float(sp[2])
elif bs_data[ii][:15] == '# gvectors(3) =':
sp = bs_data[ii][15:].split()
self.gvectors[2,0] = N.float(sp[0])
self.gvectors[2,1] = N.float(sp[1])
self.gvectors[2,2] = N.float(sp[2])
elif bs_data[ii][:26] == '# special_kpoints_number =':
special_kpoints_number = N.int(bs_data[ii][26:])
self.special_kpoints_names = ['']*special_kpoints_number
elif bs_data[ii][:22] == '# special_kpt_index':
sp = bs_data[ii][22:].split()
self.special_kpoints_indices.append(N.int(sp[0]))
self.special_kpoints.append(N.array([sp[2],sp[3],sp[4]]))
elif bs_data[ii][:21] == '# special_kpt_name':
sp = bs_data[ii][21:].split()
ispkpt = N.int(sp[0])-1
self.special_kpoints_names[ispkpt] = sp[2][1:-1]
elif bs_data[ii][:22] == '# kpoint_path_length =':
self.kpoint_path_length = N.float(bs_data[ii][22:])
elif bs_data[ii][:22] == '# kpoint_path_number =':
self.nkpt = N.int(bs_data[ii][22:])
elif bs_data[ii][:21] == '# kpoint_path_units =':
kpoint_path_units = bs_data[ii][21:].strip()
self.special_kpoints_indices = N.array(self.special_kpoints_indices,N.int)
self.special_kpoints = N.array(self.special_kpoints,N.float)
if len(self.special_kpoints_indices) != special_kpoints_number or len(self.special_kpoints) != special_kpoints_number:
print 'ERROR: reading the special kpoints ... exit'
sys.exit()
self.kpoint_path_values = N.zeros([self.nkpt],N.float)
self.kpoint_reduced_path_values = N.zeros([self.nkpt],N.float)
if kpoint_path_units == 'bohrm1_units':
jj = 0
for ii in range(ibegin+1,len(bs_data)):
if bs_data[ii][:7] == '#isppol' or bs_data[ii][:6] == '#iband':continue
if bs_data[ii] == '\n':
break
self.kpoint_path_values[jj] = N.float(bs_data[ii].split()[0])
jj = jj + 1
if jj != self.nkpt:
print 'ERROR: reading bandstructure file ... exit'
sys.exit()
self.normalized_kpoint_path_values = self.kpoint_path_values/self.kpoint_path_length
if kpoint_path_units == 'bohrm1_units_normalized':
jj = 0
for ii in range(ibegin+1,len(bs_data)):
if bs_data[ii][:7] == '#isppol' or bs_data[ii][:6] == '#iband':continue
if bs_data[ii] == '\n':
break
self.normalized_kpoint_path_values[jj] = N.float(bs_data[ii].split()[0])
jj = jj + 1
if jj != self.nkpt:
print 'ERROR: reading bandstructure file ... exit'
sys.exit()
self.kpoint_path_values = self.normalized_kpoint_path_values*self.kpoint_path_length
elif kpoint_path_units == 'reduced_normalized':
jj = 0
for ii in range(ibegin+1,len(bs_data)):
if bs_data[ii][:7] == '#isppol' or bs_data[ii][:6] == '#iband':continue
if bs_data[ii] == '\n':
break
self.normalized_kpoint_reduced_path_values[jj] = N.float(bs_data[ii].split()[0])
jj = jj + 1
if jj != self.nkpt:
print 'ERROR: reading bandstructure file ... exit'
sys.exit()
self.kpoint_reduced_path_values = self.normalized_kpoint_reduced_path_values/self.kpoint_reduced_path_length
elif kpoint_path_units == 'reduced':
jj = 0
for ii in range(ibegin+1,len(bs_data)):
if bs_data[ii][:7] == '#isppol' or bs_data[ii][:6] == '#iband':continue
if bs_data[ii] == '\n':
break
self.kpoint_reduced_path_values[jj] = N.float(bs_data[ii].split()[0])
jj = jj + 1
if jj != self.nkpt:
print 'ERROR: reading bandstructure file ... exit'
sys.exit()
self.normalized_kpoint_reduced_path_values = self.kpoint_reduced_path_values/self.kpoint_reduced_path_length
self.eigenvalues = N.zeros([self.nsppol,self.nkpt,self.mband],N.float)
check_nband = 0
for ii in range(ibegin+1,len(bs_data)):
if bs_data[ii][:7] == '#isppol':
isppol = N.int(bs_data[ii][7:])
elif bs_data[ii][:6] == '#iband':
iband = N.int(bs_data[ii][6:].split()[0])
ikpt = 0
elif bs_data[ii][:4] == '#END':
break
elif bs_data[ii] == '\n':
check_nband = check_nband + 1
else:
self.eigenvalues[isppol,ikpt,iband] = N.float(bs_data[ii].split()[1])
ikpt = ikpt + 1
def check_gw_vs_dft_parameters(dftec,gwec):
if gwec.eigenvalue_type != 'GW' or dftec.eigenvalue_type != 'DFT':
print 'ERROR: eigenvalue files do not contain GW and DFT eigenvalues ... exiting now'
sys.exit()
if dftec.nsppol != gwec.nsppol or dftec.nkpt != gwec.nkpt:
print 'ERROR: the number of spins/kpoints is not the same in the GW and DFT files used to make the interpolation ... exiting now'
sys.exit()
for ikpt in range(dftec.nkpt):
if N.absolute(dftec.kpoints[ikpt,0]-gwec.kpoints[ikpt,0]) > csts.TOLKPTS or \
N.absolute(dftec.kpoints[ikpt,1]-gwec.kpoints[ikpt,1]) > csts.TOLKPTS or \
N.absolute(dftec.kpoints[ikpt,2]-gwec.kpoints[ikpt,2]) > csts.TOLKPTS:
print 'ERROR: the kpoints are not the same in the GW and DFT files used to make the interpolation ... exiting now'
sys.exit()
def plot_gw_vs_dft_eig(dftec,gwec,vbm_index,energy_pivots=None,polyfit_degrees=None):
if gwec.eigenvalue_type != 'GW' or dftec.eigenvalue_type != 'DFT':
print 'ERROR: eigenvalue containers do not contain GW and DFT eigenvalues ... exiting now'
sys.exit()
if dftec.nsppol != gwec.nsppol or dftec.nkpt != gwec.nkpt:
print 'ERROR: the number of spins/kpoints is not the same in the GW and DFT containers ... exiting now'
sys.exit()
valdftarray = N.array([],N.float)
conddftarray = N.array([],N.float)
valgwarray = N.array([],N.float)
condgwarray = N.array([],N.float)
for isppol in range(dftec.nsppol):
for ikpt in range(dftec.nkpt):
ibdmin = N.max([dftec.bd_indices[isppol,ikpt,0],gwec.bd_indices[isppol,ikpt,0]])-1
ibdmax = N.min([dftec.bd_indices[isppol,ikpt,1],gwec.bd_indices[isppol,ikpt,1]])-1
valdftarray = N.append(valdftarray,csts.hartree2ev*dftec.eigenvalues[isppol,ikpt,ibdmin:vbm_index])
valgwarray = N.append(valgwarray,csts.hartree2ev*gwec.eigenvalues[isppol,ikpt,ibdmin:vbm_index])
conddftarray = N.append(conddftarray,csts.hartree2ev*dftec.eigenvalues[isppol,ikpt,vbm_index:ibdmax+1])
condgwarray = N.append(condgwarray,csts.hartree2ev*gwec.eigenvalues[isppol,ikpt,vbm_index:ibdmax+1])
if energy_pivots == None:
if plot_figures == 1:
P.figure(1)
P.hold(True)
P.grid(True)
P.plot(valdftarray,valgwarray,'bx')
P.plot(conddftarray,condgwarray,'rx')
P.xlabel('DFT eigenvalues (in eV)')
P.ylabel('GW eigenvalues (in eV)')
P.figure(2)
P.hold(True)
P.grid(True)
P.plot(valdftarray,valgwarray-valdftarray,'bx')
P.plot(conddftarray,condgwarray-conddftarray,'rx')
P.xlabel('DFT eigenvalues (in eV)')
P.ylabel('GW correction to the DFT eigenvalues (in eV)')
P.show()
return
polyfitlist = list()
if len(polyfit_degrees) == 1:
print 'ERROR: making a fit with only one interval is not allowed ... exiting now'
sys.exit()
dftarray = N.append(valdftarray,conddftarray)
gwarray = N.append(valgwarray,condgwarray)
dftarray_list = list()
gwarray_list = list()
for iinterval in range(len(polyfit_degrees)):
tmpdftarray = N.array([],N.float)
tmpgwarray = N.array([],N.float)
if iinterval == 0:
emin = None
emax = energy_pivots[0]
for ii in range(len(dftarray)):
if dftarray[ii] <= emax:
tmpdftarray = N.append(tmpdftarray,[dftarray[ii]])
tmpgwarray = N.append(tmpgwarray,[gwarray[ii]])
elif iinterval == len(polyfit_degrees)-1:
emin = energy_pivots[-1]
emax = None
for ii in range(len(dftarray)):
if dftarray[ii] >= emin:
tmpdftarray = N.append(tmpdftarray,[dftarray[ii]])
tmpgwarray = N.append(tmpgwarray,[gwarray[ii]])
else:
emin = energy_pivots[iinterval-1]
emax = energy_pivots[iinterval]
for ii in range(len(dftarray)):
if dftarray[ii] >= emin and dftarray[ii] <= emax:
tmpdftarray = N.append(tmpdftarray,[dftarray[ii]])
tmpgwarray = N.append(tmpgwarray,[gwarray[ii]])
dftarray_list.append(tmpdftarray)
gwarray_list.append(tmpgwarray)
pfit = N.polyfit(tmpdftarray,tmpgwarray-tmpdftarray,polyfit_degrees[iinterval])
polyfitlist.append(pfit)
if plot_figures == 1:
linspace_npoints = 200
valpoly_x = N.linspace(N.min(valdftarray),N.max(valdftarray),linspace_npoints)
condpoly_x = N.linspace(N.min(conddftarray),N.max(conddftarray),linspace_npoints)
P.figure(3)
P.hold(True)
P.grid(True)
P.plot(valdftarray,valgwarray-valdftarray,'bx')
P.plot(conddftarray,condgwarray-conddftarray,'rx')
[x_min,x_max] = P.xlim()
for iinterval in range(len(polyfit_degrees)):
if iinterval == 0:
tmppoly_x = N.linspace(x_min,energy_pivots[iinterval],linspace_npoints)
elif iinterval == len(polyfit_degrees)-1:
tmppoly_x = N.linspace(energy_pivots[iinterval-1],x_max,linspace_npoints)
else:
tmppoly_x = N.linspace(energy_pivots[iinterval-1],energy_pivots[iinterval],linspace_npoints)
P.plot(tmppoly_x,N.polyval(polyfitlist[iinterval],tmppoly_x),'k')
for ipivot in range(len(energy_pivots)):
en = energy_pivots[ipivot]
P.plot([en,en],[N.polyval(polyfitlist[ipivot],en),N.polyval(polyfitlist[ipivot+1],en)],'k-.')
P.xlabel('DFT eigenvalues (in eV)')
P.ylabel('GW correction to the DFT eigenvalues (in eV)')
P.figure(4)
P.hold(True)
P.grid(True)
for iinterval in range(len(polyfit_degrees)):
P.plot(dftarray_list[iinterval],gwarray_list[iinterval]-dftarray_list[iinterval]-N.polyval(polyfitlist[iinterval],dftarray_list[iinterval]),'bx')
[x_min,x_max] = P.xlim()
P.plot([x_min,x_max],[0,0],'k-')
P.xlabel('DFT eigenvalues (in eV)')
P.ylabel('Error in the fit (in eV)')
P.show()
return polyfitlist
def compare_bandstructures(ec_ref,ec_test):
nspkpt_ref = len(ec_ref.special_kpoints)
nspkpt_test = len(ec_test.special_kpoints)
if nspkpt_ref != nspkpt_test:
print 'ERROR: The number of special kpoints is different in the two files ... exit'
sys.exit()
eig_type_ref = ec_ref.eigenvalue_type
eig_type_test = ec_test.eigenvalue_type
print eig_type_ref,eig_type_test
if eig_type_ref == 'DFT' and eig_type_test == 'W90':
TOL_KPTS = N.float(1.0e-4)
else:
TOL_KPTS = N.float(1.0e-6)
print TOL_KPTS
for ispkpt in range(nspkpt_ref):
print 'difference between the two :',ec_ref.special_kpoints[ispkpt,:]-ec_test.special_kpoints[ispkpt,:]
if not N.allclose(ec_ref.special_kpoints[ispkpt,:],ec_test.special_kpoints[ispkpt,:],atol=TOL_KPTS):
print 'ERROR: The kpoints are not the same :'
print ' Kpt #%s ' %ispkpt
print ' Reference => %20.17f %20.17f %20.17f' %(ec_ref.special_kpoints[ispkpt,0],ec_ref.special_kpoints[ispkpt,1],ec_ref.special_kpoints[ispkpt,2])
print ' Compared => %20.17f %20.17f %20.17f' %(ec_test.special_kpoints[ispkpt,0],ec_test.special_kpoints[ispkpt,1],ec_test.special_kpoints[ispkpt,2])
print ' ... exit'
sys.exit()
mband_comparison = N.min([ec_ref.mband,ec_test.mband])
if mband_comparison < ec_ref.mband:
print 'Number of bands in the test bandstructure is lower than the number of bands in the reference (%s)' %ec_ref.mband
print ' => Comparison will proceed with %s bands' %ec_test.mband
elif mband_comparison < ec_test.mband:
print 'Number of bands in the reference bandstructure is lower than the number of bands in the test bandstructure (%s)' %ec_test.mband
print ' => Comparison will only proceed with %s bands of the test bandstructure' %ec_ref.mband
else:
print 'Number of bands in the reference and test bandstructure is the same'
print ' => Comparison will proceed with %s bands' %mband_comparison
# eig_test_ref_path = ec_ref.eigenvalues[:,:,:mband_comparison]
rmsd_per_band = N.zeros([ec_ref.nsppol,mband_comparison],N.float)
nrmsd_per_band = N.zeros([ec_ref.nsppol,mband_comparison],N.float)
mae_per_band = N.zeros([ec_ref.nsppol,mband_comparison],N.float)
for isppol in range(ec_ref.nsppol):
for iband in range(mband_comparison):
interp = N.interp(ec_ref.normalized_kpoint_path_values,ec_test.normalized_kpoint_path_values,ec_test.eigenvalues[isppol,:,iband])
rmsd_per_band[isppol,iband] = N.sqrt(N.sum((csts.hartree2ev*interp-csts.hartree2ev*ec_ref.eigenvalues[isppol,:,iband])**2)/ec_ref.nkpt)
mae_per_band[isppol,iband] = N.sum(N.abs(csts.hartree2ev*interp-csts.hartree2ev*ec_ref.eigenvalues[isppol,:,iband]))/ec_ref.nkpt
P.figure(1)
P.plot(mae_per_band[0,:])
P.figure(2)
P.plot(rmsd_per_band[0,:])
P.show()
def get_gvectors():
if os.path.isfile('.gvectors.bsinfo'):
print 'File ".gvectors.bsinfo found with the following gvectors information :"'
try:
gvectors_reader = open('.gvectors.bsinfo','r')
gvectors_data = gvectors_reader.readlines()
gvectors_reader.close()
trial_gvectors = N.identity(3,N.float)
trial_gvectors[0,0] = N.float(gvectors_data[0].split()[0])
trial_gvectors[0,1] = N.float(gvectors_data[0].split()[1])
trial_gvectors[0,2] = N.float(gvectors_data[0].split()[2])
trial_gvectors[1,0] = N.float(gvectors_data[1].split()[0])
trial_gvectors[1,1] = N.float(gvectors_data[1].split()[1])
trial_gvectors[1,2] = N.float(gvectors_data[1].split()[2])
trial_gvectors[2,0] = N.float(gvectors_data[2].split()[0])
trial_gvectors[2,1] = N.float(gvectors_data[2].split()[1])
trial_gvectors[2,2] = N.float(gvectors_data[2].split()[2])
print ' gvectors(1) = [ %20.17f %20.17f %20.17f ]' %(trial_gvectors[0,0],trial_gvectors[0,1],trial_gvectors[0,2])
print ' gvectors(2) = [ %20.17f %20.17f %20.17f ]' %(trial_gvectors[1,0],trial_gvectors[1,1],trial_gvectors[1,2])
print ' gvectors(3) = [ %20.17f %20.17f %20.17f ]' %(trial_gvectors[2,0],trial_gvectors[2,1],trial_gvectors[2,2])
except:
print 'ERROR: file ".gvectors.bsinfo" might be corrupted (empty or not formatted correctly ...)'
print ' you should remove the file and start again or check the file ... exit'
sys.exit()
test = raw_input('Press <ENTER> to use these gvectors (any other character to enter manually other gvectors)\n')
if test == '':
gvectors = trial_gvectors
else:
gvectors = N.identity(3,N.float)
test = raw_input('Enter G1 (example : "0.153 0 0") : \n')
gvectors[0,0] = N.float(test.split()[0])
gvectors[0,1] = N.float(test.split()[1])
gvectors[0,2] = N.float(test.split()[2])
test = raw_input('Enter G2 (example : "0.042 1.023 0") : \n')
gvectors[1,0] = N.float(test.split()[0])
gvectors[1,1] = N.float(test.split()[1])
gvectors[1,2] = N.float(test.split()[2])
test = raw_input('Enter G3 (example : "0 0 1.432") : \n')
gvectors[2,0] = N.float(test.split()[0])
gvectors[2,1] = N.float(test.split()[1])
gvectors[2,2] = N.float(test.split()[2])
test = raw_input('Do you want to overwrite the gvectors contained in the file ".gvectors.bsinfo" ? (<ENTER> for yes, anything else for no)\n')
if test == '':
print 'Writing gvectors to file ".gvectors.bsinfo" ...'
gvectors_writer = open('.gvectors.bsinfo','w')
gvectors_writer.write('%20.17f %20.17f %20.17f\n' %(trial_gvectors[0,0],trial_gvectors[0,1],trial_gvectors[0,2]))
gvectors_writer.write('%20.17f %20.17f %20.17f\n' %(trial_gvectors[1,0],trial_gvectors[1,1],trial_gvectors[1,2]))
gvectors_writer.write('%20.17f %20.17f %20.17f\n' %(trial_gvectors[2,0],trial_gvectors[2,1],trial_gvectors[2,2]))
gvectors_writer.close()
print '... done'
else:
test = raw_input('Do you want to enter the the reciprocal space primitive vectors (y/n)\n')
if test == 'y':
gvectors = N.identity(3,N.float)
test = raw_input('Enter G1 (example : "0.153 0 0") : ')
gvectors[0,0] = N.float(test.split()[0])
gvectors[0,1] = N.float(test.split()[1])
gvectors[0,2] = N.float(test.split()[2])
test = raw_input('Enter G2 (example : "0.042 1.023 0") : ')
gvectors[1,0] = N.float(test.split()[0])
gvectors[1,1] = N.float(test.split()[1])
gvectors[1,2] = N.float(test.split()[2])
test = raw_input('Enter G3 (example : "0 0 1.432") : ')
gvectors[2,0] = N.float(test.split()[0])
gvectors[2,1] = N.float(test.split()[1])
gvectors[2,2] = N.float(test.split()[2])
test = raw_input('Do you want to write the gvectors to file ".gvectors.bsinfo" ? (<ENTER> for yes, anything else for no)\n')
if test == '':
print 'Writing gvectors to file ".gvectors.bsinfo" ...'
gvectors_writer = open('.gvectors.bsinfo','w')
gvectors_writer.write('%20.17f %20.17f %20.17f\n' %(gvectors[0,0],gvectors[0,1],gvectors[0,2]))
gvectors_writer.write('%20.17f %20.17f %20.17f\n' %(gvectors[1,0],gvectors[1,1],gvectors[1,2]))
gvectors_writer.write('%20.17f %20.17f %20.17f\n' %(gvectors[2,0],gvectors[2,1],gvectors[2,2]))
gvectors_writer.close()
print '... done'
else:
gvectors = None
return gvectors
# Parse the command line options
parser = argparse.ArgumentParser(description='Tool for plotting dft bandstructures')
parser.add_argument('files',help='files to be opened',nargs=1)
args = parser.parse_args()
args_dict = vars(args)
if args_dict['files']:
print 'will open the file'
else:
print 'ERROR: you should provide some bandstructure file ! exiting now ...'
sys.exit()
dft_file = args_dict['files'][0]
gvectors = get_gvectors()
ec_dft = EigenvalueContainer(directory='.',filename=dft_file)
ec_dft.set_kpoint_sampling_type('Bandstructure')
ec_dft.find_special_kpoints(gvectors)
print 'Number of bands in the file : %s' %(N.shape(ec_dft.eigenvalues)[2])
test = raw_input('Enter the number of bands to be plotted (<ENTER> : %s) : \n' %(N.shape(ec_dft.eigenvalues)[2]))
if test == '':
nbd_plot = N.shape(ec_dft.eigenvalues)[2]
else:
nbd_plot = N.int(test)
if nbd_plot > N.shape(ec_dft.eigenvalues)[2]:
print 'ERROR: the number of bands to be plotted is larger than the number available ... exit'
sys.exit()
ec_dft.special_kpoints_names = ['']*len(ec_dft.special_kpoints_indices)
for ii in range(len(ec_dft.special_kpoints_indices)):
ec_dft.special_kpoints_names[ii] = 'k%s' %(ii+1)
print 'List of special kpoints :'
for ii in range(len(ec_dft.special_kpoints_indices)):
spkpt = ec_dft.kpoints[ec_dft.special_kpoints_indices[ii]]
print ' Kpoint %s : %s %s %s' %(ii+1,spkpt[0],spkpt[1],spkpt[2])
print 'Enter the name of the %s special k-points :' %(len(ec_dft.special_kpoints_indices))
test = raw_input('')
if len(test.split()) == len(ec_dft.special_kpoints_indices):
for ii in range(len(ec_dft.special_kpoints_indices)):
ec_dft.special_kpoints_names[ii] = test.split()[ii]
test = raw_input('Enter base name for bandstructure file : \n')
ec_dft.write_bandstructure_to_file('%s.bandstructure' %test)
P.figure(1,figsize=(3.464,5))
P.hold('on')
P.grid('on')
P.xticks(N.take(ec_dft.kpoint_reduced_path_values,N.array(ec_dft.special_kpoints_indices,N.int)),ec_dft.special_kpoints_names)
if ec_dft.nsppol == 1:
for iband in range(nbd_plot):
P.plot(ec_dft.kpoint_reduced_path_values,ec_dft.eigenvalues[0,:,iband]*csts.hartree2ev,'k-',linewidth=2)
elif ec_dft.nsppol == 2:
for iband in range(nbd_plot):
P.plot(ec_dft.kpoint_reduced_path_values,ec_dft.eigenvalues[0,:,iband]*csts.hartree2ev,'k-',linewidth=2)
P.plot(ec_dft.kpoint_reduced_path_values,ec_dft.eigenvalues[1,:,iband]*csts.hartree2ev,'r-',linewidth=2)
P.show()
| gpl-3.0 |
hdinsight/hue | desktop/libs/aws/src/aws/s3/s3stat.py | 6 | 2614 | # Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import stat
import posixpath
from aws.s3 import s3datetime_to_timestamp
class S3Stat(object):
DIR_MODE = 0777 | stat.S_IFDIR
FILE_MODE = 0666 | stat.S_IFREG
def __init__(self, name, path, isDir, size, mtime):
self.name = name
self.path = path
self.isDir = isDir
self.size = size
self.mtime = mtime
def __getitem__(self, key):
try:
return getattr(self, key)
except AttributeError:
raise KeyError(key)
def __setitem__(self, key, value):
# What about derivable values?
setattr(self, key, value)
@property
def type(self):
return 'DIRECTORY' if self.isDir else 'FILE'
@property
def mode(self):
return S3Stat.DIR_MODE if self.isDir else S3Stat.FILE_MODE
@property
def user(self):
return ''
@property
def group(self):
return ''
@property
def atime(self):
return self.mtime
@property
def aclBit(self):
return False
@classmethod
def from_bucket(cls, bucket):
return cls(bucket.name, 's3://%s' % bucket.name, True, 0, 0)
@classmethod
def from_key(cls, key, is_dir=False):
if key.name:
name = posixpath.basename(key.name[:-1] if key.name[-1] == '/' else key.name)
path = 's3://%s/%s' % (key.bucket.name, key.name)
else:
name = ''
path = 's3://%s' % key.bucket.name
size = key.size or 0
mtime = s3datetime_to_timestamp(key.last_modified) if key.last_modified else 0
return cls(name, path, is_dir, size, mtime)
@classmethod
def for_s3_root(cls):
return cls('S3', 's3://', True, 0, 0)
def to_json_dict(self):
"""
Returns a dictionary for easy serialization
"""
keys = ('path', 'size', 'atime', 'mtime', 'mode', 'user', 'group', 'aclBit')
res = {}
for k in keys:
res[k] = self[k]
return res
| apache-2.0 |
ajaxsys/dict-admin | pygments/util.py | 5 | 6867 | # -*- coding: utf-8 -*-
"""
pygments.util
~~~~~~~~~~~~~
Utility functions.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import sys
import codecs
split_path_re = re.compile(r'[/\\ ]')
doctype_lookup_re = re.compile(r'''(?smx)
(<\?.*?\?>)?\s*
<!DOCTYPE\s+(
[a-zA-Z_][a-zA-Z0-9]*\s+
[a-zA-Z_][a-zA-Z0-9]*\s+
"[^"]*")
[^>]*>
''')
tag_re = re.compile(r'<(.+?)(\s.*?)?>.*?</.+?>(?uism)')
class ClassNotFound(ValueError):
"""
If one of the get_*_by_* functions didn't find a matching class.
"""
class OptionError(Exception):
pass
def get_choice_opt(options, optname, allowed, default=None, normcase=False):
string = options.get(optname, default)
if normcase:
string = string.lower()
if string not in allowed:
raise OptionError('Value for option %s must be one of %s' %
(optname, ', '.join(map(str, allowed))))
return string
def get_bool_opt(options, optname, default=None):
string = options.get(optname, default)
if isinstance(string, bool):
return string
elif isinstance(string, int):
return bool(string)
elif not isinstance(string, basestring):
raise OptionError('Invalid type %r for option %s; use '
'1/0, yes/no, true/false, on/off' % (
string, optname))
elif string.lower() in ('1', 'yes', 'true', 'on'):
return True
elif string.lower() in ('0', 'no', 'false', 'off'):
return False
else:
raise OptionError('Invalid value %r for option %s; use '
'1/0, yes/no, true/false, on/off' % (
string, optname))
def get_int_opt(options, optname, default=None):
string = options.get(optname, default)
try:
return int(string)
except TypeError:
raise OptionError('Invalid type %r for option %s; you '
'must give an integer value' % (
string, optname))
except ValueError:
raise OptionError('Invalid value %r for option %s; you '
'must give an integer value' % (
string, optname))
def get_list_opt(options, optname, default=None):
val = options.get(optname, default)
if isinstance(val, basestring):
return val.split()
elif isinstance(val, (list, tuple)):
return list(val)
else:
raise OptionError('Invalid type %r for option %s; you '
'must give a list value' % (
val, optname))
def docstring_headline(obj):
if not obj.__doc__:
return ''
res = []
for line in obj.__doc__.strip().splitlines():
if line.strip():
res.append(" " + line.strip())
else:
break
return ''.join(res).lstrip()
def make_analysator(f):
"""
Return a static text analysation function that
returns float values.
"""
def text_analyse(text):
try:
rv = f(text)
except Exception:
return 0.0
if not rv:
return 0.0
try:
return min(1.0, max(0.0, float(rv)))
except ValueError:
return 0.0
text_analyse.__doc__ = f.__doc__
return staticmethod(text_analyse)
def shebang_matches(text, regex):
"""
Check if the given regular expression matches the last part of the
shebang if one exists.
>>> from pygments.util import shebang_matches
>>> shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?')
True
>>> shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?')
True
>>> shebang_matches('#!/usr/bin/python-ruby', r'python(2\.\d)?')
False
>>> shebang_matches('#!/usr/bin/python/ruby', r'python(2\.\d)?')
False
>>> shebang_matches('#!/usr/bin/startsomethingwith python',
... r'python(2\.\d)?')
True
It also checks for common windows executable file extensions::
>>> shebang_matches('#!C:\\Python2.4\\Python.exe', r'python(2\.\d)?')
True
Parameters (``'-f'`` or ``'--foo'`` are ignored so ``'perl'`` does
the same as ``'perl -e'``)
Note that this method automatically searches the whole string (eg:
the regular expression is wrapped in ``'^$'``)
"""
index = text.find('\n')
if index >= 0:
first_line = text[:index].lower()
else:
first_line = text.lower()
if first_line.startswith('#!'):
try:
found = [x for x in split_path_re.split(first_line[2:].strip())
if x and not x.startswith('-')][-1]
except IndexError:
return False
regex = re.compile('^%s(\.(exe|cmd|bat|bin))?$' % regex, re.IGNORECASE)
if regex.search(found) is not None:
return True
return False
def doctype_matches(text, regex):
"""
Check if the doctype matches a regular expression (if present).
Note that this method only checks the first part of a DOCTYPE.
eg: 'html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"'
"""
m = doctype_lookup_re.match(text)
if m is None:
return False
doctype = m.group(2)
return re.compile(regex).match(doctype.strip()) is not None
def html_doctype_matches(text):
"""
Check if the file looks like it has a html doctype.
"""
return doctype_matches(text, r'html\s+PUBLIC\s+"-//W3C//DTD X?HTML.*')
_looks_like_xml_cache = {}
def looks_like_xml(text):
"""
Check if a doctype exists or if we have some tags.
"""
key = hash(text)
try:
return _looks_like_xml_cache[key]
except KeyError:
m = doctype_lookup_re.match(text)
if m is not None:
return True
rv = tag_re.search(text[:1000]) is not None
_looks_like_xml_cache[key] = rv
return rv
# Python 2/3 compatibility
if sys.version_info < (3,0):
b = bytes = str
u_prefix = 'u'
import StringIO, cStringIO
BytesIO = cStringIO.StringIO
StringIO = StringIO.StringIO
uni_open = codecs.open
else:
import builtins
bytes = builtins.bytes
u_prefix = ''
def b(s):
if isinstance(s, str):
return bytes(map(ord, s))
elif isinstance(s, bytes):
return s
else:
raise TypeError("Invalid argument %r for b()" % (s,))
import io
BytesIO = io.BytesIO
StringIO = io.StringIO
uni_open = builtins.open
| bsd-3-clause |
stevenliuit/neon | neon/models/tests/test_rbm.py | 13 | 3470 | # ----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
"""
Tests for restricted boltzmann machine (RBM)
- create fake inputs (cudanet class with one small minibatch of 2D data)
- create a fake instance of the RBM class with the model structure from
yaml replaced by some small weight init / nodes parameters
- precompute the output values we expect for a gradient update and
numerically compare that we get them.
"""
from nose.plugins.attrib import attr
from nose.tools import nottest
import numpy as np
from neon.layers.boltzmann import RBMLayer
from neon.params import GaussianValGen
from neon.transforms.logistic import Logistic
from neon.transforms.sum_squared import SumSquaredDiffs
from neon.util.testing import assert_tensor_near_equal
@attr('cuda')
class TestCudaRBM:
def setup(self):
from neon.backends.cc2 import GPU, GPUTensor
# TODO: remove randomness from expected target results
self.be = GPU(rng_seed=0)
# reusable fake data
self.inputs = GPUTensor(np.ones((2, 100)))
# create fake layer
nin = 2
conf = {'name': 'testlayer', 'num_nodes': 2,
'weight_init': GaussianValGen(backend=self.be, loc=0.0,
scale=0.01)}
lr_params = {'learning_rate': 0.01}
thislr = {'type': 'gradient_descent', 'lr_params': lr_params}
activation = Logistic()
self.layer = RBMLayer(name=conf['name'])
# create fake cost
self.cost = SumSquaredDiffs(olayer=self.layer)
self.layer.initialize({'backend': self.be, 'batch_size': 100,
'lrule_init': thislr, 'nin': nin,
'nout': conf['num_nodes'],
'activation': activation,
'weight_init': conf['weight_init']})
def test_cudanet_positive(self):
self.layer.positive(self.inputs)
target = np.array([0.50541031, 0.50804842],
dtype='float32')
assert_tensor_near_equal(self.layer.p_hid_plus.asnumpyarray()[:, 0],
target)
def test_cudanet_negative(self):
self.layer.positive(self.inputs)
self.layer.negative(self.inputs)
target = np.array([0.50274211, 0.50407821],
dtype='float32')
assert_tensor_near_equal(self.layer.p_hid_minus.asnumpyarray()[:, 0],
target)
@nottest # TODO: remove randomness
def test_cudanet_cost(self):
self.layer.positive(self.inputs)
self.layer.negative(self.inputs)
thecost = self.cost.apply_function(self.inputs)
target = 106.588943481
assert_tensor_near_equal(thecost, target)
| apache-2.0 |
erickt/hue | desktop/core/ext-py/Django-1.6.10/tests/admin_scripts/tests.py | 52 | 81725 | # -*- coding: utf-8 -*-
"""
A series of tests to establish that the command-line managment tools work as
advertised - especially with regards to the handling of the DJANGO_SETTINGS_MODULE
and default settings.py files.
"""
from __future__ import unicode_literals
import os
import re
import shutil
import socket
import subprocess
import sys
import codecs
import django
from django import conf, get_version
from django.conf import settings
from django.core.management import BaseCommand, CommandError
from django.db import connection
from django.test.runner import DiscoverRunner
from django.test.utils import str_prefix
from django.utils import unittest
from django.utils.encoding import force_text
from django.utils._os import upath
from django.utils.six import StringIO
from django.test import LiveServerTestCase
test_dir = os.path.realpath(os.path.join(os.environ['DJANGO_TEST_TEMP_DIR'], 'test_project'))
if not os.path.exists(test_dir):
os.mkdir(test_dir)
open(os.path.join(test_dir, '__init__.py'), 'w').close()
custom_templates_dir = os.path.join(os.path.dirname(__file__), 'custom_templates')
class AdminScriptTestCase(unittest.TestCase):
def write_settings(self, filename, apps=None, is_dir=False, sdict=None):
if is_dir:
settings_dir = os.path.join(test_dir, filename)
os.mkdir(settings_dir)
settings_file_path = os.path.join(settings_dir, '__init__.py')
else:
settings_file_path = os.path.join(test_dir, filename)
with open(settings_file_path, 'w') as settings_file:
settings_file.write('# Settings file automatically generated by admin_scripts test case\n')
exports = [
'DATABASES',
'ROOT_URLCONF',
'SECRET_KEY',
]
for s in exports:
if hasattr(settings, s):
o = getattr(settings, s)
if not isinstance(o, dict):
o = "'%s'" % o
settings_file.write("%s = %s\n" % (s, o))
if apps is None:
apps = ['django.contrib.auth', 'django.contrib.contenttypes', 'admin_scripts']
settings_file.write("INSTALLED_APPS = %s\n" % apps)
if sdict:
for k, v in sdict.items():
settings_file.write("%s = %s\n" % (k, v))
def remove_settings(self, filename, is_dir=False):
full_name = os.path.join(test_dir, filename)
if is_dir:
shutil.rmtree(full_name)
else:
os.remove(full_name)
# Also try to remove the compiled file; if it exists, it could
# mess up later tests that depend upon the .py file not existing
try:
if sys.platform.startswith('java'):
# Jython produces module$py.class files
os.remove(re.sub(r'\.py$', '$py.class', full_name))
else:
# CPython produces module.pyc files
os.remove(full_name + 'c')
except OSError:
pass
# Also remove a __pycache__ directory, if it exists
cache_name = os.path.join(test_dir, '__pycache__')
if os.path.isdir(cache_name):
shutil.rmtree(cache_name)
def _ext_backend_paths(self):
"""
Returns the paths for any external backend packages.
"""
paths = []
first_package_re = re.compile(r'(^[^\.]+)\.')
for backend in settings.DATABASES.values():
result = first_package_re.findall(backend['ENGINE'])
if result and result != ['django']:
backend_pkg = __import__(result[0])
backend_dir = os.path.dirname(backend_pkg.__file__)
paths.append(os.path.dirname(backend_dir))
return paths
def run_test(self, script, args, settings_file=None, apps=None):
project_dir = test_dir
base_dir = os.path.dirname(test_dir)
# The base dir for Django's tests is one level up.
tests_dir = os.path.dirname(os.path.dirname(__file__))
# The base dir for Django is one level above the test dir. We don't use
# `import django` to figure that out, so we don't pick up a Django
# from site-packages or similar.
django_dir = os.path.dirname(tests_dir)
ext_backend_base_dirs = self._ext_backend_paths()
# Remember the old environment
old_django_settings_module = os.environ.get('DJANGO_SETTINGS_MODULE', None)
if sys.platform.startswith('java'):
python_path_var_name = 'JYTHONPATH'
else:
python_path_var_name = 'PYTHONPATH'
old_python_path = os.environ.get(python_path_var_name, None)
old_cwd = os.getcwd()
# Set the test environment
if settings_file:
os.environ['DJANGO_SETTINGS_MODULE'] = settings_file
elif 'DJANGO_SETTINGS_MODULE' in os.environ:
del os.environ['DJANGO_SETTINGS_MODULE']
python_path = [base_dir, django_dir, tests_dir]
python_path.extend(ext_backend_base_dirs)
os.environ[python_path_var_name] = os.pathsep.join(python_path)
# Move to the test directory and run
os.chdir(test_dir)
out, err = subprocess.Popen([sys.executable, script] + args,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=True).communicate()
# Restore the old environment
if old_django_settings_module:
os.environ['DJANGO_SETTINGS_MODULE'] = old_django_settings_module
if old_python_path:
os.environ[python_path_var_name] = old_python_path
# Move back to the old working directory
os.chdir(old_cwd)
return out, err
def run_django_admin(self, args, settings_file=None):
script_dir = os.path.abspath(os.path.join(os.path.dirname(upath(django.__file__)), 'bin'))
return self.run_test(os.path.join(script_dir, 'django-admin.py'), args, settings_file)
def run_manage(self, args, settings_file=None):
def safe_remove(path):
try:
os.remove(path)
except OSError:
pass
conf_dir = os.path.dirname(upath(conf.__file__))
template_manage_py = os.path.join(conf_dir, 'project_template', 'manage.py')
test_manage_py = os.path.join(test_dir, 'manage.py')
shutil.copyfile(template_manage_py, test_manage_py)
with open(test_manage_py, 'r') as fp:
manage_py_contents = fp.read()
manage_py_contents = manage_py_contents.replace(
"{{ project_name }}", "test_project")
with open(test_manage_py, 'w') as fp:
fp.write(manage_py_contents)
self.addCleanup(safe_remove, test_manage_py)
return self.run_test('./manage.py', args, settings_file)
def assertNoOutput(self, stream):
"Utility assertion: assert that the given stream is empty"
self.assertEqual(len(stream), 0, "Stream should be empty: actually contains '%s'" % stream)
def assertOutput(self, stream, msg):
"Utility assertion: assert that the given message exists in the output"
stream = force_text(stream)
self.assertTrue(msg in stream, "'%s' does not match actual output text '%s'" % (msg, stream))
def assertNotInOutput(self, stream, msg):
"Utility assertion: assert that the given message doesn't exist in the output"
stream = force_text(stream)
self.assertFalse(msg in stream, "'%s' matches actual output text '%s'" % (msg, stream))
##########################################################################
# DJANGO ADMIN TESTS
# This first series of test classes checks the environment processing
# of the django-admin.py script
##########################################################################
class DjangoAdminNoSettings(AdminScriptTestCase):
"A series of tests for django-admin.py when there is no settings.py file."
def test_builtin_command(self):
"no settings: django-admin builtin commands fail with an error when no settings provided"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_bad_settings(self):
"no settings: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['sqlall', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_builtin_with_bad_environment(self):
"no settings: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
class DjangoAdminDefaultSettings(AdminScriptTestCase):
"""A series of tests for django-admin.py when using a settings.py file that
contains the test application.
"""
def setUp(self):
self.write_settings('settings.py')
def tearDown(self):
self.remove_settings('settings.py')
def test_builtin_command(self):
"default: django-admin builtin commands fail with an error when no settings provided"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"default: django-admin builtin commands succeed if settings are provided as argument"
args = ['sqlall', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_environment(self):
"default: django-admin builtin commands succeed if settings are provided in the environment"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_bad_settings(self):
"default: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['sqlall', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_builtin_with_bad_environment(self):
"default: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_custom_command(self):
"default: django-admin can't execute user commands if it isn't provided settings"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"default: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
def test_custom_command_with_environment(self):
"default: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
class DjangoAdminFullPathDefaultSettings(AdminScriptTestCase):
"""A series of tests for django-admin.py when using a settings.py file that
contains the test application specified using a full path.
"""
def setUp(self):
self.write_settings('settings.py', ['django.contrib.auth', 'django.contrib.contenttypes', 'admin_scripts'])
def tearDown(self):
self.remove_settings('settings.py')
def test_builtin_command(self):
"fulldefault: django-admin builtin commands fail with an error when no settings provided"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"fulldefault: django-admin builtin commands succeed if a settings file is provided"
args = ['sqlall', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_environment(self):
"fulldefault: django-admin builtin commands succeed if the environment contains settings"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_bad_settings(self):
"fulldefault: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['sqlall', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_builtin_with_bad_environment(self):
"fulldefault: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_custom_command(self):
"fulldefault: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"fulldefault: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
def test_custom_command_with_environment(self):
"fulldefault: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
class DjangoAdminMinimalSettings(AdminScriptTestCase):
"""A series of tests for django-admin.py when using a settings.py file that
doesn't contain the test application.
"""
def setUp(self):
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
def tearDown(self):
self.remove_settings('settings.py')
def test_builtin_command(self):
"minimal: django-admin builtin commands fail with an error when no settings provided"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"minimal: django-admin builtin commands fail if settings are provided as argument"
args = ['sqlall', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'App with label admin_scripts could not be found')
def test_builtin_with_environment(self):
"minimal: django-admin builtin commands fail if settings are provided in the environment"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, 'App with label admin_scripts could not be found')
def test_builtin_with_bad_settings(self):
"minimal: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['sqlall', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_builtin_with_bad_environment(self):
"minimal: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_custom_command(self):
"minimal: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"minimal: django-admin can't execute user commands, even if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_environment(self):
"minimal: django-admin can't execute user commands, even if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
class DjangoAdminAlternateSettings(AdminScriptTestCase):
"""A series of tests for django-admin.py when using a settings file
with a name other than 'settings.py'.
"""
def setUp(self):
self.write_settings('alternate_settings.py')
def tearDown(self):
self.remove_settings('alternate_settings.py')
def test_builtin_command(self):
"alternate: django-admin builtin commands fail with an error when no settings provided"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"alternate: django-admin builtin commands succeed if settings are provided as argument"
args = ['sqlall', '--settings=test_project.alternate_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_environment(self):
"alternate: django-admin builtin commands succeed if settings are provided in the environment"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_bad_settings(self):
"alternate: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['sqlall', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_builtin_with_bad_environment(self):
"alternate: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_custom_command(self):
"alternate: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"alternate: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.alternate_settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
def test_custom_command_with_environment(self):
"alternate: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
class DjangoAdminMultipleSettings(AdminScriptTestCase):
"""A series of tests for django-admin.py when multiple settings files
(including the default 'settings.py') are available. The default settings
file is insufficient for performing the operations described, so the
alternate settings must be used by the running script.
"""
def setUp(self):
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
self.write_settings('alternate_settings.py')
def tearDown(self):
self.remove_settings('settings.py')
self.remove_settings('alternate_settings.py')
def test_builtin_command(self):
"alternate: django-admin builtin commands fail with an error when no settings provided"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"alternate: django-admin builtin commands succeed if settings are provided as argument"
args = ['sqlall', '--settings=test_project.alternate_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_environment(self):
"alternate: django-admin builtin commands succeed if settings are provided in the environment"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_bad_settings(self):
"alternate: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['sqlall', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_builtin_with_bad_environment(self):
"alternate: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_custom_command(self):
"alternate: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"alternate: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.alternate_settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
def test_custom_command_with_environment(self):
"alternate: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
class DjangoAdminSettingsDirectory(AdminScriptTestCase):
"""
A series of tests for django-admin.py when the settings file is in a
directory. (see #9751).
"""
def setUp(self):
self.write_settings('settings', is_dir=True)
def tearDown(self):
self.remove_settings('settings', is_dir=True)
def test_setup_environ(self):
"directory: startapp creates the correct directory"
args = ['startapp', 'settings_test']
app_path = os.path.join(test_dir, 'settings_test')
out, err = self.run_django_admin(args, 'test_project.settings')
self.addCleanup(shutil.rmtree, app_path)
self.assertNoOutput(err)
self.assertTrue(os.path.exists(app_path))
def test_setup_environ_custom_template(self):
"directory: startapp creates the correct directory with a custom template"
template_path = os.path.join(custom_templates_dir, 'app_template')
args = ['startapp', '--template', template_path, 'custom_settings_test']
app_path = os.path.join(test_dir, 'custom_settings_test')
out, err = self.run_django_admin(args, 'test_project.settings')
self.addCleanup(shutil.rmtree, app_path)
self.assertNoOutput(err)
self.assertTrue(os.path.exists(app_path))
self.assertTrue(os.path.exists(os.path.join(app_path, 'api.py')))
def test_builtin_command(self):
"directory: django-admin builtin commands fail with an error when no settings provided"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_bad_settings(self):
"directory: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['sqlall', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_builtin_with_bad_environment(self):
"directory: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_custom_command(self):
"directory: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_builtin_with_settings(self):
"directory: django-admin builtin commands succeed if settings are provided as argument"
args = ['sqlall', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_environment(self):
"directory: django-admin builtin commands succeed if settings are provided in the environment"
args = ['sqlall', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
##########################################################################
# MANAGE.PY TESTS
# This next series of test classes checks the environment processing
# of the generated manage.py script
##########################################################################
class ManageNoSettings(AdminScriptTestCase):
"A series of tests for manage.py when there is no settings.py file."
def test_builtin_command(self):
"no settings: manage.py builtin commands fail with an error when no settings provided"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'test_project.settings'")
def test_builtin_with_bad_settings(self):
"no settings: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['sqlall', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_builtin_with_bad_environment(self):
"no settings: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
class ManageDefaultSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings.py file that
contains the test application.
"""
def setUp(self):
self.write_settings('settings.py')
def tearDown(self):
self.remove_settings('settings.py')
def test_builtin_command(self):
"default: manage.py builtin commands succeed when default settings are appropriate"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_settings(self):
"default: manage.py builtin commands succeed if settings are provided as argument"
args = ['sqlall', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_environment(self):
"default: manage.py builtin commands succeed if settings are provided in the environment"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_bad_settings(self):
"default: manage.py builtin commands succeed if settings file (from argument) doesn't exist"
args = ['sqlall', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_builtin_with_bad_environment(self):
"default: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_custom_command(self):
"default: manage.py can execute user commands when default settings are appropriate"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
def test_custom_command_with_settings(self):
"default: manage.py can execute user commands when settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
def test_custom_command_with_environment(self):
"default: manage.py can execute user commands when settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
class ManageFullPathDefaultSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings.py file that
contains the test application specified using a full path.
"""
def setUp(self):
self.write_settings('settings.py', ['django.contrib.auth', 'django.contrib.contenttypes', 'admin_scripts'])
def tearDown(self):
self.remove_settings('settings.py')
def test_builtin_command(self):
"fulldefault: manage.py builtin commands succeed when default settings are appropriate"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_settings(self):
"fulldefault: manage.py builtin commands succeed if settings are provided as argument"
args = ['sqlall', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_environment(self):
"fulldefault: manage.py builtin commands succeed if settings are provided in the environment"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_bad_settings(self):
"fulldefault: manage.py builtin commands succeed if settings file (from argument) doesn't exist"
args = ['sqlall', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_builtin_with_bad_environment(self):
"fulldefault: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_custom_command(self):
"fulldefault: manage.py can execute user commands when default settings are appropriate"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
def test_custom_command_with_settings(self):
"fulldefault: manage.py can execute user commands when settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
def test_custom_command_with_environment(self):
"fulldefault: manage.py can execute user commands when settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
class ManageMinimalSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings.py file that
doesn't contain the test application.
"""
def setUp(self):
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
def tearDown(self):
self.remove_settings('settings.py')
def test_builtin_command(self):
"minimal: manage.py builtin commands fail with an error when no settings provided"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, 'App with label admin_scripts could not be found')
def test_builtin_with_settings(self):
"minimal: manage.py builtin commands fail if settings are provided as argument"
args = ['sqlall', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, 'App with label admin_scripts could not be found')
def test_builtin_with_environment(self):
"minimal: manage.py builtin commands fail if settings are provided in the environment"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, 'App with label admin_scripts could not be found')
def test_builtin_with_bad_settings(self):
"minimal: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['sqlall', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_builtin_with_bad_environment(self):
"minimal: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_custom_command(self):
"minimal: manage.py can't execute user commands without appropriate settings"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"minimal: manage.py can't execute user commands, even if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_environment(self):
"minimal: manage.py can't execute user commands, even if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
class ManageAlternateSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings file
with a name other than 'settings.py'.
"""
def setUp(self):
self.write_settings('alternate_settings.py')
def tearDown(self):
self.remove_settings('alternate_settings.py')
def test_builtin_command(self):
"alternate: manage.py builtin commands fail with an error when no default settings provided"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'test_project.settings'")
def test_builtin_with_settings(self):
"alternate: manage.py builtin commands work with settings provided as argument"
args = ['sqlall', '--settings=alternate_settings', 'admin_scripts']
out, err = self.run_manage(args)
expected = ('create table %s'
% connection.ops.quote_name('admin_scripts_article'))
self.assertTrue(expected.lower() in out.lower())
self.assertNoOutput(err)
def test_builtin_with_environment(self):
"alternate: manage.py builtin commands work if settings are provided in the environment"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args, 'alternate_settings')
expected = ('create table %s'
% connection.ops.quote_name('admin_scripts_article'))
self.assertTrue(expected.lower() in out.lower())
self.assertNoOutput(err)
def test_builtin_with_bad_settings(self):
"alternate: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['sqlall', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_builtin_with_bad_environment(self):
"alternate: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_custom_command(self):
"alternate: manage.py can't execute user commands without settings"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'test_project.settings'")
def test_custom_command_with_settings(self):
"alternate: manage.py can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=alternate_settings']
out, err = self.run_manage(args)
self.assertOutput(out, str_prefix("EXECUTE:NoArgsCommand options=[('pythonpath', None), ('settings', 'alternate_settings'), ('traceback', None), ('verbosity', %(_)s'1')]"))
self.assertNoOutput(err)
def test_custom_command_with_environment(self):
"alternate: manage.py can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'alternate_settings')
self.assertOutput(out, str_prefix("EXECUTE:NoArgsCommand options=[('pythonpath', None), ('settings', None), ('traceback', None), ('verbosity', %(_)s'1')]"))
self.assertNoOutput(err)
class ManageMultipleSettings(AdminScriptTestCase):
"""A series of tests for manage.py when multiple settings files
(including the default 'settings.py') are available. The default settings
file is insufficient for performing the operations described, so the
alternate settings must be used by the running script.
"""
def setUp(self):
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
self.write_settings('alternate_settings.py')
def tearDown(self):
self.remove_settings('settings.py')
self.remove_settings('alternate_settings.py')
def test_builtin_command(self):
"multiple: manage.py builtin commands fail with an error when no settings provided"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, 'App with label admin_scripts could not be found.')
def test_builtin_with_settings(self):
"multiple: manage.py builtin commands succeed if settings are provided as argument"
args = ['sqlall', '--settings=alternate_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_environment(self):
"multiple: manage.py can execute builtin commands if settings are provided in the environment"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args, 'alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, 'CREATE TABLE')
def test_builtin_with_bad_settings(self):
"multiple: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['sqlall', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_builtin_with_bad_environment(self):
"multiple: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "Could not import settings 'bad_settings'")
def test_custom_command(self):
"multiple: manage.py can't execute user commands using default settings"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"multiple: manage.py can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=alternate_settings']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
def test_custom_command_with_environment(self):
"multiple: manage.py can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:NoArgsCommand")
class ManageSettingsWithSettingsErrors(AdminScriptTestCase):
"""
Tests for manage.py when using the default settings.py file containing
runtime errors.
"""
def tearDown(self):
self.remove_settings('settings.py')
def write_settings_with_import_error(self, filename):
settings_file_path = os.path.join(test_dir, filename)
with open(settings_file_path, 'w') as settings_file:
settings_file.write('# Settings file automatically generated by admin_scripts test case\n')
settings_file.write('# The next line will cause an import error:\nimport foo42bar\n')
def test_import_error(self):
"""
import error: manage.py builtin commands shows useful diagnostic info
when settings with import errors is provided (#14130).
"""
self.write_settings_with_import_error('settings.py')
args = ['sqlall', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named")
self.assertOutput(err, "foo42bar")
def test_attribute_error(self):
"""
manage.py builtin commands does not swallow attribute error due to bad
settings (#18845).
"""
self.write_settings('settings.py', sdict={'BAD_VAR': 'INSTALLED_APPS.crash'})
args = ['collectstatic', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "AttributeError: 'list' object has no attribute 'crash'")
def test_key_error(self):
self.write_settings('settings.py', sdict={'BAD_VAR': 'DATABASES["blah"]'})
args = ['collectstatic', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "KeyError: 'blah'")
class ManageValidate(AdminScriptTestCase):
def tearDown(self):
self.remove_settings('settings.py')
def test_nonexistent_app(self):
"manage.py validate reports an error on a non-existent app in INSTALLED_APPS"
self.write_settings('settings.py', apps=['admin_scriptz.broken_app'], sdict={'USE_I18N': False})
args = ['validate']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, 'No module named')
self.assertOutput(err, 'admin_scriptz')
def test_broken_app(self):
"manage.py validate reports an ImportError if an app's models.py raises one on import"
self.write_settings('settings.py', apps=['admin_scripts.broken_app'])
args = ['validate']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, 'ImportError')
def test_complex_app(self):
"manage.py validate does not raise an ImportError validating a complex app with nested calls to load_app"
self.write_settings('settings.py',
apps=['admin_scripts.complex_app', 'admin_scripts.simple_app'],
sdict={'DEBUG': True})
args = ['validate']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, '0 errors found')
def test_app_with_import(self):
"manage.py validate does not raise errors when an app imports a base class that itself has an abstract base"
self.write_settings('settings.py',
apps=['admin_scripts.app_with_import',
'django.contrib.comments',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sites'],
sdict={'DEBUG': True})
args = ['validate']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, '0 errors found')
class CustomTestRunner(DiscoverRunner):
def __init__(self, *args, **kwargs):
assert 'liveserver' not in kwargs
super(CustomTestRunner, self).__init__(*args, **kwargs)
def run_tests(self, test_labels, extra_tests=None, **kwargs):
pass
class ManageTestCommand(AdminScriptTestCase):
def setUp(self):
from django.core.management.commands.test import Command as TestCommand
self.cmd = TestCommand()
def test_liveserver(self):
"""
Ensure that the --liveserver option sets the environment variable
correctly.
Refs #2879.
"""
# Backup original state
address_predefined = 'DJANGO_LIVE_TEST_SERVER_ADDRESS' in os.environ
old_address = os.environ.get('DJANGO_LIVE_TEST_SERVER_ADDRESS')
self.cmd.handle(verbosity=0, testrunner='admin_scripts.tests.CustomTestRunner')
# Original state hasn't changed
self.assertEqual('DJANGO_LIVE_TEST_SERVER_ADDRESS' in os.environ, address_predefined)
self.assertEqual(os.environ.get('DJANGO_LIVE_TEST_SERVER_ADDRESS'), old_address)
self.cmd.handle(verbosity=0, testrunner='admin_scripts.tests.CustomTestRunner',
liveserver='blah')
# Variable was correctly set
self.assertEqual(os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'], 'blah')
# Restore original state
if address_predefined:
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = old_address
else:
del os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS']
class ManageRunserver(AdminScriptTestCase):
def setUp(self):
from django.core.management.commands.runserver import Command
def monkey_run(*args, **options):
return
self.cmd = Command()
self.cmd.run = monkey_run
def assertServerSettings(self, addr, port, ipv6=None, raw_ipv6=False):
self.assertEqual(self.cmd.addr, addr)
self.assertEqual(self.cmd.port, port)
self.assertEqual(self.cmd.use_ipv6, ipv6)
self.assertEqual(self.cmd._raw_ipv6, raw_ipv6)
def test_runserver_addrport(self):
self.cmd.handle()
self.assertServerSettings('127.0.0.1', '8000')
self.cmd.handle(addrport="1.2.3.4:8000")
self.assertServerSettings('1.2.3.4', '8000')
self.cmd.handle(addrport="7000")
self.assertServerSettings('127.0.0.1', '7000')
@unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6")
def test_runner_addrport_ipv6(self):
self.cmd.handle(addrport="", use_ipv6=True)
self.assertServerSettings('::1', '8000', ipv6=True, raw_ipv6=True)
self.cmd.handle(addrport="7000", use_ipv6=True)
self.assertServerSettings('::1', '7000', ipv6=True, raw_ipv6=True)
self.cmd.handle(addrport="[2001:0db8:1234:5678::9]:7000")
self.assertServerSettings('2001:0db8:1234:5678::9', '7000', ipv6=True, raw_ipv6=True)
def test_runner_hostname(self):
self.cmd.handle(addrport="localhost:8000")
self.assertServerSettings('localhost', '8000')
self.cmd.handle(addrport="test.domain.local:7000")
self.assertServerSettings('test.domain.local', '7000')
@unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6")
def test_runner_hostname_ipv6(self):
self.cmd.handle(addrport="test.domain.local:7000", use_ipv6=True)
self.assertServerSettings('test.domain.local', '7000', ipv6=True)
def test_runner_ambiguous(self):
# Only 4 characters, all of which could be in an ipv6 address
self.cmd.handle(addrport="beef:7654")
self.assertServerSettings('beef', '7654')
# Uses only characters that could be in an ipv6 address
self.cmd.handle(addrport="deadbeef:7654")
self.assertServerSettings('deadbeef', '7654')
class ManageRunserverEmptyAllowedHosts(AdminScriptTestCase):
def setUp(self):
self.write_settings('settings.py', sdict={
'ALLOWED_HOSTS': [],
'DEBUG': False,
})
def tearDown(self):
self.remove_settings('settings.py')
def test_empty_allowed_hosts_error(self):
out, err = self.run_manage(['runserver'])
self.assertNoOutput(out)
self.assertOutput(err, 'CommandError: You must set settings.ALLOWED_HOSTS if DEBUG is False.')
##########################################################################
# COMMAND PROCESSING TESTS
# Check that user-space commands are correctly handled - in particular,
# that arguments to the commands are correctly parsed and processed.
##########################################################################
class CommandTypes(AdminScriptTestCase):
"Tests for the various types of base command types that can be defined."
def setUp(self):
self.write_settings('settings.py')
def tearDown(self):
self.remove_settings('settings.py')
def test_version(self):
"version is handled as a special case"
args = ['version']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, get_version())
def test_version_alternative(self):
"--version is equivalent to version"
args1, args2 = ['version'], ['--version']
self.assertEqual(self.run_manage(args1), self.run_manage(args2))
def test_help(self):
"help is handled as a special case"
args = ['help']
out, err = self.run_manage(args)
self.assertOutput(out, "Usage: manage.py subcommand [options] [args]")
self.assertOutput(out, "Type 'manage.py help <subcommand>' for help on a specific subcommand.")
self.assertOutput(out, '[django]')
self.assertOutput(out, 'startapp')
self.assertOutput(out, 'startproject')
def test_help_commands(self):
"help --commands shows the list of all available commands"
args = ['help', '--commands']
out, err = self.run_manage(args)
self.assertNotInOutput(out, 'Usage:')
self.assertNotInOutput(out, 'Options:')
self.assertNotInOutput(out, '[django]')
self.assertOutput(out, 'startapp')
self.assertOutput(out, 'startproject')
self.assertNotInOutput(out, '\n\n')
def test_help_alternative(self):
"--help is equivalent to help"
args1, args2 = ['help'], ['--help']
self.assertEqual(self.run_manage(args1), self.run_manage(args2))
def test_help_short_altert(self):
"-h is handled as a short form of --help"
args1, args2 = ['--help'], ['-h']
self.assertEqual(self.run_manage(args1), self.run_manage(args2))
def test_specific_help(self):
"--help can be used on a specific command"
args = ['sqlall', '--help']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "Prints the CREATE TABLE, custom SQL and CREATE INDEX SQL statements for the given model module name(s).")
def test_base_command(self):
"User BaseCommands can execute when a label is provided"
args = ['base_command', 'testlabel']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, str_prefix("EXECUTE:BaseCommand labels=('testlabel',), options=[('option_a', '1'), ('option_b', '2'), ('option_c', '3'), ('pythonpath', None), ('settings', None), ('traceback', None), ('verbosity', %(_)s'1')]"))
def test_base_command_no_label(self):
"User BaseCommands can execute when no labels are provided"
args = ['base_command']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, str_prefix("EXECUTE:BaseCommand labels=(), options=[('option_a', '1'), ('option_b', '2'), ('option_c', '3'), ('pythonpath', None), ('settings', None), ('traceback', None), ('verbosity', %(_)s'1')]"))
def test_base_command_multiple_label(self):
"User BaseCommands can execute when no labels are provided"
args = ['base_command', 'testlabel', 'anotherlabel']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, str_prefix("EXECUTE:BaseCommand labels=('testlabel', 'anotherlabel'), options=[('option_a', '1'), ('option_b', '2'), ('option_c', '3'), ('pythonpath', None), ('settings', None), ('traceback', None), ('verbosity', %(_)s'1')]"))
def test_base_command_with_option(self):
"User BaseCommands can execute with options when a label is provided"
args = ['base_command', 'testlabel', '--option_a=x']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, str_prefix("EXECUTE:BaseCommand labels=('testlabel',), options=[('option_a', 'x'), ('option_b', '2'), ('option_c', '3'), ('pythonpath', None), ('settings', None), ('traceback', None), ('verbosity', %(_)s'1')]"))
def test_base_command_with_options(self):
"User BaseCommands can execute with multiple options when a label is provided"
args = ['base_command', 'testlabel', '-a', 'x', '--option_b=y']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, str_prefix("EXECUTE:BaseCommand labels=('testlabel',), options=[('option_a', 'x'), ('option_b', 'y'), ('option_c', '3'), ('pythonpath', None), ('settings', None), ('traceback', None), ('verbosity', %(_)s'1')]"))
def test_base_run_from_argv(self):
"""
Test run_from_argv properly terminates even with custom execute() (#19665)
Also test proper traceback display.
"""
command = BaseCommand()
def raise_command_error(*args, **kwargs):
raise CommandError("Custom error")
old_stderr = sys.stderr
sys.stderr = err = StringIO()
try:
command.execute = lambda args: args # This will trigger TypeError
# If the Exception is not CommandError it should always
# raise the original exception.
with self.assertRaises(TypeError):
command.run_from_argv(['', ''])
# If the Exception is CommandError and --traceback is not present
# this command should raise a SystemExit and don't print any
# traceback to the stderr.
command.execute = raise_command_error
err.truncate(0)
with self.assertRaises(SystemExit):
command.run_from_argv(['', ''])
err_message = err.getvalue()
self.assertNotIn("Traceback", err_message)
self.assertIn("CommandError", err_message)
# If the Exception is CommandError and --traceback is present
# this command should raise the original CommandError as if it
# were not a CommandError.
err.truncate(0)
with self.assertRaises(CommandError):
command.run_from_argv(['', '', '--traceback'])
finally:
sys.stderr = old_stderr
def test_run_from_argv_non_ascii_error(self):
"""
Test that non-ascii message of CommandError does not raise any
UnicodeDecodeError in run_from_argv.
"""
def raise_command_error(*args, **kwargs):
raise CommandError("Erreur personnalisée")
command = BaseCommand()
command.execute = raise_command_error
command.stderr = StringIO()
with self.assertRaises(SystemExit):
command.run_from_argv(['', ''])
def test_noargs(self):
"NoArg Commands can be executed"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, str_prefix("EXECUTE:NoArgsCommand options=[('pythonpath', None), ('settings', None), ('traceback', None), ('verbosity', %(_)s'1')]"))
def test_noargs_with_args(self):
"NoArg Commands raise an error if an argument is provided"
args = ['noargs_command', 'argument']
out, err = self.run_manage(args)
self.assertOutput(err, "Error: Command doesn't accept any arguments")
def test_app_command(self):
"User AppCommands can execute when a single app name is provided"
args = ['app_command', 'auth']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:AppCommand app=<module 'django.contrib.auth.models'")
self.assertOutput(out, "module 'django.contrib.auth.models' from")
self.assertOutput(out, str_prefix("'>, options=[('pythonpath', None), ('settings', None), ('traceback', None), ('verbosity', %(_)s'1')]"))
def test_app_command_no_apps(self):
"User AppCommands raise an error when no app name is provided"
args = ['app_command']
out, err = self.run_manage(args)
self.assertOutput(err, 'Error: Enter at least one appname.')
def test_app_command_multiple_apps(self):
"User AppCommands raise an error when multiple app names are provided"
args = ['app_command', 'auth', 'contenttypes']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:AppCommand app=<module 'django.contrib.auth.models'")
self.assertOutput(out, "module 'django.contrib.auth.models' from")
self.assertOutput(out, str_prefix("'>, options=[('pythonpath', None), ('settings', None), ('traceback', None), ('verbosity', %(_)s'1')]"))
self.assertOutput(out, "EXECUTE:AppCommand app=<module 'django.contrib.contenttypes.models'")
self.assertOutput(out, "module 'django.contrib.contenttypes.models' from")
self.assertOutput(out, str_prefix("'>, options=[('pythonpath', None), ('settings', None), ('traceback', None), ('verbosity', %(_)s'1')]"))
def test_app_command_invalid_appname(self):
"User AppCommands can execute when a single app name is provided"
args = ['app_command', 'NOT_AN_APP']
out, err = self.run_manage(args)
self.assertOutput(err, "App with label NOT_AN_APP could not be found")
def test_app_command_some_invalid_appnames(self):
"User AppCommands can execute when some of the provided app names are invalid"
args = ['app_command', 'auth', 'NOT_AN_APP']
out, err = self.run_manage(args)
self.assertOutput(err, "App with label NOT_AN_APP could not be found")
def test_label_command(self):
"User LabelCommands can execute when a label is provided"
args = ['label_command', 'testlabel']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, str_prefix("EXECUTE:LabelCommand label=testlabel, options=[('pythonpath', None), ('settings', None), ('traceback', None), ('verbosity', %(_)s'1')]"))
def test_label_command_no_label(self):
"User LabelCommands raise an error if no label is provided"
args = ['label_command']
out, err = self.run_manage(args)
self.assertOutput(err, 'Enter at least one label')
def test_label_command_multiple_label(self):
"User LabelCommands are executed multiple times if multiple labels are provided"
args = ['label_command', 'testlabel', 'anotherlabel']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, str_prefix("EXECUTE:LabelCommand label=testlabel, options=[('pythonpath', None), ('settings', None), ('traceback', None), ('verbosity', %(_)s'1')]"))
self.assertOutput(out, str_prefix("EXECUTE:LabelCommand label=anotherlabel, options=[('pythonpath', None), ('settings', None), ('traceback', None), ('verbosity', %(_)s'1')]"))
class ArgumentOrder(AdminScriptTestCase):
"""Tests for 2-stage argument parsing scheme.
django-admin command arguments are parsed in 2 parts; the core arguments
(--settings, --traceback and --pythonpath) are parsed using a Lax parser.
This Lax parser ignores any unknown options. Then the full settings are
passed to the command parser, which extracts commands of interest to the
individual command.
"""
def setUp(self):
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
self.write_settings('alternate_settings.py')
def tearDown(self):
self.remove_settings('settings.py')
self.remove_settings('alternate_settings.py')
def test_setting_then_option(self):
"Options passed after settings are correctly handled"
args = ['base_command', 'testlabel', '--settings=alternate_settings', '--option_a=x']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, str_prefix("EXECUTE:BaseCommand labels=('testlabel',), options=[('option_a', 'x'), ('option_b', '2'), ('option_c', '3'), ('pythonpath', None), ('settings', 'alternate_settings'), ('traceback', None), ('verbosity', %(_)s'1')]"))
def test_setting_then_short_option(self):
"Short options passed after settings are correctly handled"
args = ['base_command', 'testlabel', '--settings=alternate_settings', '--option_a=x']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, str_prefix("EXECUTE:BaseCommand labels=('testlabel',), options=[('option_a', 'x'), ('option_b', '2'), ('option_c', '3'), ('pythonpath', None), ('settings', 'alternate_settings'), ('traceback', None), ('verbosity', %(_)s'1')]"))
def test_option_then_setting(self):
"Options passed before settings are correctly handled"
args = ['base_command', 'testlabel', '--option_a=x', '--settings=alternate_settings']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, str_prefix("EXECUTE:BaseCommand labels=('testlabel',), options=[('option_a', 'x'), ('option_b', '2'), ('option_c', '3'), ('pythonpath', None), ('settings', 'alternate_settings'), ('traceback', None), ('verbosity', %(_)s'1')]"))
def test_short_option_then_setting(self):
"Short options passed before settings are correctly handled"
args = ['base_command', 'testlabel', '-a', 'x', '--settings=alternate_settings']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, str_prefix("EXECUTE:BaseCommand labels=('testlabel',), options=[('option_a', 'x'), ('option_b', '2'), ('option_c', '3'), ('pythonpath', None), ('settings', 'alternate_settings'), ('traceback', None), ('verbosity', %(_)s'1')]"))
def test_option_then_setting_then_option(self):
"Options are correctly handled when they are passed before and after a setting"
args = ['base_command', 'testlabel', '--option_a=x', '--settings=alternate_settings', '--option_b=y']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, str_prefix("EXECUTE:BaseCommand labels=('testlabel',), options=[('option_a', 'x'), ('option_b', 'y'), ('option_c', '3'), ('pythonpath', None), ('settings', 'alternate_settings'), ('traceback', None), ('verbosity', %(_)s'1')]"))
class StartProject(LiveServerTestCase, AdminScriptTestCase):
available_apps = [
'admin_scripts',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
]
def test_wrong_args(self):
"Make sure passing the wrong kinds of arguments raises a CommandError"
out, err = self.run_django_admin(['startproject'])
self.assertNoOutput(out)
self.assertOutput(err, "you must provide a project name")
def test_simple_project(self):
"Make sure the startproject management command creates a project"
args = ['startproject', 'testproject']
testproject_dir = os.path.join(test_dir, 'testproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
# running again..
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "already exists")
def test_invalid_project_name(self):
"Make sure the startproject management command validates a project name"
for bad_name in ('7testproject', '../testproject'):
args = ['startproject', bad_name]
testproject_dir = os.path.join(test_dir, bad_name)
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertOutput(err, "Error: '%s' is not a valid project name. "
"Please make sure the name begins with a letter or underscore." % bad_name)
self.assertFalse(os.path.exists(testproject_dir))
def test_simple_project_different_directory(self):
"Make sure the startproject management command creates a project in a specific directory"
args = ['startproject', 'testproject', 'othertestproject']
testproject_dir = os.path.join(test_dir, 'othertestproject')
os.mkdir(testproject_dir)
self.addCleanup(shutil.rmtree, testproject_dir)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'manage.py')))
# running again..
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "already exists")
def test_custom_project_template(self):
"Make sure the startproject management command is able to use a different project template"
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'customtestproject']
testproject_dir = os.path.join(test_dir, 'customtestproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir')))
def test_template_dir_with_trailing_slash(self):
"Ticket 17475: Template dir passed has a trailing path separator"
template_path = os.path.join(custom_templates_dir, 'project_template' + os.sep)
args = ['startproject', '--template', template_path, 'customtestproject']
testproject_dir = os.path.join(test_dir, 'customtestproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir')))
def test_custom_project_template_from_tarball_by_path(self):
"Make sure the startproject management command is able to use a different project template from a tarball"
template_path = os.path.join(custom_templates_dir, 'project_template.tgz')
args = ['startproject', '--template', template_path, 'tarballtestproject']
testproject_dir = os.path.join(test_dir, 'tarballtestproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_custom_project_template_from_tarball_to_alternative_location(self):
"Startproject can use a project template from a tarball and create it in a specified location"
template_path = os.path.join(custom_templates_dir, 'project_template.tgz')
args = ['startproject', '--template', template_path, 'tarballtestproject', 'altlocation']
testproject_dir = os.path.join(test_dir, 'altlocation')
os.mkdir(testproject_dir)
self.addCleanup(shutil.rmtree, testproject_dir)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_custom_project_template_from_tarball_by_url(self):
"Make sure the startproject management command is able to use a different project template from a tarball via a url"
template_url = '%s/admin_scripts/custom_templates/project_template.tgz' % self.live_server_url
args = ['startproject', '--template', template_url, 'urltestproject']
testproject_dir = os.path.join(test_dir, 'urltestproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_project_template_tarball_url(self):
"Startproject management command handles project template tar/zip balls from non-canonical urls"
template_url = '%s/admin_scripts/custom_templates/project_template.tgz/' % self.live_server_url
args = ['startproject', '--template', template_url, 'urltestproject']
testproject_dir = os.path.join(test_dir, 'urltestproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_file_without_extension(self):
"Make sure the startproject management command is able to render custom files"
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'customtestproject', '-e', 'txt', '-n', 'Procfile']
testproject_dir = os.path.join(test_dir, 'customtestproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir')))
base_path = os.path.join(testproject_dir, 'additional_dir')
for f in ('Procfile', 'additional_file.py', 'requirements.txt'):
self.assertTrue(os.path.exists(os.path.join(base_path, f)))
with open(os.path.join(base_path, f)) as fh:
self.assertEqual(fh.read(),
'# some file for customtestproject test project')
def test_custom_project_template_context_variables(self):
"Make sure template context variables are rendered with proper values"
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'another_project', 'project_dir']
testproject_dir = os.path.join(test_dir, 'project_dir')
os.mkdir(testproject_dir)
self.addCleanup(shutil.rmtree, testproject_dir)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
test_manage_py = os.path.join(testproject_dir, 'manage.py')
with open(test_manage_py, 'r') as fp:
content = force_text(fp.read())
self.assertIn("project_name = 'another_project'", content)
self.assertIn("project_directory = '%s'" % testproject_dir, content)
def test_no_escaping_of_project_variables(self):
"Make sure template context variables are not html escaped"
# We're using a custom command so we need the alternate settings
self.write_settings('alternate_settings.py')
self.addCleanup(self.remove_settings, 'alternate_settings.py')
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['custom_startproject', '--template', template_path, 'another_project', 'project_dir', '--extra', '<&>', '--settings=alternate_settings']
testproject_dir = os.path.join(test_dir, 'project_dir')
os.mkdir(testproject_dir)
self.addCleanup(shutil.rmtree, testproject_dir)
out, err = self.run_manage(args)
self.assertNoOutput(err)
test_manage_py = os.path.join(testproject_dir, 'additional_dir', 'extra.py')
with open(test_manage_py, 'r') as fp:
content = fp.read()
self.assertIn("<&>", content)
def test_custom_project_destination_missing(self):
"""
Make sure an exception is raised when the provided
destination directory doesn't exist
"""
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'yet_another_project', 'project_dir2']
testproject_dir = os.path.join(test_dir, 'project_dir2')
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Destination directory '%s' does not exist, please create it first." % testproject_dir)
self.assertFalse(os.path.exists(testproject_dir))
def test_custom_project_template_with_non_ascii_templates(self):
"Ticket 18091: Make sure the startproject management command is able to render templates with non-ASCII content"
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, '--extension=txt', 'customtestproject']
testproject_dir = os.path.join(test_dir, 'customtestproject')
self.addCleanup(shutil.rmtree, testproject_dir, True)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
path = os.path.join(testproject_dir, 'ticket-18091-non-ascii-template.txt')
with codecs.open(path, 'r', encoding='utf-8') as f:
self.assertEqual(f.read().splitlines(False), [
'Some non-ASCII text for testing ticket #18091:',
'üäö €'])
class DiffSettings(AdminScriptTestCase):
"""Tests for diffsettings management command."""
def test_basic(self):
"""Runs without error and emits settings diff."""
self.write_settings('settings_to_diff.py', sdict={'FOO': '"bar"'})
self.addCleanup(self.remove_settings, 'settings_to_diff.py')
args = ['diffsettings', '--settings=settings_to_diff']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "FOO = 'bar' ###")
def test_all(self):
"""The all option also shows settings with the default value."""
self.write_settings('settings_to_diff.py', sdict={'STATIC_URL': 'None'})
self.addCleanup(self.remove_settings, 'settings_to_diff.py')
args = ['diffsettings', '--settings=settings_to_diff', '--all']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "### STATIC_URL = None")
class Dumpdata(AdminScriptTestCase):
"""Tests for dumpdata management command."""
def setUp(self):
self.write_settings('settings.py')
def tearDown(self):
self.remove_settings('settings.py')
def test_pks_parsing(self):
"""Regression for #20509
Test would raise an exception rather than printing an error message.
"""
args = ['dumpdata', '--pks=1']
out, err = self.run_manage(args)
self.assertOutput(err, "You can only use --pks option with one model")
self.assertNoOutput(out)
| apache-2.0 |
robios/PyTES | pytes/Util.py | 1 | 32573 | import warnings
import numpy as np
import time
from struct import unpack
from scipy.stats import norm
from scipy.signal import tukey
from Filter import median_filter
import Analysis, Filter, Constants
def savefits(data, filename, vmax=1.0, sps=1e6, bits=14, noise=False, clobber=True):
"""
Save pulse/noise to FITS file
"""
import pyfits as pf
# Prepare data
data = (np.asarray(data)/vmax*2**(bits-1)).round()
# Column Name
if noise:
colname = 'NoiseRec'
else:
colname = 'PulseRec'
# Columns
col_t = pf.Column(name='TIME', format='1D', unit='s', array=np.zeros(data.shape[0], dtype=int))
col_data = pf.Column(name=colname, format='%dI' % data.shape[1], unit='V', array=data)
cols = pf.ColDefs([col_t, col_data])
tbhdu = pf.BinTableHDU.from_columns(cols)
# Name of extension
exthdr = tbhdu.header
exthdr['EXTNAME'] = ('Record', 'name of this binary table extension')
exthdr['EXTVER'] = (1, 'extension version number')
# Add more attributes
exthdr['TSCAL2'] = (vmax/2**(bits-1), '[V/ch]')
exthdr['TZERO2'] = (0., '[V]')
exthdr['THSCL2'] = (sps**-1, '[s/bin] horizontal resolution of record')
exthdr['THZER2'] = (0, '[s] horizontal offset of record')
exthdr['THSAM2'] = (data.shape[1], 'sample number of record')
exthdr['THUNI2'] = ('s', 'physical unit of sampling step of record')
exthdr['TRMIN2'] = (-2**(bits-1)+1, '[channel] minimum number of each sample')
exthdr['TRMAX2'] = (2**(bits-1)-1, '[channel] maximum number of each sample')
exthdr['TRBIN2'] = (1, '[channel] default bin number of each sample')
# More attributes
exthdr['TSTART'] = (0, 'start time of experiment in total second')
exthdr['TSTOP'] = (0, 'end time of experiment in total second')
exthdr['TEND'] = (0, 'end time of experiment (obsolete)')
exthdr['DATE'] = (time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), 'file creation date (UT)')
# We anyway need Primary HDU
hdu = pf.PrimaryHDU()
# Write to FITS
thdulist = pf.HDUList([hdu, tbhdu])
with warnings.catch_warnings():
warnings.simplefilter("ignore")
thdulist.writeto(filename, clobber=clobber)
def fopen(filename):
"""
Read FITS file
Parameters
==========
filename: file number to read
Returns
=======
t: time array
wave: waveform array
"""
import pyfits as pf
# Open fits file and get pulse/noise data
header = pf.open(filename)
wave = header[1].data.field(1).copy()
dt = header[1].header['THSCL2']
t = np.arange(wave.shape[-1]) * dt
header.close()
return t, wave
def yopen(filenumber, summary=False, nf=None, tmin=None, tmax=None, raw=False):
"""
Read Yokogawa WVF file
Parameters
==========
filenumber: file number to read
summary: to summary waves (default: False)
nf: sigmas for valid data using median noise filter, None to disable noise filter (default: None)
tmin: lower boundary of time for partial extraction, scaler or list (Default: None)
tmax: upper boundary of time for partial extraction, scaler or list (Default: None)
raw: returns raw data without scaling/offsetting if True (Default: False)
Returns
=======
if summary is False:
[ t1, d1, t2, d2, t3, d3, ... ]
if summary is True:
[ t1, d1, err1, t2, d2, err2, ... ]
if raw is True:
t1 is a tuple of (hres1, hofs1, vres1, vofs1)
where t1 is timing for 1st ch, d1 is data for 1st ch, err1 is error (1sigma) for 1st ch, and so on.
"""
# Read header (HDR)
h = open(str(filenumber) + ".HDR")
lines = h.readlines()
h.close()
# Parse $PublicInfo
for line in lines:
token = line.split()
if len(token) > 0:
# Check endian
if token[0] == "Endian":
endian = '>' if token[1] == "Big" else '<'
# Check data format
if token[0] == "DataFormat":
format = token[1]
assert format == "Block"
# Check # of groups
if token[0] == "GroupNumber":
groups = int(token[1])
# Check # of total traces
if token[0] == "TraceTotalNumber":
ttraces = int(token[1])
# Check data offset
if token[0] == "DataOffset":
offset = int(token[1])
# Initialize containers
traces = [None] * groups # Number of traces for each group
blocks = [None] * ttraces # Number of blocks for each trace
bsizes = [None] * ttraces # Block size for each trace
vres = [None] * ttraces # VResolution for each trace
voffset = [None] * ttraces # VOffset for each trace
hres = [None] * ttraces # HResolution for each trace
hoffset = [None] * ttraces # HOffset for each trace
# Parse $Group
for line in lines:
token = line.split()
if len(token) > 0:
# Read current group number
if token[0][:6] == "$Group":
cgn = int(token[0][6:]) - 1 # Current group number (minus 1)
# Check # of traces in this group
if token[0] == "TraceNumber":
traces[cgn] = int(token[1])
traceofs = np.sum(traces[:cgn], dtype=int)
# Check # of Blocks
if token[0] == "BlockNumber":
blocks[traceofs:traceofs+traces[cgn]] = [ int(token[1]) ] * traces[cgn]
# Check Block Size
if token[0] == "BlockSize":
bsizes[traceofs:traceofs+traces[cgn]] = [ int(s) for s in token[1:] ]
# Check VResolusion
if token[0] == "VResolution":
vres[traceofs:traceofs+traces[cgn]] = [ float(res) for res in token[1:] ]
# Check VOffset
if token[0] == "VOffset":
voffset[traceofs:traceofs+traces[cgn]] = [ float(ofs) for ofs in token[1:] ]
# Check VDataType
if token[0] == "VDataType":
assert token[1] == "IS2"
# Check HResolution
if token[0] == "HResolution":
hres[traceofs:traceofs+traces[cgn]] = [ float(res) for res in token[1:] ]
# Check HOffset
if token[0] == "HOffset":
hoffset[traceofs:traceofs+traces[cgn]] = [ float(ofs) for ofs in token[1:] ]
# Data Initialization
time = [ np.array(range(bsizes[t])) * hres[t] + hoffset[t] for t in range(ttraces) ]
data = [ [None] * blocks[t] for t in range(ttraces) ]
# Open WVF
f = open(str(filenumber) + ".WVF", 'rb')
f.seek(offset)
# Read WVF
if format == "Block":
# Block format (assuming block size is the same for all the traces in Block format)
for b in range(blocks[0]):
for t in range(ttraces):
if raw:
data[t][b] = np.array(unpack(endian + 'h'*bsizes[t], f.read(bsizes[t]*2)), dtype='int64')
else:
data[t][b] = np.array(unpack(endian + 'h'*bsizes[t], f.read(bsizes[t]*2))) * vres[t] + voffset[t]
else:
# Trace format
for t in range(ttraces):
for b in range(blocks[t]):
if raw:
data[t][b] = np.array(unpack(endian + 'h'*bsizes[t], f.read(bsizes[t]*2)), dtype='int64')
else:
data[t][b] = np.array(unpack(endian + 'h'*bsizes[t], f.read(bsizes[t]*2))) * vres[t] + voffset[t]
# Array conversion
for t in range(ttraces):
if raw:
data[t] = np.array(data[t], dtype='int64')
else:
data[t] = np.array(data[t])
# Tmin/Tmax filtering
for t in range(ttraces):
if type(tmin) == list or type(tmax) == list:
if not (type(tmin) == list and type(tmax) == list and len(tmin) == len(tmax)):
raise ValueError("tmin and tmax both have to be list and have to have the same length.")
mask = np.add.reduce([ (time[t] >= _tmin) & (time[t] < _tmax) for (_tmax, _tmin) in zip(tmax, tmin)], dtype=bool)
else:
_tmin = np.min(time[t]) if tmin is None else tmin
_tmax = np.max(time[t]) + 1 if tmax is None else tmax
mask = (time[t] >= _tmin) & (time[t] < _tmax)
data[t] = data[t][:, mask]
time[t] = time[t][mask]
f.close()
if summary is False:
# Return wave data as is
if raw:
return [ [ (hres[t], hoffset[t], vres[t], voffset[t]), data[t] ] for t in range(ttraces) ]
else:
return [ [ time[t], data[t] ] for t in range(ttraces) ]
else:
if nf is None:
# Noise filter is off
if raw:
return [ [ (hres[t], hoffset[t], vres[t], voffset[t]), np.mean(data[t].astype(dtype='float64'), axis=0), np.std(data[t].astype(dtype='float64'), axis=0, ddof=1) ]
for t in range(ttraces) ]
else:
return [ [ time[t], np.mean(data[t], axis=0), np.std(data[t], axis=0, ddof=1) ]
for t in range(ttraces) ]
else:
# Noise filter is on
if raw:
return [ [ (hres[t], hoffset[t], vres[t], voffset[t]),
np.apply_along_axis(lambda a: np.mean(a[median_filter(a, nf)]), 0, data[t].astype(dtype='float64')),
np.apply_along_axis(lambda a: np.std(a[median_filter(a, nf)], ddof=1), 0, data[t].astype(dtype='float64')) ]
for t in range(ttraces) ]
else:
return [ [ time[t],
np.apply_along_axis(lambda a: np.mean(a[median_filter(a, nf)]), 0, data[t]),
np.apply_along_axis(lambda a: np.std(a[median_filter(a, nf)], ddof=1), 0, data[t]) ]
for t in range(ttraces) ]
def popen(filename, ch=None, raw=False):
"""
Read pls file
Parameters
==========
filename: file name to read
ch: returns data only for the given channel if given (Default: None)
raw: returns raw data without scaling/offsetting if True (Default: False)
Returns
=======
if raw is True:
[ header, vres, vofs, hres, hofs, tick, num, data, edata ]
else:
[ header, t, tick, num, data, edata ]
"""
# Initialize
header = {'COMMENT': []}
vres = {}
vofs = {}
hres = {}
hofs = {}
tick = {}
num = {}
data = {}
edata = {}
# Parser
def parser():
"""
PLS Data Parser (generator)
"""
# Initialization
samples = -1
extra = 0
chunk = ''
isHeader = True
while True:
while len(chunk) < 2:
chunk += yield
# Get the magic character
magic = chunk[0]
if isHeader and magic == 'C':
# Comment
while len(chunk) < 80:
chunk += yield
header['COMMENT'].append(chunk[2:80])
chunk = chunk[80:]
elif isHeader and magic == 'V':
# Version
while len(chunk) < 80:
chunk += yield
header['VERSION'] = chunk[2:80]
chunk = chunk[80:]
elif isHeader and magic == 'O':
# Date
while len(chunk) < 10:
chunk += yield
_m, _d, _y = map(int, chunk[2:10].split())
header['DATE'] = "%d/%d/%d" % (_y, _m, _d)
chunk = chunk[10:]
elif isHeader and magic == 'S':
# Number of Samples
while len(chunk) < 7:
chunk += yield
header['SAMPLES'] = samples = int(chunk[2:7])
chunk = chunk[7:]
elif isHeader and magic == 'E':
# Extra Bytes
while len(chunk) < 7:
chunk += yield
header['EXTRA'] = extra = int(chunk[2:7])
chunk = chunk[7:]
elif isHeader and magic == 'P':
# Discriminator
while len(chunk) < 78:
chunk += yield
_dis = chunk[2:78].split()
if _dis[0] == '01':
header['ULD'] = eval(_dis[1])
elif _dis[0] == '02':
header['LLD'] = eval(_dis[1])
chunk = chunk[78:]
elif isHeader and magic == 'N':
# Normalization
while len(chunk) < 47:
chunk += yield
_ch, _hofs, _hres, _vofs, _vres = chunk[2:47].split()
_ch = int(_ch)
vres[_ch] = eval(_vres)
vofs[_ch] = eval(_vofs)
hres[_ch] = eval(_hres)
hofs[_ch] = eval(_hofs)
chunk = chunk[47:]
elif magic == 'D':
# Data
isHeader = False
if samples < 0:
raise ValueError("Invalid number of samples.")
while len(chunk) < (11 + samples*2):
chunk += yield
_ch, _tick, _num = unpack('<BII', chunk[2:11])
if not data.has_key(_ch):
data[_ch] = bytearray()
tick[_ch] = []
num[_ch] = []
edata[_ch] = bytearray()
data[_ch] += chunk[11:11 + samples*2]
tick[_ch].append(_tick)
num[_ch].append(_num)
edata[_ch] += chunk[11 + samples*2:11 + samples*2 + extra]
chunk = chunk[11 + samples*2 + extra:]
else:
# Skip unknown magic
chunk = chunk[1:]
# Open pls file and read by chunks
f = open(filename, 'rb')
# Start parser
p = parser()
p.next()
# Read by chunk and parse it
with open(filename, 'rb') as f:
while True:
chunk = f.read(1024*1024) # read 1 MB
if not chunk:
break
p.send(chunk)
# Convert buffer to numpy array
for k in ([ch] if ch else data.keys()):
data[k] = np.frombuffer(data[k], dtype='>i2').reshape(-1, header['SAMPLES'])
edata[k] = np.frombuffer(edata[k], dtype='>u1').reshape(-1, header['SAMPLES'])
if raw:
if ch:
return header, vres[ch], vofs[ch], hres[ch], hofs[ch], tick[ch], num[ch], data[ch], edata[ch]
else:
return header, vres, vofs, hres, hofs, tick, num, data, edata
else:
t = {}
for k in ([ch] if ch else data.keys()):
# Normalize data using res/ofs
t[k] = (np.arange(header['SAMPLES']) + hofs[k]) * hres[k]
data[k] = (np.asarray(data[k]) + vofs[k]) * vres[k]
if ch:
return header, t[ch], tick[ch], num[ch], data[ch], edata[ch]
else:
return header, t, tick, num, data, edata
def tesana(t, p, n, lpfc=None, hpfc=None, binsize=1, max_shift=10,
thre=0.4, filt=None, nulldc=False, offset=False, center=False, sigma=3,
gain=None, dsr=None, shift=False, ocmethod="ols", flip=False, atom="Mn",
kbfit=False, ignorekb=False, method="mle",
rshunt=None, tbias=None, ites=None, ka_min=80, kb_min=40,
tex=False, plotting=True, savedat=False, session="Unnamed"):
"""
Perform TES Analysis
Parameters (and their default values):
t: time data (array-like)
p: pulse data (array-like)
n: noise data (array-like)
lpfc: low-pass filter cut-off frequency in bins (Default: None)
hpfc: high-pass filter cut-off frequency in bins (Default: None)
binsize: energy bin size for histograms and fittings (only for ls ans cs) in eV (Default: 1)
max_shift: maximum allowed shifts to calculate maximum cross correlation (Default: 10)
thre: correlation threshold for offset correction (Default: 0.4)
filt: window function (hanning/hamming/blackman/tukey) (Default: None)
nulldc: nullify the DC bin when template generation (Default: False)
offset: subtract DC offset (Default: False)
center: centering pulse rise (Default: False)
sigma: sigmas for median filter (Default: 3)
gain: feedback gain for current-space conversion (Default: None)
dsr: down-sampling rate (Default: None)
shift: treat dE as energy shift instead of scaling (Default: False)
ocmethod: offset correction fitting method (ols/odr) (Default: ols)
flip: flip x and y when offset correction fitting (Default: False)
atom: atom to fit (Default: Mn)
kbfit: fit Kb line (Default: False)
ignorekb: ignore Kb line when linearity correction (Default: False)
method: fitting method (mle/ls/cs) (Default: mle)
rshunt: shunt resistance value for r-space conversion (Default: None)
tbias: TES bias current for r-space conversion (Default: None)
ites: TES current for r-space conversion (Default: None)
ka_min: minimum counts to group bins for Ka line (valid only for ls/cs fittings) (Default: 80)
ka_min: minimum counts to group bins for Kb line (valid only for ls/cs fittings) (Default: 20)
tex: use TeX for plots (Default: False)
plotting: generate and save plots (Default: True)
savedat: save data to files (Default: False)
session: session name for plots and data files (Default: Unnamed)
Note:
- Use offset option when using filt option
- Consider using center option when using filt option
"""
if plotting:
# Import matplotlib
import matplotlib
matplotlib.use('Agg')
matplotlib.rcParams['text.usetex'] = str(tex)
from pylab import figure, plot, errorbar, hist, axvline, xlim, ylim, loglog, xlabel, ylabel, legend, tight_layout, savefig
print "Session: %s" % session
# Preparation
p = np.asarray(p)
n = np.asarray(n)
t = np.asarray(t)
dt = np.diff(t)[0]
df = (dt * t.shape[-1])**-1
# Subtract offset
if offset:
ofs = np.median(n)
p -= ofs
n -= ofs
# Convert to current-space if needed
if gain:
print "Converting to current-space"
p /= gain
n /= gain
# Convert to resistance-space
Rspace = False
if gain and rshunt and tbias and ites:
print "Converting to resistance-space"
ofs = np.median(n)
p += (ites - ofs)
n += (ites - ofs)
# Convert to resistance
p = (tbias - p) * rshunt / p
n = (tbias - n) * rshunt / n
Rspace = True
# Down-sample
if dsr > 1:
p = p[:,:p.shape[-1]/dsr*dsr].reshape(p.shape[0], -1, dsr).mean(axis=-1)
n = n[:,:n.shape[-1]/dsr*dsr].reshape(n.shape[0], -1, dsr).mean(axis=-1)
dt *= dsr
t = t[::dsr]
# Pulse centering (for filtering)
if center:
# Roll pulse to the center
r = p.shape[-1] / 2 - np.median(abs(p - Filter.offset(p)[:, np.newaxis]).argmax(axis=-1))
p = np.hstack((p[...,-r:], p[...,:-r]))
# Calculate offset (needs to be done before applying filter)
if p.size > 0:
offset = Filter.offset(p)
# Generate Filter
if filt is None:
pass
else:
if filt.lower() == "hanning":
f = np.hanning(p.shape[-1])
elif filt.lower() == "hamming":
f = np.hamming(p.shape[-1])
elif filt.lower() == "blackman":
f = np.blackman(p.shape[-1])
elif filt.lower() == "tukey":
f = tukey(p.shape[-1])
else:
raise ValueError('Unsupported filter: %s' % filt.lower())
print "Window filter function: %s" % filt.lower()
# Amplitude correction
cf = f.sum() / len(f)
p *= (f / cf)
n *= (f / cf)
# Equivalent noise bandwidth correction
enb = len(f)*(f**2).sum()/f.sum()**2
df *= enb
if p.size > 0:
# Calculate averaged pulse
avgp = Filter.average_pulse(p, max_shift=max_shift)
if savedat:
np.savetxt('%s-averagepulse.dat' % session, np.vstack((t, avgp)).T,
header='Time (s), Averaged Pulse (%s)' % ('R' if Rspace else ('A' if gain else 'V')), delimiter='\t')
if plotting:
figure()
plot(t, avgp)
xlabel('Time$\quad$(s)')
ylabel('Averaged Pulse$\quad$(%s)' % ('R' if Rspace else ('A' if gain else 'V')))
tight_layout()
savefig('%s-averagepulse.pdf' % session)
# Calculate averaged pulse spectrum
avgps = np.sqrt(Filter.power(avgp)) / df
if savedat:
np.savetxt('%s-avgpulse-power.dat' % session, np.vstack((np.arange(len(avgps))*df, avgps)).T,
header='Frequency (Hz), Average Pulse Power (%s/srHz)' % ('R' if Rspace else ('A' if gain else 'V')), delimiter='\t')
if plotting:
avgps[0] = 0 # for better plot
figure()
plot(np.arange(len(avgps))*df, avgps)
loglog()
xlabel('Frequency$\quad$(Hz)')
ylabel('Average Pulse Power$\quad$(%s/Hz)' % ('R' if Rspace else ('A' if gain else 'V')))
tight_layout()
savefig('%s-avgpulse-power.pdf' % session)
if n.size > 0:
# Plot noise spectrum
avgns = np.sqrt(Filter.average_noise(n) / df)
if savedat:
np.savetxt('%s-noise.dat' % session, np.vstack((np.arange(len(avgns))*df, avgns)).T,
header='Frequency (Hz), Noise (%s/srHz)' % ('R' if Rspace else ('A' if gain else 'V')), delimiter='\t')
if plotting:
avgns[0] = 0 # for better plot
figure()
plot(np.arange(len(avgns))*df, avgns)
loglog()
xlabel('Frequency$\quad$(Hz)')
ylabel('Noise$\quad$(%s/$\sqrt{\mathrm{Hz}}$)' % ('R' if Rspace else ('A' if gain else 'V')))
tight_layout()
savefig('%s-noise.pdf' % session)
if p.size > 0 and n.size > 0:
# Generate template
tmpl, sn = Filter.generate_template(p, n, lpfc=lpfc, hpfc=hpfc, nulldc=nulldc, max_shift=max_shift)
if savedat:
np.savetxt('%s-template.dat' % session, np.vstack((t, tmpl)).T,
header='Time (s), Template (A.U.)', delimiter='\t')
np.savetxt('%s-sn.dat' % session, np.vstack((np.arange(len(sn))*df, sn/np.sqrt(df))).T,
header='Frequency (Hz), S/N (/srHz)', delimiter='\t')
if plotting:
# Plot template
figure()
plot(t, tmpl)
xlabel('Time$\quad$(s)')
ylabel('Template$\quad$(A.U.)')
tight_layout()
savefig('%s-template.pdf' % session)
# Plot SNR
figure()
plot(np.arange(len(sn))*df, sn/np.sqrt(df))
loglog()
xlabel('Frequency$\quad$(Hz)')
ylabel('S/N$\quad$(/$\sqrt{\mathrm{Hz}}$)')
tight_layout()
savefig('%s-sn.pdf' % session)
# Calculate baseline resolution
print "Resolving power: %.2f (%.2f eV @ 5.9 keV)" % (np.sqrt((sn**2).sum()*2), Analysis.baseline(sn))
# Perform optimal filtering
pha_p = Filter.optimal_filter(p, tmpl, max_shift=max_shift)
pha_n = Filter.optimal_filter(n, tmpl, max_shift=0)
# Offset correction
(a, b), coef = Analysis.fit_offset(pha_p, offset, sigma=sigma, method=ocmethod, flip=flip)
if coef > thre:
oc_pha_p = Analysis.offset_correction(pha_p, offset, b)
oc_pha_n = Analysis.offset_correction(pha_n, offset, b)
print "Offset correction with: PHA = %f * (1 + %f * Offset)" % (a, b)
if plotting:
figure()
ka = Analysis.ka(np.vstack((pha_p, offset)).T, sigma=sigma)
plot(ka.T[1], ka.T[0], '.', c='k')
x_min, x_max = xlim()
ofs = np.linspace(x_min, x_max)
label = '$\mathrm{PHA}=%.2f\\times(1+%.2f\\times\mathrm{Offset})$' % (a, b)
plot(ofs, a*(1+b*ofs), 'r-', label=label)
xlabel('Offset$\quad$(V)')
ylabel('PHA$\quad$(V)')
legend(frameon=False)
tight_layout()
savefig('%s-offset.pdf' % session)
else:
oc_pha_p = pha_p
oc_pha_n = pha_n
print "Skipped offset correction: correlation coefficient (%f) is too small" % coef
# Check line database
if "%sKa" % atom not in Constants.LE.keys() or "%sKb" % atom not in Constants.LE.keys():
raise ValueError('Unsupported atom: %s' % atom)
# Linearity correction
pha_line_center = np.asarray([ np.median(Analysis.ka(oc_pha_p, sigma=sigma)), np.median(Analysis.kb(oc_pha_p, sigma=sigma)) ])
line_energy = np.asarray([ Constants.LE['%sKa' % atom], Constants.LE['%sKb' % atom] ])
if ignorekb:
a, b = Analysis.fit_linearity([pha_line_center[0]], [line_energy[0]], deg=1)
print "Linearity correction with: PHA = %e * E" % (b)
else:
a, b = Analysis.fit_linearity(pha_line_center, line_energy, deg=2)
print "Linearity correction with: PHA = %e * E^2 + %e * E" % (a, b)
print "MnKb saturation ratio: %.2f %%" % ((pha_line_center[1]/pha_line_center[0])/(line_energy[1]/line_energy[0])*100)
lc_pha_p = Analysis.linearity_correction(oc_pha_p, a, b)
lc_pha_n = Analysis.linearity_correction(oc_pha_n, a, b)
if savedat:
np.savetxt('%s-linearity.dat' % session, array([pha_line_center[0]]) if ignorekb else pha_line_center[np.newaxis,:],
header='%sKa PHA' % atom if ignorekb else '%sKa PHA, %sKb PHA' % (atom, atom), delimiter='\t')
if plotting:
figure()
x = np.linspace(0, 7e3)
if ignorekb:
plot(line_energy[0]/1e3, pha_line_center[0], '+', color='b')
plot(x/1e3, x*b, 'r--')
else:
plot(line_energy/1e3, pha_line_center, '+', color='b')
plot(x/1e3, x**2*a+x*b, 'r--')
xlim((0, 7))
xlabel('Energy$\quad$(keV)')
ylabel('PHA$\quad$(a.u.)')
tight_layout()
savefig('%s-linearity.pdf' % session)
# Energy Spectrum
if plotting:
figure()
hcount, hbin, hpatch = hist(lc_pha_p[lc_pha_p==lc_pha_p]/1e3, bins=7000/binsize, histtype='stepfilled', color='y')
xlim(0, 7)
xlabel('Energy$\quad$(keV)')
ylabel('Count')
tight_layout()
savefig('%s-spec.pdf' % session)
if savedat:
hcount, hbin = np.histogram(lc_pha_p[lc_pha_p==lc_pha_p]/1e3, bins=7000/binsize)
np.savetxt('%s-spec.dat' % session, np.vstack(((hbin[1:]+hbin[:-1])/2, hcount)).T,
header='Energy (keV), Count', delimiter='\t')
# Line fitting
def _line_fit(data, min, line):
# Fit
(dE, width), (dE_error, width_error), e = Analysis.fit(data, binsize=binsize, min=min, line=line, shift=shift, method=method)
if method == "cs":
chi_squared, dof = e
if method in ("mle", "ls"):
print "%s: %.2f +/- %.2f eV @ Ec%+.2f eV" \
% (line, width, width_error, dE)
elif method == "cs":
print "%s: %.2f +/- %.2f eV @ Ec%+.2f eV (Red. chi^2 = %.1f/%d = %.2f)" \
% (line, width, width_error, dE, chi_squared, dof, chi_squared/dof)
return dE, width, width_error
def _line_spectrum(data, min, line, dE, width, width_error):
# Draw histogram
n, bins = Analysis.histogram(data, binsize=binsize)
if method in ("cs"):
gn, gbins = Analysis.group_bin(n, bins, min=min)
else:
# No grouping in mle and ls
gn, gbins = n, bins
ngn = gn/(np.diff(gbins))
ngn_sigma = np.sqrt(gn)/(np.diff(gbins))
cbins = (gbins[1:]+gbins[:-1])/2
if plotting:
figure()
if width_error is not None:
label = 'FWHM$=%.2f\pm %.2f$ eV' % (width, width_error)
else:
label = 'FWHM$=%.2f$ eV (Fixed)' % width
if method == "cs":
errorbar(cbins, ngn, yerr=ngn_sigma, xerr=np.diff(gbins)/2, capsize=0, ecolor='k', fmt=None, label=label)
else:
hist(data, bins=gbins, weights=np.ones(len(data))/binsize, histtype='step', ec='k', label=label)
E = np.linspace(bins.min(), bins.max(), 1000)
model = Analysis.normalization(ngn, gbins, dE, width, line=line, shift=shift) \
* Analysis.line_model(E, dE, width, line=line, shift=shift, full=True)
# Plot theoretical model
plot(E, model[0], 'r-')
# Plot fine structures
for m in model[1:]:
plot(E, m, 'b--')
xlabel('Energy$\quad$(eV)')
ylabel('Normalized Count$\quad$(count/eV)')
legend(frameon=False)
ymin, ymax = ylim()
ylim(ymin, ymax*1.1)
tight_layout()
savefig("%s-%s.pdf" % (session, line))
if savedat:
np.savetxt('%s-%s.dat' % (session, line), np.vstack((cbins, gn)).T,
header='Energy (keV), Count', delimiter='\t')
## Ka
ka = Analysis.ka(lc_pha_p, sigma=sigma)
dE, width, width_error = _line_fit(ka, ka_min, "%sKa" % atom)
_line_spectrum(ka, ka_min, "%sKa" % atom, dE, width, width_error)
## Kb
kb = Analysis.kb(lc_pha_p, sigma=sigma)
if kbfit:
dE, width, width_error = _line_fit(kb, kb_min, "%sKb" % atom)
else:
width_error = None
_line_spectrum(kb, kb_min, "%sKb" % atom, dE, width, width_error)
## Baseline
f_pha_n = lc_pha_n[Filter.median_filter(lc_pha_n, sigma=sigma)]
baseline = Analysis.sigma2fwhm(np.std(f_pha_n))
print "Baseline resolution: %.2f eV" % baseline
n, bins = Analysis.histogram(f_pha_n, binsize=binsize)
if savedat:
np.savetxt('%s-baseline.dat' % session, np.vstack(((bins[1:]+bins[:-1])/2, n)).T,
header='Energy (keV), Count', delimiter='\t')
if plotting:
figure()
label = 'FWHM$=%.2f$ eV' % baseline
hist(f_pha_n, bins=bins, weights=np.ones(len(f_pha_n))/binsize, histtype='step', ec='k', label=label)
mu, sigma = norm.fit(f_pha_n)
E = np.linspace(bins.min(), bins.max(), 1000)
plot(E, norm.pdf(E, loc=mu, scale=sigma)*len(f_pha_n), 'r-')
xlabel('Energy$\quad$(eV)')
ylabel('Normalized Count$\quad$(count/eV)')
legend(frameon=False)
tight_layout()
savefig('%s-baseline.pdf' % session) | mit |
yongshengwang/hue | desktop/core/ext-py/django-openid-auth-0.5/django_openid_auth/tests/test_store.py | 45 | 8151 | # django-openid-auth - OpenID integration for django.contrib.auth
#
# Copyright (C) 2009-2013 Canonical Ltd.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import time
import unittest
from django.test import TestCase
from openid.association import Association as OIDAssociation
from openid.store.nonce import SKEW
from django_openid_auth.models import Association, Nonce
from django_openid_auth.store import DjangoOpenIDStore
class OpenIDStoreTests(TestCase):
def setUp(self):
super(OpenIDStoreTests, self).setUp()
self.store = DjangoOpenIDStore()
def test_storeAssociation(self):
assoc = OIDAssociation('handle', 'secret', 42, 600, 'HMAC-SHA1')
self.store.storeAssociation('server-url', assoc)
dbassoc = Association.objects.get(
server_url='server-url', handle='handle')
self.assertEquals(dbassoc.server_url, 'server-url')
self.assertEquals(dbassoc.handle, 'handle')
self.assertEquals(dbassoc.secret, 'secret'.encode('base-64'))
self.assertEquals(dbassoc.issued, 42)
self.assertEquals(dbassoc.lifetime, 600)
self.assertEquals(dbassoc.assoc_type, 'HMAC-SHA1')
def test_storeAssociation_update_existing(self):
assoc = OIDAssociation('handle', 'secret', 42, 600, 'HMAC-SHA1')
self.store.storeAssociation('server-url', assoc)
# Now update the association with new information.
assoc = OIDAssociation('handle', 'secret2', 420, 900, 'HMAC-SHA256')
self.store.storeAssociation('server-url', assoc)
dbassoc = Association.objects.get(
server_url='server-url', handle='handle')
self.assertEqual(dbassoc.secret, 'secret2'.encode('base-64'))
self.assertEqual(dbassoc.issued, 420)
self.assertEqual(dbassoc.lifetime, 900)
self.assertEqual(dbassoc.assoc_type, 'HMAC-SHA256')
def test_getAssociation(self):
timestamp = int(time.time())
self.store.storeAssociation(
'server-url', OIDAssociation('handle', 'secret', timestamp, 600,
'HMAC-SHA1'))
assoc = self.store.getAssociation('server-url', 'handle')
self.assertTrue(isinstance(assoc, OIDAssociation))
self.assertEquals(assoc.handle, 'handle')
self.assertEquals(assoc.secret, 'secret')
self.assertEquals(assoc.issued, timestamp)
self.assertEquals(assoc.lifetime, 600)
self.assertEquals(assoc.assoc_type, 'HMAC-SHA1')
def test_getAssociation_unknown(self):
assoc = self.store.getAssociation('server-url', 'unknown')
self.assertEquals(assoc, None)
def test_getAssociation_expired(self):
lifetime = 600
timestamp = int(time.time()) - 2 * lifetime
self.store.storeAssociation(
'server-url', OIDAssociation('handle', 'secret', timestamp,
lifetime, 'HMAC-SHA1'))
# The association is not returned, and is removed from the database.
assoc = self.store.getAssociation('server-url', 'handle')
self.assertEquals(assoc, None)
self.assertRaises(Association.DoesNotExist, Association.objects.get,
server_url='server-url', handle='handle')
def test_getAssociation_no_handle(self):
timestamp = int(time.time())
self.store.storeAssociation(
'server-url', OIDAssociation('handle1', 'secret', timestamp + 1,
600, 'HMAC-SHA1'))
self.store.storeAssociation(
'server-url', OIDAssociation('handle2', 'secret', timestamp,
600, 'HMAC-SHA1'))
# The newest handle is returned.
assoc = self.store.getAssociation('server-url', None)
self.assertNotEquals(assoc, None)
self.assertEquals(assoc.handle, 'handle1')
self.assertEquals(assoc.issued, timestamp + 1)
def test_removeAssociation(self):
timestamp = int(time.time())
self.store.storeAssociation(
'server-url', OIDAssociation('handle', 'secret', timestamp, 600,
'HMAC-SHA1'))
self.assertEquals(
self.store.removeAssociation('server-url', 'handle'), True)
self.assertEquals(
self.store.getAssociation('server-url', 'handle'), None)
def test_removeAssociation_unknown(self):
self.assertEquals(
self.store.removeAssociation('server-url', 'unknown'), False)
def test_useNonce(self):
timestamp = time.time()
# The nonce can only be used once.
self.assertEqual(
self.store.useNonce('server-url', timestamp, 'salt'), True)
self.assertEqual(
self.store.useNonce('server-url', timestamp, 'salt'), False)
self.assertEqual(
self.store.useNonce('server-url', timestamp, 'salt'), False)
def test_useNonce_expired(self):
timestamp = time.time() - 2 * SKEW
self.assertEqual(
self.store.useNonce('server-url', timestamp, 'salt'), False)
def test_useNonce_future(self):
timestamp = time.time() + 2 * SKEW
self.assertEqual(
self.store.useNonce('server-url', timestamp, 'salt'), False)
def test_cleanupNonces(self):
timestamp = time.time()
self.assertEqual(
self.store.useNonce('server1', timestamp, 'salt1'), True)
self.assertEqual(
self.store.useNonce('server2', timestamp, 'salt2'), True)
self.assertEqual(
self.store.useNonce('server3', timestamp, 'salt3'), True)
self.assertEqual(Nonce.objects.count(), 3)
self.assertEqual(
self.store.cleanupNonces(_now=timestamp + 2 * SKEW), 3)
self.assertEqual(Nonce.objects.count(), 0)
# The nonces have now been cleared:
self.assertEqual(
self.store.useNonce('server1', timestamp, 'salt1'), True)
self.assertEqual(
self.store.cleanupNonces(_now=timestamp + 2 * SKEW), 1)
self.assertEqual(
self.store.cleanupNonces(_now=timestamp + 2 * SKEW), 0)
def test_cleanupAssociations(self):
timestamp = int(time.time()) - 100
self.store.storeAssociation(
'server-url', OIDAssociation('handle1', 'secret', timestamp,
50, 'HMAC-SHA1'))
self.store.storeAssociation(
'server-url', OIDAssociation('handle2', 'secret', timestamp,
200, 'HMAC-SHA1'))
self.assertEquals(self.store.cleanupAssociations(), 1)
# The second (non-expired) association is left behind.
self.assertNotEqual(self.store.getAssociation('server-url', 'handle2'),
None)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
| apache-2.0 |
apanju/GMIO_Odoo | addons/crm/__init__.py | 329 | 1265 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import crm
import crm_segmentation
import crm_lead
import sales_team
import calendar_event
import ir_http
import crm_phonecall
import report
import wizard
import res_partner
import res_config
import base_partner_merge
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Kagami/kisa | lib/twisted/internet/cfreactor.py | 53 | 17431 | # -*- test-case-name: twisted.internet.test.test_core -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
A reactor for integrating with U{CFRunLoop<http://bit.ly/cfrunloop>}, the
CoreFoundation main loop used by MacOS X.
This is useful for integrating Twisted with U{PyObjC<http://pyobjc.sf.net/>}
applications.
"""
__all__ = [
'install',
'CFReactor'
]
import sys
from zope.interface import implements
from twisted.internet.interfaces import IReactorFDSet
from twisted.internet.posixbase import PosixReactorBase, _Waker
from twisted.internet.posixbase import _NO_FILEDESC
from twisted.python import log
from CoreFoundation import (
CFRunLoopAddSource, CFRunLoopRemoveSource, CFRunLoopGetMain, CFRunLoopRun,
CFRunLoopStop, CFRunLoopTimerCreate, CFRunLoopAddTimer,
CFRunLoopTimerInvalidate, kCFAllocatorDefault, kCFRunLoopCommonModes,
CFAbsoluteTimeGetCurrent)
from CFNetwork import (
CFSocketCreateWithNative, CFSocketSetSocketFlags, CFSocketEnableCallBacks,
CFSocketCreateRunLoopSource, CFSocketDisableCallBacks, CFSocketInvalidate,
kCFSocketWriteCallBack, kCFSocketReadCallBack, kCFSocketConnectCallBack,
kCFSocketAutomaticallyReenableReadCallBack,
kCFSocketAutomaticallyReenableWriteCallBack)
_READ = 0
_WRITE = 1
_preserveSOError = 1 << 6
class _WakerPlus(_Waker):
"""
The normal Twisted waker will simply wake up the main loop, which causes an
iteration to run, which in turn causes L{PosixReactorBase.runUntilCurrent}
to get invoked.
L{CFReactor} has a slightly different model of iteration, though: rather
than have each iteration process the thread queue, then timed calls, then
file descriptors, each callback is run as it is dispatched by the CFRunLoop
observer which triggered it.
So this waker needs to not only unblock the loop, but also make sure the
work gets done; so, it reschedules the invocation of C{runUntilCurrent} to
be immediate (0 seconds from now) even if there is no timed call work to
do.
"""
def doRead(self):
"""
Wake up the loop and force C{runUntilCurrent} to run immediately in the
next timed iteration.
"""
result = _Waker.doRead(self)
self.reactor._scheduleSimulate(True)
return result
class CFReactor(PosixReactorBase):
"""
The CoreFoundation reactor.
You probably want to use this via the L{install} API.
@ivar _fdmap: a dictionary, mapping an integer (a file descriptor) to a
4-tuple of:
- source: a C{CFRunLoopSource}; the source associated with this
socket.
- socket: a C{CFSocket} wrapping the file descriptor.
- descriptor: an L{IReadDescriptor} and/or L{IWriteDescriptor}
provider.
- read-write: a 2-C{list} of booleans: respectively, whether this
descriptor is currently registered for reading or registered for
writing.
@ivar _idmap: a dictionary, mapping the id() of an L{IReadDescriptor} or
L{IWriteDescriptor} to a C{fd} in L{_fdmap}. Implemented in this
manner so that we don't have to rely (even more) on the hashability of
L{IReadDescriptor} providers, and we know that they won't be collected
since these are kept in sync with C{_fdmap}. Necessary because the
.fileno() of a file descriptor may change at will, so we need to be
able to look up what its file descriptor I{used} to be, so that we can
look it up in C{_fdmap}
@ivar _cfrunloop: the L{CFRunLoop} pyobjc object wrapped by this reactor.
@ivar _inCFLoop: Is L{CFRunLoopRun} currently running?
@type _inCFLoop: C{bool}
@ivar _currentSimulator: if a CFTimer is currently scheduled with the CF
run loop to run Twisted callLater calls, this is a reference to it.
Otherwise, it is C{None}
"""
implements(IReactorFDSet)
def __init__(self, runLoop=None, runner=None):
self._fdmap = {}
self._idmap = {}
if runner is None:
runner = CFRunLoopRun
self._runner = runner
if runLoop is None:
runLoop = CFRunLoopGetMain()
self._cfrunloop = runLoop
PosixReactorBase.__init__(self)
def installWaker(self):
"""
Override C{installWaker} in order to use L{_WakerPlus}; otherwise this
should be exactly the same as the parent implementation.
"""
if not self.waker:
self.waker = _WakerPlus(self)
self._internalReaders.add(self.waker)
self.addReader(self.waker)
def _socketCallback(self, cfSocket, callbackType,
ignoredAddress, ignoredData, context):
"""
The socket callback issued by CFRunLoop. This will issue C{doRead} or
C{doWrite} calls to the L{IReadDescriptor} and L{IWriteDescriptor}
registered with the file descriptor that we are being notified of.
@param cfSocket: The L{CFSocket} which has got some activity.
@param callbackType: The type of activity that we are being notified
of. Either L{kCFSocketReadCallBack} or L{kCFSocketWriteCallBack}.
@param ignoredAddress: Unused, because this is not used for either of
the callback types we register for.
@param ignoredData: Unused, because this is not used for either of the
callback types we register for.
@param context: The data associated with this callback by
L{CFSocketCreateWithNative} (in L{CFReactor._watchFD}). A 2-tuple
of C{(int, CFRunLoopSource)}.
"""
(fd, smugglesrc) = context
if fd not in self._fdmap:
# Spurious notifications seem to be generated sometimes if you
# CFSocketDisableCallBacks in the middle of an event. I don't know
# about this FD, any more, so let's get rid of it.
CFRunLoopRemoveSource(
self._cfrunloop, smugglesrc, kCFRunLoopCommonModes
)
return
why = None
isRead = False
src, skt, readWriteDescriptor, rw = self._fdmap[fd]
try:
if readWriteDescriptor.fileno() == -1:
why = _NO_FILEDESC
else:
isRead = callbackType == kCFSocketReadCallBack
# CFSocket seems to deliver duplicate read/write notifications
# sometimes, especially a duplicate writability notification
# when first registering the socket. This bears further
# investigation, since I may have been mis-interpreting the
# behavior I was seeing. (Running the full Twisted test suite,
# while thorough, is not always entirely clear.) Until this has
# been more thoroughly investigated , we consult our own
# reading/writing state flags to determine whether we should
# actually attempt a doRead/doWrite first. -glyph
if isRead:
if rw[_READ]:
why = log.callWithLogger(
readWriteDescriptor, readWriteDescriptor.doRead)
else:
if rw[_WRITE]:
why = log.callWithLogger(
readWriteDescriptor, readWriteDescriptor.doWrite)
except:
why = sys.exc_info()[1]
log.err()
if why:
self._disconnectSelectable(readWriteDescriptor, why, isRead)
def _watchFD(self, fd, descr, flag):
"""
Register a file descriptor with the L{CFRunLoop}, or modify its state
so that it's listening for both notifications (read and write) rather
than just one; used to implement C{addReader} and C{addWriter}.
@param fd: The file descriptor.
@type fd: C{int}
@param descr: the L{IReadDescriptor} or L{IWriteDescriptor}
@param flag: the flag to register for callbacks on, either
L{kCFSocketReadCallBack} or L{kCFSocketWriteCallBack}
"""
if fd == -1:
raise RuntimeError("Invalid file descriptor.")
if fd in self._fdmap:
src, cfs, gotdescr, rw = self._fdmap[fd]
# do I need to verify that it's the same descr?
else:
ctx = []
ctx.append(fd)
cfs = CFSocketCreateWithNative(
kCFAllocatorDefault, fd,
kCFSocketReadCallBack | kCFSocketWriteCallBack |
kCFSocketConnectCallBack,
self._socketCallback, ctx
)
CFSocketSetSocketFlags(
cfs,
kCFSocketAutomaticallyReenableReadCallBack |
kCFSocketAutomaticallyReenableWriteCallBack |
# This extra flag is to ensure that CF doesn't (destructively,
# because destructively is the only way to do it) retrieve
# SO_ERROR and thereby break twisted.internet.tcp.BaseClient,
# which needs SO_ERROR to tell it whether or not it needs to
# call connect_ex a second time.
_preserveSOError
)
src = CFSocketCreateRunLoopSource(kCFAllocatorDefault, cfs, 0)
ctx.append(src)
CFRunLoopAddSource(self._cfrunloop, src, kCFRunLoopCommonModes)
CFSocketDisableCallBacks(
cfs,
kCFSocketReadCallBack | kCFSocketWriteCallBack |
kCFSocketConnectCallBack
)
rw = [False, False]
self._idmap[id(descr)] = fd
self._fdmap[fd] = src, cfs, descr, rw
rw[self._flag2idx(flag)] = True
CFSocketEnableCallBacks(cfs, flag)
def _flag2idx(self, flag):
"""
Convert a C{kCFSocket...} constant to an index into the read/write
state list (C{_READ} or C{_WRITE}) (the 4th element of the value of
C{self._fdmap}).
@param flag: C{kCFSocketReadCallBack} or C{kCFSocketWriteCallBack}
@return: C{_READ} or C{_WRITE}
"""
return {kCFSocketReadCallBack: _READ,
kCFSocketWriteCallBack: _WRITE}[flag]
def _unwatchFD(self, fd, descr, flag):
"""
Unregister a file descriptor with the L{CFRunLoop}, or modify its state
so that it's listening for only one notification (read or write) as
opposed to both; used to implement C{removeReader} and C{removeWriter}.
@param fd: a file descriptor
@type fd: C{int}
@param descr: an L{IReadDescriptor} or L{IWriteDescriptor}
@param flag: L{kCFSocketWriteCallBack} L{kCFSocketReadCallBack}
"""
if id(descr) not in self._idmap:
return
if fd == -1:
# need to deal with it in this case, I think.
realfd = self._idmap[id(descr)]
else:
realfd = fd
src, cfs, descr, rw = self._fdmap[realfd]
CFSocketDisableCallBacks(cfs, flag)
rw[self._flag2idx(flag)] = False
if not rw[_READ] and not rw[_WRITE]:
del self._idmap[id(descr)]
del self._fdmap[realfd]
CFRunLoopRemoveSource(self._cfrunloop, src, kCFRunLoopCommonModes)
CFSocketInvalidate(cfs)
def addReader(self, reader):
"""
Implement L{IReactorFDSet.addReader}.
"""
self._watchFD(reader.fileno(), reader, kCFSocketReadCallBack)
def addWriter(self, writer):
"""
Implement L{IReactorFDSet.addWriter}.
"""
self._watchFD(writer.fileno(), writer, kCFSocketWriteCallBack)
def removeReader(self, reader):
"""
Implement L{IReactorFDSet.removeReader}.
"""
self._unwatchFD(reader.fileno(), reader, kCFSocketReadCallBack)
def removeWriter(self, writer):
"""
Implement L{IReactorFDSet.removeWriter}.
"""
self._unwatchFD(writer.fileno(), writer, kCFSocketWriteCallBack)
def removeAll(self):
"""
Implement L{IReactorFDSet.removeAll}.
"""
allDesc = set([descr for src, cfs, descr, rw in self._fdmap.values()])
allDesc -= set(self._internalReaders)
for desc in allDesc:
self.removeReader(desc)
self.removeWriter(desc)
return list(allDesc)
def getReaders(self):
"""
Implement L{IReactorFDSet.getReaders}.
"""
return [descr for src, cfs, descr, rw in self._fdmap.values()
if rw[_READ]]
def getWriters(self):
"""
Implement L{IReactorFDSet.getWriters}.
"""
return [descr for src, cfs, descr, rw in self._fdmap.values()
if rw[_WRITE]]
def _moveCallLaterSooner(self, tple):
"""
Override L{PosixReactorBase}'s implementation of L{IDelayedCall.reset}
so that it will immediately reschedule. Normally
C{_moveCallLaterSooner} depends on the fact that C{runUntilCurrent} is
always run before the mainloop goes back to sleep, so this forces it to
immediately recompute how long the loop needs to stay asleep.
"""
result = PosixReactorBase._moveCallLaterSooner(self, tple)
self._scheduleSimulate()
return result
_inCFLoop = False
def mainLoop(self):
"""
Run the runner (L{CFRunLoopRun} or something that calls it), which runs
the run loop until C{crash()} is called.
"""
self._inCFLoop = True
try:
self._runner()
finally:
self._inCFLoop = False
_currentSimulator = None
def _scheduleSimulate(self, force=False):
"""
Schedule a call to C{self.runUntilCurrent}. This will cancel the
currently scheduled call if it is already scheduled.
@param force: Even if there are no timed calls, make sure that
C{runUntilCurrent} runs immediately (in a 0-seconds-from-now
{CFRunLoopTimer}). This is necessary for calls which need to
trigger behavior of C{runUntilCurrent} other than running timed
calls, such as draining the thread call queue or calling C{crash()}
when the appropriate flags are set.
@type force: C{bool}
"""
if self._currentSimulator is not None:
CFRunLoopTimerInvalidate(self._currentSimulator)
self._currentSimulator = None
timeout = self.timeout()
if force:
timeout = 0.0
if timeout is not None:
fireDate = (CFAbsoluteTimeGetCurrent() + timeout)
def simulate(cftimer, extra):
self._currentSimulator = None
self.runUntilCurrent()
self._scheduleSimulate()
c = self._currentSimulator = CFRunLoopTimerCreate(
kCFAllocatorDefault, fireDate,
0, 0, 0, simulate, None
)
CFRunLoopAddTimer(self._cfrunloop, c, kCFRunLoopCommonModes)
def callLater(self, _seconds, _f, *args, **kw):
"""
Implement L{IReactorTime.callLater}.
"""
delayedCall = PosixReactorBase.callLater(
self, _seconds, _f, *args, **kw
)
self._scheduleSimulate()
return delayedCall
def stop(self):
"""
Implement L{IReactorCore.stop}.
"""
PosixReactorBase.stop(self)
self._scheduleSimulate(True)
def crash(self):
"""
Implement L{IReactorCore.crash}
"""
wasStarted = self._started
PosixReactorBase.crash(self)
if self._inCFLoop:
self._stopNow()
else:
if wasStarted:
self.callLater(0, self._stopNow)
def _stopNow(self):
"""
Immediately stop the CFRunLoop (which must be running!).
"""
CFRunLoopStop(self._cfrunloop)
def iterate(self, delay=0):
"""
Emulate the behavior of C{iterate()} for things that want to call it,
by letting the loop run for a little while and then scheduling a timed
call to exit it.
"""
self.callLater(delay, self._stopNow)
self.mainLoop()
def install(runLoop=None, runner=None):
"""
Configure the twisted mainloop to be run inside CFRunLoop.
@param runLoop: the run loop to use.
@param runner: the function to call in order to actually invoke the main
loop. This will default to L{CFRunLoopRun} if not specified. However,
this is not an appropriate choice for GUI applications, as you need to
run NSApplicationMain (or something like it). For example, to run the
Twisted mainloop in a PyObjC application, your C{main.py} should look
something like this::
from PyObjCTools import AppHelper
from twisted.internet.cfreactor import install
install(runner=AppHelper.runEventLoop)
# initialize your application
reactor.run()
@return: The installed reactor.
@rtype: L{CFReactor}
"""
reactor = CFReactor(runLoop=runLoop, runner=runner)
from twisted.internet.main import installReactor
installReactor(reactor)
return reactor
| cc0-1.0 |
milafrerichs/geonode | geonode/groups/admin.py | 7 | 1218 | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.contrib import admin
from geonode.groups.models import GroupMember, GroupProfile, GroupInvitation
class GroupMemberInline(admin.TabularInline):
model = GroupMember
class GroupAdmin(admin.ModelAdmin):
inlines = [
GroupMemberInline
]
exclude = ['group', ]
admin.site.register(GroupProfile, GroupAdmin)
admin.site.register(GroupInvitation)
| gpl-3.0 |
Allow2CEO/browser-ios | brave/node_modules/ad-block/vendor/depot_tools/third_party/gsutil/gslib/commands/cat.py | 51 | 4387 | # Copyright 2011 Google Inc. All Rights Reserved.
# Copyright 2011, Nexenta Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from gslib.command import Command
from gslib.command import COMMAND_NAME
from gslib.command import COMMAND_NAME_ALIASES
from gslib.command import CONFIG_REQUIRED
from gslib.command import FILE_URIS_OK
from gslib.command import MAX_ARGS
from gslib.command import MIN_ARGS
from gslib.command import PROVIDER_URIS_OK
from gslib.command import SUPPORTED_SUB_ARGS
from gslib.command import URIS_START_ARG
from gslib.exception import CommandException
from gslib.help_provider import HELP_NAME
from gslib.help_provider import HELP_NAME_ALIASES
from gslib.help_provider import HELP_ONE_LINE_SUMMARY
from gslib.help_provider import HELP_TEXT
from gslib.help_provider import HelpType
from gslib.help_provider import HELP_TYPE
from gslib.util import NO_MAX
from gslib.wildcard_iterator import ContainsWildcard
_detailed_help_text = ("""
<B>SYNOPSIS</B>
gsutil cat [-h] uri...
<B>DESCRIPTION</B>
The cat command outputs the contents of one or more URIs to stdout.
It is equivalent to doing:
gsutil cp uri... -
(The final '-' causes gsutil to stream the output to stdout.)
<B>OPTIONS</B>
-h Prints short header for each object. For example:
gsutil cat -h gs://bucket/meeting_notes/2012_Feb/*.txt
""")
class CatCommand(Command):
"""Implementation of gsutil cat command."""
# Command specification (processed by parent class).
command_spec = {
# Name of command.
COMMAND_NAME : 'cat',
# List of command name aliases.
COMMAND_NAME_ALIASES : [],
# Min number of args required by this command.
MIN_ARGS : 0,
# Max number of args required by this command, or NO_MAX.
MAX_ARGS : NO_MAX,
# Getopt-style string specifying acceptable sub args.
SUPPORTED_SUB_ARGS : 'hv',
# True if file URIs acceptable for this command.
FILE_URIS_OK : False,
# True if provider-only URIs acceptable for this command.
PROVIDER_URIS_OK : False,
# Index in args of first URI arg.
URIS_START_ARG : 0,
# True if must configure gsutil before running command.
CONFIG_REQUIRED : True,
}
help_spec = {
# Name of command or auxiliary help info for which this help applies.
HELP_NAME : 'cat',
# List of help name aliases.
HELP_NAME_ALIASES : [],
# Type of help:
HELP_TYPE : HelpType.COMMAND_HELP,
# One line summary of this help.
HELP_ONE_LINE_SUMMARY : 'Concatenate object content to stdout',
# The full help text.
HELP_TEXT : _detailed_help_text,
}
# Command entry point.
def RunCommand(self):
show_header = False
if self.sub_opts:
for o, unused_a in self.sub_opts:
if o == '-h':
show_header = True
elif o == '-v':
self.THREADED_LOGGER.info('WARNING: The %s -v option is no longer'
' needed, and will eventually be removed.\n'
% self.command_name)
printed_one = False
# We manipulate the stdout so that all other data other than the Object
# contents go to stderr.
cat_outfd = sys.stdout
sys.stdout = sys.stderr
did_some_work = False
for uri_str in self.args:
for uri in self.WildcardIterator(uri_str).IterUris():
if not uri.names_object():
raise CommandException('"%s" command must specify objects.' %
self.command_name)
did_some_work = True
if show_header:
if printed_one:
print
print '==> %s <==' % uri.__str__()
printed_one = True
key = uri.get_key(False, self.headers)
key.get_file(cat_outfd, self.headers)
sys.stdout = cat_outfd
if not did_some_work:
raise CommandException('No URIs matched')
return 0
| mpl-2.0 |
mwv/scikit-learn | examples/linear_model/plot_sgd_loss_functions.py | 249 | 1095 | """
==========================
SGD: convex loss functions
==========================
A plot that compares the various convex loss functions supported by
:class:`sklearn.linear_model.SGDClassifier` .
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
def modified_huber_loss(y_true, y_pred):
z = y_pred * y_true
loss = -4 * z
loss[z >= -1] = (1 - z[z >= -1]) ** 2
loss[z >= 1.] = 0
return loss
xmin, xmax = -4, 4
xx = np.linspace(xmin, xmax, 100)
plt.plot([xmin, 0, 0, xmax], [1, 1, 0, 0], 'k-',
label="Zero-one loss")
plt.plot(xx, np.where(xx < 1, 1 - xx, 0), 'g-',
label="Hinge loss")
plt.plot(xx, -np.minimum(xx, 0), 'm-',
label="Perceptron loss")
plt.plot(xx, np.log2(1 + np.exp(-xx)), 'r-',
label="Log loss")
plt.plot(xx, np.where(xx < 1, 1 - xx, 0) ** 2, 'b-',
label="Squared hinge loss")
plt.plot(xx, modified_huber_loss(xx, 1), 'y--',
label="Modified Huber loss")
plt.ylim((0, 8))
plt.legend(loc="upper right")
plt.xlabel(r"Decision function $f(x)$")
plt.ylabel("$L(y, f(x))$")
plt.show()
| bsd-3-clause |
tinloaf/home-assistant | homeassistant/components/binary_sensor/netatmo.py | 5 | 7407 | """
Support for the Netatmo binary sensors.
The binary sensors based on events seen by the Netatmo cameras.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/binary_sensor.netatmo/.
"""
import logging
import voluptuous as vol
from homeassistant.components.binary_sensor import (
BinarySensorDevice, PLATFORM_SCHEMA)
from homeassistant.components.netatmo import CameraData
from homeassistant.const import CONF_TIMEOUT
from homeassistant.helpers import config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['netatmo']
# These are the available sensors mapped to binary_sensor class
WELCOME_SENSOR_TYPES = {
"Someone known": "motion",
"Someone unknown": "motion",
"Motion": "motion",
}
PRESENCE_SENSOR_TYPES = {
"Outdoor motion": "motion",
"Outdoor human": "motion",
"Outdoor animal": "motion",
"Outdoor vehicle": "motion"
}
TAG_SENSOR_TYPES = {
"Tag Vibration": "vibration",
"Tag Open": "opening"
}
CONF_HOME = 'home'
CONF_CAMERAS = 'cameras'
CONF_WELCOME_SENSORS = 'welcome_sensors'
CONF_PRESENCE_SENSORS = 'presence_sensors'
CONF_TAG_SENSORS = 'tag_sensors'
DEFAULT_TIMEOUT = 90
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_CAMERAS, default=[]):
vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_HOME): cv.string,
vol.Optional(CONF_PRESENCE_SENSORS, default=list(PRESENCE_SENSOR_TYPES)):
vol.All(cv.ensure_list, [vol.In(PRESENCE_SENSOR_TYPES)]),
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Optional(CONF_WELCOME_SENSORS, default=list(WELCOME_SENSOR_TYPES)):
vol.All(cv.ensure_list, [vol.In(WELCOME_SENSOR_TYPES)]),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the access to Netatmo binary sensor."""
netatmo = hass.components.netatmo
home = config.get(CONF_HOME)
timeout = config.get(CONF_TIMEOUT)
if timeout is None:
timeout = DEFAULT_TIMEOUT
module_name = None
import pyatmo
try:
data = CameraData(netatmo.NETATMO_AUTH, home)
if not data.get_camera_names():
return None
except pyatmo.NoDevice:
return None
welcome_sensors = config.get(
CONF_WELCOME_SENSORS, WELCOME_SENSOR_TYPES)
presence_sensors = config.get(
CONF_PRESENCE_SENSORS, PRESENCE_SENSOR_TYPES)
tag_sensors = config.get(CONF_TAG_SENSORS, TAG_SENSOR_TYPES)
for camera_name in data.get_camera_names():
camera_type = data.get_camera_type(camera=camera_name, home=home)
if camera_type == 'NACamera':
if CONF_CAMERAS in config:
if config[CONF_CAMERAS] != [] and \
camera_name not in config[CONF_CAMERAS]:
continue
for variable in welcome_sensors:
add_entities([NetatmoBinarySensor(
data, camera_name, module_name, home, timeout,
camera_type, variable)], True)
if camera_type == 'NOC':
if CONF_CAMERAS in config:
if config[CONF_CAMERAS] != [] and \
camera_name not in config[CONF_CAMERAS]:
continue
for variable in presence_sensors:
add_entities([NetatmoBinarySensor(
data, camera_name, module_name, home, timeout,
camera_type, variable)], True)
for module_name in data.get_module_names(camera_name):
for variable in tag_sensors:
camera_type = None
add_entities([NetatmoBinarySensor(
data, camera_name, module_name, home, timeout,
camera_type, variable)], True)
class NetatmoBinarySensor(BinarySensorDevice):
"""Represent a single binary sensor in a Netatmo Camera device."""
def __init__(self, data, camera_name, module_name, home,
timeout, camera_type, sensor):
"""Set up for access to the Netatmo camera events."""
self._data = data
self._camera_name = camera_name
self._module_name = module_name
self._home = home
self._timeout = timeout
if home:
self._name = '{} / {}'.format(home, camera_name)
else:
self._name = camera_name
if module_name:
self._name += ' / ' + module_name
self._sensor_name = sensor
self._name += ' ' + sensor
self._cameratype = camera_type
self._state = None
@property
def name(self):
"""Return the name of the Netatmo device and this sensor."""
return self._name
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
if self._cameratype == 'NACamera':
return WELCOME_SENSOR_TYPES.get(self._sensor_name)
if self._cameratype == 'NOC':
return PRESENCE_SENSOR_TYPES.get(self._sensor_name)
return TAG_SENSOR_TYPES.get(self._sensor_name)
@property
def is_on(self):
"""Return true if binary sensor is on."""
return self._state
def update(self):
"""Request an update from the Netatmo API."""
self._data.update()
self._data.update_event()
if self._cameratype == 'NACamera':
if self._sensor_name == "Someone known":
self._state =\
self._data.camera_data.someoneKnownSeen(
self._home, self._camera_name, self._timeout)
elif self._sensor_name == "Someone unknown":
self._state =\
self._data.camera_data.someoneUnknownSeen(
self._home, self._camera_name, self._timeout)
elif self._sensor_name == "Motion":
self._state =\
self._data.camera_data.motionDetected(
self._home, self._camera_name, self._timeout)
elif self._cameratype == 'NOC':
if self._sensor_name == "Outdoor motion":
self._state =\
self._data.camera_data.outdoormotionDetected(
self._home, self._camera_name, self._timeout)
elif self._sensor_name == "Outdoor human":
self._state =\
self._data.camera_data.humanDetected(
self._home, self._camera_name, self._timeout)
elif self._sensor_name == "Outdoor animal":
self._state =\
self._data.camera_data.animalDetected(
self._home, self._camera_name, self._timeout)
elif self._sensor_name == "Outdoor vehicle":
self._state =\
self._data.camera_data.carDetected(
self._home, self._camera_name, self._timeout)
if self._sensor_name == "Tag Vibration":
self._state =\
self._data.camera_data.moduleMotionDetected(
self._home, self._module_name, self._camera_name,
self._timeout)
elif self._sensor_name == "Tag Open":
self._state =\
self._data.camera_data.moduleOpened(
self._home, self._module_name, self._camera_name,
self._timeout)
| apache-2.0 |
cxx-hep/root-cern | interpreter/llvm/src/tools/clang/bindings/python/examples/cindex/cindex-includes.py | 110 | 1644 | #!/usr/bin/env python
#===- cindex-includes.py - cindex/Python Inclusion Graph -----*- python -*--===#
#
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#
"""
A simple command line tool for dumping a Graphviz description (dot) that
describes include dependencies.
"""
def main():
import sys
from clang.cindex import Index
from optparse import OptionParser, OptionGroup
parser = OptionParser("usage: %prog [options] {filename} [clang-args*]")
parser.disable_interspersed_args()
(opts, args) = parser.parse_args()
if len(args) == 0:
parser.error('invalid number arguments')
# FIXME: Add an output file option
out = sys.stdout
index = Index.create()
tu = index.parse(None, args)
if not tu:
parser.error("unable to load input")
# A helper function for generating the node name.
def name(f):
if f:
return "\"" + f.name + "\""
# Generate the include graph
out.write("digraph G {\n")
for i in tu.get_includes():
line = " ";
if i.is_input_file:
# Always write the input file as a node just in case it doesn't
# actually include anything. This would generate a 1 node graph.
line += name(i.include)
else:
line += '%s->%s' % (name(i.source), name(i.include))
line += "\n";
out.write(line)
out.write("}\n")
if __name__ == '__main__':
main()
| lgpl-2.1 |
vicky2135/lucious | oscar/lib/python2.7/site-packages/IPython/utils/text.py | 8 | 23504 | # encoding: utf-8
"""
Utilities for working with strings and text.
Inheritance diagram:
.. inheritance-diagram:: IPython.utils.text
:parts: 3
"""
from __future__ import absolute_import
import os
import re
import sys
import textwrap
from string import Formatter
try:
from pathlib import Path
except ImportError:
# Python 2 backport
from pathlib2 import Path
from IPython.testing.skipdoctest import skip_doctest_py3, skip_doctest
from IPython.utils import py3compat
# datetime.strftime date format for ipython
if sys.platform == 'win32':
date_format = "%B %d, %Y"
else:
date_format = "%B %-d, %Y"
class LSString(str):
"""String derivative with a special access attributes.
These are normal strings, but with the special attributes:
.l (or .list) : value as list (split on newlines).
.n (or .nlstr): original value (the string itself).
.s (or .spstr): value as whitespace-separated string.
.p (or .paths): list of path objects (requires path.py package)
Any values which require transformations are computed only once and
cached.
Such strings are very useful to efficiently interact with the shell, which
typically only understands whitespace-separated options for commands."""
def get_list(self):
try:
return self.__list
except AttributeError:
self.__list = self.split('\n')
return self.__list
l = list = property(get_list)
def get_spstr(self):
try:
return self.__spstr
except AttributeError:
self.__spstr = self.replace('\n',' ')
return self.__spstr
s = spstr = property(get_spstr)
def get_nlstr(self):
return self
n = nlstr = property(get_nlstr)
def get_paths(self):
try:
return self.__paths
except AttributeError:
self.__paths = [Path(p) for p in self.split('\n') if os.path.exists(p)]
return self.__paths
p = paths = property(get_paths)
# FIXME: We need to reimplement type specific displayhook and then add this
# back as a custom printer. This should also be moved outside utils into the
# core.
# def print_lsstring(arg):
# """ Prettier (non-repr-like) and more informative printer for LSString """
# print "LSString (.p, .n, .l, .s available). Value:"
# print arg
#
#
# print_lsstring = result_display.when_type(LSString)(print_lsstring)
class SList(list):
"""List derivative with a special access attributes.
These are normal lists, but with the special attributes:
* .l (or .list) : value as list (the list itself).
* .n (or .nlstr): value as a string, joined on newlines.
* .s (or .spstr): value as a string, joined on spaces.
* .p (or .paths): list of path objects (requires path.py package)
Any values which require transformations are computed only once and
cached."""
def get_list(self):
return self
l = list = property(get_list)
def get_spstr(self):
try:
return self.__spstr
except AttributeError:
self.__spstr = ' '.join(self)
return self.__spstr
s = spstr = property(get_spstr)
def get_nlstr(self):
try:
return self.__nlstr
except AttributeError:
self.__nlstr = '\n'.join(self)
return self.__nlstr
n = nlstr = property(get_nlstr)
def get_paths(self):
try:
return self.__paths
except AttributeError:
self.__paths = [Path(p) for p in self if os.path.exists(p)]
return self.__paths
p = paths = property(get_paths)
def grep(self, pattern, prune = False, field = None):
""" Return all strings matching 'pattern' (a regex or callable)
This is case-insensitive. If prune is true, return all items
NOT matching the pattern.
If field is specified, the match must occur in the specified
whitespace-separated field.
Examples::
a.grep( lambda x: x.startswith('C') )
a.grep('Cha.*log', prune=1)
a.grep('chm', field=-1)
"""
def match_target(s):
if field is None:
return s
parts = s.split()
try:
tgt = parts[field]
return tgt
except IndexError:
return ""
if isinstance(pattern, py3compat.string_types):
pred = lambda x : re.search(pattern, x, re.IGNORECASE)
else:
pred = pattern
if not prune:
return SList([el for el in self if pred(match_target(el))])
else:
return SList([el for el in self if not pred(match_target(el))])
def fields(self, *fields):
""" Collect whitespace-separated fields from string list
Allows quick awk-like usage of string lists.
Example data (in var a, created by 'a = !ls -l')::
-rwxrwxrwx 1 ville None 18 Dec 14 2006 ChangeLog
drwxrwxrwx+ 6 ville None 0 Oct 24 18:05 IPython
* ``a.fields(0)`` is ``['-rwxrwxrwx', 'drwxrwxrwx+']``
* ``a.fields(1,0)`` is ``['1 -rwxrwxrwx', '6 drwxrwxrwx+']``
(note the joining by space).
* ``a.fields(-1)`` is ``['ChangeLog', 'IPython']``
IndexErrors are ignored.
Without args, fields() just split()'s the strings.
"""
if len(fields) == 0:
return [el.split() for el in self]
res = SList()
for el in [f.split() for f in self]:
lineparts = []
for fd in fields:
try:
lineparts.append(el[fd])
except IndexError:
pass
if lineparts:
res.append(" ".join(lineparts))
return res
def sort(self,field= None, nums = False):
""" sort by specified fields (see fields())
Example::
a.sort(1, nums = True)
Sorts a by second field, in numerical order (so that 21 > 3)
"""
#decorate, sort, undecorate
if field is not None:
dsu = [[SList([line]).fields(field), line] for line in self]
else:
dsu = [[line, line] for line in self]
if nums:
for i in range(len(dsu)):
numstr = "".join([ch for ch in dsu[i][0] if ch.isdigit()])
try:
n = int(numstr)
except ValueError:
n = 0
dsu[i][0] = n
dsu.sort()
return SList([t[1] for t in dsu])
# FIXME: We need to reimplement type specific displayhook and then add this
# back as a custom printer. This should also be moved outside utils into the
# core.
# def print_slist(arg):
# """ Prettier (non-repr-like) and more informative printer for SList """
# print "SList (.p, .n, .l, .s, .grep(), .fields(), sort() available):"
# if hasattr(arg, 'hideonce') and arg.hideonce:
# arg.hideonce = False
# return
#
# nlprint(arg) # This was a nested list printer, now removed.
#
# print_slist = result_display.when_type(SList)(print_slist)
def indent(instr,nspaces=4, ntabs=0, flatten=False):
"""Indent a string a given number of spaces or tabstops.
indent(str,nspaces=4,ntabs=0) -> indent str by ntabs+nspaces.
Parameters
----------
instr : basestring
The string to be indented.
nspaces : int (default: 4)
The number of spaces to be indented.
ntabs : int (default: 0)
The number of tabs to be indented.
flatten : bool (default: False)
Whether to scrub existing indentation. If True, all lines will be
aligned to the same indentation. If False, existing indentation will
be strictly increased.
Returns
-------
str|unicode : string indented by ntabs and nspaces.
"""
if instr is None:
return
ind = '\t'*ntabs+' '*nspaces
if flatten:
pat = re.compile(r'^\s*', re.MULTILINE)
else:
pat = re.compile(r'^', re.MULTILINE)
outstr = re.sub(pat, ind, instr)
if outstr.endswith(os.linesep+ind):
return outstr[:-len(ind)]
else:
return outstr
def list_strings(arg):
"""Always return a list of strings, given a string or list of strings
as input.
Examples
--------
::
In [7]: list_strings('A single string')
Out[7]: ['A single string']
In [8]: list_strings(['A single string in a list'])
Out[8]: ['A single string in a list']
In [9]: list_strings(['A','list','of','strings'])
Out[9]: ['A', 'list', 'of', 'strings']
"""
if isinstance(arg, py3compat.string_types): return [arg]
else: return arg
def marquee(txt='',width=78,mark='*'):
"""Return the input string centered in a 'marquee'.
Examples
--------
::
In [16]: marquee('A test',40)
Out[16]: '**************** A test ****************'
In [17]: marquee('A test',40,'-')
Out[17]: '---------------- A test ----------------'
In [18]: marquee('A test',40,' ')
Out[18]: ' A test '
"""
if not txt:
return (mark*width)[:width]
nmark = (width-len(txt)-2)//len(mark)//2
if nmark < 0: nmark =0
marks = mark*nmark
return '%s %s %s' % (marks,txt,marks)
ini_spaces_re = re.compile(r'^(\s+)')
def num_ini_spaces(strng):
"""Return the number of initial spaces in a string"""
ini_spaces = ini_spaces_re.match(strng)
if ini_spaces:
return ini_spaces.end()
else:
return 0
def format_screen(strng):
"""Format a string for screen printing.
This removes some latex-type format codes."""
# Paragraph continue
par_re = re.compile(r'\\$',re.MULTILINE)
strng = par_re.sub('',strng)
return strng
def dedent(text):
"""Equivalent of textwrap.dedent that ignores unindented first line.
This means it will still dedent strings like:
'''foo
is a bar
'''
For use in wrap_paragraphs.
"""
if text.startswith('\n'):
# text starts with blank line, don't ignore the first line
return textwrap.dedent(text)
# split first line
splits = text.split('\n',1)
if len(splits) == 1:
# only one line
return textwrap.dedent(text)
first, rest = splits
# dedent everything but the first line
rest = textwrap.dedent(rest)
return '\n'.join([first, rest])
def wrap_paragraphs(text, ncols=80):
"""Wrap multiple paragraphs to fit a specified width.
This is equivalent to textwrap.wrap, but with support for multiple
paragraphs, as separated by empty lines.
Returns
-------
list of complete paragraphs, wrapped to fill `ncols` columns.
"""
paragraph_re = re.compile(r'\n(\s*\n)+', re.MULTILINE)
text = dedent(text).strip()
paragraphs = paragraph_re.split(text)[::2] # every other entry is space
out_ps = []
indent_re = re.compile(r'\n\s+', re.MULTILINE)
for p in paragraphs:
# presume indentation that survives dedent is meaningful formatting,
# so don't fill unless text is flush.
if indent_re.search(p) is None:
# wrap paragraph
p = textwrap.fill(p, ncols)
out_ps.append(p)
return out_ps
def long_substr(data):
"""Return the longest common substring in a list of strings.
Credit: http://stackoverflow.com/questions/2892931/longest-common-substring-from-more-than-two-strings-python
"""
substr = ''
if len(data) > 1 and len(data[0]) > 0:
for i in range(len(data[0])):
for j in range(len(data[0])-i+1):
if j > len(substr) and all(data[0][i:i+j] in x for x in data):
substr = data[0][i:i+j]
elif len(data) == 1:
substr = data[0]
return substr
def strip_email_quotes(text):
"""Strip leading email quotation characters ('>').
Removes any combination of leading '>' interspersed with whitespace that
appears *identically* in all lines of the input text.
Parameters
----------
text : str
Examples
--------
Simple uses::
In [2]: strip_email_quotes('> > text')
Out[2]: 'text'
In [3]: strip_email_quotes('> > text\\n> > more')
Out[3]: 'text\\nmore'
Note how only the common prefix that appears in all lines is stripped::
In [4]: strip_email_quotes('> > text\\n> > more\\n> more...')
Out[4]: '> text\\n> more\\nmore...'
So if any line has no quote marks ('>') , then none are stripped from any
of them ::
In [5]: strip_email_quotes('> > text\\n> > more\\nlast different')
Out[5]: '> > text\\n> > more\\nlast different'
"""
lines = text.splitlines()
matches = set()
for line in lines:
prefix = re.match(r'^(\s*>[ >]*)', line)
if prefix:
matches.add(prefix.group(1))
else:
break
else:
prefix = long_substr(list(matches))
if prefix:
strip = len(prefix)
text = '\n'.join([ ln[strip:] for ln in lines])
return text
def strip_ansi(source):
"""
Remove ansi escape codes from text.
Parameters
----------
source : str
Source to remove the ansi from
"""
return re.sub(r'\033\[(\d|;)+?m', '', source)
class EvalFormatter(Formatter):
"""A String Formatter that allows evaluation of simple expressions.
Note that this version interprets a : as specifying a format string (as per
standard string formatting), so if slicing is required, you must explicitly
create a slice.
This is to be used in templating cases, such as the parallel batch
script templates, where simple arithmetic on arguments is useful.
Examples
--------
::
In [1]: f = EvalFormatter()
In [2]: f.format('{n//4}', n=8)
Out[2]: '2'
In [3]: f.format("{greeting[slice(2,4)]}", greeting="Hello")
Out[3]: 'll'
"""
def get_field(self, name, args, kwargs):
v = eval(name, kwargs)
return v, name
#XXX: As of Python 3.4, the format string parsing no longer splits on a colon
# inside [], so EvalFormatter can handle slicing. Once we only support 3.4 and
# above, it should be possible to remove FullEvalFormatter.
@skip_doctest_py3
class FullEvalFormatter(Formatter):
"""A String Formatter that allows evaluation of simple expressions.
Any time a format key is not found in the kwargs,
it will be tried as an expression in the kwargs namespace.
Note that this version allows slicing using [1:2], so you cannot specify
a format string. Use :class:`EvalFormatter` to permit format strings.
Examples
--------
::
In [1]: f = FullEvalFormatter()
In [2]: f.format('{n//4}', n=8)
Out[2]: u'2'
In [3]: f.format('{list(range(5))[2:4]}')
Out[3]: u'[2, 3]'
In [4]: f.format('{3*2}')
Out[4]: u'6'
"""
# copied from Formatter._vformat with minor changes to allow eval
# and replace the format_spec code with slicing
def vformat(self, format_string, args, kwargs):
result = []
for literal_text, field_name, format_spec, conversion in \
self.parse(format_string):
# output the literal text
if literal_text:
result.append(literal_text)
# if there's a field, output it
if field_name is not None:
# this is some markup, find the object and do
# the formatting
if format_spec:
# override format spec, to allow slicing:
field_name = ':'.join([field_name, format_spec])
# eval the contents of the field for the object
# to be formatted
obj = eval(field_name, kwargs)
# do any conversion on the resulting object
obj = self.convert_field(obj, conversion)
# format the object and append to the result
result.append(self.format_field(obj, ''))
return u''.join(py3compat.cast_unicode(s) for s in result)
@skip_doctest_py3
class DollarFormatter(FullEvalFormatter):
"""Formatter allowing Itpl style $foo replacement, for names and attribute
access only. Standard {foo} replacement also works, and allows full
evaluation of its arguments.
Examples
--------
::
In [1]: f = DollarFormatter()
In [2]: f.format('{n//4}', n=8)
Out[2]: u'2'
In [3]: f.format('23 * 76 is $result', result=23*76)
Out[3]: u'23 * 76 is 1748'
In [4]: f.format('$a or {b}', a=1, b=2)
Out[4]: u'1 or 2'
"""
_dollar_pattern = re.compile("(.*?)\$(\$?[\w\.]+)")
def parse(self, fmt_string):
for literal_txt, field_name, format_spec, conversion \
in Formatter.parse(self, fmt_string):
# Find $foo patterns in the literal text.
continue_from = 0
txt = ""
for m in self._dollar_pattern.finditer(literal_txt):
new_txt, new_field = m.group(1,2)
# $$foo --> $foo
if new_field.startswith("$"):
txt += new_txt + new_field
else:
yield (txt + new_txt, new_field, "", None)
txt = ""
continue_from = m.end()
# Re-yield the {foo} style pattern
yield (txt + literal_txt[continue_from:], field_name, format_spec, conversion)
#-----------------------------------------------------------------------------
# Utils to columnize a list of string
#-----------------------------------------------------------------------------
def _col_chunks(l, max_rows, row_first=False):
"""Yield successive max_rows-sized column chunks from l."""
if row_first:
ncols = (len(l) // max_rows) + (len(l) % max_rows > 0)
for i in py3compat.xrange(ncols):
yield [l[j] for j in py3compat.xrange(i, len(l), ncols)]
else:
for i in py3compat.xrange(0, len(l), max_rows):
yield l[i:(i + max_rows)]
def _find_optimal(rlist, row_first=False, separator_size=2, displaywidth=80):
"""Calculate optimal info to columnize a list of string"""
for max_rows in range(1, len(rlist) + 1):
col_widths = list(map(max, _col_chunks(rlist, max_rows, row_first)))
sumlength = sum(col_widths)
ncols = len(col_widths)
if sumlength + separator_size * (ncols - 1) <= displaywidth:
break
return {'num_columns': ncols,
'optimal_separator_width': (displaywidth - sumlength) / (ncols - 1) if (ncols - 1) else 0,
'max_rows': max_rows,
'column_widths': col_widths
}
def _get_or_default(mylist, i, default=None):
"""return list item number, or default if don't exist"""
if i >= len(mylist):
return default
else :
return mylist[i]
def compute_item_matrix(items, row_first=False, empty=None, *args, **kwargs) :
"""Returns a nested list, and info to columnize items
Parameters
----------
items
list of strings to columize
row_first : (default False)
Whether to compute columns for a row-first matrix instead of
column-first (default).
empty : (default None)
default value to fill list if needed
separator_size : int (default=2)
How much caracters will be used as a separation between each columns.
displaywidth : int (default=80)
The width of the area onto wich the columns should enter
Returns
-------
strings_matrix
nested list of string, the outer most list contains as many list as
rows, the innermost lists have each as many element as colums. If the
total number of elements in `items` does not equal the product of
rows*columns, the last element of some lists are filled with `None`.
dict_info
some info to make columnize easier:
num_columns
number of columns
max_rows
maximum number of rows (final number may be less)
column_widths
list of with of each columns
optimal_separator_width
best separator width between columns
Examples
--------
::
In [1]: l = ['aaa','b','cc','d','eeeee','f','g','h','i','j','k','l']
...: compute_item_matrix(l, displaywidth=12)
Out[1]:
([['aaa', 'f', 'k'],
['b', 'g', 'l'],
['cc', 'h', None],
['d', 'i', None],
['eeeee', 'j', None]],
{'num_columns': 3,
'column_widths': [5, 1, 1],
'optimal_separator_width': 2,
'max_rows': 5})
"""
info = _find_optimal(list(map(len, items)), row_first, *args, **kwargs)
nrow, ncol = info['max_rows'], info['num_columns']
if row_first:
return ([[_get_or_default(items, r * ncol + c, default=empty) for c in range(ncol)] for r in range(nrow)], info)
else:
return ([[_get_or_default(items, c * nrow + r, default=empty) for c in range(ncol)] for r in range(nrow)], info)
def columnize(items, row_first=False, separator=' ', displaywidth=80, spread=False):
""" Transform a list of strings into a single string with columns.
Parameters
----------
items : sequence of strings
The strings to process.
row_first : (default False)
Whether to compute columns for a row-first matrix instead of
column-first (default).
separator : str, optional [default is two spaces]
The string that separates columns.
displaywidth : int, optional [default is 80]
Width of the display in number of characters.
Returns
-------
The formatted string.
"""
if not items:
return '\n'
matrix, info = compute_item_matrix(items, row_first=row_first, separator_size=len(separator), displaywidth=displaywidth)
if spread:
separator = separator.ljust(int(info['optimal_separator_width']))
fmatrix = [filter(None, x) for x in matrix]
sjoin = lambda x : separator.join([ y.ljust(w, ' ') for y, w in zip(x, info['column_widths'])])
return '\n'.join(map(sjoin, fmatrix))+'\n'
def get_text_list(list_, last_sep=' and ', sep=", ", wrap_item_with=""):
"""
Return a string with a natural enumeration of items
>>> get_text_list(['a', 'b', 'c', 'd'])
'a, b, c and d'
>>> get_text_list(['a', 'b', 'c'], ' or ')
'a, b or c'
>>> get_text_list(['a', 'b', 'c'], ', ')
'a, b, c'
>>> get_text_list(['a', 'b'], ' or ')
'a or b'
>>> get_text_list(['a'])
'a'
>>> get_text_list([])
''
>>> get_text_list(['a', 'b'], wrap_item_with="`")
'`a` and `b`'
>>> get_text_list(['a', 'b', 'c', 'd'], " = ", sep=" + ")
'a + b + c = d'
"""
if len(list_) == 0:
return ''
if wrap_item_with:
list_ = ['%s%s%s' % (wrap_item_with, item, wrap_item_with) for
item in list_]
if len(list_) == 1:
return list_[0]
return '%s%s%s' % (
sep.join(i for i in list_[:-1]),
last_sep, list_[-1])
| bsd-3-clause |
MrSenko/Nitrate | tcms/profiles/views.py | 1 | 5280 | # -*- coding: utf-8 -*-
from django import http
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
from django.db.models import Q
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.http import require_http_methods
from django.views.decorators.http import require_GET
from django.shortcuts import get_object_or_404
from tcms.core.utils.raw_sql import RawSQL
from tcms.testplans.models import TestPlan
from tcms.testruns.models import TestRun
from tcms.profiles.models import Bookmark
from tcms.profiles.models import UserProfile
from tcms.profiles.forms import BookmarkForm, UserProfileForm
MODULE_NAME = 'profile'
@require_http_methods(['GET', 'POST'])
# @user_passes_test(lambda u: u.username == username)
@login_required
def bookmark(request, username, template_name='profile/bookmarks.html'):
"""
Bookmarks for the user
"""
if username != request.user.username:
return http.HttpResponseRedirect(reverse('django.contrib.auth.views.login'))
else:
up = {'user': request.user}
class BookmarkActions(object):
def __init__(self):
self.ajax_response = {'rc': 0, 'response': 'ok'}
def add(self):
form = BookmarkForm(request.GET)
if not form.is_valid():
ajax_response = {'rc': 1, 'response': form.errors.as_text()}
return http.JsonResponse(ajax_response)
form.save()
return http.JsonResponse(self.ajax_response)
def remove(self):
pks = request.POST.getlist('pk')
bks = Bookmark.objects.filter(pk__in=pks, user=request.user)
bks.delete()
return http.JsonResponse(self.ajax_response)
def render(self):
if request.GET.get('category'):
bks = Bookmark.objects.filter(user=request.user,
category_id=request.GET['category'])
else:
bks = Bookmark.objects.filter(user=request.user)
context_data = {
'user_profile': up,
'bookmarks': bks,
}
return render_to_response(template_name, context_data,
context_instance=RequestContext(request))
def render_form(self):
query = request.GET.copy()
query['a'] = 'add'
form = BookmarkForm(initial=query)
form.populate(user=request.user)
return http.HttpResponse(form.as_p())
action = BookmarkActions()
request_data = request.GET or request.POST
func = getattr(action, request_data.get('a', 'render'))
return func()
@require_http_methods(['GET', 'POST'])
@login_required
@csrf_protect
def profile(request, username, template_name='profile/info.html'):
"""Edit the profiles of the user"""
u = get_object_or_404(User, username=username)
try:
up = UserProfile.get_user_profile(u)
except ObjectDoesNotExist:
up = UserProfile.objects.create(user=u)
message = None
form = UserProfileForm(instance=up)
if request.method == 'POST':
form = UserProfileForm(request.POST, instance=up)
if form.is_valid():
form.save()
message = 'Information successfully updated.'
context_data = {
'user_profile': up,
'form': form,
'message': message,
}
return render_to_response(template_name, context_data,
context_instance=RequestContext(request))
@require_GET
@login_required
def recent(request, username, template_name='profile/recent.html'):
"""List the recent plan/run"""
if username != request.user.username:
return http.HttpResponseRedirect(reverse('django.contrib.auth.views.login'))
else:
up = {'user': request.user}
runs_query = {
'people': request.user,
'is_active': True,
'status': 'running',
}
tps = TestPlan.objects.filter(Q(author=request.user) | Q(owner=request.user))
tps = tps.order_by('-plan_id')
tps = tps.select_related('product', 'type')
tps = tps.extra(select={
'num_runs': RawSQL.num_runs,
})
tps_active = tps.filter(is_active=True)
trs = TestRun.list(runs_query)
latest_fifteen_testruns = trs.order_by('-run_id')[:15]
test_plans_disable_count = tps.count() - tps_active.count()
context_data = {
'module': MODULE_NAME,
'user_profile': up,
'test_plans_count': tps.count(),
'test_plans_disable_count': test_plans_disable_count,
'test_runs_count': trs.count(),
'last_15_test_plans': tps_active[:15],
'last_15_test_runs': latest_fifteen_testruns,
}
return render_to_response(template_name, context_data,
context_instance=RequestContext(request))
@login_required
def redirect_to_profile(request):
return http.HttpResponseRedirect(
reverse('tcms.profiles.views.recent', args=[request.user.username]))
| gpl-2.0 |
saisrisathya/whatsapps | yowsup/registration/regrequest.py | 50 | 1982 | '''
Copyright (c) <2012> Tarek Galal <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR
A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
from yowsup.common.http.warequest import WARequest
from yowsup.common.http.waresponseparser import JSONResponseParser
from yowsup.common.tools import StorageTools
class WARegRequest(WARequest):
def __init__(self,cc, p_in, code):
super(WARegRequest,self).__init__()
idx = StorageTools.getIdentity(cc + p_in)
if idx is None:
raise ValueError("You have to request code first")
self.addParam("cc", cc)
self.addParam("in", p_in)
self.addParam("id", idx)
self.addParam("code", code)
self.url = "v.whatsapp.net/v2/register"
self.pvars = ["status", "login", "pw", "type", "expiration", "kind", "price", "cost", "currency", "price_expiration",
"reason","retry_after"]
self.setParser(JSONResponseParser())
def register(self):
return self.send() | gpl-3.0 |
AZtheAsian/zulip | zerver/migrations/0037_disallow_null_string_id.py | 29 | 1506 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from six.moves import range
from django.db.utils import IntegrityError
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from django.db import migrations, models
def set_string_id_using_domain(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Realm = apps.get_model('zerver', 'Realm')
for realm in Realm.objects.all():
if not realm.string_id:
prefix = realm.domain.split('.')[0]
try:
realm.string_id = prefix
realm.save(update_fields=["string_id"])
continue
except IntegrityError:
pass
for i in range(1, 100):
try:
realm.string_id = prefix + str(i)
realm.save(update_fields=["string_id"])
continue
except IntegrityError:
pass
raise RuntimeError("Unable to find a good string_id for realm %s" % (realm,))
class Migration(migrations.Migration):
dependencies = [
('zerver', '0036_rename_subdomain_to_string_id'),
]
operations = [
migrations.RunPython(set_string_id_using_domain),
migrations.AlterField(
model_name='realm',
name='string_id',
field=models.CharField(unique=True, max_length=40),
),
]
| apache-2.0 |
alimony/django | tests/postgres_tests/test_trigram.py | 131 | 1888 | from django.contrib.postgres.search import TrigramDistance, TrigramSimilarity
from django.test import modify_settings
from . import PostgreSQLTestCase
from .models import CharFieldModel, TextFieldModel
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class TrigramTest(PostgreSQLTestCase):
Model = CharFieldModel
@classmethod
def setUpTestData(cls):
cls.Model.objects.bulk_create([
cls.Model(field='Matthew'),
cls.Model(field='Cat sat on mat.'),
cls.Model(field='Dog sat on rug.'),
])
def test_trigram_search(self):
self.assertQuerysetEqual(
self.Model.objects.filter(field__trigram_similar='Mathew'),
['Matthew'],
transform=lambda instance: instance.field,
)
def test_trigram_similarity(self):
search = 'Bat sat on cat.'
self.assertQuerysetEqual(
self.Model.objects.filter(
field__trigram_similar=search,
).annotate(similarity=TrigramSimilarity('field', search)).order_by('-similarity'),
[('Cat sat on mat.', 0.625), ('Dog sat on rug.', 0.333333)],
transform=lambda instance: (instance.field, instance.similarity),
ordered=True,
)
def test_trigram_similarity_alternate(self):
self.assertQuerysetEqual(
self.Model.objects.annotate(
distance=TrigramDistance('field', 'Bat sat on cat.'),
).filter(distance__lte=0.7).order_by('distance'),
[('Cat sat on mat.', 0.375), ('Dog sat on rug.', 0.666667)],
transform=lambda instance: (instance.field, instance.distance),
ordered=True,
)
class TrigramTextFieldTest(TrigramTest):
"""
TextField has the same behavior as CharField regarding trigram lookups.
"""
Model = TextFieldModel
| bsd-3-clause |
erikriver/eduIntelligent-cynin | src/ubify.policy/ubify/policy/__init__.py | 5 | 3717 | ###############################################################################
#cyn.in is an open source Collaborative Knowledge Management Appliance that
#enables teams to seamlessly work together on files, documents and content in
#a secure central environment.
#
#cyn.in v2 an open source appliance is distributed under the GPL v3 license
#along with commercial support options.
#
#cyn.in is a Cynapse Invention.
#
#Copyright (C) 2008 Cynapse India Pvt. Ltd.
#
#This program is free software: you can redistribute it and/or modify it under
#the terms of the GNU General Public License as published by the Free Software
#Foundation, either version 3 of the License, or any later version and observe
#the Additional Terms applicable to this program and must display appropriate
#legal notices. In accordance with Section 7(b) of the GNU General Public
#License version 3, these Appropriate Legal Notices must retain the display of
#the "Powered by cyn.in" AND "A Cynapse Invention" logos. You should have
#received a copy of the detailed Additional Terms License with this program.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
#Public License for more details.
#
#You should have received a copy of the GNU General Public License along with
#this program. If not, see <http://www.gnu.org/licenses/>.
#
#You can contact Cynapse at [email protected] with any problems with cyn.in.
#For any queries regarding the licensing, please send your mails to
# [email protected]
#
#You can also contact Cynapse at:
#802, Building No. 1,
#Dheeraj Sagar, Malad(W)
#Mumbai-400064, India
###############################################################################
#
from Products.PluggableAuthService.interfaces.request import IRequest
from Products.PluggableAuthService.plugins.RequestTypeSniffer import registerSniffer
from Products.PluggableAuthService.plugins.ChallengeProtocolChooser import registerRequestType
from ZPublisher import xmlrpc
from zope.i18nmessageid import MessageFactory
CyninMessageFactory = MessageFactory('cynin')
class IRSSRequest(IRequest):
"""Rss Request"""
def rssSniffer(request):
response = request['RESPONSE']
method = request.get('REQUEST_METHOD', 'GET').upper()
#import pdb;pdb.set_trace()
if len(request.steps) > 0 and request.steps[-1] in ('RSS','rss','atom','itunes'):
return True
if len(request.steps) > 0 and request.steps[-1].startswith('ffxmpp_'):
return True
if len(request.steps) > 0 and request.steps[-1].startswith('basic_login'):
return True
class ICALRequest(IRequest):
"""CAL Request"""
def calSniffer(request):
response = request['RESPONSE']
method = request.get('REQUEST_METHOD', 'GET').upper()
if request.getURL().__contains__('.ics'):
return True
def setup_languageSchemaVocabulary():
from plone.app.controlpanel.language import ILanguageSelectionSchema
defaultlang_schema = ILanguageSelectionSchema.get('default_language')
if defaultlang_schema and defaultlang_schema.vocabularyName != 'plone.app.vocabularies.SupportedContentLanguages':
defaultlang_schema.vocabularyName = 'plone.app.vocabularies.SupportedContentLanguages'
def initialize(context):
"""Intializer called when used as a Zope 2 product."""
registerSniffer(IRSSRequest, rssSniffer)
registerRequestType('RSS',IRSSRequest)
registerSniffer(ICALRequest, calSniffer)
registerRequestType('CAL',ICALRequest)
setup_languageSchemaVocabulary() | gpl-3.0 |
evernym/zeno | stp_core/common/logging/handlers.py | 2 | 2199 | import logging
class CallbackHandler(logging.Handler):
def __init__(self, typestr, default_tags, callback, override_tags):
"""
Initialize the handler.
"""
super().__init__()
self.callback = callback
self.tags = default_tags
self.update_tags(override_tags or {})
self.typestr = typestr
def update_tags(self, override_tags):
self.tags.update(override_tags)
def emit(self, record):
"""
Passes the log record back to the CLI for rendering
"""
should_cb = None
attr_val = None
if hasattr(record, self.typestr):
attr_val = getattr(record, self.typestr)
should_cb = bool(attr_val)
if should_cb is None and record.levelno >= logging.INFO:
should_cb = True
if hasattr(record, 'tags'):
for t in record.tags:
if t in self.tags:
if self.tags[t]:
should_cb = True
continue
else:
should_cb = False
break
if should_cb:
self.callback(record, attr_val)
class CliHandler(CallbackHandler):
def __init__(self, callback, override_tags=None):
default_tags = {
"add_replica": True
}
super().__init__(typestr="cli",
default_tags=default_tags,
callback=callback,
override_tags=override_tags)
class DemoHandler(CallbackHandler):
def __init__(self, callback, override_tags=None):
default_tags = {
"add_replica": True
}
super().__init__(typestr="demo",
default_tags=default_tags,
callback=callback,
override_tags=override_tags)
class TestingHandler(logging.Handler):
def __init__(self, tester):
"""
Initialize the handler.
"""
super().__init__()
self.tester = tester
def emit(self, record):
"""
Captures a record.
"""
self.tester(record)
| apache-2.0 |
yufengg/tensorflow | tensorflow/contrib/keras/api/keras/callbacks/__init__.py | 30 | 1766 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras callback classes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.keras.python.keras.callbacks import BaseLogger
from tensorflow.contrib.keras.python.keras.callbacks import Callback
from tensorflow.contrib.keras.python.keras.callbacks import CSVLogger
from tensorflow.contrib.keras.python.keras.callbacks import EarlyStopping
from tensorflow.contrib.keras.python.keras.callbacks import History
from tensorflow.contrib.keras.python.keras.callbacks import LambdaCallback
from tensorflow.contrib.keras.python.keras.callbacks import LearningRateScheduler
from tensorflow.contrib.keras.python.keras.callbacks import ModelCheckpoint
from tensorflow.contrib.keras.python.keras.callbacks import ProgbarLogger
from tensorflow.contrib.keras.python.keras.callbacks import ReduceLROnPlateau
from tensorflow.contrib.keras.python.keras.callbacks import RemoteMonitor
from tensorflow.contrib.keras.python.keras.callbacks import TensorBoard
del absolute_import
del division
del print_function
| apache-2.0 |
kthordarson/CouchPotatoServer | libs/enzyme/__init__.py | 168 | 2414 | # -*- coding: utf-8 -*-
# enzyme - Video metadata parser
# Copyright 2011-2012 Antoine Bertin <[email protected]>
# Copyright 2003-2006 Thomas Schueppel <[email protected]>
# Copyright 2003-2006 Dirk Meyer <[email protected]>
#
# This file is part of enzyme.
#
# enzyme is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# enzyme is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with enzyme. If not, see <http://www.gnu.org/licenses/>.
import mimetypes
import os
import sys
from exceptions import *
PARSERS = [('asf', ['video/asf'], ['asf', 'wmv', 'wma']),
('flv', ['video/flv'], ['flv']),
('mkv', ['video/x-matroska', 'application/mkv'], ['mkv', 'mka', 'webm']),
('mp4', ['video/quicktime', 'video/mp4'], ['mov', 'qt', 'mp4', 'mp4a', '3gp', '3gp2', '3g2', 'mk2']),
('mpeg', ['video/mpeg'], ['mpeg', 'mpg', 'mp4', 'ts']),
('ogm', ['application/ogg'], ['ogm', 'ogg', 'ogv']),
('real', ['video/real'], ['rm', 'ra', 'ram']),
('riff', ['video/avi'], ['wav', 'avi'])
]
def parse(path):
"""Parse metadata of the given video
:param string path: path to the video file to parse
:return: a parser corresponding to the video's mimetype or extension
:rtype: :class:`~enzyme.core.AVContainer`
"""
if not os.path.isfile(path):
raise ValueError('Invalid path')
extension = os.path.splitext(path)[1][1:]
mimetype = mimetypes.guess_type(path)[0]
parser_ext = None
parser_mime = None
for (parser_name, parser_mimetypes, parser_extensions) in PARSERS:
if mimetype in parser_mimetypes:
parser_mime = parser_name
if extension in parser_extensions:
parser_ext = parser_name
parser = parser_mime or parser_ext
if not parser:
raise NoParserError()
mod = __import__(parser, globals=globals(), locals=locals(), fromlist=[], level=-1)
with open(path, 'rb') as f:
p = mod.Parser(f)
return p
| gpl-3.0 |
gangadharkadam/tailorerp | erpnext/accounts/party.py | 22 | 6295 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.defaults import get_user_permissions
from frappe.utils import add_days
from erpnext.utilities.doctype.address.address import get_address_display
from erpnext.utilities.doctype.contact.contact import get_contact_details
@frappe.whitelist()
def get_party_details(party=None, account=None, party_type="Customer", company=None,
posting_date=None, price_list=None, currency=None, doctype=None):
return _get_party_details(party, account, party_type,
company, posting_date, price_list, currency, doctype)
def _get_party_details(party=None, account=None, party_type="Customer", company=None,
posting_date=None, price_list=None, currency=None, doctype=None, ignore_permissions=False):
out = frappe._dict(set_account_and_due_date(party, account, party_type, company, posting_date, doctype))
party = out[party_type.lower()]
if not ignore_permissions and not frappe.has_permission(party_type, "read", party):
frappe.throw(_("Not permitted"), frappe.PermissionError)
party = frappe.get_doc(party_type, party)
set_address_details(out, party, party_type)
set_contact_details(out, party, party_type)
set_other_values(out, party, party_type)
set_price_list(out, party, party_type, price_list)
if not out.get("currency"):
out["currency"] = currency
# sales team
if party_type=="Customer":
out["sales_team"] = [{
"sales_person": d.sales_person,
"sales_designation": d.sales_designation,
"allocated_percentage": d.allocated_percentage
} for d in party.get("sales_team")]
return out
def set_address_details(out, party, party_type):
billing_address_field = "customer_address" if party_type == "Lead" \
else party_type.lower() + "_address"
out[billing_address_field] = frappe.db.get_value("Address",
{party_type.lower(): party.name, "is_primary_address":1}, "name")
# address display
out.address_display = get_address_display(out[billing_address_field])
# shipping address
if party_type in ["Customer", "Lead"]:
out.shipping_address_name = frappe.db.get_value("Address",
{party_type.lower(): party.name, "is_shipping_address":1}, "name")
out.shipping_address = get_address_display(out["shipping_address_name"])
def set_contact_details(out, party, party_type):
out.contact_person = frappe.db.get_value("Contact",
{party_type.lower(): party.name, "is_primary_contact":1}, "name")
if not out.contact_person:
return
out.update(get_contact_details(out.contact_person))
def set_other_values(out, party, party_type):
# copy
if party_type=="Customer":
to_copy = ["customer_name", "customer_group", "territory"]
else:
to_copy = ["supplier_name", "supplier_type"]
for f in to_copy:
out[f] = party.get(f)
# fields prepended with default in Customer doctype
for f in ['currency', 'taxes_and_charges'] \
+ (['sales_partner', 'commission_rate'] if party_type=="Customer" else []):
if party.get("default_" + f):
out[f] = party.get("default_" + f)
def set_price_list(out, party, party_type, given_price_list):
# price list
price_list = filter(None, get_user_permissions().get("Price List", []))
if isinstance(price_list, list):
price_list = price_list[0] if len(price_list)==1 else None
if not price_list:
price_list = party.default_price_list
if not price_list and party_type=="Customer":
price_list = frappe.db.get_value("Customer Group",
party.customer_group, "default_price_list")
if not price_list:
price_list = given_price_list
if price_list:
out.price_list_currency = frappe.db.get_value("Price List", price_list, "currency")
out["selling_price_list" if party.doctype=="Customer" else "buying_price_list"] = price_list
def set_account_and_due_date(party, account, party_type, company, posting_date, doctype):
if doctype not in ["Sales Invoice", "Purchase Invoice"]:
# not an invoice
return {
party_type.lower(): party
}
if party:
account = get_party_account(company, party, party_type)
elif account:
party = frappe.db.get_value('Account', account, 'master_name')
account_fieldname = "debit_to" if party_type=="Customer" else "credit_to"
out = {
party_type.lower(): party,
account_fieldname : account,
"due_date": get_due_date(posting_date, party, party_type, account, company)
}
return out
def get_party_account(company, party, party_type):
if not company:
frappe.throw(_("Please select company first."))
if party:
acc_head = frappe.db.get_value("Account", {"master_name":party,
"master_type": party_type, "company": company})
if not acc_head:
create_party_account(party, party_type, company)
return acc_head
def get_due_date(posting_date, party, party_type, account, company):
"""Set Due Date = Posting Date + Credit Days"""
due_date = None
if posting_date:
credit_days = 0
if account:
credit_days = frappe.db.get_value("Account", account, "credit_days")
if party and not credit_days:
credit_days = frappe.db.get_value(party_type, party, "credit_days")
if company and not credit_days:
credit_days = frappe.db.get_value("Company", company, "credit_days")
due_date = add_days(posting_date, credit_days) if credit_days else posting_date
return due_date
def create_party_account(party, party_type, company):
if not company:
frappe.throw(_("Company is required"))
company_details = frappe.db.get_value("Company", company,
["abbr", "receivables_group", "payables_group"], as_dict=True)
if not frappe.db.exists("Account", (party.strip() + " - " + company_details.abbr)):
parent_account = company_details.receivables_group \
if party_type=="Customer" else company_details.payables_group
if not parent_account:
frappe.throw(_("Please enter Account Receivable/Payable group in company master"))
# create
account = frappe.get_doc({
"doctype": "Account",
'account_name': party,
'parent_account': parent_account,
'group_or_ledger':'Ledger',
'company': company,
'master_type': party_type,
'master_name': party,
"freeze_account": "No",
"report_type": "Balance Sheet"
}).insert(ignore_permissions=True)
frappe.msgprint(_("Account Created: {0}").format(account.name))
| agpl-3.0 |
alazaro/tennis_tournament | django/contrib/gis/tests/geogapp/tests.py | 222 | 4080 | """
Tests for geography support in PostGIS 1.5+
"""
import os
from django.contrib.gis import gdal
from django.contrib.gis.measure import D
from django.test import TestCase
from models import City, County, Zipcode
class GeographyTest(TestCase):
def test01_fixture_load(self):
"Ensure geography features loaded properly."
self.assertEqual(8, City.objects.count())
def test02_distance_lookup(self):
"Testing GeoQuerySet distance lookup support on non-point geography fields."
z = Zipcode.objects.get(code='77002')
cities1 = list(City.objects
.filter(point__distance_lte=(z.poly, D(mi=500)))
.order_by('name')
.values_list('name', flat=True))
cities2 = list(City.objects
.filter(point__dwithin=(z.poly, D(mi=500)))
.order_by('name')
.values_list('name', flat=True))
for cities in [cities1, cities2]:
self.assertEqual(['Dallas', 'Houston', 'Oklahoma City'], cities)
def test03_distance_method(self):
"Testing GeoQuerySet.distance() support on non-point geography fields."
# `GeoQuerySet.distance` is not allowed geometry fields.
htown = City.objects.get(name='Houston')
qs = Zipcode.objects.distance(htown.point)
def test04_invalid_operators_functions(self):
"Ensuring exceptions are raised for operators & functions invalid on geography fields."
# Only a subset of the geometry functions & operator are available
# to PostGIS geography types. For more information, visit:
# http://postgis.refractions.net/documentation/manual-1.5/ch08.html#PostGIS_GeographyFunctions
z = Zipcode.objects.get(code='77002')
# ST_Within not available.
self.assertRaises(ValueError, City.objects.filter(point__within=z.poly).count)
# `@` operator not available.
self.assertRaises(ValueError, City.objects.filter(point__contained=z.poly).count)
# Regression test for #14060, `~=` was never really implemented for PostGIS.
htown = City.objects.get(name='Houston')
self.assertRaises(ValueError, City.objects.get, point__exact=htown.point)
def test05_geography_layermapping(self):
"Testing LayerMapping support on models with geography fields."
# There is a similar test in `layermap` that uses the same data set,
# but the County model here is a bit different.
if not gdal.HAS_GDAL: return
from django.contrib.gis.utils import LayerMapping
# Getting the shapefile and mapping dictionary.
shp_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'data'))
co_shp = os.path.join(shp_path, 'counties', 'counties.shp')
co_mapping = {'name' : 'Name',
'state' : 'State',
'mpoly' : 'MULTIPOLYGON',
}
# Reference county names, number of polygons, and state names.
names = ['Bexar', 'Galveston', 'Harris', 'Honolulu', 'Pueblo']
num_polys = [1, 2, 1, 19, 1] # Number of polygons for each.
st_names = ['Texas', 'Texas', 'Texas', 'Hawaii', 'Colorado']
lm = LayerMapping(County, co_shp, co_mapping, source_srs=4269, unique='name')
lm.save(silent=True, strict=True)
for c, name, num_poly, state in zip(County.objects.order_by('name'), names, num_polys, st_names):
self.assertEqual(4326, c.mpoly.srid)
self.assertEqual(num_poly, len(c.mpoly))
self.assertEqual(name, c.name)
self.assertEqual(state, c.state)
def test06_geography_area(self):
"Testing that Area calculations work on geography columns."
from django.contrib.gis.measure import A
# SELECT ST_Area(poly) FROM geogapp_zipcode WHERE code='77002';
ref_area = 5439084.70637573
tol = 5
z = Zipcode.objects.area().get(code='77002')
self.assertAlmostEqual(z.area.sq_m, ref_area, tol)
| gpl-3.0 |
erochest/threepress-rdfa | bookworm/django_authopenid/urls.py | 7 | 1360 | # -*- coding: utf-8 -*-
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('django_authopenid.views',
# manage account registration
url(r'^%s$' % ('signin/'), 'signin', name='user_signin'),
url(r'^%s$' % ('signout/'), 'signout', name='user_signout'),
url(r'^%s%s$' % ('signin/', 'complete/'), 'complete_signin', name='user_complete_signin'),
url(r'^%s$' % ('register/'), 'register', name='user_register'),
url(r'^%s$' % ('signup/'), 'signup', name='user_signup'),
url(r'^%s$' % 'sendpw/', 'sendpw', name='user_sendpw'),
url(r'^%s%s$' % ('password/', 'confirm/'), 'confirmchangepw',
name='user_confirmchangepw'),
# manage account settings
url(r'^%s$' % 'password/', 'changepw', name='user_changepw'),
url(r'^%s$' % 'email/', 'changeemail', name='user_changeemail'),
url(r'^%s$' % 'openid/', 'changeopenid', name='user_changeopenid'),
url(r'^%s$' % 'delete/', 'delete', name='user_delete'),
)
urlpatterns += patterns('bookworm.library.views',
url(r'^$', 'profile', name='profile'),
)
| bsd-3-clause |
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/examples/transports/plot_legislation/plot_ticpe_taux_implicite.py | 4 | 2264 | # -*- coding: utf-8 -*-
"""
Created on Mon Aug 17 18:06:45 2015
@author: thomas.douenne
TICPE: Taxe intérieure sur la consommation des produits énergétiques
"""
# L'objectif de ce script est d'illustrer graphiquement l'évolution du taux implicite de la TICPE depuis 1993.
# On étudie ce taux pour le diesel, et pour les carburants sans plombs.
# Import de modules généraux
from pandas import concat
# Import de modules spécifiques à Openfisca
from openfisca_france_indirect_taxation.examples.utils_example import graph_builder_bar_list
from openfisca_france_indirect_taxation.examples.dataframes_from_legislation.get_accises import get_accises_carburants
from openfisca_france_indirect_taxation.examples.dataframes_from_legislation.get_tva import get_tva_taux_plein
from openfisca_france_indirect_taxation.examples.dataframes_from_legislation.get_prix_carburants import \
get_prix_carburants
# Appel des paramètres de la législation et des prix
ticpe = ['ticpe_gazole', 'ticpe_super9598']
accise_diesel = get_accises_carburants(ticpe)
prix_ttc = ['diesel_ttc', 'super_95_ttc']
prix_carburants = get_prix_carburants(prix_ttc)
tva_taux_plein = get_tva_taux_plein()
# Création d'une dataframe contenant ces paramètres
df_taux_implicite = concat([accise_diesel, prix_carburants, tva_taux_plein], axis = 1)
df_taux_implicite.rename(columns = {'value': 'taux plein tva'}, inplace = True)
# A partir des paramètres, calcul des taux de taxation implicites
df_taux_implicite['taux_implicite_diesel'] = (
df_taux_implicite['accise ticpe gazole'] * (1 + df_taux_implicite['taux plein tva']) /
(df_taux_implicite['prix diesel ttc'] -
(df_taux_implicite['accise ticpe gazole'] * (1 + df_taux_implicite['taux plein tva'])))
)
df_taux_implicite['taux_implicite_sp95'] = (
df_taux_implicite['accise ticpe super9598'] * (1 + df_taux_implicite['taux plein tva']) /
(df_taux_implicite['prix super 95 ttc'] -
(df_taux_implicite['accise ticpe super9598'] * (1 + df_taux_implicite['taux plein tva'])))
)
df_taux_implicite = df_taux_implicite.dropna()
# Réalisation des graphiques
graph_builder_bar_list(df_taux_implicite['taux_implicite_diesel'], 1, 1)
graph_builder_bar_list(df_taux_implicite['taux_implicite_sp95'], 1, 1)
| agpl-3.0 |
BT-fgarbely/odoo | addons/edi/models/res_partner.py | 437 | 4243 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011-2012 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from openerp.osv import osv
from edi import EDIMixin
from openerp import SUPERUSER_ID
_logger = logging.getLogger(__name__)
RES_PARTNER_EDI_STRUCT = {
'name': True,
'ref': True,
'lang': True,
'website': True,
'email': True,
'street': True,
'street2': True,
'zip': True,
'city': True,
'country_id': True,
'state_id': True,
'phone': True,
'fax': True,
'mobile': True,
}
class res_partner(osv.osv, EDIMixin):
_inherit = "res.partner"
def edi_export(self, cr, uid, records, edi_struct=None, context=None):
return super(res_partner,self).edi_export(cr, uid, records,
edi_struct or dict(RES_PARTNER_EDI_STRUCT),
context=context)
def _get_bank_type(self, cr, uid, context=None):
# first option: the "normal" bank type, installed by default
res_partner_bank_type = self.pool.get('res.partner.bank.type')
try:
return self.pool.get('ir.model.data').get_object(cr, uid, 'base', 'bank_normal', context=context).code
except ValueError:
pass
# second option: create a new custom type for EDI or use it if already created, as IBAN type is
# not always appropriate: we need a free-form bank type for max flexibility (users can correct
# data manually after import)
code, label = 'edi_generic', 'Generic Bank Type (auto-created for EDI)'
bank_code_ids = res_partner_bank_type.search(cr, uid, [('code','=',code)], context=context)
if not bank_code_ids:
_logger.info('Normal bank account type is missing, creating '
'a generic bank account type for EDI.')
self.res_partner_bank_type.create(cr, SUPERUSER_ID, {'name': label,
'code': label})
return code
def edi_import(self, cr, uid, edi_document, context=None):
# handle bank info, if any
edi_bank_ids = edi_document.pop('bank_ids', None)
contact_id = super(res_partner,self).edi_import(cr, uid, edi_document, context=context)
if edi_bank_ids:
contact = self.browse(cr, uid, contact_id, context=context)
import_ctx = dict((context or {}),
default_partner_id = contact.id,
default_state=self._get_bank_type(cr, uid, context))
for ext_bank_id, bank_name in edi_bank_ids:
try:
self.edi_import_relation(cr, uid, 'res.partner.bank',
bank_name, ext_bank_id, context=import_ctx)
except osv.except_osv:
# failed to import it, try again with unrestricted default type
_logger.warning('Failed to import bank account using'
'bank type: %s, ignoring', import_ctx['default_state'],
exc_info=True)
return contact_id
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
yan12125/youtube-dl | youtube_dl/extractor/jove.py | 50 | 3075 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
unified_strdate
)
class JoveIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?jove\.com/video/(?P<id>[0-9]+)'
_CHAPTERS_URL = 'http://www.jove.com/video-chapters?videoid={video_id:}'
_TESTS = [
{
'url': 'http://www.jove.com/video/2744/electrode-positioning-montage-transcranial-direct-current',
'md5': '93723888d82dbd6ba8b3d7d0cd65dd2b',
'info_dict': {
'id': '2744',
'ext': 'mp4',
'title': 'Electrode Positioning and Montage in Transcranial Direct Current Stimulation',
'description': 'md5:015dd4509649c0908bc27f049e0262c6',
'thumbnail': r're:^https?://.*\.png$',
'upload_date': '20110523',
}
},
{
'url': 'http://www.jove.com/video/51796/culturing-caenorhabditis-elegans-axenic-liquid-media-creation',
'md5': '914aeb356f416811d911996434811beb',
'info_dict': {
'id': '51796',
'ext': 'mp4',
'title': 'Culturing Caenorhabditis elegans in Axenic Liquid Media and Creation of Transgenic Worms by Microparticle Bombardment',
'description': 'md5:35ff029261900583970c4023b70f1dc9',
'thumbnail': r're:^https?://.*\.png$',
'upload_date': '20140802',
}
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
chapters_id = self._html_search_regex(
r'/video-chapters\?videoid=([0-9]+)', webpage, 'chapters id')
chapters_xml = self._download_xml(
self._CHAPTERS_URL.format(video_id=chapters_id),
video_id, note='Downloading chapters XML',
errnote='Failed to download chapters XML')
video_url = chapters_xml.attrib.get('video')
if not video_url:
raise ExtractorError('Failed to get the video URL')
title = self._html_search_meta('citation_title', webpage, 'title')
thumbnail = self._og_search_thumbnail(webpage)
description = self._html_search_regex(
r'<div id="section_body_summary"><p class="jove_content">(.+?)</p>',
webpage, 'description', fatal=False)
publish_date = unified_strdate(self._html_search_meta(
'citation_publication_date', webpage, 'publish date', fatal=False))
comment_count = int(self._html_search_regex(
r'<meta name="num_comments" content="(\d+) Comments?"',
webpage, 'comment count', fatal=False))
return {
'id': video_id,
'title': title,
'url': video_url,
'thumbnail': thumbnail,
'description': description,
'upload_date': publish_date,
'comment_count': comment_count,
}
| unlicense |
JJones131/steam-engage-monitor | lib/jinja2/testsuite/regression.py | 414 | 8382 | # -*- coding: utf-8 -*-
"""
jinja2.testsuite.regression
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests corner cases and bugs.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import unittest
from jinja2.testsuite import JinjaTestCase
from jinja2 import Template, Environment, DictLoader, TemplateSyntaxError, \
TemplateNotFound, PrefixLoader
from jinja2._compat import text_type
env = Environment()
class CornerTestCase(JinjaTestCase):
def test_assigned_scoping(self):
t = env.from_string('''
{%- for item in (1, 2, 3, 4) -%}
[{{ item }}]
{%- endfor %}
{{- item -}}
''')
assert t.render(item=42) == '[1][2][3][4]42'
t = env.from_string('''
{%- for item in (1, 2, 3, 4) -%}
[{{ item }}]
{%- endfor %}
{%- set item = 42 %}
{{- item -}}
''')
assert t.render() == '[1][2][3][4]42'
t = env.from_string('''
{%- set item = 42 %}
{%- for item in (1, 2, 3, 4) -%}
[{{ item }}]
{%- endfor %}
{{- item -}}
''')
assert t.render() == '[1][2][3][4]42'
def test_closure_scoping(self):
t = env.from_string('''
{%- set wrapper = "<FOO>" %}
{%- for item in (1, 2, 3, 4) %}
{%- macro wrapper() %}[{{ item }}]{% endmacro %}
{{- wrapper() }}
{%- endfor %}
{{- wrapper -}}
''')
assert t.render() == '[1][2][3][4]<FOO>'
t = env.from_string('''
{%- for item in (1, 2, 3, 4) %}
{%- macro wrapper() %}[{{ item }}]{% endmacro %}
{{- wrapper() }}
{%- endfor %}
{%- set wrapper = "<FOO>" %}
{{- wrapper -}}
''')
assert t.render() == '[1][2][3][4]<FOO>'
t = env.from_string('''
{%- for item in (1, 2, 3, 4) %}
{%- macro wrapper() %}[{{ item }}]{% endmacro %}
{{- wrapper() }}
{%- endfor %}
{{- wrapper -}}
''')
assert t.render(wrapper=23) == '[1][2][3][4]23'
class BugTestCase(JinjaTestCase):
def test_keyword_folding(self):
env = Environment()
env.filters['testing'] = lambda value, some: value + some
assert env.from_string("{{ 'test'|testing(some='stuff') }}") \
.render() == 'teststuff'
def test_extends_output_bugs(self):
env = Environment(loader=DictLoader({
'parent.html': '(({% block title %}{% endblock %}))'
}))
t = env.from_string('{% if expr %}{% extends "parent.html" %}{% endif %}'
'[[{% block title %}title{% endblock %}]]'
'{% for item in [1, 2, 3] %}({{ item }}){% endfor %}')
assert t.render(expr=False) == '[[title]](1)(2)(3)'
assert t.render(expr=True) == '((title))'
def test_urlize_filter_escaping(self):
tmpl = env.from_string('{{ "http://www.example.org/<foo"|urlize }}')
assert tmpl.render() == '<a href="http://www.example.org/<foo">http://www.example.org/<foo</a>'
def test_loop_call_loop(self):
tmpl = env.from_string('''
{% macro test() %}
{{ caller() }}
{% endmacro %}
{% for num1 in range(5) %}
{% call test() %}
{% for num2 in range(10) %}
{{ loop.index }}
{% endfor %}
{% endcall %}
{% endfor %}
''')
assert tmpl.render().split() == [text_type(x) for x in range(1, 11)] * 5
def test_weird_inline_comment(self):
env = Environment(line_statement_prefix='%')
self.assert_raises(TemplateSyntaxError, env.from_string,
'% for item in seq {# missing #}\n...% endfor')
def test_old_macro_loop_scoping_bug(self):
tmpl = env.from_string('{% for i in (1, 2) %}{{ i }}{% endfor %}'
'{% macro i() %}3{% endmacro %}{{ i() }}')
assert tmpl.render() == '123'
def test_partial_conditional_assignments(self):
tmpl = env.from_string('{% if b %}{% set a = 42 %}{% endif %}{{ a }}')
assert tmpl.render(a=23) == '23'
assert tmpl.render(b=True) == '42'
def test_stacked_locals_scoping_bug(self):
env = Environment(line_statement_prefix='#')
t = env.from_string('''\
# for j in [1, 2]:
# set x = 1
# for i in [1, 2]:
# print x
# if i % 2 == 0:
# set x = x + 1
# endif
# endfor
# endfor
# if a
# print 'A'
# elif b
# print 'B'
# elif c == d
# print 'C'
# else
# print 'D'
# endif
''')
assert t.render(a=0, b=False, c=42, d=42.0) == '1111C'
def test_stacked_locals_scoping_bug_twoframe(self):
t = Template('''
{% set x = 1 %}
{% for item in foo %}
{% if item == 1 %}
{% set x = 2 %}
{% endif %}
{% endfor %}
{{ x }}
''')
rv = t.render(foo=[1]).strip()
assert rv == u'1'
def test_call_with_args(self):
t = Template("""{% macro dump_users(users) -%}
<ul>
{%- for user in users -%}
<li><p>{{ user.username|e }}</p>{{ caller(user) }}</li>
{%- endfor -%}
</ul>
{%- endmacro -%}
{% call(user) dump_users(list_of_user) -%}
<dl>
<dl>Realname</dl>
<dd>{{ user.realname|e }}</dd>
<dl>Description</dl>
<dd>{{ user.description }}</dd>
</dl>
{% endcall %}""")
assert [x.strip() for x in t.render(list_of_user=[{
'username':'apo',
'realname':'something else',
'description':'test'
}]).splitlines()] == [
u'<ul><li><p>apo</p><dl>',
u'<dl>Realname</dl>',
u'<dd>something else</dd>',
u'<dl>Description</dl>',
u'<dd>test</dd>',
u'</dl>',
u'</li></ul>'
]
def test_empty_if_condition_fails(self):
self.assert_raises(TemplateSyntaxError, Template, '{% if %}....{% endif %}')
self.assert_raises(TemplateSyntaxError, Template, '{% if foo %}...{% elif %}...{% endif %}')
self.assert_raises(TemplateSyntaxError, Template, '{% for x in %}..{% endfor %}')
def test_recursive_loop_bug(self):
tpl1 = Template("""
{% for p in foo recursive%}
{{p.bar}}
{% for f in p.fields recursive%}
{{f.baz}}
{{p.bar}}
{% if f.rec %}
{{ loop(f.sub) }}
{% endif %}
{% endfor %}
{% endfor %}
""")
tpl2 = Template("""
{% for p in foo%}
{{p.bar}}
{% for f in p.fields recursive%}
{{f.baz}}
{{p.bar}}
{% if f.rec %}
{{ loop(f.sub) }}
{% endif %}
{% endfor %}
{% endfor %}
""")
def test_else_loop_bug(self):
t = Template('''
{% for x in y %}
{{ loop.index0 }}
{% else %}
{% for i in range(3) %}{{ i }}{% endfor %}
{% endfor %}
''')
self.assertEqual(t.render(y=[]).strip(), '012')
def test_correct_prefix_loader_name(self):
env = Environment(loader=PrefixLoader({
'foo': DictLoader({})
}))
try:
env.get_template('foo/bar.html')
except TemplateNotFound as e:
assert e.name == 'foo/bar.html'
else:
assert False, 'expected error here'
def test_contextfunction_callable_classes(self):
from jinja2.utils import contextfunction
class CallableClass(object):
@contextfunction
def __call__(self, ctx):
return ctx.resolve('hello')
tpl = Template("""{{ callableclass() }}""")
output = tpl.render(callableclass = CallableClass(), hello = 'TEST')
expected = 'TEST'
self.assert_equal(output, expected)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(CornerTestCase))
suite.addTest(unittest.makeSuite(BugTestCase))
return suite
| apache-2.0 |
asm-products/movie-database-service | ani/lib/python2.7/site-packages/paramiko/config.py | 2 | 10382 | # Copyright (C) 2006-2007 Robey Pointer <[email protected]>
# Copyright (C) 2012 Olle Lundberg <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
Configuration file (aka ``ssh_config``) support.
"""
import fnmatch
import os
import re
import socket
SSH_PORT = 22
class SSHConfig (object):
"""
Representation of config information as stored in the format used by
OpenSSH. Queries can be made via `lookup`. The format is described in
OpenSSH's ``ssh_config`` man page. This class is provided primarily as a
convenience to posix users (since the OpenSSH format is a de-facto
standard on posix) but should work fine on Windows too.
.. versionadded:: 1.6
"""
SETTINGS_REGEX = re.compile(r'(\w+)(?:\s*=\s*|\s+)(.+)')
def __init__(self):
"""
Create a new OpenSSH config object.
"""
self._config = []
def parse(self, file_obj):
"""
Read an OpenSSH config from the given file object.
:param file file_obj: a file-like object to read the config file from
"""
host = {"host": ['*'], "config": {}}
for line in file_obj:
line = line.rstrip('\r\n').lstrip()
if not line or line.startswith('#'):
continue
match = re.match(self.SETTINGS_REGEX, line)
if not match:
raise Exception("Unparsable line %s" % line)
key = match.group(1).lower()
value = match.group(2)
if key == 'host':
self._config.append(host)
host = {
'host': self._get_hosts(value),
'config': {}
}
else:
if value.startswith('"') and value.endswith('"'):
value = value[1:-1]
#identityfile, localforward, remoteforward keys are special cases, since they are allowed to be
# specified multiple times and they should be tried in order
# of specification.
if key in ['identityfile', 'localforward', 'remoteforward']:
if key in host['config']:
host['config'][key].append(value)
else:
host['config'][key] = [value]
elif key not in host['config']:
host['config'][key] = value
self._config.append(host)
def lookup(self, hostname):
"""
Return a dict of config options for a given hostname.
The host-matching rules of OpenSSH's ``ssh_config`` man page are used,
which means that all configuration options from matching host
specifications are merged, with more specific hostmasks taking
precedence. In other words, if ``"Port"`` is set under ``"Host *"``
and also ``"Host *.example.com"``, and the lookup is for
``"ssh.example.com"``, then the port entry for ``"Host *.example.com"``
will win out.
The keys in the returned dict are all normalized to lowercase (look for
``"port"``, not ``"Port"``. The values are processed according to the
rules for substitution variable expansion in ``ssh_config``.
:param str hostname: the hostname to lookup
"""
matches = [
config for config in self._config
if self._allowed(config['host'], hostname)
]
ret = {}
for match in matches:
for key, value in match['config'].items():
if key not in ret:
# Create a copy of the original value,
# else it will reference the original list
# in self._config and update that value too
# when the extend() is being called.
ret[key] = value[:]
elif key == 'identityfile':
ret[key].extend(value)
ret = self._expand_variables(ret, hostname)
return ret
def _allowed(self, hosts, hostname):
match = False
for host in hosts:
if host.startswith('!') and fnmatch.fnmatch(hostname, host[1:]):
return False
elif fnmatch.fnmatch(hostname, host):
match = True
return match
def _expand_variables(self, config, hostname):
"""
Return a dict of config options with expanded substitutions
for a given hostname.
Please refer to man ``ssh_config`` for the parameters that
are replaced.
:param dict config: the config for the hostname
:param str hostname: the hostname that the config belongs to
"""
if 'hostname' in config:
config['hostname'] = config['hostname'].replace('%h', hostname)
else:
config['hostname'] = hostname
if 'port' in config:
port = config['port']
else:
port = SSH_PORT
user = os.getenv('USER')
if 'user' in config:
remoteuser = config['user']
else:
remoteuser = user
host = socket.gethostname().split('.')[0]
fqdn = LazyFqdn(config, host)
homedir = os.path.expanduser('~')
replacements = {'controlpath':
[
('%h', config['hostname']),
('%l', fqdn),
('%L', host),
('%n', hostname),
('%p', port),
('%r', remoteuser),
('%u', user)
],
'identityfile':
[
('~', homedir),
('%d', homedir),
('%h', config['hostname']),
('%l', fqdn),
('%u', user),
('%r', remoteuser)
],
'proxycommand':
[
('%h', config['hostname']),
('%p', port),
('%r', remoteuser)
]
}
for k in config:
if k in replacements:
for find, replace in replacements[k]:
if isinstance(config[k], list):
for item in range(len(config[k])):
if find in config[k][item]:
config[k][item] = config[k][item].\
replace(find, str(replace))
else:
if find in config[k]:
config[k] = config[k].replace(find, str(replace))
return config
def _get_hosts(self, host):
"""
Return a list of host_names from host value.
"""
i, length = 0, len(host)
hosts = []
while i < length:
if host[i] == '"':
end = host.find('"', i + 1)
if end < 0:
raise Exception("Unparsable host %s" % host)
hosts.append(host[i + 1:end])
i = end + 1
elif not host[i].isspace():
end = i + 1
while end < length and not host[end].isspace() and host[end] != '"':
end += 1
hosts.append(host[i:end])
i = end
else:
i += 1
return hosts
class LazyFqdn(object):
"""
Returns the host's fqdn on request as string.
"""
def __init__(self, config, host=None):
self.fqdn = None
self.config = config
self.host = host
def __str__(self):
if self.fqdn is None:
#
# If the SSH config contains AddressFamily, use that when
# determining the local host's FQDN. Using socket.getfqdn() from
# the standard library is the most general solution, but can
# result in noticeable delays on some platforms when IPv6 is
# misconfigured or not available, as it calls getaddrinfo with no
# address family specified, so both IPv4 and IPv6 are checked.
#
# Handle specific option
fqdn = None
address_family = self.config.get('addressfamily', 'any').lower()
if address_family != 'any':
try:
family = socket.AF_INET if address_family == 'inet' \
else socket.AF_INET6
results = socket.getaddrinfo(
self.host,
None,
family,
socket.SOCK_DGRAM,
socket.IPPROTO_IP,
socket.AI_CANONNAME
)
for res in results:
af, socktype, proto, canonname, sa = res
if canonname and '.' in canonname:
fqdn = canonname
break
# giaerror -> socket.getaddrinfo() can't resolve self.host
# (which is from socket.gethostname()). Fall back to the
# getfqdn() call below.
except socket.gaierror:
pass
# Handle 'any' / unspecified
if fqdn is None:
fqdn = socket.getfqdn()
# Cache
self.fqdn = fqdn
return self.fqdn
| agpl-3.0 |
janusnic/wagtail | wagtail/wagtailredirects/models.py | 8 | 2622 | from __future__ import unicode_literals
from django.db import models
from django.utils.six.moves.urllib.parse import urlparse
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel, MultiFieldPanel, PageChooserPanel
class Redirect(models.Model):
old_path = models.CharField(verbose_name=_("Redirect from"), max_length=255, unique=True, db_index=True)
site = models.ForeignKey('wagtailcore.Site', verbose_name=_('Site'), null=True, blank=True, related_name='redirects', db_index=True, editable=False)
is_permanent = models.BooleanField(verbose_name=_("Permanent"), default=True, help_text=_("Recommended. Permanent redirects ensure search engines forget the old page (the 'Redirect from') and index the new page instead."))
redirect_page = models.ForeignKey('wagtailcore.Page', verbose_name=_("Redirect to a page"), null=True, blank=True)
redirect_link = models.URLField(verbose_name=_("Redirect to any URL"), blank=True)
@property
def title(self):
return self.old_path
@property
def link(self):
if self.redirect_page:
return self.redirect_page.url
else:
return self.redirect_link
def get_is_permanent_display(self):
if self.is_permanent:
return "permanent"
else:
return "temporary"
@classmethod
def get_for_site(cls, site=None):
if site:
return cls.objects.filter(models.Q(site=site) | models.Q(site=None))
else:
return cls.objects.all()
@staticmethod
def normalise_path(url):
# Parse url
url_parsed = urlparse(url)
# Path must start with / but not end with /
path = url_parsed[2]
if not path.startswith('/'):
path = '/' + path
if path.endswith('/'):
path = path[:-1]
# Query string components must be sorted alphabetically
query_string = url_parsed[4]
query_string_components = query_string.split('&')
query_string = '&'.join(sorted(query_string_components))
# Add query string to path
if query_string:
path = path + '?' + query_string
return path
def clean(self):
# Normalise old path
self.old_path = Redirect.normalise_path(self.old_path)
class Meta:
verbose_name = _('Redirect')
Redirect.content_panels = [
MultiFieldPanel([
FieldPanel('old_path'),
FieldPanel('is_permanent'),
PageChooserPanel('redirect_page'),
FieldPanel('redirect_link'),
])
]
| bsd-3-clause |
mariaantoanelam/Licenta | Lib/test/test_array.py | 5 | 13973 | # CAU: Adaptation of the cpython 2.2 test_array.py for jython 2.2
# Formerly test_jarray.py, now test_array.py so that this
# test completely supercedes the cpthyhon test. It would
# be better to simply complement the cpython test, but that
# test bombs out too early due to basic incompatibilities.
#
# The jarray module is being phased out, with all functionality
# now available in the array module.
from test_support import *
from array import array, zeros
import sys
from java.lang import String
from java.lang.reflect import Array
from java.util import Arrays
print_test('array module (test_array.py)', 1)
def main():
test_jarray() # while it's still supported
test_java_compat()
test_java_object_arrays()
testtype('c', 'c')
for type in (['b', 'h', 'i', 'l', 'f', 'd']):
testtype(type, 1)
#test a mix of known success and failure cases
init_tests()
extend_tests()
fromlist_tests()
unlink(TESTFN)
def test_jarray(): # until it is fully formally removed
# While jarray is still being phased out, just flex the initilaizers.
# The rest of the test for array will catch all the big problems.
import jarray
jarray.array(range(5), 'i')
jarray.array([String("a"), String("b"), String("c")], String)
jarray.zeros(5, 'i')
jarray.zeros(5, String)
def test_java_object_arrays():
jStringArr = array(String, [String("a"), String("b"), String("c")])
verify(Arrays.equals(jStringArr.typecode, str(String)),
"String array typecode of wrong type, expected %s, found %s" %
(jStringArr.typecode, str(String)))
verify(zeros(String, 5) == Array.newInstance(String, 5))
import java # require for eval to work
if jStringArr != eval(str(jStringArr)):
raise TestFailed, "eval(str(%s)) <> %s" % (jStringArr,)*2
def test_java_compat():
print_test('array', 2)
from java import awt
hsb = awt.Color.RGBtoHSB(0,255,255, None)
#print hsb
verify(hsb == array('f', [0.5,1,1]),
"output hsb float array does not correspond to input rgb values")
rgb = apply(awt.Color.HSBtoRGB, tuple(hsb))
#print hex(rgb)
verify(rgb == 0xff00ffff, "output rgb bytes don't match input hsb floats")
print_test('zeros', 2)
hsb1 = zeros('f', 3)
awt.Color.RGBtoHSB(0,255,255, hsb1)
#print hsb, hsb1
verify(hsb == hsb1, "hsb float arrays were not equal")
def testoverflow(type, lowerLimit, upperLimit):
# should not overflow assigning lower limit
if verbose:
print "test overflow: array(%s, [%s])" % (lowerLimit, type)
try:
a = array(type, [lowerLimit])
except:
raise TestFailed("array(%s) overflowed assigning %s" %
(lowerLimit, type))
# should overflow assigning less than lower limit
if verbose:
print "test overflow: array(%s, [%s])" % (lowerLimit-1, type)
try:
a = array(type, [lowerLimit-1])
raise TestFailed, "array(%s) did not overflow assigning %s" %\
(lowerLimit-1, type)
except OverflowError:
pass
# should not overflow assigning upper limit
if verbose:
print "test verflow: array(%s, [%s])" % (upperLimit, type)
try:
a = array(type, [upperLimit])
except:
raise TestFailed, "array(%s) overflowed assigning %s" %\
(upperLimit, type)
# should overflow assigning more than upper limit
if verbose:
print "test overflow: array(%s, [%s])" % (upperLimit+1, type)
try:
a = array(type, [upperLimit+1])
raise TestFailed, "array(%s) did not overflow assigning %s" %\
(upperLimit+1, type)
except OverflowError:
pass
def testtype(type, example):
if verbose:
print "testing type ", type
a = array(type)
a.append(example)
if verbose:
print 40*'*'
print 'array after append: ', a
a.typecode
a.itemsize
if a <> eval(str(a)):
raise TestFailed, "eval(str(%s)) <> %s" % (a,a)
if a.typecode in ('i', 'b', 'h', 'l'):
a.byteswap()
if a.typecode == 'c':
f = open(TESTFN, "w")
f.write("The quick brown fox jumps over the lazy dog.\n")
f.close()
f = open(TESTFN, 'r')
a.fromfile(f, 10)
f.close()
if verbose:
print 'char array with 10 bytes of TESTFN appended: ', a
a.fromlist(['a', 'b', 'c'])
if verbose:
print 'char array with list appended: ', a
a.insert(0, example)
if verbose:
print 'array of %s after inserting another:' % a.typecode, a
f = open(TESTFN, 'w')
a.tofile(f)
f.close()
# This block is just to verify that the operations don't blow up.
a.tolist()
a.tostring()
repr(a)
str(a)
if verbose:
print 'array of %s converted to a list: ' % a.typecode, a.tolist()
if verbose:
print 'array of %s converted to a string: ' \
% a.typecode, a.tostring()
if type == 'c':
a = array(type, "abcde")
a[:-1] = a
if a != array(type, "abcdee"):
raise TestFailed, "array(%s) self-slice-assign (head)" % type
a = array(type, "abcde")
a[1:] = a
if a != array(type, "aabcde"):
raise TestFailed, "array(%s) self-slice-assign (tail)" % type
a = array(type, "abcde")
a[1:-1] = a
if a != array(type, "aabcdee"):
raise TestFailed, "array(%s) self-slice-assign (cntr)" % type
if a.index("e") != 5:
raise TestFailed, "array(%s) index-test" % type
if a.count("a") != 2:
raise TestFailed, "array(%s) count-test" % type
a.remove("e")
if a != array(type, "aabcde"):
raise TestFailed, "array(%s) remove-test" % type
if a.pop(0) != "a":
raise TestFailed, "array(%s) pop-test" % type
if a.pop(1) != "b":
raise TestFailed, "array(%s) pop-test" % type
a.extend(array(type, "xyz"))
if a != array(type, "acdexyz"):
raise TestFailed, "array(%s) extend-test" % type
a.pop()
a.pop()
a.pop()
x = a.pop()
if x != 'e':
raise TestFailed, "array(%s) pop-test" % type
if a != array(type, "acd"):
raise TestFailed, "array(%s) pop-test" % type
a.reverse()
if a != array(type, "dca"):
raise TestFailed, "array(%s) reverse-test" % type
else:
a = array(type, [1, 2, 3, 4, 5])
a[:-1] = a
if a != array(type, [1, 2, 3, 4, 5, 5]):
raise TestFailed, "array(%s) self-slice-assign (head)" % type
a = array(type, [1, 2, 3, 4, 5])
a[1:] = a
if a != array(type, [1, 1, 2, 3, 4, 5]):
raise TestFailed, "array(%s) self-slice-assign (tail)" % type
a = array(type, [1, 2, 3, 4, 5])
a[1:-1] = a
if a != array(type, [1, 1, 2, 3, 4, 5, 5]):
raise TestFailed, "array(%s) self-slice-assign (cntr)" % type
if a.index(5) != 5:
raise TestFailed, "array(%s) index-test" % type
if a.count(1) != 2:
raise TestFailed, "array(%s) count-test" % type
a.remove(5)
if a != array(type, [1, 1, 2, 3, 4, 5]):
raise TestFailed, "array(%s) remove-test" % type
if a.pop(0) != 1:
raise TestFailed, "array(%s) pop-test" % type
if a.pop(1) != 2:
raise TestFailed, "array(%s) pop-test" % type
a.extend(array(type, [7, 8, 9]))
if a != array(type, [1, 3, 4, 5, 7, 8, 9]):
raise TestFailed, "array(%s) extend-test" % type
a.pop()
a.pop()
a.pop()
x = a.pop()
if x != 5:
raise TestFailed, "array(%s) pop-test" % type
if a != array(type, [1, 3, 4]):
raise TestFailed, "array(%s) pop-test" % type
a.reverse()
if a != array(type, [4, 3, 1]):
raise TestFailed, "array(%s) reverse-test" % type
# test that overflow exceptions are raised as expected for assignment
# to array of specific integral types
from math import pow
#check using long inputs
if type in ('b', 'h', 'i', 'l'):
a = array(type)
signedLowerLimit = -1 * long(pow(2, a.itemsize * 8 - 1))
signedUpperLimit = long(pow(2, a.itemsize * 8 - 1)) - 1L
unsignedLowerLimit = 0
unsignedUpperLimit = long(pow(2, a.itemsize * 8)) - 1L
testoverflow(type, signedLowerLimit, signedUpperLimit)
#check using integer inputs - int cannot hold MAXINT+1 nor MININT-1
# so only valid test types are byte and short for this test
if type in ('b', 'h'):
a = array(type)
signedLowerLimit = -1 * int(pow(2, a.itemsize * 8 - 1))
signedUpperLimit = int(pow(2, a.itemsize * 8 - 1)) - 1
unsignedLowerLimit = 0
unsignedUpperLimit = int(pow(2, a.itemsize * 8)) - 1
testoverflow(type, signedLowerLimit, signedUpperLimit)
def init_tests():
test = array('c', ['t','e','s','t'])
verify(init_test( "test: String initialisation", "test", 'c') == test,
"string initialisation failed")
test = array('i', [41,42,43,44])
s = test.tostring();
verify(init_test( "test: String2 initialisation", s, 'i') == test,
"string 2 initialisation failed")
init_test( "test: List initialisation", [1,2,3,4], 'i')
init_test( "test: Tuple initialisation", (1,2,3,4), 'i')
test = array('i', [1,2,3,4])
verify(init_test( "test: array initialisation", test, 'i') == test,
"array init failed")
try:
init_test('test: "broken" list initialisation', [1,2,3,4, 'fred'], 'i')
raise TestFailed, '"broken" list initialisation'
except TypeError:
pass
test = array('i', [1,2,3,4])
try:
init_test('test: "broken" PyArray initialisation', test, 'd')
raise TestFailed, '"broken" PyArray initialisation'
except TypeError:
pass
f = open(TESTFN, "w")
#f.write("\x00\x00\x00\x01")
f.write("test message\nline2\nline3");
f.close();
f = open(TESTFN, "r")
try:
init_test( "test: Invalid initialisation object (file)", f, 'i')
raise TestFailed, "Invalid initialisation object (file)"
except TypeError:
pass
f.close()
try:
init_test( "test: Invalid initialisation object (module)", sys, 'i')
raise TestFailed, "Invalid initialisation object (module)"
except TypeError:
pass
def extend_tests():
test = array('c', 'testextend')
verify(extend_test("test: String extend", "test", "extend", 'c') == test,
"String extend failed")
test = array('i', [1,2,3,4,51,52,53,54]);
verify( extend_test("test: List extend", [1,2,3,4], [51,52,53,54], 'i') == test,
"List extend failed")
test = array('i', (1,2,3,4,51,52,53,54));
verify( extend_test("test: Tuple extend", (1,2,3,4), (51,52,53,54), 'i') == test,
"Tuple extend failed")
try:
extend_test('test: "broken" list extend', [1,2,3,4], [51,52,53,"fred"], 'i')
raise TestFailed, 'test: "broken" list extend'
except TypeError:
pass
a = array('d', [123.45, 567.89])
test = array('i', [1,2,3,4])
try:
assert extend_test("test: Array type mismatch", [1,2,3,4], a, 'i') == test, \
"Array mismatch test failed"
raise TestFailed, "test: Array type mismatch"
except TypeError:
pass
del a
f = open(TESTFN, "r")
try:
extend_test("test: incorrect type extend (file)", [1,2,3,4], f, 'i')
raise TestFailed, "test: incorrect type extend (file)"
except TypeError:
pass
f.close()
try:
extend_test("test: incorrect type extend (module)", (1,2,3,4), sys, 'i')
raise TestFailed, "test: incorrect type extend (module)"
except TypeError:
pass
try:
extend_test("test: incorrect type extend (integer)", [], 456, 'i')
raise TestFailed, "test: incorrect type extend (integer)"
except TypeError:
pass
def fromlist_tests():
test = array('c', ['t','e','s','t','h','e','l','l','o'])
verify(fromlist_test("test: String fromlist", "test", ['h','e','l','l','o'], 'c') == test,
"String fromlist failed")
test = array('i', [1,2,3,4,51,52,53,54])
verify(fromlist_test("test: integer fromlist", [1,2,3,4], [51,52,53,54], 'i') == test,
"integer fromlist failed")
try:
fromlist_test('test: "broken" fromlist (integer)', [1,2,3,4], [51,52,53,"fred"], 'i')
raise TestFailed, 'test: "broken" fromlist (integer)'
except TypeError:
pass
try:
fromlist_test("test: invalid fromlist (tuple)", [1,2,3,4], (51,52,53,54), 'i')
raise TestFailed, "test: invalid fromlist (tuple)"
except TypeError:
pass
def init_test(name, init, typecode):
if verbose:
print 40*'*'
print name, "- type:", typecode
print "initialiser:", init
a = array(typecode, init)
if verbose:
print a
return a
def extend_test(name, init, extend, typecode):
if verbose:
print 40*'*'
print name, "- type:", typecode
a = array(typecode, init)
if verbose:
print "initial:", a
print "extended by:", extend
a.extend(extend)
#if no exceptions then
if verbose:
print "final:", a
return a
def fromlist_test(name, init, listdata, typecode):
if verbose:
print 40*'*'
print name , "- type:", typecode
a = array(typecode, init)
if verbose:
print "initial:", a
print "fromlist source:", listdata
a.fromlist(listdata)
#if no exceptions then
if verbose:
print "final:", a
return a
main()
| mit |
msabramo/ansible | lib/ansible/modules/network/nxos/nxos_aaa_server.py | 19 | 12497 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: nxos_aaa_server
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages AAA server global configuration.
description:
- Manages AAA server global configuration
author:
- Jason Edelman (@jedelman8)
notes:
- The server_type parameter is always required.
- If encrypt_type is not supplied, the global AAA server key will be
stored as encrypted (type 7).
- Changes to the global AAA server key with encrypt_type=0
are not idempotent.
- If global AAA server key is not found, it's shown as "unknown"
- state=default will set the supplied parameters to their default values.
The parameters that you want to default must also be set to default.
If global_key=default, the global key will be removed.
options:
server_type:
description:
- The server type is either radius or tacacs.
required: true
choices: ['radius', 'tacacs']
global_key:
description:
- Global AAA shared secret.
required: false
default: null
encrypt_type:
description:
- The state of encryption applied to the entered global key.
O clear text, 7 encrypted. Type-6 encryption is not supported.
required: false
default: null
choices: ['0', '7']
deadtime:
description:
- Duration for which a non-reachable AAA server is skipped,
in minutes. Range is 1-1440. Device default is 0.
required: false
default: null
server_timeout:
description:
- Global AAA server timeout period, in seconds. Range is 1-60.
Device default is 5.
required: false
default: null
directed_request:
description:
- Enables direct authentication requests to AAA server.
Device default is disabled.
required: false
default: null
choices: ['enabled', 'disabled']
state:
description:
- Manage the state of the resource.
required: true
default: present
choices: ['present','default']
'''
EXAMPLES = '''
# Radius Server Basic settings
- name: "Radius Server Basic settings"
nxos_aaa_server:
server_type: radius
server_timeout: 9
deadtime: 20
directed_request: enabled
host: inventory_hostname }}
username: un }}
password: pwd }}
# Tacacs Server Basic settings
- name: "Tacacs Server Basic settings"
nxos_aaa_server:
server_type: tacacs
server_timeout: 8
deadtime: 19
directed_request: disabled
host: inventory_hostname }}
username: un }}
password: pwd }}
# Setting Global Key
- name: "AAA Server Global Key"
nxos_aaa_server:
server_type: radius
global_key: test_key
host: inventory_hostname }}
username: un }}
password: pwd }}
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"deadtime": "22", "directed_request": "enabled",
"server_type": "radius", "server_timeout": "11"}
existing:
description:
- k/v pairs of existing aaa server
type: dict
sample: {"deadtime": "0", "directed_request": "disabled",
"global_key": "unknown", "server_timeout": "5"}
end_state:
description: k/v pairs of aaa params after module execution
returned: always
type: dict
sample: {"deadtime": "22", "directed_request": "enabled",
"global_key": "unknown", "server_timeout": "11"}
state:
description: state as sent in from the playbook
returned: always
type: string
sample: "present"
updates:
description: command sent to the device
returned: always
type: list
sample: ["radius-server deadtime 22", "radius-server timeout 11",
"radius-server directed-request"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import re
from ansible.module_utils.nxos import load_config, run_commands
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
def execute_show_command(command, module, command_type='cli_show'):
cmds = [command]
if module.params['transport'] == 'cli':
body = run_commands(module, cmds)
elif module.params['transport'] == 'nxapi':
body = run_commands(module, cmds)
return body
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def get_aaa_server_info(server_type, module):
aaa_server_info = {}
server_command = 'show {0}-server'.format(server_type)
request_command = 'show {0}-server directed-request'.format(server_type)
global_key_command = 'show run | sec {0}'.format(server_type)
aaa_regex = '.*{0}-server\skey\s\d\s+(?P<key>\S+).*'.format(server_type)
server_body = execute_show_command(
server_command, module, command_type='cli_show_ascii')[0]
split_server = server_body.splitlines()
for line in split_server:
if line.startswith('timeout'):
aaa_server_info['server_timeout'] = line.split(':')[1]
elif line.startswith('deadtime'):
aaa_server_info['deadtime'] = line.split(':')[1]
request_body = execute_show_command(
request_command, module, command_type='cli_show_ascii')[0]
aaa_server_info['directed_request'] = request_body.replace('\n', '')
key_body = execute_show_command(
global_key_command, module, command_type='cli_show_ascii')[0]
try:
match_global_key = re.match(aaa_regex, key_body, re.DOTALL)
group_key = match_global_key.groupdict()
aaa_server_info['global_key'] = group_key["key"].replace('\"', '')
except (AttributeError, TypeError):
aaa_server_info['global_key'] = 'unknown'
return aaa_server_info
def set_aaa_server_global_key(encrypt_type, key, server_type):
if not encrypt_type:
encrypt_type = ''
return '{0}-server key {1} {2}'.format(
server_type, encrypt_type, key)
def config_aaa_server(params, server_type):
cmds = []
deadtime = params.get('deadtime')
server_timeout = params.get('server_timeout')
directed_request = params.get('directed_request')
encrypt_type = params.get('encrypt_type', '7')
global_key = params.get('global_key')
if deadtime is not None:
cmds.append('{0}-server deadtime {1}'.format(server_type, deadtime))
if server_timeout is not None:
cmds.append('{0}-server timeout {1}'.format(server_type, server_timeout))
if directed_request is not None:
if directed_request == 'enabled':
cmds.append('{0}-server directed-request'.format(server_type))
elif directed_request == 'disabled':
cmds.append('no {0}-server directed-request'.format(server_type))
if global_key is not None:
cmds.append('{0}-server key {1} {2}'.format(server_type, encrypt_type,
global_key))
return cmds
def default_aaa_server(existing, params, server_type):
cmds = []
deadtime = params.get('deadtime')
server_timeout = params.get('server_timeout')
directed_request = params.get('directed_request')
global_key = params.get('global_key')
existing_key = existing.get('global_key')
if deadtime is not None:
cmds.append('no {0}-server deadtime 1'.format(server_type))
if server_timeout is not None:
cmds.append('no {0}-server timeout 1'.format(server_type))
if directed_request is not None:
cmds.append('no {0}-server directed-request'.format(server_type))
if global_key is not None and existing_key is not None:
cmds.append('no {0}-server key 7 {1}'.format(server_type, existing_key))
return cmds
def main():
argument_spec = dict(
server_type=dict(type='str',
choices=['radius', 'tacacs'], required=True),
global_key=dict(type='str'),
encrypt_type=dict(type='str', choices=['0', '7']),
deadtime=dict(type='str'),
server_timeout=dict(type='str'),
directed_request=dict(type='str',
choices=['enabled', 'disabled', 'default']),
state=dict(choices=['default', 'present'], default='present'),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
server_type = module.params['server_type']
global_key = module.params['global_key']
encrypt_type = module.params['encrypt_type']
deadtime = module.params['deadtime']
server_timeout = module.params['server_timeout']
directed_request = module.params['directed_request']
state = module.params['state']
if encrypt_type and not global_key:
module.fail_json(msg='encrypt_type must be used with global_key.')
args = dict(server_type=server_type, global_key=global_key,
encrypt_type=encrypt_type, deadtime=deadtime,
server_timeout=server_timeout, directed_request=directed_request)
changed = False
proposed = dict((k, v) for k, v in args.items() if v is not None)
existing = get_aaa_server_info(server_type, module)
end_state = existing
commands = []
if state == 'present':
if deadtime:
try:
if int(deadtime) < 0 or int(deadtime) > 1440:
raise ValueError
except ValueError:
module.fail_json(
msg='deadtime must be an integer between 0 and 1440')
if server_timeout:
try:
if int(server_timeout) < 1 or int(server_timeout) > 60:
raise ValueError
except ValueError:
module.fail_json(
msg='server_timeout must be an integer between 1 and 60')
delta = dict(set(proposed.items()).difference(
existing.items()))
if delta:
command = config_aaa_server(delta, server_type)
if command:
commands.append(command)
elif state == 'default':
for key, value in proposed.items():
if key != 'server_type' and value != 'default':
module.fail_json(
msg='Parameters must be set to "default"'
'when state=default')
command = default_aaa_server(existing, proposed, server_type)
if command:
commands.append(command)
cmds = flatten_list(commands)
if cmds:
if module.check_mode:
module.exit_json(changed=True, commands=cmds)
else:
changed = True
load_config(module, cmds)
end_state = get_aaa_server_info(server_type, module)
if 'configure' in cmds:
cmds.pop(0)
results = {}
results['proposed'] = proposed
results['existing'] = existing
results['updates'] = cmds
results['changed'] = changed
results['warnings'] = warnings
results['end_state'] = end_state
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
levelrf/level_basestation | gr-digital/examples/narrowband/tunnel.py | 59 | 10293 | #!/usr/bin/env python
#
# Copyright 2005,2006,2009,2011 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
# ////////////////////////////////////////////////////////////////////
#
# This code sets up up a virtual ethernet interface (typically
# gr0), and relays packets between the interface and the GNU Radio
# PHY+MAC
#
# What this means in plain language, is that if you've got a couple
# of USRPs on different machines, and if you run this code on those
# machines, you can talk between them using normal TCP/IP
# networking.
#
# ////////////////////////////////////////////////////////////////////
from gnuradio import gr, digital
from gnuradio import eng_notation
from gnuradio.eng_option import eng_option
from optparse import OptionParser
# from current dir
from receive_path import receive_path
from transmit_path import transmit_path
from uhd_interface import uhd_transmitter
from uhd_interface import uhd_receiver
import os, sys
import random, time, struct
#print os.getpid()
#raw_input('Attach and press enter')
# ////////////////////////////////////////////////////////////////////
#
# Use the Universal TUN/TAP device driver to move packets to/from
# kernel
#
# See /usr/src/linux/Documentation/networking/tuntap.txt
#
# ////////////////////////////////////////////////////////////////////
# Linux specific...
# TUNSETIFF ifr flags from <linux/tun_if.h>
IFF_TUN = 0x0001 # tunnel IP packets
IFF_TAP = 0x0002 # tunnel ethernet frames
IFF_NO_PI = 0x1000 # don't pass extra packet info
IFF_ONE_QUEUE = 0x2000 # beats me ;)
def open_tun_interface(tun_device_filename):
from fcntl import ioctl
mode = IFF_TAP | IFF_NO_PI
TUNSETIFF = 0x400454ca
tun = os.open(tun_device_filename, os.O_RDWR)
ifs = ioctl(tun, TUNSETIFF, struct.pack("16sH", "gr%d", mode))
ifname = ifs[:16].strip("\x00")
return (tun, ifname)
# ////////////////////////////////////////////////////////////////////
# the flow graph
# ////////////////////////////////////////////////////////////////////
class my_top_block(gr.top_block):
def __init__(self, mod_class, demod_class,
rx_callback, options):
gr.top_block.__init__(self)
# Get the modulation's bits_per_symbol
args = mod_class.extract_kwargs_from_options(options)
symbol_rate = options.bitrate / mod_class(**args).bits_per_symbol()
self.source = uhd_receiver(options.args, symbol_rate,
options.samples_per_symbol,
options.rx_freq, options.rx_gain,
options.spec, options.antenna,
options.verbose)
self.sink = uhd_transmitter(options.args, symbol_rate,
options.samples_per_symbol,
options.tx_freq, options.tx_gain,
options.spec, options.antenna,
options.verbose)
options.samples_per_symbol = self.source._sps
self.txpath = transmit_path(mod_class, options)
self.rxpath = receive_path(demod_class, rx_callback, options)
self.connect(self.txpath, self.sink)
self.connect(self.source, self.rxpath)
def send_pkt(self, payload='', eof=False):
return self.txpath.send_pkt(payload, eof)
def carrier_sensed(self):
"""
Return True if the receive path thinks there's carrier
"""
return self.rxpath.carrier_sensed()
def set_freq(self, target_freq):
"""
Set the center frequency we're interested in.
"""
self.sink.set_freq(target_freq)
self.source.set_freq(target_freq)
# ////////////////////////////////////////////////////////////////////
# Carrier Sense MAC
# ////////////////////////////////////////////////////////////////////
class cs_mac(object):
"""
Prototype carrier sense MAC
Reads packets from the TUN/TAP interface, and sends them to the
PHY. Receives packets from the PHY via phy_rx_callback, and sends
them into the TUN/TAP interface.
Of course, we're not restricted to getting packets via TUN/TAP,
this is just an example.
"""
def __init__(self, tun_fd, verbose=False):
self.tun_fd = tun_fd # file descriptor for TUN/TAP interface
self.verbose = verbose
self.tb = None # top block (access to PHY)
def set_top_block(self, tb):
self.tb = tb
def phy_rx_callback(self, ok, payload):
"""
Invoked by thread associated with PHY to pass received packet up.
Args:
ok: bool indicating whether payload CRC was OK
payload: contents of the packet (string)
"""
if self.verbose:
print "Rx: ok = %r len(payload) = %4d" % (ok, len(payload))
if ok:
os.write(self.tun_fd, payload)
def main_loop(self):
"""
Main loop for MAC.
Only returns if we get an error reading from TUN.
FIXME: may want to check for EINTR and EAGAIN and reissue read
"""
min_delay = 0.001 # seconds
while 1:
payload = os.read(self.tun_fd, 10*1024)
if not payload:
self.tb.send_pkt(eof=True)
break
if self.verbose:
print "Tx: len(payload) = %4d" % (len(payload),)
delay = min_delay
while self.tb.carrier_sensed():
sys.stderr.write('B')
time.sleep(delay)
if delay < 0.050:
delay = delay * 2 # exponential back-off
self.tb.send_pkt(payload)
# /////////////////////////////////////////////////////////////////////////////
# main
# /////////////////////////////////////////////////////////////////////////////
def main():
mods = digital.modulation_utils.type_1_mods()
demods = digital.modulation_utils.type_1_demods()
parser = OptionParser (option_class=eng_option, conflict_handler="resolve")
expert_grp = parser.add_option_group("Expert")
parser.add_option("-m", "--modulation", type="choice", choices=mods.keys(),
default='gmsk',
help="Select modulation from: %s [default=%%default]"
% (', '.join(mods.keys()),))
parser.add_option("-s", "--size", type="eng_float", default=1500,
help="set packet size [default=%default]")
parser.add_option("-v","--verbose", action="store_true", default=False)
expert_grp.add_option("-c", "--carrier-threshold", type="eng_float", default=30,
help="set carrier detect threshold (dB) [default=%default]")
expert_grp.add_option("","--tun-device-filename", default="/dev/net/tun",
help="path to tun device file [default=%default]")
transmit_path.add_options(parser, expert_grp)
receive_path.add_options(parser, expert_grp)
uhd_receiver.add_options(parser)
uhd_transmitter.add_options(parser)
for mod in mods.values():
mod.add_options(expert_grp)
for demod in demods.values():
demod.add_options(expert_grp)
(options, args) = parser.parse_args ()
if len(args) != 0:
parser.print_help(sys.stderr)
sys.exit(1)
# open the TUN/TAP interface
(tun_fd, tun_ifname) = open_tun_interface(options.tun_device_filename)
# Attempt to enable realtime scheduling
r = gr.enable_realtime_scheduling()
if r == gr.RT_OK:
realtime = True
else:
realtime = False
print "Note: failed to enable realtime scheduling"
# instantiate the MAC
mac = cs_mac(tun_fd, verbose=True)
# build the graph (PHY)
tb = my_top_block(mods[options.modulation],
demods[options.modulation],
mac.phy_rx_callback,
options)
mac.set_top_block(tb) # give the MAC a handle for the PHY
if tb.txpath.bitrate() != tb.rxpath.bitrate():
print "WARNING: Transmit bitrate = %sb/sec, Receive bitrate = %sb/sec" % (
eng_notation.num_to_str(tb.txpath.bitrate()),
eng_notation.num_to_str(tb.rxpath.bitrate()))
print "modulation: %s" % (options.modulation,)
print "freq: %s" % (eng_notation.num_to_str(options.tx_freq))
print "bitrate: %sb/sec" % (eng_notation.num_to_str(tb.txpath.bitrate()),)
print "samples/symbol: %3d" % (tb.txpath.samples_per_symbol(),)
tb.rxpath.set_carrier_threshold(options.carrier_threshold)
print "Carrier sense threshold:", options.carrier_threshold, "dB"
print
print "Allocated virtual ethernet interface: %s" % (tun_ifname,)
print "You must now use ifconfig to set its IP address. E.g.,"
print
print " $ sudo ifconfig %s 192.168.200.1" % (tun_ifname,)
print
print "Be sure to use a different address in the same subnet for each machine."
print
tb.start() # Start executing the flow graph (runs in separate threads)
mac.main_loop() # don't expect this to return...
tb.stop() # but if it does, tell flow graph to stop.
tb.wait() # wait for it to finish
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
| gpl-3.0 |
gangadhar-kadam/latestchurcherp | erpnext/hr/doctype/salary_manager/salary_manager.py | 13 | 5930 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cint, flt, nowdate
from frappe import _
from frappe.model.document import Document
class SalaryManager(Document):
def get_emp_list(self):
"""
Returns list of active employees based on selected criteria
and for which salary structure exists
"""
cond = self.get_filter_condition()
cond += self.get_joining_releiving_condition()
emp_list = frappe.db.sql("""
select t1.name
from `tabEmployee` t1, `tabSalary Structure` t2
where t1.docstatus!=2 and t2.docstatus != 2
and t1.name = t2.employee
%s """% cond)
return emp_list
def get_filter_condition(self):
self.check_mandatory()
cond = ''
for f in ['company', 'branch', 'department', 'designation']:
if self.get(f):
cond += " and t1." + f + " = '" + self.get(f).replace("'", "\'") + "'"
return cond
def get_joining_releiving_condition(self):
m = self.get_month_details(self.fiscal_year, self.month)
cond = """
and ifnull(t1.date_of_joining, '0000-00-00') <= '%(month_end_date)s'
and ifnull(t1.relieving_date, '2199-12-31') >= '%(month_start_date)s'
""" % m
return cond
def check_mandatory(self):
for f in ['company', 'month', 'fiscal_year']:
if not self.get(f):
frappe.throw(_("Please set {0}").format(f))
def get_month_details(self, year, month):
ysd = frappe.db.get_value("Fiscal Year", year, "year_start_date")
if ysd:
from dateutil.relativedelta import relativedelta
import calendar, datetime
diff_mnt = cint(month)-cint(ysd.month)
if diff_mnt<0:
diff_mnt = 12-int(ysd.month)+cint(month)
msd = ysd + relativedelta(months=diff_mnt) # month start date
month_days = cint(calendar.monthrange(cint(msd.year) ,cint(month))[1]) # days in month
med = datetime.date(msd.year, cint(month), month_days) # month end date
return {
'year': msd.year,
'month_start_date': msd,
'month_end_date': med,
'month_days': month_days
}
def create_sal_slip(self):
"""
Creates salary slip for selected employees if already not created
"""
emp_list = self.get_emp_list()
ss_list = []
for emp in emp_list:
if not frappe.db.sql("""select name from `tabSalary Slip`
where docstatus!= 2 and employee = %s and month = %s and fiscal_year = %s and company = %s
""", (emp[0], self.month, self.fiscal_year, self.company)):
ss = frappe.get_doc({
"doctype": "Salary Slip",
"fiscal_year": self.fiscal_year,
"employee": emp[0],
"month": self.month,
"email_check": self.send_email,
"company": self.company,
})
ss.insert()
ss_list.append(ss.name)
return self.create_log(ss_list)
def create_log(self, ss_list):
log = "<p>No employee for the above selected criteria OR salary slip already created</p>"
if ss_list:
log = "<b>Salary Slip Created For</b>\
<br><br>%s" % '<br>'.join(ss_list)
return log
def get_sal_slip_list(self):
"""
Returns list of salary slips based on selected criteria
which are not submitted
"""
cond = self.get_filter_condition()
ss_list = frappe.db.sql("""
select t1.name from `tabSalary Slip` t1
where t1.docstatus = 0 and month = %s and fiscal_year = %s %s
""" % ('%s', '%s', cond), (self.month, self.fiscal_year))
return ss_list
def submit_salary_slip(self):
"""
Submit all salary slips based on selected criteria
"""
ss_list = self.get_sal_slip_list()
not_submitted_ss = []
for ss in ss_list:
ss_obj = frappe.get_doc("Salary Slip",ss[0])
try:
ss_obj.email_check = self.send_email
ss_obj.submit()
except Exception,e:
not_submitted_ss.append(ss[0])
frappe.msgprint(e)
continue
return self.create_submit_log(ss_list, not_submitted_ss)
def create_submit_log(self, all_ss, not_submitted_ss):
log = ''
if not all_ss:
log = "No salary slip found to submit for the above selected criteria"
else:
all_ss = [d[0] for d in all_ss]
submitted_ss = list(set(all_ss) - set(not_submitted_ss))
if submitted_ss:
mail_sent_msg = self.send_email and " (Mail has been sent to the employee)" or ""
log = """
<b>Salary Slips Submitted %s:</b>\
<br><br> %s <br><br>
""" % (mail_sent_msg, '<br>'.join(submitted_ss))
if not_submitted_ss:
log += """
<b>Not Submitted Salary Slips: </b>\
<br><br> %s <br><br> \
Reason: <br>\
May be company email id specified in employee master is not valid. <br> \
Please mention correct email id in employee master or if you don't want to \
send mail, uncheck 'Send Email' checkbox. <br>\
Then try to submit Salary Slip again.
"""% ('<br>'.join(not_submitted_ss))
return log
def get_total_salary(self):
"""
Get total salary amount from submitted salary slip based on selected criteria
"""
cond = self.get_filter_condition()
tot = frappe.db.sql("""
select sum(rounded_total) from `tabSalary Slip` t1
where t1.docstatus = 1 and month = %s and fiscal_year = %s %s
""" % ('%s', '%s', cond), (self.month, self.fiscal_year))
return flt(tot[0][0])
def make_journal_entry(self, salary_account = None):
amount = self.get_total_salary()
default_bank_account = frappe.db.get_value("Company", self.company,
"default_bank_account")
journal_entry = frappe.new_doc('Journal Entry')
journal_entry.voucher_type = 'Bank Entry'
journal_entry.user_remark = _('Payment of salary for the month {0} and year {1}').format(self.month,
self.fiscal_year)
journal_entry.fiscal_year = self.fiscal_year
journal_entry.company = self.company
journal_entry.posting_date = nowdate()
journal_entry.set("accounts", [
{
"account": salary_account,
"debit": amount
},
{
"account": default_bank_account,
"credit": amount
},
])
return journal_entry.as_dict()
| agpl-3.0 |
rizumu/django | tests/file_storage/tests.py | 35 | 30366 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import errno
import os
import shutil
import sys
import tempfile
import threading
import time
import unittest
from datetime import datetime, timedelta
from django.core.cache import cache
from django.core.exceptions import SuspiciousFileOperation, SuspiciousOperation
from django.core.files.base import ContentFile, File
from django.core.files.storage import FileSystemStorage, get_storage_class
from django.core.files.uploadedfile import (
InMemoryUploadedFile, SimpleUploadedFile, TemporaryUploadedFile,
)
from django.db.models.fields.files import FileDescriptor
from django.test import (
LiveServerTestCase, SimpleTestCase, TestCase, override_settings,
)
from django.utils import six
from django.utils._os import upath
from django.utils.six.moves.urllib.request import urlopen
from .models import Storage, temp_storage, temp_storage_location
FILE_SUFFIX_REGEX = '[A-Za-z0-9]{7}'
class GetStorageClassTests(SimpleTestCase):
def test_get_filesystem_storage(self):
"""
get_storage_class returns the class for a storage backend name/path.
"""
self.assertEqual(
get_storage_class('django.core.files.storage.FileSystemStorage'),
FileSystemStorage)
def test_get_invalid_storage_module(self):
"""
get_storage_class raises an error if the requested import don't exist.
"""
with six.assertRaisesRegex(self, ImportError, "No module named '?storage'?"):
get_storage_class('storage.NonExistingStorage')
def test_get_nonexisting_storage_class(self):
"""
get_storage_class raises an error if the requested class don't exist.
"""
self.assertRaises(ImportError, get_storage_class,
'django.core.files.storage.NonExistingStorage')
def test_get_nonexisting_storage_module(self):
"""
get_storage_class raises an error if the requested module don't exist.
"""
# Error message may or may not be the fully qualified path.
with six.assertRaisesRegex(self, ImportError,
"No module named '?(django.core.files.)?non_existing_storage'?"):
get_storage_class(
'django.core.files.non_existing_storage.NonExistingStorage')
class FileStorageDeconstructionTests(unittest.TestCase):
def test_deconstruction(self):
path, args, kwargs = temp_storage.deconstruct()
self.assertEqual(path, "django.core.files.storage.FileSystemStorage")
self.assertEqual(args, tuple())
self.assertEqual(kwargs, {'location': temp_storage_location})
kwargs_orig = {
'location': temp_storage_location,
'base_url': 'http://myfiles.example.com/'
}
storage = FileSystemStorage(**kwargs_orig)
path, args, kwargs = storage.deconstruct()
self.assertEqual(kwargs, kwargs_orig)
class FileStorageTests(unittest.TestCase):
storage_class = FileSystemStorage
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.storage = self.storage_class(location=self.temp_dir,
base_url='/test_media_url/')
# Set up a second temporary directory which is ensured to have a mixed
# case name.
self.temp_dir2 = tempfile.mkdtemp(suffix='aBc')
def tearDown(self):
shutil.rmtree(self.temp_dir)
shutil.rmtree(self.temp_dir2)
def test_empty_location(self):
"""
Makes sure an exception is raised if the location is empty
"""
storage = self.storage_class(location='')
self.assertEqual(storage.base_location, '')
self.assertEqual(storage.location, upath(os.getcwd()))
def test_file_access_options(self):
"""
Standard file access options are available, and work as expected.
"""
self.assertFalse(self.storage.exists('storage_test'))
f = self.storage.open('storage_test', 'w')
f.write('storage contents')
f.close()
self.assertTrue(self.storage.exists('storage_test'))
f = self.storage.open('storage_test', 'r')
self.assertEqual(f.read(), 'storage contents')
f.close()
self.storage.delete('storage_test')
self.assertFalse(self.storage.exists('storage_test'))
def test_file_accessed_time(self):
"""
File storage returns a Datetime object for the last accessed time of
a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
atime = self.storage.accessed_time(f_name)
self.assertEqual(atime, datetime.fromtimestamp(
os.path.getatime(self.storage.path(f_name))))
self.assertLess(datetime.now() - self.storage.accessed_time(f_name), timedelta(seconds=2))
self.storage.delete(f_name)
def test_file_created_time(self):
"""
File storage returns a Datetime object for the creation time of
a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
ctime = self.storage.created_time(f_name)
self.assertEqual(ctime, datetime.fromtimestamp(
os.path.getctime(self.storage.path(f_name))))
self.assertLess(datetime.now() - self.storage.created_time(f_name), timedelta(seconds=2))
self.storage.delete(f_name)
def test_file_modified_time(self):
"""
File storage returns a Datetime object for the last modified time of
a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
mtime = self.storage.modified_time(f_name)
self.assertEqual(mtime, datetime.fromtimestamp(
os.path.getmtime(self.storage.path(f_name))))
self.assertLess(datetime.now() - self.storage.modified_time(f_name), timedelta(seconds=2))
self.storage.delete(f_name)
def test_file_save_without_name(self):
"""
File storage extracts the filename from the content object if no
name is given explicitly.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f.name = 'test.file'
storage_f_name = self.storage.save(None, f)
self.assertEqual(storage_f_name, f.name)
self.assertTrue(os.path.exists(os.path.join(self.temp_dir, f.name)))
self.storage.delete(storage_f_name)
def test_file_save_with_path(self):
"""
Saving a pathname should create intermediate directories as necessary.
"""
self.assertFalse(self.storage.exists('path/to'))
self.storage.save('path/to/test.file',
ContentFile('file saved with path'))
self.assertTrue(self.storage.exists('path/to'))
with self.storage.open('path/to/test.file') as f:
self.assertEqual(f.read(), b'file saved with path')
self.assertTrue(os.path.exists(
os.path.join(self.temp_dir, 'path', 'to', 'test.file')))
self.storage.delete('path/to/test.file')
def test_save_doesnt_close(self):
with TemporaryUploadedFile('test', 'text/plain', 1, 'utf8') as file:
file.write(b'1')
file.seek(0)
self.assertFalse(file.closed)
self.storage.save('path/to/test.file', file)
self.assertFalse(file.closed)
self.assertFalse(file.file.closed)
file = InMemoryUploadedFile(six.StringIO('1'), '', 'test',
'text/plain', 1, 'utf8')
with file:
self.assertFalse(file.closed)
self.storage.save('path/to/test.file', file)
self.assertFalse(file.closed)
self.assertFalse(file.file.closed)
def test_file_path(self):
"""
File storage returns the full path of a file
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
self.assertEqual(self.storage.path(f_name),
os.path.join(self.temp_dir, f_name))
self.storage.delete(f_name)
def test_file_url(self):
"""
File storage returns a url to access a given file from the Web.
"""
self.assertEqual(self.storage.url('test.file'),
'%s%s' % (self.storage.base_url, 'test.file'))
# should encode special chars except ~!*()'
# like encodeURIComponent() JavaScript function do
self.assertEqual(self.storage.url(r"""~!*()'@#$%^&*abc`+ =.file"""),
"""/test_media_url/~!*()'%40%23%24%25%5E%26*abc%60%2B%20%3D.file""")
# should translate os path separator(s) to the url path separator
self.assertEqual(self.storage.url("""a/b\\c.file"""),
"""/test_media_url/a/b/c.file""")
self.storage.base_url = None
self.assertRaises(ValueError, self.storage.url, 'test.file')
# #22717: missing ending slash in base_url should be auto-corrected
storage = self.storage_class(location=self.temp_dir,
base_url='/no_ending_slash')
self.assertEqual(
storage.url('test.file'),
'%s%s' % (storage.base_url, 'test.file')
)
def test_listdir(self):
"""
File storage returns a tuple containing directories and files.
"""
self.assertFalse(self.storage.exists('storage_test_1'))
self.assertFalse(self.storage.exists('storage_test_2'))
self.assertFalse(self.storage.exists('storage_dir_1'))
self.storage.save('storage_test_1', ContentFile('custom content'))
self.storage.save('storage_test_2', ContentFile('custom content'))
os.mkdir(os.path.join(self.temp_dir, 'storage_dir_1'))
dirs, files = self.storage.listdir('')
self.assertEqual(set(dirs), {'storage_dir_1'})
self.assertEqual(set(files),
{'storage_test_1', 'storage_test_2'})
self.storage.delete('storage_test_1')
self.storage.delete('storage_test_2')
os.rmdir(os.path.join(self.temp_dir, 'storage_dir_1'))
def test_file_storage_prevents_directory_traversal(self):
"""
File storage prevents directory traversal (files can only be accessed if
they're below the storage location).
"""
self.assertRaises(SuspiciousOperation, self.storage.exists, '..')
self.assertRaises(SuspiciousOperation, self.storage.exists, '/etc/passwd')
def test_file_storage_preserves_filename_case(self):
"""The storage backend should preserve case of filenames."""
# Create a storage backend associated with the mixed case name
# directory.
other_temp_storage = self.storage_class(location=self.temp_dir2)
# Ask that storage backend to store a file with a mixed case filename.
mixed_case = 'CaSe_SeNsItIvE'
file = other_temp_storage.open(mixed_case, 'w')
file.write('storage contents')
file.close()
self.assertEqual(os.path.join(self.temp_dir2, mixed_case),
other_temp_storage.path(mixed_case))
other_temp_storage.delete(mixed_case)
def test_makedirs_race_handling(self):
"""
File storage should be robust against directory creation race conditions.
"""
real_makedirs = os.makedirs
# Monkey-patch os.makedirs, to simulate a normal call, a raced call,
# and an error.
def fake_makedirs(path):
if path == os.path.join(self.temp_dir, 'normal'):
real_makedirs(path)
elif path == os.path.join(self.temp_dir, 'raced'):
real_makedirs(path)
raise OSError(errno.EEXIST, 'simulated EEXIST')
elif path == os.path.join(self.temp_dir, 'error'):
raise OSError(errno.EACCES, 'simulated EACCES')
else:
self.fail('unexpected argument %r' % path)
try:
os.makedirs = fake_makedirs
self.storage.save('normal/test.file',
ContentFile('saved normally'))
with self.storage.open('normal/test.file') as f:
self.assertEqual(f.read(), b'saved normally')
self.storage.save('raced/test.file',
ContentFile('saved with race'))
with self.storage.open('raced/test.file') as f:
self.assertEqual(f.read(), b'saved with race')
# Check that OSErrors aside from EEXIST are still raised.
self.assertRaises(OSError,
self.storage.save, 'error/test.file', ContentFile('not saved'))
finally:
os.makedirs = real_makedirs
def test_remove_race_handling(self):
"""
File storage should be robust against file removal race conditions.
"""
real_remove = os.remove
# Monkey-patch os.remove, to simulate a normal call, a raced call,
# and an error.
def fake_remove(path):
if path == os.path.join(self.temp_dir, 'normal.file'):
real_remove(path)
elif path == os.path.join(self.temp_dir, 'raced.file'):
real_remove(path)
raise OSError(errno.ENOENT, 'simulated ENOENT')
elif path == os.path.join(self.temp_dir, 'error.file'):
raise OSError(errno.EACCES, 'simulated EACCES')
else:
self.fail('unexpected argument %r' % path)
try:
os.remove = fake_remove
self.storage.save('normal.file', ContentFile('delete normally'))
self.storage.delete('normal.file')
self.assertFalse(self.storage.exists('normal.file'))
self.storage.save('raced.file', ContentFile('delete with race'))
self.storage.delete('raced.file')
self.assertFalse(self.storage.exists('normal.file'))
# Check that OSErrors aside from ENOENT are still raised.
self.storage.save('error.file', ContentFile('delete with error'))
self.assertRaises(OSError, self.storage.delete, 'error.file')
finally:
os.remove = real_remove
def test_file_chunks_error(self):
"""
Test behavior when file.chunks() is raising an error
"""
f1 = ContentFile('chunks fails')
def failing_chunks():
raise IOError
f1.chunks = failing_chunks
with self.assertRaises(IOError):
self.storage.save('error.file', f1)
def test_delete_no_name(self):
"""
Calling delete with an empty name should not try to remove the base
storage directory, but fail loudly (#20660).
"""
with self.assertRaises(AssertionError):
self.storage.delete('')
class CustomStorage(FileSystemStorage):
def get_available_name(self, name, max_length=None):
"""
Append numbers to duplicate files rather than underscores, like Trac.
"""
parts = name.split('.')
basename, ext = parts[0], parts[1:]
number = 2
while self.exists(name):
name = '.'.join([basename, str(number)] + ext)
number += 1
return name
class CustomStorageTests(FileStorageTests):
storage_class = CustomStorage
def test_custom_get_available_name(self):
first = self.storage.save('custom_storage', ContentFile('custom contents'))
self.assertEqual(first, 'custom_storage')
second = self.storage.save('custom_storage', ContentFile('more contents'))
self.assertEqual(second, 'custom_storage.2')
self.storage.delete(first)
self.storage.delete(second)
class FileFieldStorageTests(TestCase):
def tearDown(self):
shutil.rmtree(temp_storage_location)
def _storage_max_filename_length(self, storage):
"""
Query filesystem for maximum filename length (e.g. AUFS has 242).
"""
dir_to_test = storage.location
while not os.path.exists(dir_to_test):
dir_to_test = os.path.dirname(dir_to_test)
try:
return os.pathconf(dir_to_test, 'PC_NAME_MAX')
except Exception:
return 255 # Should be safe on most backends
def test_files(self):
self.assertIsInstance(Storage.normal, FileDescriptor)
# An object without a file has limited functionality.
obj1 = Storage()
self.assertEqual(obj1.normal.name, "")
self.assertRaises(ValueError, lambda: obj1.normal.size)
# Saving a file enables full functionality.
obj1.normal.save("django_test.txt", ContentFile("content"))
self.assertEqual(obj1.normal.name, "tests/django_test.txt")
self.assertEqual(obj1.normal.size, 7)
self.assertEqual(obj1.normal.read(), b"content")
obj1.normal.close()
# File objects can be assigned to FileField attributes, but shouldn't
# get committed until the model it's attached to is saved.
obj1.normal = SimpleUploadedFile("assignment.txt", b"content")
dirs, files = temp_storage.listdir("tests")
self.assertEqual(dirs, [])
self.assertNotIn("assignment.txt", files)
obj1.save()
dirs, files = temp_storage.listdir("tests")
self.assertEqual(sorted(files), ["assignment.txt", "django_test.txt"])
# Save another file with the same name.
obj2 = Storage()
obj2.normal.save("django_test.txt", ContentFile("more content"))
obj2_name = obj2.normal.name
six.assertRegex(self, obj2_name, "tests/django_test_%s.txt" % FILE_SUFFIX_REGEX)
self.assertEqual(obj2.normal.size, 12)
obj2.normal.close()
# Deleting an object does not delete the file it uses.
obj2.delete()
obj2.normal.save("django_test.txt", ContentFile("more content"))
self.assertNotEqual(obj2_name, obj2.normal.name)
six.assertRegex(self, obj2.normal.name, "tests/django_test_%s.txt" % FILE_SUFFIX_REGEX)
obj2.normal.close()
def test_filefield_read(self):
# Files can be read in a little at a time, if necessary.
obj = Storage.objects.create(
normal=SimpleUploadedFile("assignment.txt", b"content"))
obj.normal.open()
self.assertEqual(obj.normal.read(3), b"con")
self.assertEqual(obj.normal.read(), b"tent")
self.assertEqual(list(obj.normal.chunks(chunk_size=2)), [b"co", b"nt", b"en", b"t"])
obj.normal.close()
def test_duplicate_filename(self):
# Multiple files with the same name get _(7 random chars) appended to them.
objs = [Storage() for i in range(2)]
for o in objs:
o.normal.save("multiple_files.txt", ContentFile("Same Content"))
try:
names = [o.normal.name for o in objs]
self.assertEqual(names[0], "tests/multiple_files.txt")
six.assertRegex(self, names[1], "tests/multiple_files_%s.txt" % FILE_SUFFIX_REGEX)
finally:
for o in objs:
o.delete()
def test_file_truncation(self):
# Given the max_length is limited, when multiple files get uploaded
# under the same name, then the filename get truncated in order to fit
# in _(7 random chars). When most of the max_length is taken by
# dirname + extension and there are not enough characters in the
# filename to truncate, an exception should be raised.
objs = [Storage() for i in range(2)]
filename = 'filename.ext'
for o in objs:
o.limited_length.save(filename, ContentFile('Same Content'))
try:
# Testing truncation.
names = [o.limited_length.name for o in objs]
self.assertEqual(names[0], 'tests/%s' % filename)
six.assertRegex(self, names[1], 'tests/fi_%s.ext' % FILE_SUFFIX_REGEX)
# Testing exception is raised when filename is too short to truncate.
filename = 'short.longext'
objs[0].limited_length.save(filename, ContentFile('Same Content'))
self.assertRaisesMessage(
SuspiciousFileOperation, 'Storage can not find an available filename',
objs[1].limited_length.save, *(filename, ContentFile('Same Content'))
)
finally:
for o in objs:
o.delete()
@unittest.skipIf(
sys.platform.startswith('win'),
"Windows supports at most 260 characters in a path.",
)
def test_extended_length_storage(self):
# Testing FileField with max_length > 255. Most systems have filename
# length limitation of 255. Path takes extra chars.
filename = (self._storage_max_filename_length(temp_storage) - 4) * 'a' # 4 chars for extension.
obj = Storage()
obj.extended_length.save('%s.txt' % filename, ContentFile('Same Content'))
self.assertEqual(obj.extended_length.name, 'tests/%s.txt' % filename)
self.assertEqual(obj.extended_length.read(), b'Same Content')
obj.extended_length.close()
def test_filefield_default(self):
# Default values allow an object to access a single file.
temp_storage.save('tests/default.txt', ContentFile('default content'))
obj = Storage.objects.create()
self.assertEqual(obj.default.name, "tests/default.txt")
self.assertEqual(obj.default.read(), b"default content")
obj.default.close()
# But it shouldn't be deleted, even if there are no more objects using
# it.
obj.delete()
obj = Storage()
self.assertEqual(obj.default.read(), b"default content")
obj.default.close()
def test_empty_upload_to(self):
# upload_to can be empty, meaning it does not use subdirectory.
obj = Storage()
obj.empty.save('django_test.txt', ContentFile('more content'))
self.assertEqual(obj.empty.name, "./django_test.txt")
self.assertEqual(obj.empty.read(), b"more content")
obj.empty.close()
def test_random_upload_to(self):
# Verify the fix for #5655, making sure the directory is only
# determined once.
obj = Storage()
obj.random.save("random_file", ContentFile("random content"))
self.assertTrue(obj.random.name.endswith("/random_file"))
obj.random.close()
def test_custom_valid_name_callable_upload_to(self):
"""
Storage.get_valid_name() should be called when upload_to is a callable.
"""
obj = Storage()
obj.custom_valid_name.save("random_file", ContentFile("random content"))
# CustomValidNameStorage.get_valid_name() appends '_valid' to the name
self.assertTrue(obj.custom_valid_name.name.endswith("/random_file_valid"))
obj.custom_valid_name.close()
def test_filefield_pickling(self):
# Push an object into the cache to make sure it pickles properly
obj = Storage()
obj.normal.save("django_test.txt", ContentFile("more content"))
obj.normal.close()
cache.set("obj", obj)
self.assertEqual(cache.get("obj").normal.name, "tests/django_test.txt")
def test_file_object(self):
# Create sample file
temp_storage.save('tests/example.txt', ContentFile('some content'))
# Load it as python file object
with open(temp_storage.path('tests/example.txt')) as file_obj:
# Save it using storage and read its content
temp_storage.save('tests/file_obj', file_obj)
self.assertTrue(temp_storage.exists('tests/file_obj'))
with temp_storage.open('tests/file_obj') as f:
self.assertEqual(f.read(), b'some content')
def test_stringio(self):
# Test passing StringIO instance as content argument to save
output = six.StringIO()
output.write('content')
output.seek(0)
# Save it and read written file
temp_storage.save('tests/stringio', output)
self.assertTrue(temp_storage.exists('tests/stringio'))
with temp_storage.open('tests/stringio') as f:
self.assertEqual(f.read(), b'content')
# Tests for a race condition on file saving (#4948).
# This is written in such a way that it'll always pass on platforms
# without threading.
class SlowFile(ContentFile):
def chunks(self):
time.sleep(1)
return super(ContentFile, self).chunks()
class FileSaveRaceConditionTest(unittest.TestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
self.thread = threading.Thread(target=self.save_file, args=['conflict'])
def tearDown(self):
shutil.rmtree(self.storage_dir)
def save_file(self, name):
name = self.storage.save(name, SlowFile(b"Data"))
def test_race_condition(self):
self.thread.start()
self.save_file('conflict')
self.thread.join()
files = sorted(os.listdir(self.storage_dir))
self.assertEqual(files[0], 'conflict')
six.assertRegex(self, files[1], 'conflict_%s' % FILE_SUFFIX_REGEX)
@unittest.skipIf(sys.platform.startswith('win'), "Windows only partially supports umasks and chmod.")
class FileStoragePermissions(unittest.TestCase):
def setUp(self):
self.umask = 0o027
self.old_umask = os.umask(self.umask)
self.storage_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.storage_dir)
os.umask(self.old_umask)
@override_settings(FILE_UPLOAD_PERMISSIONS=0o654)
def test_file_upload_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
name = self.storage.save("the_file", ContentFile("data"))
actual_mode = os.stat(self.storage.path(name))[0] & 0o777
self.assertEqual(actual_mode, 0o654)
@override_settings(FILE_UPLOAD_PERMISSIONS=None)
def test_file_upload_default_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
fname = self.storage.save("some_file", ContentFile("data"))
mode = os.stat(self.storage.path(fname))[0] & 0o777
self.assertEqual(mode, 0o666 & ~self.umask)
@override_settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765)
def test_file_upload_directory_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
name = self.storage.save("the_directory/the_file", ContentFile("data"))
dir_mode = os.stat(os.path.dirname(self.storage.path(name)))[0] & 0o777
self.assertEqual(dir_mode, 0o765)
@override_settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=None)
def test_file_upload_directory_default_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
name = self.storage.save("the_directory/the_file", ContentFile("data"))
dir_mode = os.stat(os.path.dirname(self.storage.path(name)))[0] & 0o777
self.assertEqual(dir_mode, 0o777 & ~self.umask)
class FileStoragePathParsing(unittest.TestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
def tearDown(self):
shutil.rmtree(self.storage_dir)
def test_directory_with_dot(self):
"""Regression test for #9610.
If the directory name contains a dot and the file name doesn't, make
sure we still mangle the file name instead of the directory name.
"""
self.storage.save('dotted.path/test', ContentFile("1"))
self.storage.save('dotted.path/test', ContentFile("2"))
files = sorted(os.listdir(os.path.join(self.storage_dir, 'dotted.path')))
self.assertFalse(os.path.exists(os.path.join(self.storage_dir, 'dotted_.path')))
self.assertEqual(files[0], 'test')
six.assertRegex(self, files[1], 'test_%s' % FILE_SUFFIX_REGEX)
def test_first_character_dot(self):
"""
File names with a dot as their first character don't have an extension,
and the underscore should get added to the end.
"""
self.storage.save('dotted.path/.test', ContentFile("1"))
self.storage.save('dotted.path/.test', ContentFile("2"))
files = sorted(os.listdir(os.path.join(self.storage_dir, 'dotted.path')))
self.assertFalse(os.path.exists(os.path.join(self.storage_dir, 'dotted_.path')))
self.assertEqual(files[0], '.test')
six.assertRegex(self, files[1], '.test_%s' % FILE_SUFFIX_REGEX)
class ContentFileStorageTestCase(unittest.TestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
def tearDown(self):
shutil.rmtree(self.storage_dir)
def test_content_saving(self):
"""
Test that ContentFile can be saved correctly with the filesystem storage,
both if it was initialized with string or unicode content"""
self.storage.save('bytes.txt', ContentFile(b"content"))
self.storage.save('unicode.txt', ContentFile("español"))
@override_settings(ROOT_URLCONF='file_storage.urls')
class FileLikeObjectTestCase(LiveServerTestCase):
"""
Test file-like objects (#15644).
"""
available_apps = []
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(location=self.temp_dir)
def tearDown(self):
shutil.rmtree(self.temp_dir)
def test_urllib2_urlopen(self):
"""
Test the File storage API with a file like object coming from urllib2.urlopen()
"""
file_like_object = urlopen(self.live_server_url + '/')
f = File(file_like_object)
stored_filename = self.storage.save("remote_file.html", f)
remote_file = urlopen(self.live_server_url + '/')
with self.storage.open(stored_filename) as stored_file:
self.assertEqual(stored_file.read(), remote_file.read())
| bsd-3-clause |
menardorama/ReadyNAS-Add-ons | headphones-1.0.0/debian/headphones/apps/headphones/lib/unidecode/x0b9.py | 253 | 4704 | data = (
'ruk', # 0x00
'rut', # 0x01
'rup', # 0x02
'ruh', # 0x03
'rweo', # 0x04
'rweog', # 0x05
'rweogg', # 0x06
'rweogs', # 0x07
'rweon', # 0x08
'rweonj', # 0x09
'rweonh', # 0x0a
'rweod', # 0x0b
'rweol', # 0x0c
'rweolg', # 0x0d
'rweolm', # 0x0e
'rweolb', # 0x0f
'rweols', # 0x10
'rweolt', # 0x11
'rweolp', # 0x12
'rweolh', # 0x13
'rweom', # 0x14
'rweob', # 0x15
'rweobs', # 0x16
'rweos', # 0x17
'rweoss', # 0x18
'rweong', # 0x19
'rweoj', # 0x1a
'rweoc', # 0x1b
'rweok', # 0x1c
'rweot', # 0x1d
'rweop', # 0x1e
'rweoh', # 0x1f
'rwe', # 0x20
'rweg', # 0x21
'rwegg', # 0x22
'rwegs', # 0x23
'rwen', # 0x24
'rwenj', # 0x25
'rwenh', # 0x26
'rwed', # 0x27
'rwel', # 0x28
'rwelg', # 0x29
'rwelm', # 0x2a
'rwelb', # 0x2b
'rwels', # 0x2c
'rwelt', # 0x2d
'rwelp', # 0x2e
'rwelh', # 0x2f
'rwem', # 0x30
'rweb', # 0x31
'rwebs', # 0x32
'rwes', # 0x33
'rwess', # 0x34
'rweng', # 0x35
'rwej', # 0x36
'rwec', # 0x37
'rwek', # 0x38
'rwet', # 0x39
'rwep', # 0x3a
'rweh', # 0x3b
'rwi', # 0x3c
'rwig', # 0x3d
'rwigg', # 0x3e
'rwigs', # 0x3f
'rwin', # 0x40
'rwinj', # 0x41
'rwinh', # 0x42
'rwid', # 0x43
'rwil', # 0x44
'rwilg', # 0x45
'rwilm', # 0x46
'rwilb', # 0x47
'rwils', # 0x48
'rwilt', # 0x49
'rwilp', # 0x4a
'rwilh', # 0x4b
'rwim', # 0x4c
'rwib', # 0x4d
'rwibs', # 0x4e
'rwis', # 0x4f
'rwiss', # 0x50
'rwing', # 0x51
'rwij', # 0x52
'rwic', # 0x53
'rwik', # 0x54
'rwit', # 0x55
'rwip', # 0x56
'rwih', # 0x57
'ryu', # 0x58
'ryug', # 0x59
'ryugg', # 0x5a
'ryugs', # 0x5b
'ryun', # 0x5c
'ryunj', # 0x5d
'ryunh', # 0x5e
'ryud', # 0x5f
'ryul', # 0x60
'ryulg', # 0x61
'ryulm', # 0x62
'ryulb', # 0x63
'ryuls', # 0x64
'ryult', # 0x65
'ryulp', # 0x66
'ryulh', # 0x67
'ryum', # 0x68
'ryub', # 0x69
'ryubs', # 0x6a
'ryus', # 0x6b
'ryuss', # 0x6c
'ryung', # 0x6d
'ryuj', # 0x6e
'ryuc', # 0x6f
'ryuk', # 0x70
'ryut', # 0x71
'ryup', # 0x72
'ryuh', # 0x73
'reu', # 0x74
'reug', # 0x75
'reugg', # 0x76
'reugs', # 0x77
'reun', # 0x78
'reunj', # 0x79
'reunh', # 0x7a
'reud', # 0x7b
'reul', # 0x7c
'reulg', # 0x7d
'reulm', # 0x7e
'reulb', # 0x7f
'reuls', # 0x80
'reult', # 0x81
'reulp', # 0x82
'reulh', # 0x83
'reum', # 0x84
'reub', # 0x85
'reubs', # 0x86
'reus', # 0x87
'reuss', # 0x88
'reung', # 0x89
'reuj', # 0x8a
'reuc', # 0x8b
'reuk', # 0x8c
'reut', # 0x8d
'reup', # 0x8e
'reuh', # 0x8f
'ryi', # 0x90
'ryig', # 0x91
'ryigg', # 0x92
'ryigs', # 0x93
'ryin', # 0x94
'ryinj', # 0x95
'ryinh', # 0x96
'ryid', # 0x97
'ryil', # 0x98
'ryilg', # 0x99
'ryilm', # 0x9a
'ryilb', # 0x9b
'ryils', # 0x9c
'ryilt', # 0x9d
'ryilp', # 0x9e
'ryilh', # 0x9f
'ryim', # 0xa0
'ryib', # 0xa1
'ryibs', # 0xa2
'ryis', # 0xa3
'ryiss', # 0xa4
'rying', # 0xa5
'ryij', # 0xa6
'ryic', # 0xa7
'ryik', # 0xa8
'ryit', # 0xa9
'ryip', # 0xaa
'ryih', # 0xab
'ri', # 0xac
'rig', # 0xad
'rigg', # 0xae
'rigs', # 0xaf
'rin', # 0xb0
'rinj', # 0xb1
'rinh', # 0xb2
'rid', # 0xb3
'ril', # 0xb4
'rilg', # 0xb5
'rilm', # 0xb6
'rilb', # 0xb7
'rils', # 0xb8
'rilt', # 0xb9
'rilp', # 0xba
'rilh', # 0xbb
'rim', # 0xbc
'rib', # 0xbd
'ribs', # 0xbe
'ris', # 0xbf
'riss', # 0xc0
'ring', # 0xc1
'rij', # 0xc2
'ric', # 0xc3
'rik', # 0xc4
'rit', # 0xc5
'rip', # 0xc6
'rih', # 0xc7
'ma', # 0xc8
'mag', # 0xc9
'magg', # 0xca
'mags', # 0xcb
'man', # 0xcc
'manj', # 0xcd
'manh', # 0xce
'mad', # 0xcf
'mal', # 0xd0
'malg', # 0xd1
'malm', # 0xd2
'malb', # 0xd3
'mals', # 0xd4
'malt', # 0xd5
'malp', # 0xd6
'malh', # 0xd7
'mam', # 0xd8
'mab', # 0xd9
'mabs', # 0xda
'mas', # 0xdb
'mass', # 0xdc
'mang', # 0xdd
'maj', # 0xde
'mac', # 0xdf
'mak', # 0xe0
'mat', # 0xe1
'map', # 0xe2
'mah', # 0xe3
'mae', # 0xe4
'maeg', # 0xe5
'maegg', # 0xe6
'maegs', # 0xe7
'maen', # 0xe8
'maenj', # 0xe9
'maenh', # 0xea
'maed', # 0xeb
'mael', # 0xec
'maelg', # 0xed
'maelm', # 0xee
'maelb', # 0xef
'maels', # 0xf0
'maelt', # 0xf1
'maelp', # 0xf2
'maelh', # 0xf3
'maem', # 0xf4
'maeb', # 0xf5
'maebs', # 0xf6
'maes', # 0xf7
'maess', # 0xf8
'maeng', # 0xf9
'maej', # 0xfa
'maec', # 0xfb
'maek', # 0xfc
'maet', # 0xfd
'maep', # 0xfe
'maeh', # 0xff
)
| gpl-2.0 |
Adel-Magebinary/odoo | addons/crm/sales_team.py | 321 | 5053 | # -*- coding: utf-8 -*-
import calendar
from datetime import date
from dateutil import relativedelta
import json
from openerp import tools
from openerp.osv import fields, osv
class crm_case_section(osv.Model):
_inherit = 'crm.case.section'
_inherits = {'mail.alias': 'alias_id'}
def _get_opportunities_data(self, cr, uid, ids, field_name, arg, context=None):
""" Get opportunities-related data for salesteam kanban view
monthly_open_leads: number of open lead during the last months
monthly_planned_revenue: planned revenu of opportunities during the last months
"""
obj = self.pool.get('crm.lead')
res = dict.fromkeys(ids, False)
month_begin = date.today().replace(day=1)
date_begin = month_begin - relativedelta.relativedelta(months=self._period_number - 1)
date_end = month_begin.replace(day=calendar.monthrange(month_begin.year, month_begin.month)[1])
lead_pre_domain = [('create_date', '>=', date_begin.strftime(tools.DEFAULT_SERVER_DATE_FORMAT)),
('create_date', '<=', date_end.strftime(tools.DEFAULT_SERVER_DATE_FORMAT)),
('type', '=', 'lead')]
opp_pre_domain = [('date_deadline', '>=', date_begin.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)),
('date_deadline', '<=', date_end.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)),
('type', '=', 'opportunity')]
for id in ids:
res[id] = dict()
lead_domain = lead_pre_domain + [('section_id', '=', id)]
opp_domain = opp_pre_domain + [('section_id', '=', id)]
res[id]['monthly_open_leads'] = json.dumps(self.__get_bar_values(cr, uid, obj, lead_domain, ['create_date'], 'create_date_count', 'create_date', context=context))
res[id]['monthly_planned_revenue'] = json.dumps(self.__get_bar_values(cr, uid, obj, opp_domain, ['planned_revenue', 'date_deadline'], 'planned_revenue', 'date_deadline', context=context))
return res
_columns = {
'resource_calendar_id': fields.many2one('resource.calendar', "Working Time", help="Used to compute open days"),
'stage_ids': fields.many2many('crm.case.stage', 'section_stage_rel', 'section_id', 'stage_id', 'Stages'),
'use_leads': fields.boolean('Leads',
help="The first contact you get with a potential customer is a lead you qualify before converting it into a real business opportunity. Check this box to manage leads in this sales team."),
'use_opportunities': fields.boolean('Opportunities', help="Check this box to manage opportunities in this sales team."),
'monthly_open_leads': fields.function(_get_opportunities_data,
type="char", readonly=True, multi='_get_opportunities_data',
string='Open Leads per Month'),
'monthly_planned_revenue': fields.function(_get_opportunities_data,
type="char", readonly=True, multi='_get_opportunities_data',
string='Planned Revenue per Month'),
'alias_id': fields.many2one('mail.alias', 'Alias', ondelete="restrict", required=True, help="The email address associated with this team. New emails received will automatically create new leads assigned to the team."),
}
def _auto_init(self, cr, context=None):
"""Installation hook to create aliases for all lead and avoid constraint errors."""
return self.pool.get('mail.alias').migrate_to_alias(cr, self._name, self._table, super(crm_case_section, self)._auto_init,
'crm.lead', self._columns['alias_id'], 'name', alias_prefix='Lead+', alias_defaults={}, context=context)
def _get_stage_common(self, cr, uid, context):
ids = self.pool.get('crm.case.stage').search(cr, uid, [('case_default', '=', 1)], context=context)
return ids
_defaults = {
'stage_ids': _get_stage_common,
'use_leads': True,
'use_opportunities': True,
}
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
create_context = dict(context, alias_model_name='crm.lead', alias_parent_model_name=self._name)
section_id = super(crm_case_section, self).create(cr, uid, vals, context=create_context)
section = self.browse(cr, uid, section_id, context=context)
self.pool.get('mail.alias').write(cr, uid, [section.alias_id.id], {'alias_parent_thread_id': section_id, 'alias_defaults': {'section_id': section_id, 'type': 'lead'}}, context=context)
return section_id
def unlink(self, cr, uid, ids, context=None):
# Cascade-delete mail aliases as well, as they should not exist without the sales team.
mail_alias = self.pool.get('mail.alias')
alias_ids = [team.alias_id.id for team in self.browse(cr, uid, ids, context=context) if team.alias_id]
res = super(crm_case_section, self).unlink(cr, uid, ids, context=context)
mail_alias.unlink(cr, uid, alias_ids, context=context)
return res
| agpl-3.0 |
mikel-egana-aranguren/SADI-Galaxy-Docker | galaxy-dist/eggs/kombu-3.0.13-py2.7.egg/kombu/transport/virtual/__init__.py | 1 | 27656 | """
kombu.transport.virtual
=======================
Virtual transport implementation.
Emulates the AMQ API for non-AMQ transports.
"""
from __future__ import absolute_import, unicode_literals
import base64
import socket
import sys
import warnings
from array import array
from itertools import count
from multiprocessing.util import Finalize
from time import sleep
from amqp.protocol import queue_declare_ok_t
from kombu.exceptions import ResourceError, ChannelError
from kombu.five import Empty, items, monotonic
from kombu.utils import emergency_dump_state, kwdict, say, uuid
from kombu.utils.compat import OrderedDict
from kombu.utils.encoding import str_to_bytes, bytes_to_str
from kombu.transport import base
from .scheduling import FairCycle
from .exchange import STANDARD_EXCHANGE_TYPES
ARRAY_TYPE_H = 'H' if sys.version_info[0] == 3 else b'H'
UNDELIVERABLE_FMT = """\
Message could not be delivered: No queues bound to exchange {exchange!r} \
using binding key {routing_key!r}.
"""
NOT_EQUIVALENT_FMT = """\
Cannot redeclare exchange {0!r} in vhost {1!r} with \
different type, durable, autodelete or arguments value.\
"""
class Base64(object):
def encode(self, s):
return bytes_to_str(base64.b64encode(str_to_bytes(s)))
def decode(self, s):
return base64.b64decode(str_to_bytes(s))
class NotEquivalentError(Exception):
"""Entity declaration is not equivalent to the previous declaration."""
pass
class UndeliverableWarning(UserWarning):
"""The message could not be delivered to a queue."""
pass
class BrokerState(object):
#: exchange declarations.
exchanges = None
#: active bindings.
bindings = None
def __init__(self, exchanges=None, bindings=None):
self.exchanges = {} if exchanges is None else exchanges
self.bindings = {} if bindings is None else bindings
def clear(self):
self.exchanges.clear()
self.bindings.clear()
class QoS(object):
"""Quality of Service guarantees.
Only supports `prefetch_count` at this point.
:param channel: AMQ Channel.
:keyword prefetch_count: Initial prefetch count (defaults to 0).
"""
#: current prefetch count value
prefetch_count = 0
#: :class:`~collections.OrderedDict` of active messages.
#: *NOTE*: Can only be modified by the consuming thread.
_delivered = None
#: acks can be done by other threads than the consuming thread.
#: Instead of a mutex, which doesn't perform well here, we mark
#: the delivery tags as dirty, so subsequent calls to append() can remove
#: them.
_dirty = None
#: If disabled, unacked messages won't be restored at shutdown.
restore_at_shutdown = True
def __init__(self, channel, prefetch_count=0):
self.channel = channel
self.prefetch_count = prefetch_count or 0
self._delivered = OrderedDict()
self._delivered.restored = False
self._dirty = set()
self._quick_ack = self._dirty.add
self._quick_append = self._delivered.__setitem__
self._on_collect = Finalize(
self, self.restore_unacked_once, exitpriority=1,
)
def can_consume(self):
"""Return true if the channel can be consumed from.
Used to ensure the client adhers to currently active
prefetch limits.
"""
pcount = self.prefetch_count
return not pcount or len(self._delivered) - len(self._dirty) < pcount
def can_consume_max_estimate(self):
"""Returns the maximum number of messages allowed to be returned.
Returns an estimated number of messages that a consumer may be allowed
to consume at once from the broker. This is used for services where
bulk 'get message' calls are preferred to many individual 'get message'
calls - like SQS.
returns:
An integer > 0
"""
pcount = self.prefetch_count
if pcount:
return max(pcount - (len(self._delivered) - len(self._dirty)), 0)
def append(self, message, delivery_tag):
"""Append message to transactional state."""
if self._dirty:
self._flush()
self._quick_append(delivery_tag, message)
def get(self, delivery_tag):
return self._delivered[delivery_tag]
def _flush(self):
"""Flush dirty (acked/rejected) tags from."""
dirty = self._dirty
delivered = self._delivered
while 1:
try:
dirty_tag = dirty.pop()
except KeyError:
break
delivered.pop(dirty_tag, None)
def ack(self, delivery_tag):
"""Acknowledge message and remove from transactional state."""
self._quick_ack(delivery_tag)
def reject(self, delivery_tag, requeue=False):
"""Remove from transactional state and requeue message."""
if requeue:
self.channel._restore_at_beginning(self._delivered[delivery_tag])
self._quick_ack(delivery_tag)
def restore_unacked(self):
"""Restore all unacknowledged messages."""
self._flush()
delivered = self._delivered
errors = []
restore = self.channel._restore
pop_message = delivered.popitem
while delivered:
try:
_, message = pop_message()
except KeyError: # pragma: no cover
break
try:
restore(message)
except BaseException as exc:
errors.append((exc, message))
delivered.clear()
return errors
def restore_unacked_once(self):
"""Restores all unacknowledged messages at shutdown/gc collect.
Will only be done once for each instance.
"""
self._on_collect.cancel()
self._flush()
state = self._delivered
if not self.restore_at_shutdown or not self.channel.do_restore:
return
if getattr(state, 'restored', None):
assert not state
return
try:
if state:
say('Restoring {0!r} unacknowledged message(s).',
len(self._delivered))
unrestored = self.restore_unacked()
if unrestored:
errors, messages = list(zip(*unrestored))
say('UNABLE TO RESTORE {0} MESSAGES: {1}',
len(errors), errors)
emergency_dump_state(messages)
finally:
state.restored = True
def restore_visible(self, *args, **kwargs):
"""Restore any pending unackwnowledged messages for visibility_timeout
style implementations.
Optional: Currently only used by the Redis transport.
"""
pass
class Message(base.Message):
def __init__(self, channel, payload, **kwargs):
self._raw = payload
properties = payload['properties']
body = payload.get('body')
if body:
body = channel.decode_body(body, properties.get('body_encoding'))
kwargs.update({
'body': body,
'delivery_tag': properties['delivery_tag'],
'content_type': payload.get('content-type'),
'content_encoding': payload.get('content-encoding'),
'headers': payload.get('headers'),
'properties': properties,
'delivery_info': properties.get('delivery_info'),
'postencode': 'utf-8',
})
super(Message, self).__init__(channel, **kwdict(kwargs))
def serializable(self):
props = self.properties
body, _ = self.channel.encode_body(self.body,
props.get('body_encoding'))
headers = dict(self.headers)
# remove compression header
headers.pop('compression', None)
return {
'body': body,
'properties': props,
'content-type': self.content_type,
'content-encoding': self.content_encoding,
'headers': headers,
}
class AbstractChannel(object):
"""This is an abstract class defining the channel methods
you'd usually want to implement in a virtual channel.
Do not subclass directly, but rather inherit from :class:`Channel`
instead.
"""
def _get(self, queue, timeout=None):
"""Get next message from `queue`."""
raise NotImplementedError('Virtual channels must implement _get')
def _put(self, queue, message):
"""Put `message` onto `queue`."""
raise NotImplementedError('Virtual channels must implement _put')
def _purge(self, queue):
"""Remove all messages from `queue`."""
raise NotImplementedError('Virtual channels must implement _purge')
def _size(self, queue):
"""Return the number of messages in `queue` as an :class:`int`."""
return 0
def _delete(self, queue, *args, **kwargs):
"""Delete `queue`.
This just purges the queue, if you need to do more you can
override this method.
"""
self._purge(queue)
def _new_queue(self, queue, **kwargs):
"""Create new queue.
Your transport can override this method if it needs
to do something whenever a new queue is declared.
"""
pass
def _has_queue(self, queue, **kwargs):
"""Verify that queue exists.
Should return :const:`True` if the queue exists or :const:`False`
otherwise.
"""
return True
def _poll(self, cycle, timeout=None):
"""Poll a list of queues for available messages."""
return cycle.get()
class Channel(AbstractChannel, base.StdChannel):
"""Virtual channel.
:param connection: The transport instance this channel is part of.
"""
#: message class used.
Message = Message
#: QoS class used.
QoS = QoS
#: flag to restore unacked messages when channel
#: goes out of scope.
do_restore = True
#: mapping of exchange types and corresponding classes.
exchange_types = dict(STANDARD_EXCHANGE_TYPES)
#: flag set if the channel supports fanout exchanges.
supports_fanout = False
#: Binary <-> ASCII codecs.
codecs = {'base64': Base64()}
#: Default body encoding.
#: NOTE: ``transport_options['body_encoding']`` will override this value.
body_encoding = 'base64'
#: counter used to generate delivery tags for this channel.
_delivery_tags = count(1)
#: Optional queue where messages with no route is delivered.
#: Set by ``transport_options['deadletter_queue']``.
deadletter_queue = None
# List of options to transfer from :attr:`transport_options`.
from_transport_options = ('body_encoding', 'deadletter_queue')
def __init__(self, connection, **kwargs):
self.connection = connection
self._consumers = set()
self._cycle = None
self._tag_to_queue = {}
self._active_queues = []
self._qos = None
self.closed = False
# instantiate exchange types
self.exchange_types = dict(
(typ, cls(self)) for typ, cls in items(self.exchange_types)
)
try:
self.channel_id = self.connection._avail_channel_ids.pop()
except IndexError:
raise ResourceError(
'No free channel ids, current={0}, channel_max={1}'.format(
len(self.connection.channels),
self.connection.channel_max), (20, 10),
)
topts = self.connection.client.transport_options
for opt_name in self.from_transport_options:
try:
setattr(self, opt_name, topts[opt_name])
except KeyError:
pass
def exchange_declare(self, exchange=None, type='direct', durable=False,
auto_delete=False, arguments=None,
nowait=False, passive=False):
"""Declare exchange."""
type = type or 'direct'
exchange = exchange or 'amq.%s' % type
if passive:
if exchange not in self.state.exchanges:
raise ChannelError(
'NOT_FOUND - no exchange {0!r} in vhost {1!r}'.format(
exchange, self.connection.client.virtual_host or '/'),
(50, 10), 'Channel.exchange_declare', '404',
)
return
try:
prev = self.state.exchanges[exchange]
if not self.typeof(exchange).equivalent(prev, exchange, type,
durable, auto_delete,
arguments):
raise NotEquivalentError(NOT_EQUIVALENT_FMT.format(
exchange, self.connection.client.virtual_host or '/'))
except KeyError:
self.state.exchanges[exchange] = {
'type': type,
'durable': durable,
'auto_delete': auto_delete,
'arguments': arguments or {},
'table': [],
}
def exchange_delete(self, exchange, if_unused=False, nowait=False):
"""Delete `exchange` and all its bindings."""
for rkey, _, queue in self.get_table(exchange):
self.queue_delete(queue, if_unused=True, if_empty=True)
self.state.exchanges.pop(exchange, None)
def queue_declare(self, queue=None, passive=False, **kwargs):
"""Declare queue."""
queue = queue or 'amq.gen-%s' % uuid()
if passive and not self._has_queue(queue, **kwargs):
raise ChannelError(
'NOT_FOUND - no queue {0!r} in vhost {1!r}'.format(
queue, self.connection.client.virtual_host or '/'),
(50, 10), 'Channel.queue_declare', '404',
)
else:
self._new_queue(queue, **kwargs)
return queue_declare_ok_t(queue, self._size(queue), 0)
def queue_delete(self, queue, if_unused=False, if_empty=False, **kwargs):
"""Delete queue."""
if if_empty and self._size(queue):
return
try:
exchange, routing_key, arguments = self.state.bindings[queue]
except KeyError:
return
meta = self.typeof(exchange).prepare_bind(
queue, exchange, routing_key, arguments,
)
self._delete(queue, exchange, *meta)
self.state.bindings.pop(queue, None)
def after_reply_message_received(self, queue):
self.queue_delete(queue)
def exchange_bind(self, destination, source='', routing_key='',
nowait=False, arguments=None):
raise NotImplementedError('transport does not support exchange_bind')
def exchange_unbind(self, destination, source='', routing_key='',
nowait=False, arguments=None):
raise NotImplementedError('transport does not support exchange_unbind')
def queue_bind(self, queue, exchange=None, routing_key='',
arguments=None, **kwargs):
"""Bind `queue` to `exchange` with `routing key`."""
if queue in self.state.bindings:
return
exchange = exchange or 'amq.direct'
table = self.state.exchanges[exchange].setdefault('table', [])
self.state.bindings[queue] = exchange, routing_key, arguments
meta = self.typeof(exchange).prepare_bind(
queue, exchange, routing_key, arguments,
)
table.append(meta)
if self.supports_fanout:
self._queue_bind(exchange, *meta)
def queue_unbind(self, queue, exchange=None, routing_key='',
arguments=None, **kwargs):
raise NotImplementedError('transport does not support queue_unbind')
def list_bindings(self):
return ((queue, exchange, rkey)
for exchange in self.state.exchanges
for rkey, pattern, queue in self.get_table(exchange))
def queue_purge(self, queue, **kwargs):
"""Remove all ready messages from queue."""
return self._purge(queue)
def _next_delivery_tag(self):
return uuid()
def basic_publish(self, message, exchange, routing_key, **kwargs):
"""Publish message."""
message['body'], body_encoding = self.encode_body(
message['body'], self.body_encoding,
)
props = message['properties']
props.update(
body_encoding=body_encoding,
delivery_tag=self._next_delivery_tag(),
)
props['delivery_info'].update(
exchange=exchange,
routing_key=routing_key,
)
if exchange:
return self.typeof(exchange).deliver(
message, exchange, routing_key, **kwargs
)
# anon exchange: routing_key is the destintaion queue
return self._put(routing_key, message, **kwargs)
def basic_consume(self, queue, no_ack, callback, consumer_tag, **kwargs):
"""Consume from `queue`"""
self._tag_to_queue[consumer_tag] = queue
self._active_queues.append(queue)
def _callback(raw_message):
message = self.Message(self, raw_message)
if not no_ack:
self.qos.append(message, message.delivery_tag)
return callback(message)
self.connection._callbacks[queue] = _callback
self._consumers.add(consumer_tag)
self._reset_cycle()
def basic_cancel(self, consumer_tag):
"""Cancel consumer by consumer tag."""
if consumer_tag in self._consumers:
self._consumers.remove(consumer_tag)
self._reset_cycle()
queue = self._tag_to_queue.pop(consumer_tag, None)
try:
self._active_queues.remove(queue)
except ValueError:
pass
self.connection._callbacks.pop(queue, None)
def basic_get(self, queue, no_ack=False, **kwargs):
"""Get message by direct access (synchronous)."""
try:
message = self.Message(self, self._get(queue))
if not no_ack:
self.qos.append(message, message.delivery_tag)
return message
except Empty:
pass
def basic_ack(self, delivery_tag):
"""Acknowledge message."""
self.qos.ack(delivery_tag)
def basic_recover(self, requeue=False):
"""Recover unacked messages."""
if requeue:
return self.qos.restore_unacked()
raise NotImplementedError('Does not support recover(requeue=False)')
def basic_reject(self, delivery_tag, requeue=False):
"""Reject message."""
self.qos.reject(delivery_tag, requeue=requeue)
def basic_qos(self, prefetch_size=0, prefetch_count=0,
apply_global=False):
"""Change QoS settings for this channel.
Only `prefetch_count` is supported.
"""
self.qos.prefetch_count = prefetch_count
def get_exchanges(self):
return list(self.state.exchanges)
def get_table(self, exchange):
"""Get table of bindings for `exchange`."""
return self.state.exchanges[exchange]['table']
def typeof(self, exchange, default='direct'):
"""Get the exchange type instance for `exchange`."""
try:
type = self.state.exchanges[exchange]['type']
except KeyError:
type = default
return self.exchange_types[type]
def _lookup(self, exchange, routing_key, default=None):
"""Find all queues matching `routing_key` for the given `exchange`.
Must return the string `default` if no queues matched.
"""
if default is None:
default = self.deadletter_queue
try:
R = self.typeof(exchange).lookup(
self.get_table(exchange),
exchange, routing_key, default,
)
except KeyError:
R = []
if not R and default is not None:
warnings.warn(UndeliverableWarning(UNDELIVERABLE_FMT.format(
exchange=exchange, routing_key=routing_key)),
)
self._new_queue(default)
R = [default]
return R
def _restore(self, message):
"""Redeliver message to its original destination."""
delivery_info = message.delivery_info
message = message.serializable()
message['redelivered'] = True
for queue in self._lookup(
delivery_info['exchange'], delivery_info['routing_key']):
self._put(queue, message)
def _restore_at_beginning(self, message):
return self._restore(message)
def drain_events(self, timeout=None):
if self._consumers and self.qos.can_consume():
if hasattr(self, '_get_many'):
return self._get_many(self._active_queues, timeout=timeout)
return self._poll(self.cycle, timeout=timeout)
raise Empty()
def message_to_python(self, raw_message):
"""Convert raw message to :class:`Message` instance."""
if not isinstance(raw_message, self.Message):
return self.Message(self, payload=raw_message)
return raw_message
def prepare_message(self, body, priority=None, content_type=None,
content_encoding=None, headers=None, properties=None):
"""Prepare message data."""
properties = properties or {}
info = properties.setdefault('delivery_info', {})
info['priority'] = priority or 0
return {'body': body,
'content-encoding': content_encoding,
'content-type': content_type,
'headers': headers or {},
'properties': properties or {}}
def flow(self, active=True):
"""Enable/disable message flow.
:raises NotImplementedError: as flow
is not implemented by the base virtual implementation.
"""
raise NotImplementedError('virtual channels do not support flow.')
def close(self):
"""Close channel, cancel all consumers, and requeue unacked
messages."""
if not self.closed:
self.closed = True
for consumer in list(self._consumers):
self.basic_cancel(consumer)
if self._qos:
self._qos.restore_unacked_once()
if self._cycle is not None:
self._cycle.close()
self._cycle = None
if self.connection is not None:
self.connection.close_channel(self)
self.exchange_types = None
def encode_body(self, body, encoding=None):
if encoding:
return self.codecs.get(encoding).encode(body), encoding
return body, encoding
def decode_body(self, body, encoding=None):
if encoding:
return self.codecs.get(encoding).decode(body)
return body
def _reset_cycle(self):
self._cycle = FairCycle(self._get, self._active_queues, Empty)
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.close()
@property
def state(self):
"""Broker state containing exchanges and bindings."""
return self.connection.state
@property
def qos(self):
""":class:`QoS` manager for this channel."""
if self._qos is None:
self._qos = self.QoS(self)
return self._qos
@property
def cycle(self):
if self._cycle is None:
self._reset_cycle()
return self._cycle
class Management(base.Management):
def __init__(self, transport):
super(Management, self).__init__(transport)
self.channel = transport.client.channel()
def get_bindings(self):
return [dict(destination=q, source=e, routing_key=r)
for q, e, r in self.channel.list_bindings()]
def close(self):
self.channel.close()
class Transport(base.Transport):
"""Virtual transport.
:param client: :class:`~kombu.Connection` instance
"""
Channel = Channel
Cycle = FairCycle
Management = Management
#: :class:`BrokerState` containing declared exchanges and
#: bindings (set by constructor).
state = BrokerState()
#: :class:`~kombu.transport.virtual.scheduling.FairCycle` instance
#: used to fairly drain events from channels (set by constructor).
cycle = None
#: port number used when no port is specified.
default_port = None
#: active channels.
channels = None
#: queue/callback map.
_callbacks = None
#: Time to sleep between unsuccessful polls.
polling_interval = 1.0
#: Max number of channels
channel_max = 65535
def __init__(self, client, **kwargs):
self.client = client
self.channels = []
self._avail_channels = []
self._callbacks = {}
self.cycle = self.Cycle(self._drain_channel, self.channels, Empty)
polling_interval = client.transport_options.get('polling_interval')
if polling_interval is not None:
self.polling_interval = polling_interval
self._avail_channel_ids = array(
ARRAY_TYPE_H, range(self.channel_max, 0, -1),
)
def create_channel(self, connection):
try:
return self._avail_channels.pop()
except IndexError:
channel = self.Channel(connection)
self.channels.append(channel)
return channel
def close_channel(self, channel):
try:
self._avail_channel_ids.append(channel.channel_id)
try:
self.channels.remove(channel)
except ValueError:
pass
finally:
channel.connection = None
def establish_connection(self):
# creates channel to verify connection.
# this channel is then used as the next requested channel.
# (returned by ``create_channel``).
self._avail_channels.append(self.create_channel(self))
return self # for drain events
def close_connection(self, connection):
self.cycle.close()
for l in self._avail_channels, self.channels:
while l:
try:
channel = l.pop()
except (IndexError, KeyError): # pragma: no cover
pass
else:
channel.close()
def drain_events(self, connection, timeout=None):
loop = 0
time_start = monotonic()
get = self.cycle.get
polling_interval = self.polling_interval
while 1:
try:
item, channel = get(timeout=timeout)
except Empty:
if timeout and monotonic() - time_start >= timeout:
raise socket.timeout()
loop += 1
if polling_interval is not None:
sleep(polling_interval)
else:
break
message, queue = item
if not queue or queue not in self._callbacks:
raise KeyError(
'Message for queue {0!r} without consumers: {1}'.format(
queue, message))
self._callbacks[queue](message)
def _drain_channel(self, channel, timeout=None):
return channel.drain_events(timeout=timeout)
@property
def default_connection_params(self):
return {'port': self.default_port, 'hostname': 'localhost'}
| gpl-3.0 |
jmontgom10/Mimir_pyPol | oldCode/04b_avgBAABditherHWPimages.py | 1 | 17054 | # -*- coding: utf-8 -*-
"""
Combines all the images for a given (TARGET, FILTER, HWP) combination to
produce a single, average image.
Estimates the sky background level of the on-target position at the time of the
on-target observation using a bracketing pair of off-target observations through
the same HWP polaroid rotation value. Subtracts this background level from
each on-target image to produce background free images. Applies an airmass
correction to each image, and combines these final image to produce a background
free, airmass corrected, average image.
"""
# Core imports
import os
import sys
import copy
import warnings
# Import scipy/numpy packages
import numpy as np
from scipy import ndimage
# Import astropy packages
from astropy.table import Table
import astropy.units as u
from astropy.convolution import Gaussian2DKernel
from astropy.modeling import models, fitting
from astropy.stats import gaussian_fwhm_to_sigma, sigma_clipped_stats
from photutils import (make_source_mask,
MedianBackground, SigmaClip, Background2D)
# Import plotting utilities
from matplotlib import pyplot as plt
# Add the AstroImage class
import astroimage as ai
# Add the header handler to the BaseImage class
from Mimir_header_handler import Mimir_header_handler
ai.reduced.ReducedScience.set_header_handler(Mimir_header_handler)
ai.set_instrument('mimir')
#==============================================================================
# *********************** CUSTOM USER CODE ************************************
# this is where the user specifies where the raw data is stored
# and some of the subdirectory structure to find the actual .FITS images
#==============================================================================
# This is a list of targets for which to process each subgroup (observational
# group... never spanning multiple nights, etc...) instead of combining into a
# single "metagroup" for all observations of that target. The default behavior
# is to go ahead and combine everything into a single, large "metagroup". The
# calibration data should probably not be processed as a metagroup though.
processSubGroupList = []
processSubGroupList = [t.upper() for t in processSubGroupList]
# Define the location of the PPOL reduced data to be read and worked on
PPOL_data = 'C:\\Users\\Jordan\\FITS_data\\Mimir_data\\PPOL_Reduced\\201611\\'
S3_dir = os.path.join(PPOL_data, 'S3_Astrometry')
# This is the location where all pyPol data will be saved
pyPol_data = 'C:\\Users\\Jordan\\FITS_data\\Mimir_data\\pyPol_Reduced\\201611'
# This is the location of the previously generated masks (step 4)
maskDir = os.path.join(pyPol_data, 'Masks')
# Setup new directory for polarimetry data
polarimetryDir = os.path.join(pyPol_data, 'Polarimetry')
if (not os.path.isdir(polarimetryDir)):
os.mkdir(polarimetryDir, 0o755)
HWPDir = os.path.join(polarimetryDir, 'HWPImgs')
if (not os.path.isdir(HWPDir)):
os.mkdir(HWPDir, 0o755)
bkgPlotDir = os.path.join(HWPDir, 'bkgPlots')
if (not os.path.isdir(bkgPlotDir)):
os.mkdir(bkgPlotDir, 0o755)
# # Setup PRISM detector properties
# read_noise = 13.0 # electrons
# effective_gain = 3.3 # electrons/ADU
#########
### Establish the atmospheric extinction (magnitudes/airmass)
#########
# Following table from Hu (2011)
# Data from Gaomeigu Observational Station
# Passband | K'(lambda) [mag/airmass] | K'' [mag/(color*airmass)]
# U 0.560 +/- 0.023 0.061 +/- 0.004
# B 0.336 +/- 0.021 0.012 +/- 0.003
# V 0.198 +/- 0.024 -0.015 +/- 0.004
# R 0.142 +/- 0.021 -0.067 +/- 0.005
# I 0.093 +/- 0.020 0.023 +/- 0.006
# Following table from Schmude (1994)
# Data from Texas A & M University Observatory
# Passband | K(lambda) [mag/airmass] | dispersion on K(lambda)
# U 0.60 +/- 0.05 0.120
# B 0.40 +/- 0.06 0.165
# V 0.26 +/- 0.03 0.084
# R 0.19 +/- 0.03 0.068
# I 0.16 +/- 0.02 0.055
# TODO: Ask Dan about atmospheric extinction from airmass at NIR
kappa = dict(zip(['U', 'B', 'V', 'R', 'I', 'J', 'H', 'K' ],
[0.60, 0.40, 0.26, 0.19, 0.16, 0.05, 0.01, 0.005]))
# Read in the indexFile data and select the filenames
print('\nReading file index from disk')
indexFile = os.path.join(pyPol_data, 'reducedFileIndex.csv')
fileIndex = Table.read(indexFile, format='ascii.csv')
# Determine which parts of the fileIndex pertain to HEX dither science images
useFiles = np.logical_and(
fileIndex['USE'] == 1,
fileIndex['DITHER_TYPE'] == 'ABBA'
)
useFileRows = np.where(useFiles)
# Cull the file index to only include files selected for use
fileIndex = fileIndex[useFileRows]
# Define an approximate pixel scale
pixScale = 0.5789*(u.arcsec/u.pixel)
# TODO: implement a FWHM seeing cut... not yet working because PSF getter seems
# to be malfunctioning in step 2
#
#
# # Loop through each unique GROUP_ID and test for bad seeing conditions.
# groupByID = fileIndex.group_by(['GROUP_ID'])
# for subGroup in groupByID.groups:
# # Grab the FWHM values for this subGroup
# thisFWHMs = subGroup['FWHM']*u.pixel
#
# # Grab the median and standard deviation of the seeing for this subgroup
# medianSeeing = np.median(thisFWHMs)
# stdSeeing = np.std(thisFWHMs)
#
# # Find bad FWHM values
# badFWHMs = np.logical_not(np.isfinite(subGroup['FWHM']))
# badFWHMs = np.logical_or(
# badFWHMs,
# thisFWHMs <= 0
# )
# badFWHM = np.logical_and(
# badFWHM,
# thisFWHMs > 2.0*u.arcsec
# )
# import pdb; pdb.set_trace()
# Group the fileIndex by...
# 1. Target
# 2. Waveband
fileIndexByTarget = fileIndex.group_by(['TARGET', 'FILTER'])
# Loop through each group
for group in fileIndexByTarget.groups:
# Grab the current group information
thisTarget = str(np.unique(group['TARGET'].data)[0])
thisFilter = str(np.unique(group['FILTER'].data)[0])
# # Skip the Merope nebula for now... not of primary scientific importance
# if thisTarget == 'MEROPE': continue
# Update the user on processing status
print('\nProcessing images for')
print('Target : {0}'.format(thisTarget))
print('Filter : {0}'.format(thisFilter))
# Grab the atmospheric extinction coefficient for this wavelength
thisKappa = kappa[thisFilter]
# Further divide this group by its constituent HWP values
indexByPolAng = group.group_by(['IPPA'])
# Loop over each of the HWP values, as these are independent from
# eachother and should be treated entirely separately from eachother.
for IPPAgroup in indexByPolAng.groups:
# Grab the current HWP information
thisIPPA = np.unique(IPPAgroup['IPPA'].data)[0]
# Update the user on processing status
print('\tIPPA : {0}'.format(thisIPPA))
# For ABBA dithers, we need to compute the background levels on a
# sub-group basis. If this target has not been selected for subGroup
# averaging, then simply append the background subtracted images to a
# cumulative list of images to align and average.
# Initalize an image list to store all the images for this
# (target, filter, pol-ang) combination
imgList = []
indexByGroupID = IPPAgroup.group_by(['GROUP_ID'])
for subGroup in indexByGroupID.groups:
# Grab the numae of this subGroup
thisSubGroup = str(np.unique(subGroup['OBJECT'])[0])
# if (thisSubGroup != 'NGC2023_R1') and (thisSubGroup != 'NGC2023_R2'): continue
# Construct the output file name and test if it alread exsits.
if thisTarget in processSubGroupList:
outFile = '_'.join([thisTarget, thisSubGroup, str(thisIPPA)])
outFile = os.path.join(HWPDir, outFile) + '.fits'
elif thisTarget not in processSubGroupList:
outFile = '_'.join([thisTarget, thisFilter, str(thisIPPA)])
outFile = os.path.join(HWPDir, outFile) + '.fits'
# Test if this file has already been constructed and either skip
# this subgroup or break out of the subgroup loop.
if os.path.isfile(outFile):
print('\t\tFile {0} already exists...'.format(os.path.basename(outFile)))
if thisTarget in processSubGroupList:
continue
elif thisTarget not in processSubGroupList:
break
# Update the user on the current execution status
print('\t\tProcessing images for subgroup {0}'.format(thisSubGroup))
# Initalize lists to store the A and B images.
AimgList = []
BimgList = []
# Initalize a list to store the off-target sky background levels
BbkgList = []
# Initilaze lists to store the times of observation
AdatetimeList = []
BdatetimeList = []
# Read in all the images for this subgroup
progressString = '\t\tNumber of Images : {0}'
for iFile, filename in enumerate(subGroup['FILENAME']):
# Update the user on processing status
print(progressString.format(iFile+1), end='\r')
# Read in a temporary compy of this image
PPOL_file = os.path.join(S3_dir, filename)
tmpImg = ai.reduced.ReducedScience.read(PPOL_file)
# Crop the edges of this image
ny, nx = tmpImg.shape
binningArray = np.array(tmpImg.binning)
# Compute the amount to crop to get a 1000 x 1000 image
cy, cx = (ny - 1000, nx - 1000)
# Compute the crop boundaries and apply them
lf = np.int(np.round(0.5*cx))
rt = lf + 1000
bt = np.int(np.round(0.5*cy))
tp = bt + 1000
tmpImg = tmpImg[bt:tp, lf:rt]
# Grab the on-off target value for this image
thisAB = subGroup['AB'][iFile]
# Place the image in a list and store required background values
if thisAB == 'B':
# Place B images in the BimgList
BimgList.append(tmpImg)
# Place the median value of this off-target image in list
mask = make_source_mask(
tmpImg.data, snr=2, npixels=5, dilate_size=11
)
mean, median, std = sigma_clipped_stats(
tmpImg.data, sigma=3.0, mask=mask
)
BbkgList.append(median)
# Place the time of this image in a list of time values
BdatetimeList.append(tmpImg.julianDate)
if thisAB == 'A':
# Read in any associated masks and store them.
maskFile = os.path.join(maskDir, os.path.basename(filename))
# If there is a mask for this file, then apply it!
if os.path.isfile(maskFile):
# Read in the mask file
tmpMask = ai.reduced.ReducedScience.read(maskFile)
# Crop the mask to match the shape of the original image
tmpMask = tmpMask[cy:ny-cy, cx:nx-cx]
# Grab the data to be masked
tmpData = tmpImg.data
# Mask the data and put it back into the tmpImg
maskInds = np.where(tmpMask.data)
tmpData[maskInds] = np.NaN
tmpImg.data = tmpData
# Place B images in the BimgList
AimgList.append(tmpImg)
# Place the time of this image in a list of time values
AdatetimeList.append(tmpImg.julianDate)
# Create a new line for shell output
print('')
# Construct an image stack of the off-target images
BimageStack = ai.utilitywrappers.ImageStack(BimgList)
# Build a supersky image from these off-target images
superskyImage = BimageStack.produce_supersky()
import pdb; pdb.set_trace()
# Locate regions outside of a 5% deviation
tmpSuperskyData = superskyImage.data
maskedPix = np.abs(tmpSuperskyData - 1.0) > 0.05
# Get rid of the small stuff and expand the big stuff
maskedPix = ndimage.binary_opening(maskedPix, iterations=2)
maskedPix = ndimage.binary_closing(maskedPix, iterations=2)
maskedPix = ndimage.binary_dilation(maskedPix, iterations=4)
# TODO: Make the box_size and filter_size sensitive to binning.
binningArray = np.array(superskyImage.binning)
box_size = tuple((100/binningArray).astype(int))
filter_size = tuple((10/binningArray).astype(int))
# Setup the sigma clipping and median background estimators
sigma_clip = SigmaClip(sigma=3., iters=10)
bkg_estimator = MedianBackground()
# Compute a smoothed background image
bkgData = Background2D(superskyImage.data,
box_size=box_size, filter_size=filter_size, mask=maskedPix,
sigma_clip=sigma_clip, bkg_estimator=bkg_estimator)
# Construct a smoothed supersky image object
smoothedSuperskyImage = ai.reduced.ReducedScience(
bkgData.background/bkgData.background_median,
uncertainty = bkgData.background_rms,
properties={'unit':u.dimensionless_unscaled}
)
# Interpolate background values to A times
AbkgList = np.interp(
AdatetimeList,
BdatetimeList,
BbkgList,
left=-1e6,
right=-1e6
)
# Cut out any extrapolated data (and corresponding images)
goodInds = np.where(AbkgList > -1e5)
AimgList = np.array(AimgList)[goodInds]
AdatetimeList = np.array(AdatetimeList)[goodInds]
AbkgList = AbkgList[goodInds]
AsubtractedList = []
# Loop through the on-target images and subtract background values
for Aimg, Abkg in zip(AimgList, AbkgList):
# Subtract the interpolated background values from the A images
tmpImg = Aimg - smoothedSuperskyImage*(Abkg*Aimg.unit)
# Apply an airmass correction
tmpImg = tmpImg.correct_airmass(thisKappa)
# Append the subtracted and masked image to the list.
AsubtractedList.append(tmpImg)
# Now that the images have been fully processed, pause to generate
# a plot to store in the "background plots" folder. These plots
# constitute a good sanity check on background subtraction.
plt.plot(BdatetimeList, BbkgList, '-ob')
plt.scatter(AdatetimeList, AbkgList, marker='o', facecolor='r')
plt.xlabel('Julian Date')
plt.ylabel('Background Value [ADU]')
figName = '_'.join([thisTarget, thisSubGroup, str(thisIPPA)])
figName = os.path.join(bkgPlotDir, figName) + '.png'
plt.savefig(figName, dpi=300)
plt.close('all')
# Here is where I need to decide if each subgroup image should be
# computed or if I should just continue with the loop.
if thisTarget.upper() in processSubGroupList:
# Construct an image combiner for the A images
AimgStack = ai.utilitywrappers.ImageStack(AsubtractedList)
# Align the images
AimgStack.align_images_with_wcs(
subPixel=False,
padding=np.NaN
)
# Combine the images
AoutImg = imgStack.combine_images()
# Save the image
AoutImg.write(outFile, dtype=np.float64)
else:
# Extend the imgList variable with background corrected images
imgList.extend(AsubtractedList)
if len(imgList) > 0:
# At the exit of the loop, process ALL the files from ALL the groups
# Construct an image combiner for the A images
imgStack = ai.utilitywrappers.ImageStack(imgList)
# Align the images
imgStack.align_images_with_wcs(
subPixel=False,
padding=np.NaN
)
# Combine the images
outImg = imgStack.combine_images()
# Save the image
outImg.write(outFile, dtype=np.float64)
print('\nDone computing average images!')
| mit |
kulicuu/supercollider | editors/sced/scedwin/py/Settings.py | 37 | 2412 | # sced (SuperCollider mode for gedit)
#
# Copyright 2012 Jakob Leben
# Copyright 2009 Artem Popov and other contributors (see AUTHORS)
#
# sced is free software:
# you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from os import path
import simplejson
def locate():
base = path.expandvars("%APPDATA%")
if base is not None:
return path.join(base, "sced.config.json")
else:
return None
def load():
sets = Settings()
try:
filename = locate()
f = open(filename, "r")
except:
print "Could not open configuration file: " + str(filename)
f = None
data = {}
if f is not None:
try:
data = simplejson.load(f)
except:
print "Configuration file not a valid JSON script!"
f.close()
sets.sc_dir = data.get("supercollider-dir")
sets.advanced = data.get("advanced", False)
lang_data = data.get("interpreter", {})
sets.sclang_cmd = lang_data.get("command")
sets.sclang_work_dir = lang_data.get("runtime-dir")
print "Sced settings loaded."
return sets
# map gconf options to gobject properties
class Settings(object):
def __init__(self):
self.sc_dir = None
self.advanced = None
self.sclang_cmd = None
self.sclang_work_dir = None
def save(self):
try:
path.remove(self.__filename)
except:
print "existing"
f = open(locate(), "w")
data = {
"supercollider-dir": self.sc_dir,
"advanced": self.advanced,
"interpreter": {
"command": self.sclang_cmd,
"runtime-dir": self.sclang_work_dir
}
};
simplejson.dump(data, f, indent=" ")
f.close()
print "Sced settings saved."
| gpl-3.0 |
dashmug/django-payments | payments/paypal/__init__.py | 3 | 12996 | from __future__ import unicode_literals
from datetime import timedelta
from decimal import Decimal, ROUND_HALF_UP
from functools import wraps
try:
from itertools import ifilter as filter
except ImportError:
pass
import json
import logging
from django.http import HttpResponseForbidden
from django.shortcuts import redirect
from django.utils import timezone
import requests
from requests.exceptions import HTTPError
from .forms import PaymentForm
from .. import (
BasicProvider, get_credit_card_issuer, PaymentError, RedirectNeeded)
# Get an instance of a logger
logger = logging.getLogger(__name__)
CENTS = Decimal('0.01')
class UnauthorizedRequest(Exception):
pass
def authorize(fun):
@wraps(fun)
def wrapper(*args, **kwargs):
self = args[0]
payment = args[1]
self.access_token = self.get_access_token(payment)
try:
response = fun(*args, **kwargs)
except HTTPError as e:
if e.response.status_code == 401:
last_auth_response = self.get_last_response(
payment, is_auth=True)
if 'access_token' in last_auth_response:
del last_auth_response['access_token']
self.set_response_data(
payment, last_auth_response, is_auth=True)
self.access_token = self.get_access_token(payment)
response = fun(*args, **kwargs)
else:
raise
return response
return wrapper
class PaypalProvider(BasicProvider):
'''
paypal.com payment provider
'''
def __init__(self, client_id, secret,
endpoint='https://api.sandbox.paypal.com', **kwargs):
self.secret = secret
self.client_id = client_id
self.endpoint = endpoint
self.oauth2_url = self.endpoint + '/v1/oauth2/token'
self.payments_url = self.endpoint + '/v1/payments/payment'
self.payment_execute_url = self.payments_url + '/%(id)s/execute/'
self.payment_refund_url = (
self.endpoint + '/v1/payments/capture/{captureId}/refund')
super(PaypalProvider, self).__init__(**kwargs)
def set_response_data(self, payment, response, is_auth=False):
extra_data = json.loads(payment.extra_data or '{}')
if is_auth:
extra_data['auth_response'] = response
else:
extra_data['response'] = response
if 'links' in response:
extra_data['links'] = dict(
(link['rel'], link) for link in response['links'])
payment.extra_data = json.dumps(extra_data)
def set_response_links(self, payment, links):
extra_data = json.loads(payment.extra_data or '{}')
extra_data['links'] = dict((link['rel'], link) for link in links)
payment.extra_data = json.dumps(extra_data)
def set_error_data(self, payment, error):
extra_data = json.loads(payment.extra_data or '{}')
extra_data['error'] = error
payment.extra_data = json.dumps(extra_data)
def _get_links(self, payment):
extra_data = json.loads(payment.extra_data or '{}')
links = extra_data.get('links', {})
return links
@authorize
def post(self, payment, *args, **kwargs):
kwargs['headers'] = {
'Content-Type': 'application/json',
'Authorization': self.access_token}
if 'data' in kwargs:
kwargs['data'] = json.dumps(kwargs['data'])
response = requests.post(*args, **kwargs)
try:
data = response.json()
except ValueError:
data = {}
if 400 <= response.status_code <= 500:
self.set_error_data(payment, data)
logger.debug(data)
message = 'Paypal error'
if response.status_code == 400:
error_data = response.json()
logger.warning(message, extra={
'response': error_data,
'status_code': response.status_code})
message = error_data.get('message', message)
else:
logger.warning(
message, extra={'status_code': response.status_code})
payment.change_status('error', message)
raise PaymentError(message)
else:
self.set_response_data(payment, data)
return data
def get_last_response(self, payment, is_auth=False):
extra_data = json.loads(payment.extra_data or '{}')
if is_auth:
return extra_data.get('auth_response', {})
return extra_data.get('response', {})
def get_access_token(self, payment):
last_auth_response = self.get_last_response(payment, is_auth=True)
created = payment.created
now = timezone.now()
if ('access_token' in last_auth_response and
'expires_in' in last_auth_response and
(created + timedelta(
seconds=last_auth_response['expires_in'])) > now):
return '%s %s' % (last_auth_response['token_type'],
last_auth_response['access_token'])
else:
headers = {'Accept': 'application/json',
'Accept-Language': 'en_US'}
post = {'grant_type': 'client_credentials'}
response = requests.post(self.oauth2_url, data=post,
headers=headers,
auth=(self.client_id, self.secret))
response.raise_for_status()
data = response.json()
last_auth_response.update(data)
self.set_response_data(payment, last_auth_response, is_auth=True)
return '%s %s' % (data['token_type'], data['access_token'])
def get_transactions_items(self, payment):
for purchased_item in payment.get_purchased_items():
price = purchased_item.price.quantize(
CENTS, rounding=ROUND_HALF_UP)
item = {'name': purchased_item.name[:127],
'quantity': str(purchased_item.quantity),
'price': str(price),
'currency': purchased_item.currency,
'sku': purchased_item.sku}
yield item
def get_transactions_data(self, payment):
items = list(self.get_transactions_items(payment))
sub_total = (
payment.total - payment.delivery - payment.tax)
sub_total = sub_total.quantize(CENTS, rounding=ROUND_HALF_UP)
total = payment.total.quantize(CENTS, rounding=ROUND_HALF_UP)
tax = payment.tax.quantize(CENTS, rounding=ROUND_HALF_UP)
delivery = payment.delivery.quantize(
CENTS, rounding=ROUND_HALF_UP)
data = {
'intent': 'sale' if self._capture else 'authorize',
'transactions': [{'amount': {
'total': str(total),
'currency': payment.currency,
'details': {
'subtotal': str(sub_total),
'tax': str(tax),
'shipping': str(delivery)}},
'item_list': {'items': items},
'description': payment.description}]}
return data
def get_product_data(self, payment, extra_data=None):
return_url = self.get_return_url(payment)
data = self.get_transactions_data(payment)
data['redirect_urls'] = {'return_url': return_url,
'cancel_url': return_url}
data['payer'] = {'payment_method': 'paypal'}
return data
def get_form(self, payment, data=None):
if not payment.id:
payment.save()
links = self._get_links(payment)
redirect_to = links.get('approval_url')
if not redirect_to:
payment_data = self.create_payment(payment)
payment.transaction_id = payment_data['id']
links = self._get_links(payment)
redirect_to = links['approval_url']
payment.change_status('waiting')
raise RedirectNeeded(redirect_to['href'])
def process_data(self, payment, request):
success_url = payment.get_success_url()
if not 'token' in request.GET:
return HttpResponseForbidden('FAILED')
payer_id = request.GET.get('PayerID')
if not payer_id:
if payment.status != 'confirmed':
payment.change_status('rejected')
return redirect(payment.get_failure_url())
else:
return redirect(success_url)
execute_payment = self.execute_payment(payment, payer_id)
transaction = execute_payment['transactions'][0]
related_resources = transaction['related_resources'][0]
resource_key = 'sale' if self._capture else 'authorization'
authorization_links = related_resources[resource_key]['links']
self.set_response_links(payment, authorization_links)
payment.attrs.payer_info = payment['payer']['payer_info']
if self._capture:
payment.captured_amount = payment.total
payment.change_status('confirmed')
else:
payment.change_status('preauth')
return redirect(success_url)
def create_payment(self, payment, extra_data=None):
product_data = self.get_product_data(payment, extra_data)
payment = self.post(payment, self.payments_url, data=product_data)
return payment
def execute_payment(self, payment, payer_id):
post = {'payer_id': payer_id}
links = self._get_links(payment)
execute_url = links['execute']['href']
return self.post(payment, execute_url, data=post)
def get_amount_data(self, payment, amount=None):
return {
'currency': payment.currency,
'total': str(amount.quantize(
CENTS, rounding=ROUND_HALF_UP))}
def capture(self, payment, amount=None):
if amount is None:
amount = payment.total
amount_data = self.get_amount_data(payment, amount)
capture_data = {
'amount': amount_data,
'is_final_capture': True
}
links = self._get_links(payment)
url = links['capture']['href']
try:
capture = self.post(payment, url, data=capture_data)
except HTTPError as e:
try:
error = e.response.json()
except ValueError:
error = {}
if error.get('name') != 'AUTHORIZATION_ALREADY_COMPLETED':
raise e
capture = {'state': 'completed'}
state = capture['state']
if state == 'completed':
payment.change_status('confirmed')
return amount
elif state in [
'partially_captured', 'partially_refunded']:
return amount
elif state == 'pending':
payment.change_status('waiting')
elif state == 'refunded':
payment.change_status('refunded')
raise PaymentError('Payment already refunded')
def release(self, payment):
links = self._get_links(payment)
url = links['void']['href']
self.post(payment, url)
def refund(self, payment, amount=None):
if amount is None:
amount = payment.captured_amount
amount_data = self.get_amount_data(payment, amount)
refund_data = {'amount': amount_data}
links = self._get_links(payment)
url = links['refund']['href']
self.post(payment, url, data=refund_data)
payment.change_status('refunded')
return amount
class PaypalCardProvider(PaypalProvider):
'''
paypal.com credit card payment provider
'''
def get_form(self, payment, data=None):
if payment.status == 'waiting':
payment.change_status('input')
form = PaymentForm(data, provider=self, payment=payment)
if form.is_valid():
raise RedirectNeeded(payment.get_success_url())
return form
def get_product_data(self, payment, extra_data=None):
extra_data = extra_data or {}
data = self.get_transactions_data(payment)
year = extra_data['expiration'].year
month = extra_data['expiration'].month
number = extra_data['number']
card_type, _card_issuer = get_credit_card_issuer(number)
credit_card = {'number': number,
'type': card_type,
'expire_month': month,
'expire_year': year}
if 'cvv2' in extra_data and extra_data['cvv2']:
credit_card['cvv2'] = extra_data['cvv2']
data['payer'] = {'payment_method': 'credit_card',
'funding_instruments': [{'credit_card': credit_card}]}
return data
def process_data(self, payment, request):
return HttpResponseForbidden('FAILED')
| bsd-3-clause |
Hasimir/cryptography | src/cryptography/hazmat/backends/openssl/hmac.py | 16 | 2952 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
InvalidSignature, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.primitives import constant_time, hashes, interfaces
@utils.register_interface(interfaces.MACContext)
@utils.register_interface(hashes.HashContext)
class _HMACContext(object):
def __init__(self, backend, key, algorithm, ctx=None):
self._algorithm = algorithm
self._backend = backend
if ctx is None:
ctx = self._backend._ffi.new("HMAC_CTX *")
self._backend._lib.HMAC_CTX_init(ctx)
ctx = self._backend._ffi.gc(
ctx, self._backend._lib.HMAC_CTX_cleanup
)
evp_md = self._backend._lib.EVP_get_digestbyname(
algorithm.name.encode('ascii'))
if evp_md == self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
res = self._backend._lib.Cryptography_HMAC_Init_ex(
ctx, key, len(key), evp_md, self._backend._ffi.NULL
)
assert res != 0
self._ctx = ctx
self._key = key
algorithm = utils.read_only_property("_algorithm")
def copy(self):
copied_ctx = self._backend._ffi.new("HMAC_CTX *")
self._backend._lib.HMAC_CTX_init(copied_ctx)
copied_ctx = self._backend._ffi.gc(
copied_ctx, self._backend._lib.HMAC_CTX_cleanup
)
res = self._backend._lib.Cryptography_HMAC_CTX_copy(
copied_ctx, self._ctx
)
assert res != 0
return _HMACContext(
self._backend, self._key, self.algorithm, ctx=copied_ctx
)
def update(self, data):
res = self._backend._lib.Cryptography_HMAC_Update(
self._ctx, data, len(data)
)
assert res != 0
def finalize(self):
buf = self._backend._ffi.new("unsigned char[]",
self._backend._lib.EVP_MAX_MD_SIZE)
outlen = self._backend._ffi.new("unsigned int *")
res = self._backend._lib.Cryptography_HMAC_Final(
self._ctx, buf, outlen
)
assert res != 0
assert outlen[0] == self.algorithm.digest_size
self._backend._lib.HMAC_CTX_cleanup(self._ctx)
return self._backend._ffi.buffer(buf)[:outlen[0]]
def verify(self, signature):
digest = self.finalize()
if not constant_time.bytes_eq(digest, signature):
raise InvalidSignature("Signature did not match digest.")
| bsd-3-clause |
MER-GROUP/intellij-community | plugins/hg4idea/testData/bin/hgext/convert/monotone.py | 94 | 12947 | # monotone.py - monotone support for the convert extension
#
# Copyright 2008, 2009 Mikkel Fahnoe Jorgensen <[email protected]> and
# others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
import os, re
from mercurial import util
from common import NoRepo, commit, converter_source, checktool
from common import commandline
from mercurial.i18n import _
class monotone_source(converter_source, commandline):
def __init__(self, ui, path=None, rev=None):
converter_source.__init__(self, ui, path, rev)
commandline.__init__(self, ui, 'mtn')
self.ui = ui
self.path = path
self.automatestdio = False
self.rev = rev
norepo = NoRepo(_("%s does not look like a monotone repository")
% path)
if not os.path.exists(os.path.join(path, '_MTN')):
# Could be a monotone repository (SQLite db file)
try:
f = file(path, 'rb')
header = f.read(16)
f.close()
except IOError:
header = ''
if header != 'SQLite format 3\x00':
raise norepo
# regular expressions for parsing monotone output
space = r'\s*'
name = r'\s+"((?:\\"|[^"])*)"\s*'
value = name
revision = r'\s+\[(\w+)\]\s*'
lines = r'(?:.|\n)+'
self.dir_re = re.compile(space + "dir" + name)
self.file_re = re.compile(space + "file" + name +
"content" + revision)
self.add_file_re = re.compile(space + "add_file" + name +
"content" + revision)
self.patch_re = re.compile(space + "patch" + name +
"from" + revision + "to" + revision)
self.rename_re = re.compile(space + "rename" + name + "to" + name)
self.delete_re = re.compile(space + "delete" + name)
self.tag_re = re.compile(space + "tag" + name + "revision" +
revision)
self.cert_re = re.compile(lines + space + "name" + name +
"value" + value)
attr = space + "file" + lines + space + "attr" + space
self.attr_execute_re = re.compile(attr + '"mtn:execute"' +
space + '"true"')
# cached data
self.manifest_rev = None
self.manifest = None
self.files = None
self.dirs = None
checktool('mtn', abort=False)
def mtnrun(self, *args, **kwargs):
if self.automatestdio:
return self.mtnrunstdio(*args, **kwargs)
else:
return self.mtnrunsingle(*args, **kwargs)
def mtnrunsingle(self, *args, **kwargs):
kwargs['d'] = self.path
return self.run0('automate', *args, **kwargs)
def mtnrunstdio(self, *args, **kwargs):
# Prepare the command in automate stdio format
command = []
for k, v in kwargs.iteritems():
command.append("%s:%s" % (len(k), k))
if v:
command.append("%s:%s" % (len(v), v))
if command:
command.insert(0, 'o')
command.append('e')
command.append('l')
for arg in args:
command += "%s:%s" % (len(arg), arg)
command.append('e')
command = ''.join(command)
self.ui.debug("mtn: sending '%s'\n" % command)
self.mtnwritefp.write(command)
self.mtnwritefp.flush()
return self.mtnstdioreadcommandoutput(command)
def mtnstdioreadpacket(self):
read = None
commandnbr = ''
while read != ':':
read = self.mtnreadfp.read(1)
if not read:
raise util.Abort(_('bad mtn packet - no end of commandnbr'))
commandnbr += read
commandnbr = commandnbr[:-1]
stream = self.mtnreadfp.read(1)
if stream not in 'mewptl':
raise util.Abort(_('bad mtn packet - bad stream type %s') % stream)
read = self.mtnreadfp.read(1)
if read != ':':
raise util.Abort(_('bad mtn packet - no divider before size'))
read = None
lengthstr = ''
while read != ':':
read = self.mtnreadfp.read(1)
if not read:
raise util.Abort(_('bad mtn packet - no end of packet size'))
lengthstr += read
try:
length = long(lengthstr[:-1])
except TypeError:
raise util.Abort(_('bad mtn packet - bad packet size %s')
% lengthstr)
read = self.mtnreadfp.read(length)
if len(read) != length:
raise util.Abort(_("bad mtn packet - unable to read full packet "
"read %s of %s") % (len(read), length))
return (commandnbr, stream, length, read)
def mtnstdioreadcommandoutput(self, command):
retval = []
while True:
commandnbr, stream, length, output = self.mtnstdioreadpacket()
self.ui.debug('mtn: read packet %s:%s:%s\n' %
(commandnbr, stream, length))
if stream == 'l':
# End of command
if output != '0':
raise util.Abort(_("mtn command '%s' returned %s") %
(command, output))
break
elif stream in 'ew':
# Error, warning output
self.ui.warn(_('%s error:\n') % self.command)
self.ui.warn(output)
elif stream == 'p':
# Progress messages
self.ui.debug('mtn: ' + output)
elif stream == 'm':
# Main stream - command output
retval.append(output)
return ''.join(retval)
def mtnloadmanifest(self, rev):
if self.manifest_rev == rev:
return
self.manifest = self.mtnrun("get_manifest_of", rev).split("\n\n")
self.manifest_rev = rev
self.files = {}
self.dirs = {}
for e in self.manifest:
m = self.file_re.match(e)
if m:
attr = ""
name = m.group(1)
node = m.group(2)
if self.attr_execute_re.match(e):
attr += "x"
self.files[name] = (node, attr)
m = self.dir_re.match(e)
if m:
self.dirs[m.group(1)] = True
def mtnisfile(self, name, rev):
# a non-file could be a directory or a deleted or renamed file
self.mtnloadmanifest(rev)
return name in self.files
def mtnisdir(self, name, rev):
self.mtnloadmanifest(rev)
return name in self.dirs
def mtngetcerts(self, rev):
certs = {"author":"<missing>", "date":"<missing>",
"changelog":"<missing>", "branch":"<missing>"}
certlist = self.mtnrun("certs", rev)
# mtn < 0.45:
# key "[email protected]"
# mtn >= 0.45:
# key [ff58a7ffb771907c4ff68995eada1c4da068d328]
certlist = re.split('\n\n key ["\[]', certlist)
for e in certlist:
m = self.cert_re.match(e)
if m:
name, value = m.groups()
value = value.replace(r'\"', '"')
value = value.replace(r'\\', '\\')
certs[name] = value
# Monotone may have subsecond dates: 2005-02-05T09:39:12.364306
# and all times are stored in UTC
certs["date"] = certs["date"].split('.')[0] + " UTC"
return certs
# implement the converter_source interface:
def getheads(self):
if not self.rev:
return self.mtnrun("leaves").splitlines()
else:
return [self.rev]
def getchanges(self, rev):
revision = self.mtnrun("get_revision", rev).split("\n\n")
files = {}
ignoremove = {}
renameddirs = []
copies = {}
for e in revision:
m = self.add_file_re.match(e)
if m:
files[m.group(1)] = rev
ignoremove[m.group(1)] = rev
m = self.patch_re.match(e)
if m:
files[m.group(1)] = rev
# Delete/rename is handled later when the convert engine
# discovers an IOError exception from getfile,
# but only if we add the "from" file to the list of changes.
m = self.delete_re.match(e)
if m:
files[m.group(1)] = rev
m = self.rename_re.match(e)
if m:
toname = m.group(2)
fromname = m.group(1)
if self.mtnisfile(toname, rev):
ignoremove[toname] = 1
copies[toname] = fromname
files[toname] = rev
files[fromname] = rev
elif self.mtnisdir(toname, rev):
renameddirs.append((fromname, toname))
# Directory renames can be handled only once we have recorded
# all new files
for fromdir, todir in renameddirs:
renamed = {}
for tofile in self.files:
if tofile in ignoremove:
continue
if tofile.startswith(todir + '/'):
renamed[tofile] = fromdir + tofile[len(todir):]
# Avoid chained moves like:
# d1(/a) => d3/d1(/a)
# d2 => d3
ignoremove[tofile] = 1
for tofile, fromfile in renamed.items():
self.ui.debug (_("copying file in renamed directory "
"from '%s' to '%s'")
% (fromfile, tofile), '\n')
files[tofile] = rev
copies[tofile] = fromfile
for fromfile in renamed.values():
files[fromfile] = rev
return (files.items(), copies)
def getfile(self, name, rev):
if not self.mtnisfile(name, rev):
raise IOError # file was deleted or renamed
try:
data = self.mtnrun("get_file_of", name, r=rev)
except Exception:
raise IOError # file was deleted or renamed
self.mtnloadmanifest(rev)
node, attr = self.files.get(name, (None, ""))
return data, attr
def getcommit(self, rev):
extra = {}
certs = self.mtngetcerts(rev)
if certs.get('suspend') == certs["branch"]:
extra['close'] = '1'
return commit(
author=certs["author"],
date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")),
desc=certs["changelog"],
rev=rev,
parents=self.mtnrun("parents", rev).splitlines(),
branch=certs["branch"],
extra=extra)
def gettags(self):
tags = {}
for e in self.mtnrun("tags").split("\n\n"):
m = self.tag_re.match(e)
if m:
tags[m.group(1)] = m.group(2)
return tags
def getchangedfiles(self, rev, i):
# This function is only needed to support --filemap
# ... and we don't support that
raise NotImplementedError
def before(self):
# Check if we have a new enough version to use automate stdio
version = 0.0
try:
versionstr = self.mtnrunsingle("interface_version")
version = float(versionstr)
except Exception:
raise util.Abort(_("unable to determine mtn automate interface "
"version"))
if version >= 12.0:
self.automatestdio = True
self.ui.debug("mtn automate version %s - using automate stdio\n" %
version)
# launch the long-running automate stdio process
self.mtnwritefp, self.mtnreadfp = self._run2('automate', 'stdio',
'-d', self.path)
# read the headers
read = self.mtnreadfp.readline()
if read != 'format-version: 2\n':
raise util.Abort(_('mtn automate stdio header unexpected: %s')
% read)
while read != '\n':
read = self.mtnreadfp.readline()
if not read:
raise util.Abort(_("failed to reach end of mtn automate "
"stdio headers"))
else:
self.ui.debug("mtn automate version %s - not using automate stdio "
"(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version)
def after(self):
if self.automatestdio:
self.mtnwritefp.close()
self.mtnwritefp = None
self.mtnreadfp.close()
self.mtnreadfp = None
| apache-2.0 |
GaussDing/django | django/contrib/gis/geoip/base.py | 106 | 11135 | import os
import re
from ctypes import c_char_p
from django.contrib.gis.geoip.libgeoip import GEOIP_SETTINGS
from django.contrib.gis.geoip.prototypes import (
GeoIP_country_code_by_addr, GeoIP_country_code_by_name,
GeoIP_country_name_by_addr, GeoIP_country_name_by_name,
GeoIP_database_info, GeoIP_delete, GeoIP_lib_version, GeoIP_open,
GeoIP_record_by_addr, GeoIP_record_by_name,
)
from django.core.validators import ipv4_re
from django.utils import six
from django.utils.encoding import force_bytes
# Regular expressions for recognizing the GeoIP free database editions.
free_regex = re.compile(r'^GEO-\d{3}FREE')
lite_regex = re.compile(r'^GEO-\d{3}LITE')
class GeoIPException(Exception):
pass
class GeoIP(object):
# The flags for GeoIP memory caching.
# GEOIP_STANDARD - read database from filesystem, uses least memory.
#
# GEOIP_MEMORY_CACHE - load database into memory, faster performance
# but uses more memory
#
# GEOIP_CHECK_CACHE - check for updated database. If database has been
# updated, reload filehandle and/or memory cache. This option
# is not thread safe.
#
# GEOIP_INDEX_CACHE - just cache the most frequently accessed index
# portion of the database, resulting in faster lookups than
# GEOIP_STANDARD, but less memory usage than GEOIP_MEMORY_CACHE -
# useful for larger databases such as GeoIP Organization and
# GeoIP City. Note, for GeoIP Country, Region and Netspeed
# databases, GEOIP_INDEX_CACHE is equivalent to GEOIP_MEMORY_CACHE
#
# GEOIP_MMAP_CACHE - load database into mmap shared memory ( not available
# on Windows).
GEOIP_STANDARD = 0
GEOIP_MEMORY_CACHE = 1
GEOIP_CHECK_CACHE = 2
GEOIP_INDEX_CACHE = 4
GEOIP_MMAP_CACHE = 8
cache_options = {opt: None for opt in (0, 1, 2, 4, 8)}
# Paths to the city & country binary databases.
_city_file = ''
_country_file = ''
# Initially, pointers to GeoIP file references are NULL.
_city = None
_country = None
def __init__(self, path=None, cache=0, country=None, city=None):
"""
Initializes the GeoIP object, no parameters are required to use default
settings. Keyword arguments may be passed in to customize the locations
of the GeoIP data sets.
* path: Base directory to where GeoIP data is located or the full path
to where the city or country data files (*.dat) are located.
Assumes that both the city and country data sets are located in
this directory; overrides the GEOIP_PATH settings attribute.
* cache: The cache settings when opening up the GeoIP datasets,
and may be an integer in (0, 1, 2, 4, 8) corresponding to
the GEOIP_STANDARD, GEOIP_MEMORY_CACHE, GEOIP_CHECK_CACHE,
GEOIP_INDEX_CACHE, and GEOIP_MMAP_CACHE, `GeoIPOptions` C API
settings, respectively. Defaults to 0, meaning that the data is read
from the disk.
* country: The name of the GeoIP country data file. Defaults to
'GeoIP.dat'; overrides the GEOIP_COUNTRY settings attribute.
* city: The name of the GeoIP city data file. Defaults to
'GeoLiteCity.dat'; overrides the GEOIP_CITY settings attribute.
"""
# Checking the given cache option.
if cache in self.cache_options:
self._cache = cache
else:
raise GeoIPException('Invalid GeoIP caching option: %s' % cache)
# Getting the GeoIP data path.
if not path:
path = GEOIP_SETTINGS.get('GEOIP_PATH', None)
if not path:
raise GeoIPException('GeoIP path must be provided via parameter or the GEOIP_PATH setting.')
if not isinstance(path, six.string_types):
raise TypeError('Invalid path type: %s' % type(path).__name__)
if os.path.isdir(path):
# Constructing the GeoIP database filenames using the settings
# dictionary. If the database files for the GeoLite country
# and/or city datasets exist, then try and open them.
country_db = os.path.join(path, country or GEOIP_SETTINGS.get('GEOIP_COUNTRY', 'GeoIP.dat'))
if os.path.isfile(country_db):
self._country = GeoIP_open(force_bytes(country_db), cache)
self._country_file = country_db
city_db = os.path.join(path, city or GEOIP_SETTINGS.get('GEOIP_CITY', 'GeoLiteCity.dat'))
if os.path.isfile(city_db):
self._city = GeoIP_open(force_bytes(city_db), cache)
self._city_file = city_db
elif os.path.isfile(path):
# Otherwise, some detective work will be needed to figure
# out whether the given database path is for the GeoIP country
# or city databases.
ptr = GeoIP_open(force_bytes(path), cache)
info = GeoIP_database_info(ptr)
if lite_regex.match(info):
# GeoLite City database detected.
self._city = ptr
self._city_file = path
elif free_regex.match(info):
# GeoIP Country database detected.
self._country = ptr
self._country_file = path
else:
raise GeoIPException('Unable to recognize database edition: %s' % info)
else:
raise GeoIPException('GeoIP path must be a valid file or directory.')
def __del__(self):
# Cleaning any GeoIP file handles lying around.
if GeoIP_delete is None:
return
if self._country:
GeoIP_delete(self._country)
if self._city:
GeoIP_delete(self._city)
def _check_query(self, query, country=False, city=False, city_or_country=False):
"Helper routine for checking the query and database availability."
# Making sure a string was passed in for the query.
if not isinstance(query, six.string_types):
raise TypeError('GeoIP query must be a string, not type %s' % type(query).__name__)
# Extra checks for the existence of country and city databases.
if city_or_country and not (self._country or self._city):
raise GeoIPException('Invalid GeoIP country and city data files.')
elif country and not self._country:
raise GeoIPException('Invalid GeoIP country data file: %s' % self._country_file)
elif city and not self._city:
raise GeoIPException('Invalid GeoIP city data file: %s' % self._city_file)
# Return the query string back to the caller. GeoIP only takes bytestrings.
return force_bytes(query)
def city(self, query):
"""
Returns a dictionary of city information for the given IP address or
Fully Qualified Domain Name (FQDN). Some information in the dictionary
may be undefined (None).
"""
enc_query = self._check_query(query, city=True)
if ipv4_re.match(query):
# If an IP address was passed in
return GeoIP_record_by_addr(self._city, c_char_p(enc_query))
else:
# If a FQDN was passed in.
return GeoIP_record_by_name(self._city, c_char_p(enc_query))
def country_code(self, query):
"Returns the country code for the given IP Address or FQDN."
enc_query = self._check_query(query, city_or_country=True)
if self._country:
if ipv4_re.match(query):
return GeoIP_country_code_by_addr(self._country, enc_query)
else:
return GeoIP_country_code_by_name(self._country, enc_query)
else:
return self.city(query)['country_code']
def country_name(self, query):
"Returns the country name for the given IP Address or FQDN."
enc_query = self._check_query(query, city_or_country=True)
if self._country:
if ipv4_re.match(query):
return GeoIP_country_name_by_addr(self._country, enc_query)
else:
return GeoIP_country_name_by_name(self._country, enc_query)
else:
return self.city(query)['country_name']
def country(self, query):
"""
Returns a dictionary with the country code and name when given an
IP address or a Fully Qualified Domain Name (FQDN). For example, both
'24.124.1.80' and 'djangoproject.com' are valid parameters.
"""
# Returning the country code and name
return {'country_code': self.country_code(query),
'country_name': self.country_name(query),
}
# #### Coordinate retrieval routines ####
def coords(self, query, ordering=('longitude', 'latitude')):
cdict = self.city(query)
if cdict is None:
return None
else:
return tuple(cdict[o] for o in ordering)
def lon_lat(self, query):
"Returns a tuple of the (longitude, latitude) for the given query."
return self.coords(query)
def lat_lon(self, query):
"Returns a tuple of the (latitude, longitude) for the given query."
return self.coords(query, ('latitude', 'longitude'))
def geos(self, query):
"Returns a GEOS Point object for the given query."
ll = self.lon_lat(query)
if ll:
from django.contrib.gis.geos import Point
return Point(ll, srid=4326)
else:
return None
# #### GeoIP Database Information Routines ####
@property
def country_info(self):
"Returns information about the GeoIP country database."
if self._country is None:
ci = 'No GeoIP Country data in "%s"' % self._country_file
else:
ci = GeoIP_database_info(self._country)
return ci
@property
def city_info(self):
"Returns information about the GeoIP city database."
if self._city is None:
ci = 'No GeoIP City data in "%s"' % self._city_file
else:
ci = GeoIP_database_info(self._city)
return ci
@property
def info(self):
"Returns information about the GeoIP library and databases in use."
info = ''
if GeoIP_lib_version:
info += 'GeoIP Library:\n\t%s\n' % GeoIP_lib_version()
return info + 'Country:\n\t%s\nCity:\n\t%s' % (self.country_info, self.city_info)
# #### Methods for compatibility w/the GeoIP-Python API. ####
@classmethod
def open(cls, full_path, cache):
return GeoIP(full_path, cache)
def _rec_by_arg(self, arg):
if self._city:
return self.city(arg)
else:
return self.country(arg)
region_by_addr = city
region_by_name = city
record_by_addr = _rec_by_arg
record_by_name = _rec_by_arg
country_code_by_addr = country_code
country_code_by_name = country_code
country_name_by_addr = country_name
country_name_by_name = country_name
| bsd-3-clause |
gangadharkadam/johnfrappe | frappe/widgets/event.py | 35 | 1523 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
# Event
# -------------
from __future__ import unicode_literals
import frappe
@frappe.whitelist()
def get_cal_events(m_st, m_end):
# load owned events
res1 = frappe.db.sql("""select name from `tabEvent`
WHERE ifnull(event_date,'2000-01-01') between %s and %s and owner = %s
and event_type != 'Public' and event_type != 'Cancel'""",
(m_st, m_end, frappe.user.name))
# load individual events
res2 = frappe.db.sql("""select t1.name from `tabEvent` t1, `tabEvent User` t2
where ifnull(t1.event_date,'2000-01-01') between %s and %s and t2.person = %s
and t1.name = t2.parent and t1.event_type != 'Cancel'""",
(m_st, m_end, frappe.user.name))
# load role events
roles = frappe.user.get_roles()
myroles = ['t2.role = "%s"' % r.replace('"', '\"') for r in roles]
myroles = '(' + (' OR '.join(myroles)) + ')'
res3 = frappe.db.sql("""select t1.name from `tabEvent` t1, `tabEvent Role` t2
where ifnull(t1.event_date,'2000-01-01') between %s and %s
and t1.name = t2.parent and t1.event_type != 'Cancel' and %s""" %
('%s', '%s', myroles), (m_st, m_end))
# load public events
res4 = frappe.db.sql("select name from `tabEvent` \
where ifnull(event_date,'2000-01-01') between %s and %s and event_type='Public'",
(m_st, m_end))
doclist, rl = [], []
for r in res1 + res2 + res3 + res4:
if not r in rl:
doclist += frappe.get_doc('Event', r[0])
rl.append(r)
return doclist
| mit |
ansible/ansible | test/sanity/code-smell/docs-build.py | 16 | 5175 | #!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
import shutil
import subprocess
import sys
import tempfile
def main():
base_dir = os.getcwd() + os.path.sep
docs_dir = os.path.abspath('docs/docsite')
# TODO: Remove this temporary hack to constrain 'cryptography' when we have
# a better story for dealing with it.
tmpfd, tmp = tempfile.mkstemp()
requirements_txt = os.path.join(base_dir, 'requirements.txt')
shutil.copy2(requirements_txt, tmp)
lines = []
with open(requirements_txt, 'r') as f:
for line in f.readlines():
if line.strip() == 'cryptography':
line = 'cryptography < 3.4\n'
lines.append(line)
with open(requirements_txt, 'w') as f:
f.writelines(lines)
try:
cmd = ['make', 'core_singlehtmldocs']
sphinx = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=docs_dir)
stdout, stderr = sphinx.communicate()
finally:
shutil.move(tmp, requirements_txt)
stdout = stdout.decode('utf-8')
stderr = stderr.decode('utf-8')
if sphinx.returncode != 0:
sys.stderr.write("Command '%s' failed with status code: %d\n" % (' '.join(cmd), sphinx.returncode))
if stdout.strip():
stdout = simplify_stdout(stdout)
sys.stderr.write("--> Standard Output\n")
sys.stderr.write("%s\n" % stdout.strip())
if stderr.strip():
sys.stderr.write("--> Standard Error\n")
sys.stderr.write("%s\n" % stderr.strip())
sys.exit(1)
with open('docs/docsite/rst_warnings', 'r') as warnings_fd:
output = warnings_fd.read().strip()
lines = output.splitlines()
known_warnings = {
'block-quote-missing-blank-line': r'^Block quote ends without a blank line; unexpected unindent.$',
'literal-block-lex-error': r'^Could not lex literal_block as "[^"]*". Highlighting skipped.$',
'duplicate-label': r'^duplicate label ',
'undefined-label': r'undefined label: ',
'unknown-document': r'unknown document: ',
'toc-tree-missing-document': r'toctree contains reference to nonexisting document ',
'reference-target-not-found': r'[^ ]* reference target not found: ',
'not-in-toc-tree': r"document isn't included in any toctree$",
'unexpected-indentation': r'^Unexpected indentation.$',
'definition-list-missing-blank-line': r'^Definition list ends without a blank line; unexpected unindent.$',
'explicit-markup-missing-blank-line': r'Explicit markup ends without a blank line; unexpected unindent.$',
'toc-tree-glob-pattern-no-match': r"^toctree glob pattern '[^']*' didn't match any documents$",
'unknown-interpreted-text-role': '^Unknown interpreted text role "[^"]*".$',
}
for line in lines:
match = re.search('^(?P<path>[^:]+):((?P<line>[0-9]+):)?((?P<column>[0-9]+):)? (?P<level>WARNING|ERROR): (?P<message>.*)$', line)
if not match:
path = 'docs/docsite/rst/index.rst'
lineno = 0
column = 0
code = 'unknown'
message = line
# surface unknown lines while filtering out known lines to avoid excessive output
print('%s:%d:%d: %s: %s' % (path, lineno, column, code, message))
continue
path = match.group('path')
lineno = int(match.group('line') or 0)
column = int(match.group('column') or 0)
level = match.group('level').lower()
message = match.group('message')
path = os.path.abspath(path)
if path.startswith(base_dir):
path = path[len(base_dir):]
if path.startswith('rst/'):
path = 'docs/docsite/' + path # fix up paths reported relative to `docs/docsite/`
if level == 'warning':
code = 'warning'
for label, pattern in known_warnings.items():
if re.search(pattern, message):
code = label
break
else:
code = 'error'
print('%s:%d:%d: %s: %s' % (path, lineno, column, code, message))
def simplify_stdout(value):
"""Simplify output by omitting earlier 'rendering: ...' messages."""
lines = value.strip().splitlines()
rendering = []
keep = []
def truncate_rendering():
"""Keep last rendering line (if any) with a message about omitted lines as needed."""
if not rendering:
return
notice = rendering[-1]
if len(rendering) > 1:
notice += ' (%d previous rendering line(s) omitted)' % (len(rendering) - 1)
keep.append(notice)
# Could change to rendering.clear() if we do not support python2
rendering[:] = []
for line in lines:
if line.startswith('rendering: '):
rendering.append(line)
continue
truncate_rendering()
keep.append(line)
truncate_rendering()
result = '\n'.join(keep)
return result
if __name__ == '__main__':
main()
| gpl-3.0 |
PaddlePaddle/Paddle | python/paddle/fluid/tests/unittests/sequence/test_sequence_concat.py | 2 | 3737 | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import sys
sys.path.append("../")
from op_test import OpTest
from paddle import fluid
class TestSequenceConcat(OpTest):
def setLoD(self):
self.lod1 = [7, 3]
self.lod2 = [12, 8]
self.out_lod = [19, 11]
def setUp(self):
x1 = np.random.random(size=(10, 80)).astype('float64')
x2 = np.random.random(size=(20, 80)).astype('float64')
self.setLoD()
out = np.concatenate((x1[0:self.lod1[0]], x2[0:self.lod2[0]],
x1[self.lod1[0]:], x2[self.lod2[0]:]))
self.op_type = "sequence_concat"
self.inputs = {
'X': [("x1", (x1, [self.lod1])), ("x2", (x2, [self.lod2]))]
}
self.outputs = {"Out": (out, [self.out_lod])}
def test_output(self):
self.check_output()
def test_dx(self):
self.check_grad(inputs_to_check=['x1', 'x2'], output_names="Out")
class TestSequenceConcatCase2(TestSequenceConcat):
def setLoD(self):
self.lod1 = [10, 0]
self.lod2 = [12, 8]
self.out_lod = [22, 8]
class TestSequenceConcatCase3(TestSequenceConcat):
def setLoD(self):
self.lod1 = [10, 0]
self.lod2 = [20, 0]
self.out_lod = [30, 0]
class TestSequenceConcatCase4(TestSequenceConcat):
def setLoD(self):
self.lod1 = [0, 10]
self.lod2 = [0, 20]
self.out_lod = [0, 30]
class TestSequenceConcatCase5(TestSequenceConcat):
def setLoD(self):
self.lod1 = [0, 10]
self.lod2 = [20, 0]
self.out_lod = [20, 10]
class TestSequenceConcatOpError(unittest.TestCase):
def test_errors(self):
def test_input_list():
# the input type must be list
x_data = fluid.layers.data(name='x', shape=[4], dtype='float32')
fluid.layers.sequence_concat(input=x_data)
self.assertRaises(TypeError, test_input_list)
def test_variable1():
# the input element type must be Variable
x1_data = np.array([[3, 5]]).astype('float32')
y1_data = fluid.layers.data(name='y1', shape=[4], dtype='float32')
fluid.layers.sequence_concat(input=[x1_data, y1_data])
def test_variable2():
x2_data = np.array([[3, 5]]).astype('float32')
y2_data = fluid.layers.data(name='y2', shape=[4], dtype='float32')
fluid.layers.sequence_concat(input=[y2_data, x2_data])
for i in range(2):
if i == 0:
self.assertRaises(TypeError, test_variable1)
else:
self.assertRaises(TypeError, test_variable2)
def test_dtype():
# dtype must be 'float32', 'float64', 'int64'
x3_data = fluid.layers.data(name="x3", shape=[3, 5], dtype='int32')
y3_data = fluid.layers.data(name="y3", shape=[3, 5], dtype='int16')
input_list = [x3_data, y3_data]
fluid.layers.sequence_concat(input=input_list)
self.assertRaises(TypeError, test_dtype)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
sampadsaha5/sympy | sympy/polys/compatibility.py | 96 | 56525 | """Compatibility interface between dense and sparse polys. """
from __future__ import print_function, division
from sympy.polys.densearith import dup_add_term
from sympy.polys.densearith import dmp_add_term
from sympy.polys.densearith import dup_sub_term
from sympy.polys.densearith import dmp_sub_term
from sympy.polys.densearith import dup_mul_term
from sympy.polys.densearith import dmp_mul_term
from sympy.polys.densearith import dup_add_ground
from sympy.polys.densearith import dmp_add_ground
from sympy.polys.densearith import dup_sub_ground
from sympy.polys.densearith import dmp_sub_ground
from sympy.polys.densearith import dup_mul_ground
from sympy.polys.densearith import dmp_mul_ground
from sympy.polys.densearith import dup_quo_ground
from sympy.polys.densearith import dmp_quo_ground
from sympy.polys.densearith import dup_exquo_ground
from sympy.polys.densearith import dmp_exquo_ground
from sympy.polys.densearith import dup_lshift
from sympy.polys.densearith import dup_rshift
from sympy.polys.densearith import dup_abs
from sympy.polys.densearith import dmp_abs
from sympy.polys.densearith import dup_neg
from sympy.polys.densearith import dmp_neg
from sympy.polys.densearith import dup_add
from sympy.polys.densearith import dmp_add
from sympy.polys.densearith import dup_sub
from sympy.polys.densearith import dmp_sub
from sympy.polys.densearith import dup_add_mul
from sympy.polys.densearith import dmp_add_mul
from sympy.polys.densearith import dup_sub_mul
from sympy.polys.densearith import dmp_sub_mul
from sympy.polys.densearith import dup_mul
from sympy.polys.densearith import dmp_mul
from sympy.polys.densearith import dup_sqr
from sympy.polys.densearith import dmp_sqr
from sympy.polys.densearith import dup_pow
from sympy.polys.densearith import dmp_pow
from sympy.polys.densearith import dup_pdiv
from sympy.polys.densearith import dup_prem
from sympy.polys.densearith import dup_pquo
from sympy.polys.densearith import dup_pexquo
from sympy.polys.densearith import dmp_pdiv
from sympy.polys.densearith import dmp_prem
from sympy.polys.densearith import dmp_pquo
from sympy.polys.densearith import dmp_pexquo
from sympy.polys.densearith import dup_rr_div
from sympy.polys.densearith import dmp_rr_div
from sympy.polys.densearith import dup_ff_div
from sympy.polys.densearith import dmp_ff_div
from sympy.polys.densearith import dup_div
from sympy.polys.densearith import dup_rem
from sympy.polys.densearith import dup_quo
from sympy.polys.densearith import dup_exquo
from sympy.polys.densearith import dmp_div
from sympy.polys.densearith import dmp_rem
from sympy.polys.densearith import dmp_quo
from sympy.polys.densearith import dmp_exquo
from sympy.polys.densearith import dup_max_norm
from sympy.polys.densearith import dmp_max_norm
from sympy.polys.densearith import dup_l1_norm
from sympy.polys.densearith import dmp_l1_norm
from sympy.polys.densearith import dup_expand
from sympy.polys.densearith import dmp_expand
from sympy.polys.densebasic import dup_LC
from sympy.polys.densebasic import dmp_LC
from sympy.polys.densebasic import dup_TC
from sympy.polys.densebasic import dmp_TC
from sympy.polys.densebasic import dmp_ground_LC
from sympy.polys.densebasic import dmp_ground_TC
from sympy.polys.densebasic import dup_degree
from sympy.polys.densebasic import dmp_degree
from sympy.polys.densebasic import dmp_degree_in
from sympy.polys.densebasic import dmp_to_dict
from sympy.polys.densetools import dup_integrate
from sympy.polys.densetools import dmp_integrate
from sympy.polys.densetools import dmp_integrate_in
from sympy.polys.densetools import dup_diff
from sympy.polys.densetools import dmp_diff
from sympy.polys.densetools import dmp_diff_in
from sympy.polys.densetools import dup_eval
from sympy.polys.densetools import dmp_eval
from sympy.polys.densetools import dmp_eval_in
from sympy.polys.densetools import dmp_eval_tail
from sympy.polys.densetools import dmp_diff_eval_in
from sympy.polys.densetools import dup_trunc
from sympy.polys.densetools import dmp_trunc
from sympy.polys.densetools import dmp_ground_trunc
from sympy.polys.densetools import dup_monic
from sympy.polys.densetools import dmp_ground_monic
from sympy.polys.densetools import dup_content
from sympy.polys.densetools import dmp_ground_content
from sympy.polys.densetools import dup_primitive
from sympy.polys.densetools import dmp_ground_primitive
from sympy.polys.densetools import dup_extract
from sympy.polys.densetools import dmp_ground_extract
from sympy.polys.densetools import dup_real_imag
from sympy.polys.densetools import dup_mirror
from sympy.polys.densetools import dup_scale
from sympy.polys.densetools import dup_shift
from sympy.polys.densetools import dup_transform
from sympy.polys.densetools import dup_compose
from sympy.polys.densetools import dmp_compose
from sympy.polys.densetools import dup_decompose
from sympy.polys.densetools import dmp_lift
from sympy.polys.densetools import dup_sign_variations
from sympy.polys.densetools import dup_clear_denoms
from sympy.polys.densetools import dmp_clear_denoms
from sympy.polys.densetools import dup_revert
from sympy.polys.euclidtools import dup_half_gcdex
from sympy.polys.euclidtools import dmp_half_gcdex
from sympy.polys.euclidtools import dup_gcdex
from sympy.polys.euclidtools import dmp_gcdex
from sympy.polys.euclidtools import dup_invert
from sympy.polys.euclidtools import dmp_invert
from sympy.polys.euclidtools import dup_euclidean_prs
from sympy.polys.euclidtools import dmp_euclidean_prs
from sympy.polys.euclidtools import dup_primitive_prs
from sympy.polys.euclidtools import dmp_primitive_prs
from sympy.polys.euclidtools import dup_inner_subresultants
from sympy.polys.euclidtools import dup_subresultants
from sympy.polys.euclidtools import dup_prs_resultant
from sympy.polys.euclidtools import dup_resultant
from sympy.polys.euclidtools import dmp_inner_subresultants
from sympy.polys.euclidtools import dmp_subresultants
from sympy.polys.euclidtools import dmp_prs_resultant
from sympy.polys.euclidtools import dmp_zz_modular_resultant
from sympy.polys.euclidtools import dmp_zz_collins_resultant
from sympy.polys.euclidtools import dmp_qq_collins_resultant
from sympy.polys.euclidtools import dmp_resultant
from sympy.polys.euclidtools import dup_discriminant
from sympy.polys.euclidtools import dmp_discriminant
from sympy.polys.euclidtools import dup_rr_prs_gcd
from sympy.polys.euclidtools import dup_ff_prs_gcd
from sympy.polys.euclidtools import dmp_rr_prs_gcd
from sympy.polys.euclidtools import dmp_ff_prs_gcd
from sympy.polys.euclidtools import dup_zz_heu_gcd
from sympy.polys.euclidtools import dmp_zz_heu_gcd
from sympy.polys.euclidtools import dup_qq_heu_gcd
from sympy.polys.euclidtools import dmp_qq_heu_gcd
from sympy.polys.euclidtools import dup_inner_gcd
from sympy.polys.euclidtools import dmp_inner_gcd
from sympy.polys.euclidtools import dup_gcd
from sympy.polys.euclidtools import dmp_gcd
from sympy.polys.euclidtools import dup_rr_lcm
from sympy.polys.euclidtools import dup_ff_lcm
from sympy.polys.euclidtools import dup_lcm
from sympy.polys.euclidtools import dmp_rr_lcm
from sympy.polys.euclidtools import dmp_ff_lcm
from sympy.polys.euclidtools import dmp_lcm
from sympy.polys.euclidtools import dmp_content
from sympy.polys.euclidtools import dmp_primitive
from sympy.polys.euclidtools import dup_cancel
from sympy.polys.euclidtools import dmp_cancel
from sympy.polys.factortools import dup_trial_division
from sympy.polys.factortools import dmp_trial_division
from sympy.polys.factortools import dup_zz_mignotte_bound
from sympy.polys.factortools import dmp_zz_mignotte_bound
from sympy.polys.factortools import dup_zz_hensel_step
from sympy.polys.factortools import dup_zz_hensel_lift
from sympy.polys.factortools import dup_zz_zassenhaus
from sympy.polys.factortools import dup_zz_irreducible_p
from sympy.polys.factortools import dup_cyclotomic_p
from sympy.polys.factortools import dup_zz_cyclotomic_poly
from sympy.polys.factortools import dup_zz_cyclotomic_factor
from sympy.polys.factortools import dup_zz_factor_sqf
from sympy.polys.factortools import dup_zz_factor
from sympy.polys.factortools import dmp_zz_wang_non_divisors
from sympy.polys.factortools import dmp_zz_wang_lead_coeffs
from sympy.polys.factortools import dup_zz_diophantine
from sympy.polys.factortools import dmp_zz_diophantine
from sympy.polys.factortools import dmp_zz_wang_hensel_lifting
from sympy.polys.factortools import dmp_zz_wang
from sympy.polys.factortools import dmp_zz_factor
from sympy.polys.factortools import dup_ext_factor
from sympy.polys.factortools import dmp_ext_factor
from sympy.polys.factortools import dup_gf_factor
from sympy.polys.factortools import dmp_gf_factor
from sympy.polys.factortools import dup_factor_list
from sympy.polys.factortools import dup_factor_list_include
from sympy.polys.factortools import dmp_factor_list
from sympy.polys.factortools import dmp_factor_list_include
from sympy.polys.factortools import dup_irreducible_p
from sympy.polys.factortools import dmp_irreducible_p
from sympy.polys.rootisolation import dup_sturm
from sympy.polys.rootisolation import dup_root_upper_bound
from sympy.polys.rootisolation import dup_root_lower_bound
from sympy.polys.rootisolation import dup_step_refine_real_root
from sympy.polys.rootisolation import dup_inner_refine_real_root
from sympy.polys.rootisolation import dup_outer_refine_real_root
from sympy.polys.rootisolation import dup_refine_real_root
from sympy.polys.rootisolation import dup_inner_isolate_real_roots
from sympy.polys.rootisolation import dup_inner_isolate_positive_roots
from sympy.polys.rootisolation import dup_inner_isolate_negative_roots
from sympy.polys.rootisolation import dup_isolate_real_roots_sqf
from sympy.polys.rootisolation import dup_isolate_real_roots
from sympy.polys.rootisolation import dup_isolate_real_roots_list
from sympy.polys.rootisolation import dup_count_real_roots
from sympy.polys.rootisolation import dup_count_complex_roots
from sympy.polys.rootisolation import dup_isolate_complex_roots_sqf
from sympy.polys.rootisolation import dup_isolate_all_roots_sqf
from sympy.polys.rootisolation import dup_isolate_all_roots
from sympy.polys.sqfreetools import (
dup_sqf_p, dmp_sqf_p, dup_sqf_norm, dmp_sqf_norm, dup_gf_sqf_part, dmp_gf_sqf_part,
dup_sqf_part, dmp_sqf_part, dup_gf_sqf_list, dmp_gf_sqf_list, dup_sqf_list,
dup_sqf_list_include, dmp_sqf_list, dmp_sqf_list_include, dup_gff_list, dmp_gff_list)
from sympy.polys.galoistools import (
gf_degree, gf_LC, gf_TC, gf_strip, gf_from_dict,
gf_to_dict, gf_from_int_poly, gf_to_int_poly, gf_neg, gf_add_ground, gf_sub_ground,
gf_mul_ground, gf_quo_ground, gf_add, gf_sub, gf_mul, gf_sqr, gf_add_mul, gf_sub_mul,
gf_expand, gf_div, gf_rem, gf_quo, gf_exquo, gf_lshift, gf_rshift, gf_pow, gf_pow_mod,
gf_gcd, gf_lcm, gf_cofactors, gf_gcdex, gf_monic, gf_diff, gf_eval, gf_multi_eval,
gf_compose, gf_compose_mod, gf_trace_map, gf_random, gf_irreducible, gf_irred_p_ben_or,
gf_irred_p_rabin, gf_irreducible_p, gf_sqf_p, gf_sqf_part, gf_Qmatrix,
gf_berlekamp, gf_ddf_zassenhaus, gf_edf_zassenhaus, gf_ddf_shoup, gf_edf_shoup,
gf_zassenhaus, gf_shoup, gf_factor_sqf, gf_factor)
from sympy.utilities import public
@public
class IPolys(object):
symbols = None
ngens = None
domain = None
order = None
gens = None
def drop(self, gen):
pass
def clone(self, symbols=None, domain=None, order=None):
pass
def to_ground(self):
pass
def ground_new(self, element):
pass
def domain_new(self, element):
pass
def from_dict(self, d):
pass
def wrap(self, element):
from sympy.polys.rings import PolyElement
if isinstance(element, PolyElement):
if element.ring == self:
return element
else:
raise NotImplementedError("domain conversions")
else:
return self.ground_new(element)
def to_dense(self, element):
return self.wrap(element).to_dense()
def from_dense(self, element):
return self.from_dict(dmp_to_dict(element, self.ngens-1, self.domain))
def dup_add_term(self, f, c, i):
return self.from_dense(dup_add_term(self.to_dense(f), c, i, self.domain))
def dmp_add_term(self, f, c, i):
return self.from_dense(dmp_add_term(self.to_dense(f), self.wrap(c).drop(0).to_dense(), i, self.ngens-1, self.domain))
def dup_sub_term(self, f, c, i):
return self.from_dense(dup_sub_term(self.to_dense(f), c, i, self.domain))
def dmp_sub_term(self, f, c, i):
return self.from_dense(dmp_sub_term(self.to_dense(f), self.wrap(c).drop(0).to_dense(), i, self.ngens-1, self.domain))
def dup_mul_term(self, f, c, i):
return self.from_dense(dup_mul_term(self.to_dense(f), c, i, self.domain))
def dmp_mul_term(self, f, c, i):
return self.from_dense(dmp_mul_term(self.to_dense(f), self.wrap(c).drop(0).to_dense(), i, self.ngens-1, self.domain))
def dup_add_ground(self, f, c):
return self.from_dense(dup_add_ground(self.to_dense(f), c, self.domain))
def dmp_add_ground(self, f, c):
return self.from_dense(dmp_add_ground(self.to_dense(f), c, self.ngens-1, self.domain))
def dup_sub_ground(self, f, c):
return self.from_dense(dup_sub_ground(self.to_dense(f), c, self.domain))
def dmp_sub_ground(self, f, c):
return self.from_dense(dmp_sub_ground(self.to_dense(f), c, self.ngens-1, self.domain))
def dup_mul_ground(self, f, c):
return self.from_dense(dup_mul_ground(self.to_dense(f), c, self.domain))
def dmp_mul_ground(self, f, c):
return self.from_dense(dmp_mul_ground(self.to_dense(f), c, self.ngens-1, self.domain))
def dup_quo_ground(self, f, c):
return self.from_dense(dup_quo_ground(self.to_dense(f), c, self.domain))
def dmp_quo_ground(self, f, c):
return self.from_dense(dmp_quo_ground(self.to_dense(f), c, self.ngens-1, self.domain))
def dup_exquo_ground(self, f, c):
return self.from_dense(dup_exquo_ground(self.to_dense(f), c, self.domain))
def dmp_exquo_ground(self, f, c):
return self.from_dense(dmp_exquo_ground(self.to_dense(f), c, self.ngens-1, self.domain))
def dup_lshift(self, f, n):
return self.from_dense(dup_lshift(self.to_dense(f), n, self.domain))
def dup_rshift(self, f, n):
return self.from_dense(dup_rshift(self.to_dense(f), n, self.domain))
def dup_abs(self, f):
return self.from_dense(dup_abs(self.to_dense(f), self.domain))
def dmp_abs(self, f):
return self.from_dense(dmp_abs(self.to_dense(f), self.ngens-1, self.domain))
def dup_neg(self, f):
return self.from_dense(dup_neg(self.to_dense(f), self.domain))
def dmp_neg(self, f):
return self.from_dense(dmp_neg(self.to_dense(f), self.ngens-1, self.domain))
def dup_add(self, f, g):
return self.from_dense(dup_add(self.to_dense(f), self.to_dense(g), self.domain))
def dmp_add(self, f, g):
return self.from_dense(dmp_add(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain))
def dup_sub(self, f, g):
return self.from_dense(dup_sub(self.to_dense(f), self.to_dense(g), self.domain))
def dmp_sub(self, f, g):
return self.from_dense(dmp_sub(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain))
def dup_add_mul(self, f, g, h):
return self.from_dense(dup_add_mul(self.to_dense(f), self.to_dense(g), self.to_dense(h), self.domain))
def dmp_add_mul(self, f, g, h):
return self.from_dense(dmp_add_mul(self.to_dense(f), self.to_dense(g), self.to_dense(h), self.ngens-1, self.domain))
def dup_sub_mul(self, f, g, h):
return self.from_dense(dup_sub_mul(self.to_dense(f), self.to_dense(g), self.to_dense(h), self.domain))
def dmp_sub_mul(self, f, g, h):
return self.from_dense(dmp_sub_mul(self.to_dense(f), self.to_dense(g), self.to_dense(h), self.ngens-1, self.domain))
def dup_mul(self, f, g):
return self.from_dense(dup_mul(self.to_dense(f), self.to_dense(g), self.domain))
def dmp_mul(self, f, g):
return self.from_dense(dmp_mul(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain))
def dup_sqr(self, f):
return self.from_dense(dup_sqr(self.to_dense(f), self.domain))
def dmp_sqr(self, f):
return self.from_dense(dmp_sqr(self.to_dense(f), self.ngens-1, self.domain))
def dup_pow(self, f, n):
return self.from_dense(dup_pow(self.to_dense(f), n, self.domain))
def dmp_pow(self, f, n):
return self.from_dense(dmp_pow(self.to_dense(f), n, self.ngens-1, self.domain))
def dup_pdiv(self, f, g):
q, r = dup_pdiv(self.to_dense(f), self.to_dense(g), self.domain)
return (self.from_dense(q), self.from_dense(r))
def dup_prem(self, f, g):
return self.from_dense(dup_prem(self.to_dense(f), self.to_dense(g), self.domain))
def dup_pquo(self, f, g):
return self.from_dense(dup_pquo(self.to_dense(f), self.to_dense(g), self.domain))
def dup_pexquo(self, f, g):
return self.from_dense(dup_pexquo(self.to_dense(f), self.to_dense(g), self.domain))
def dmp_pdiv(self, f, g):
q, r = dmp_pdiv(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return (self.from_dense(q), self.from_dense(r))
def dmp_prem(self, f, g):
return self.from_dense(dmp_prem(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain))
def dmp_pquo(self, f, g):
return self.from_dense(dmp_pquo(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain))
def dmp_pexquo(self, f, g):
return self.from_dense(dmp_pexquo(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain))
def dup_rr_div(self, f, g):
q, r = dup_rr_div(self.to_dense(f), self.to_dense(g), self.domain)
return (self.from_dense(q), self.from_dense(r))
def dmp_rr_div(self, f, g):
q, r = dmp_rr_div(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return (self.from_dense(q), self.from_dense(r))
def dup_ff_div(self, f, g):
q, r = dup_ff_div(self.to_dense(f), self.to_dense(g), self.domain)
return (self.from_dense(q), self.from_dense(r))
def dmp_ff_div(self, f, g):
q, r = dmp_ff_div(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return (self.from_dense(q), self.from_dense(r))
def dup_div(self, f, g):
q, r = dup_div(self.to_dense(f), self.to_dense(g), self.domain)
return (self.from_dense(q), self.from_dense(r))
def dup_rem(self, f, g):
return self.from_dense(dup_rem(self.to_dense(f), self.to_dense(g), self.domain))
def dup_quo(self, f, g):
return self.from_dense(dup_quo(self.to_dense(f), self.to_dense(g), self.domain))
def dup_exquo(self, f, g):
return self.from_dense(dup_exquo(self.to_dense(f), self.to_dense(g), self.domain))
def dmp_div(self, f, g):
q, r = dmp_div(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return (self.from_dense(q), self.from_dense(r))
def dmp_rem(self, f, g):
return self.from_dense(dmp_rem(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain))
def dmp_quo(self, f, g):
return self.from_dense(dmp_quo(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain))
def dmp_exquo(self, f, g):
return self.from_dense(dmp_exquo(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain))
def dup_max_norm(self, f):
return dup_max_norm(self.to_dense(f), self.domain)
def dmp_max_norm(self, f):
return dmp_max_norm(self.to_dense(f), self.ngens-1, self.domain)
def dup_l1_norm(self, f):
return dup_l1_norm(self.to_dense(f), self.domain)
def dmp_l1_norm(self, f):
return dmp_l1_norm(self.to_dense(f), self.ngens-1, self.domain)
def dup_expand(self, polys):
return self.from_dense(dup_expand(list(map(self.to_dense, polys)), self.domain))
def dmp_expand(self, polys):
return self.from_dense(dmp_expand(list(map(self.to_dense, polys)), self.ngens-1, self.domain))
def dup_LC(self, f):
return dup_LC(self.to_dense(f), self.domain)
def dmp_LC(self, f):
LC = dmp_LC(self.to_dense(f), self.domain)
if isinstance(LC, list):
return self[1:].from_dense(LC)
else:
return LC
def dup_TC(self, f):
return dup_TC(self.to_dense(f), self.domain)
def dmp_TC(self, f):
TC = dmp_TC(self.to_dense(f), self.domain)
if isinstance(TC, list):
return self[1:].from_dense(TC)
else:
return TC
def dmp_ground_LC(self, f):
return dmp_ground_LC(self.to_dense(f), self.ngens-1, self.domain)
def dmp_ground_TC(self, f):
return dmp_ground_TC(self.to_dense(f), self.ngens-1, self.domain)
def dup_degree(self, f):
return dup_degree(self.to_dense(f))
def dmp_degree(self, f):
return dmp_degree(self.to_dense(f), self.ngens-1)
def dmp_degree_in(self, f, j):
return dmp_degree_in(self.to_dense(f), j, self.ngens-1)
def dup_integrate(self, f, m):
return self.from_dense(dup_integrate(self.to_dense(f), m, self.domain))
def dmp_integrate(self, f, m):
return self.from_dense(dmp_integrate(self.to_dense(f), m, self.ngens-1, self.domain))
def dup_diff(self, f, m):
return self.from_dense(dup_diff(self.to_dense(f), m, self.domain))
def dmp_diff(self, f, m):
return self.from_dense(dmp_diff(self.to_dense(f), m, self.ngens-1, self.domain))
def dmp_diff_in(self, f, m, j):
return self.from_dense(dmp_diff_in(self.to_dense(f), m, j, self.ngens-1, self.domain))
def dmp_integrate_in(self, f, m, j):
return self.from_dense(dmp_integrate_in(self.to_dense(f), m, j, self.ngens-1, self.domain))
def dup_eval(self, f, a):
return dup_eval(self.to_dense(f), a, self.domain)
def dmp_eval(self, f, a):
result = dmp_eval(self.to_dense(f), a, self.ngens-1, self.domain)
return self[1:].from_dense(result)
def dmp_eval_in(self, f, a, j):
result = dmp_eval_in(self.to_dense(f), a, j, self.ngens-1, self.domain)
return self.drop(j).from_dense(result)
def dmp_diff_eval_in(self, f, m, a, j):
result = dmp_diff_eval_in(self.to_dense(f), m, a, j, self.ngens-1, self.domain)
return self.drop(j).from_dense(result)
def dmp_eval_tail(self, f, A):
result = dmp_eval_tail(self.to_dense(f), A, self.ngens-1, self.domain)
if isinstance(result, list):
return self[:-len(A)].from_dense(result)
else:
return result
def dup_trunc(self, f, p):
return self.from_dense(dup_trunc(self.to_dense(f), p, self.domain))
def dmp_trunc(self, f, g):
return self.from_dense(dmp_trunc(self.to_dense(f), self[1:].to_dense(g), self.ngens-1, self.domain))
def dmp_ground_trunc(self, f, p):
return self.from_dense(dmp_ground_trunc(self.to_dense(f), p, self.ngens-1, self.domain))
def dup_monic(self, f):
return self.from_dense(dup_monic(self.to_dense(f), self.domain))
def dmp_ground_monic(self, f):
return self.from_dense(dmp_ground_monic(self.to_dense(f), self.ngens-1, self.domain))
def dup_extract(self, f, g):
c, F, G = dup_extract(self.to_dense(f), self.to_dense(g), self.domain)
return (c, self.from_dense(F), self.from_dense(G))
def dmp_ground_extract(self, f, g):
c, F, G = dmp_ground_extract(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return (c, self.from_dense(F), self.from_dense(G))
def dup_real_imag(self, f):
p, q = dup_real_imag(self.wrap(f).drop(1).to_dense(), self.domain)
return (self.from_dense(p), self.from_dense(q))
def dup_mirror(self, f):
return self.from_dense(dup_mirror(self.to_dense(f), self.domain))
def dup_scale(self, f, a):
return self.from_dense(dup_scale(self.to_dense(f), a, self.domain))
def dup_shift(self, f, a):
return self.from_dense(dup_shift(self.to_dense(f), a, self.domain))
def dup_transform(self, f, p, q):
return self.from_dense(dup_transform(self.to_dense(f), self.to_dense(p), self.to_dense(q), self.domain))
def dup_compose(self, f, g):
return self.from_dense(dup_compose(self.to_dense(f), self.to_dense(g), self.domain))
def dmp_compose(self, f, g):
return self.from_dense(dmp_compose(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain))
def dup_decompose(self, f):
components = dup_decompose(self.to_dense(f), self.domain)
return list(map(self.from_dense, components))
def dmp_lift(self, f):
result = dmp_lift(self.to_dense(f), self.ngens-1, self.domain)
return self.to_ground().from_dense(result)
def dup_sign_variations(self, f):
return dup_sign_variations(self.to_dense(f), self.domain)
def dup_clear_denoms(self, f, convert=False):
c, F = dup_clear_denoms(self.to_dense(f), self.domain, convert=convert)
if convert:
ring = self.clone(domain=self.domain.get_ring())
else:
ring = self
return (c, ring.from_dense(F))
def dmp_clear_denoms(self, f, convert=False):
c, F = dmp_clear_denoms(self.to_dense(f), self.ngens-1, self.domain, convert=convert)
if convert:
ring = self.clone(domain=self.domain.get_ring())
else:
ring = self
return (c, ring.from_dense(F))
def dup_revert(self, f, n):
return self.from_dense(dup_revert(self.to_dense(f), n, self.domain))
def dup_half_gcdex(self, f, g):
s, h = dup_half_gcdex(self.to_dense(f), self.to_dense(g), self.domain)
return (self.from_dense(s), self.from_dense(h))
def dmp_half_gcdex(self, f, g):
s, h = dmp_half_gcdex(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return (self.from_dense(s), self.from_dense(h))
def dup_gcdex(self, f, g):
s, t, h = dup_gcdex(self.to_dense(f), self.to_dense(g), self.domain)
return (self.from_dense(s), self.from_dense(t), self.from_dense(h))
def dmp_gcdex(self, f, g):
s, t, h = dmp_gcdex(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return (self.from_dense(s), self.from_dense(t), self.from_dense(h))
def dup_invert(self, f, g):
return self.from_dense(dup_invert(self.to_dense(f), self.to_dense(g), self.domain))
def dmp_invert(self, f, g):
return self.from_dense(dmp_invert(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain))
def dup_euclidean_prs(self, f, g):
prs = dup_euclidean_prs(self.to_dense(f), self.to_dense(g), self.domain)
return list(map(self.from_dense, prs))
def dmp_euclidean_prs(self, f, g):
prs = dmp_euclidean_prs(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return list(map(self.from_dense, prs))
def dup_primitive_prs(self, f, g):
prs = dup_primitive_prs(self.to_dense(f), self.to_dense(g), self.domain)
return list(map(self.from_dense, prs))
def dmp_primitive_prs(self, f, g):
prs = dmp_primitive_prs(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return list(map(self.from_dense, prs))
def dup_inner_subresultants(self, f, g):
prs, sres = dup_inner_subresultants(self.to_dense(f), self.to_dense(g), self.domain)
return (list(map(self.from_dense, prs)), sres)
def dmp_inner_subresultants(self, f, g):
prs, sres = dmp_inner_subresultants(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return (list(map(self.from_dense, prs)), sres)
def dup_subresultants(self, f, g):
prs = dup_subresultants(self.to_dense(f), self.to_dense(g), self.domain)
return list(map(self.from_dense, prs))
def dmp_subresultants(self, f, g):
prs = dmp_subresultants(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return list(map(self.from_dense, prs))
def dup_prs_resultant(self, f, g):
res, prs = dup_prs_resultant(self.to_dense(f), self.to_dense(g), self.domain)
return (res, list(map(self.from_dense, prs)))
def dmp_prs_resultant(self, f, g):
res, prs = dmp_prs_resultant(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return (self[1:].from_dense(res), list(map(self.from_dense, prs)))
def dmp_zz_modular_resultant(self, f, g, p):
res = dmp_zz_modular_resultant(self.to_dense(f), self.to_dense(g), self.domain_new(p), self.ngens-1, self.domain)
return self[1:].from_dense(res)
def dmp_zz_collins_resultant(self, f, g):
res = dmp_zz_collins_resultant(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return self[1:].from_dense(res)
def dmp_qq_collins_resultant(self, f, g):
res = dmp_qq_collins_resultant(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return self[1:].from_dense(res)
def dup_resultant(self, f, g): #, includePRS=False):
return dup_resultant(self.to_dense(f), self.to_dense(g), self.domain) #, includePRS=includePRS)
def dmp_resultant(self, f, g): #, includePRS=False):
res = dmp_resultant(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain) #, includePRS=includePRS)
if isinstance(res, list):
return self[1:].from_dense(res)
else:
return res
def dup_discriminant(self, f):
return dup_discriminant(self.to_dense(f), self.domain)
def dmp_discriminant(self, f):
disc = dmp_discriminant(self.to_dense(f), self.ngens-1, self.domain)
if isinstance(disc, list):
return self[1:].from_dense(disc)
else:
return disc
def dup_rr_prs_gcd(self, f, g):
H, F, G = dup_rr_prs_gcd(self.to_dense(f), self.to_dense(g), self.domain)
return (self.from_dense(H), self.from_dense(F), self.from_dense(G))
def dup_ff_prs_gcd(self, f, g):
H, F, G = dup_ff_prs_gcd(self.to_dense(f), self.to_dense(g), self.domain)
return (self.from_dense(H), self.from_dense(F), self.from_dense(G))
def dmp_rr_prs_gcd(self, f, g):
H, F, G = dmp_rr_prs_gcd(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return (self.from_dense(H), self.from_dense(F), self.from_dense(G))
def dmp_ff_prs_gcd(self, f, g):
H, F, G = dmp_ff_prs_gcd(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return (self.from_dense(H), self.from_dense(F), self.from_dense(G))
def dup_zz_heu_gcd(self, f, g):
H, F, G = dup_zz_heu_gcd(self.to_dense(f), self.to_dense(g), self.domain)
return (self.from_dense(H), self.from_dense(F), self.from_dense(G))
def dmp_zz_heu_gcd(self, f, g):
H, F, G = dmp_zz_heu_gcd(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return (self.from_dense(H), self.from_dense(F), self.from_dense(G))
def dup_qq_heu_gcd(self, f, g):
H, F, G = dup_qq_heu_gcd(self.to_dense(f), self.to_dense(g), self.domain)
return (self.from_dense(H), self.from_dense(F), self.from_dense(G))
def dmp_qq_heu_gcd(self, f, g):
H, F, G = dmp_qq_heu_gcd(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return (self.from_dense(H), self.from_dense(F), self.from_dense(G))
def dup_inner_gcd(self, f, g):
H, F, G = dup_inner_gcd(self.to_dense(f), self.to_dense(g), self.domain)
return (self.from_dense(H), self.from_dense(F), self.from_dense(G))
def dmp_inner_gcd(self, f, g):
H, F, G = dmp_inner_gcd(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return (self.from_dense(H), self.from_dense(F), self.from_dense(G))
def dup_gcd(self, f, g):
H = dup_gcd(self.to_dense(f), self.to_dense(g), self.domain)
return self.from_dense(H)
def dmp_gcd(self, f, g):
H = dmp_gcd(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return self.from_dense(H)
def dup_rr_lcm(self, f, g):
H = dup_rr_lcm(self.to_dense(f), self.to_dense(g), self.domain)
return self.from_dense(H)
def dup_ff_lcm(self, f, g):
H = dup_ff_lcm(self.to_dense(f), self.to_dense(g), self.domain)
return self.from_dense(H)
def dup_lcm(self, f, g):
H = dup_lcm(self.to_dense(f), self.to_dense(g), self.domain)
return self.from_dense(H)
def dmp_rr_lcm(self, f, g):
H = dmp_rr_lcm(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return self.from_dense(H)
def dmp_ff_lcm(self, f, g):
H = dmp_ff_lcm(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return self.from_dense(H)
def dmp_lcm(self, f, g):
H = dmp_lcm(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain)
return self.from_dense(H)
def dup_content(self, f):
cont = dup_content(self.to_dense(f), self.domain)
return cont
def dup_primitive(self, f):
cont, prim = dup_primitive(self.to_dense(f), self.domain)
return cont, self.from_dense(prim)
def dmp_content(self, f):
cont = dmp_content(self.to_dense(f), self.ngens-1, self.domain)
if isinstance(cont, list):
return self[1:].from_dense(cont)
else:
return cont
def dmp_primitive(self, f):
cont, prim = dmp_primitive(self.to_dense(f), self.ngens-1, self.domain)
if isinstance(cont, list):
return (self[1:].from_dense(cont), self.from_dense(prim))
else:
return (cont, self.from_dense(prim))
def dmp_ground_content(self, f):
cont = dmp_ground_content(self.to_dense(f), self.ngens-1, self.domain)
return cont
def dmp_ground_primitive(self, f):
cont, prim = dmp_ground_primitive(self.to_dense(f), self.ngens-1, self.domain)
return (cont, self.from_dense(prim))
def dup_cancel(self, f, g, include=True):
result = dup_cancel(self.to_dense(f), self.to_dense(g), self.domain, include=include)
if not include:
cf, cg, F, G = result
return (cf, cg, self.from_dense(F), self.from_dense(G))
else:
F, G = result
return (self.from_dense(F), self.from_dense(G))
def dmp_cancel(self, f, g, include=True):
result = dmp_cancel(self.to_dense(f), self.to_dense(g), self.ngens-1, self.domain, include=include)
if not include:
cf, cg, F, G = result
return (cf, cg, self.from_dense(F), self.from_dense(G))
else:
F, G = result
return (self.from_dense(F), self.from_dense(G))
def dup_trial_division(self, f, factors):
factors = dup_trial_division(self.to_dense(f), list(map(self.to_dense, factors)), self.domain)
return [ (self.from_dense(g), k) for g, k in factors ]
def dmp_trial_division(self, f, factors):
factors = dmp_trial_division(self.to_dense(f), list(map(self.to_dense, factors)), self.ngens-1, self.domain)
return [ (self.from_dense(g), k) for g, k in factors ]
def dup_zz_mignotte_bound(self, f):
return dup_zz_mignotte_bound(self.to_dense(f), self.domain)
def dmp_zz_mignotte_bound(self, f):
return dmp_zz_mignotte_bound(self.to_dense(f), self.ngens-1, self.domain)
def dup_zz_hensel_step(self, m, f, g, h, s, t):
D = self.to_dense
G, H, S, T = dup_zz_hensel_step(m, D(f), D(g), D(h), D(s), D(t), self.domain)
return (self.from_dense(G), self.from_dense(H), self.from_dense(S), self.from_dense(T))
def dup_zz_hensel_lift(self, p, f, f_list, l):
D = self.to_dense
polys = dup_zz_hensel_lift(p, D(f), list(map(D, f_list)), l, self.domain)
return list(map(self.from_dense, polys))
def dup_zz_zassenhaus(self, f):
factors = dup_zz_zassenhaus(self.to_dense(f), self.domain)
return [ (self.from_dense(g), k) for g, k in factors ]
def dup_zz_irreducible_p(self, f):
return dup_zz_irreducible_p(self.to_dense(f), self.domain)
def dup_cyclotomic_p(self, f, irreducible=False):
return dup_cyclotomic_p(self.to_dense(f), self.domain, irreducible=irreducible)
def dup_zz_cyclotomic_poly(self, n):
F = dup_zz_cyclotomic_poly(n, self.domain)
return self.from_dense(F)
def dup_zz_cyclotomic_factor(self, f):
result = dup_zz_cyclotomic_factor(self.to_dense(f), self.domain)
if result is None:
return result
else:
return list(map(self.from_dense, result))
# E: List[ZZ], cs: ZZ, ct: ZZ
def dmp_zz_wang_non_divisors(self, E, cs, ct):
return dmp_zz_wang_non_divisors(E, cs, ct, self.domain)
# f: Poly, T: List[(Poly, int)], ct: ZZ, A: List[ZZ]
#def dmp_zz_wang_test_points(f, T, ct, A):
# dmp_zz_wang_test_points(self.to_dense(f), T, ct, A, self.ngens-1, self.domain)
# f: Poly, T: List[(Poly, int)], cs: ZZ, E: List[ZZ], H: List[Poly], A: List[ZZ]
def dmp_zz_wang_lead_coeffs(self, f, T, cs, E, H, A):
mv = self[1:]
T = [ (mv.to_dense(t), k) for t, k in T ]
uv = self[:1]
H = list(map(uv.to_dense, H))
f, HH, CC = dmp_zz_wang_lead_coeffs(self.to_dense(f), T, cs, E, H, A, self.ngens-1, self.domain)
return self.from_dense(f), list(map(uv.from_dense, HH)), list(map(mv.from_dense, CC))
# f: List[Poly], m: int, p: ZZ
def dup_zz_diophantine(self, F, m, p):
result = dup_zz_diophantine(list(map(self.to_dense, F)), m, p, self.domain)
return list(map(self.from_dense, result))
# f: List[Poly], c: List[Poly], A: List[ZZ], d: int, p: ZZ
def dmp_zz_diophantine(self, F, c, A, d, p):
result = dmp_zz_diophantine(list(map(self.to_dense, F)), self.to_dense(c), A, d, p, self.ngens-1, self.domain)
return list(map(self.from_dense, result))
# f: Poly, H: List[Poly], LC: List[Poly], A: List[ZZ], p: ZZ
def dmp_zz_wang_hensel_lifting(self, f, H, LC, A, p):
uv = self[:1]
mv = self[1:]
H = list(map(uv.to_dense, H))
LC = list(map(mv.to_dense, LC))
result = dmp_zz_wang_hensel_lifting(self.to_dense(f), H, LC, A, p, self.ngens-1, self.domain)
return list(map(self.from_dense, result))
def dmp_zz_wang(self, f, mod=None, seed=None):
factors = dmp_zz_wang(self.to_dense(f), self.ngens-1, self.domain, mod=mod, seed=seed)
return [ self.from_dense(g) for g in factors ]
def dup_zz_factor_sqf(self, f):
coeff, factors = dup_zz_factor_sqf(self.to_dense(f), self.domain)
return (coeff, [ self.from_dense(g) for g in factors ])
def dup_zz_factor(self, f):
coeff, factors = dup_zz_factor(self.to_dense(f), self.domain)
return (coeff, [ (self.from_dense(g), k) for g, k in factors ])
def dmp_zz_factor(self, f):
coeff, factors = dmp_zz_factor(self.to_dense(f), self.ngens-1, self.domain)
return (coeff, [ (self.from_dense(g), k) for g, k in factors ])
def dup_ext_factor(self, f):
coeff, factors = dup_ext_factor(self.to_dense(f), self.domain)
return (coeff, [ (self.from_dense(g), k) for g, k in factors ])
def dmp_ext_factor(self, f):
coeff, factors = dmp_ext_factor(self.to_dense(f), self.ngens-1, self.domain)
return (coeff, [ (self.from_dense(g), k) for g, k in factors ])
def dup_gf_factor(self, f):
coeff, factors = dup_gf_factor(self.to_dense(f), self.domain)
return (coeff, [ (self.from_dense(g), k) for g, k in factors ])
def dmp_gf_factor(self, f):
coeff, factors = dmp_gf_factor(self.to_dense(f), self.ngens-1, self.domain)
return (coeff, [ (self.from_dense(g), k) for g, k in factors ])
def dup_factor_list(self, f):
coeff, factors = dup_factor_list(self.to_dense(f), self.domain)
return (coeff, [ (self.from_dense(g), k) for g, k in factors ])
def dup_factor_list_include(self, f):
factors = dup_factor_list_include(self.to_dense(f), self.domain)
return [ (self.from_dense(g), k) for g, k in factors ]
def dmp_factor_list(self, f):
coeff, factors = dmp_factor_list(self.to_dense(f), self.ngens-1, self.domain)
return (coeff, [ (self.from_dense(g), k) for g, k in factors ])
def dmp_factor_list_include(self, f):
factors = dmp_factor_list_include(self.to_dense(f), self.ngens-1, self.domain)
return [ (self.from_dense(g), k) for g, k in factors ]
def dup_irreducible_p(self, f):
return dup_irreducible_p(self.to_dense(f), self.domain)
def dmp_irreducible_p(self, f):
return dmp_irreducible_p(self.to_dense(f), self.ngens-1, self.domain)
def dup_sturm(self, f):
seq = dup_sturm(self.to_dense(f), self.domain)
return list(map(self.from_dense, seq))
def dup_sqf_p(self, f):
return dup_sqf_p(self.to_dense(f), self.domain)
def dmp_sqf_p(self, f):
return dmp_sqf_p(self.to_dense(f), self.ngens-1, self.domain)
def dup_sqf_norm(self, f):
s, F, R = dup_sqf_norm(self.to_dense(f), self.domain)
return (s, self.from_dense(F), self.to_ground().from_dense(R))
def dmp_sqf_norm(self, f):
s, F, R = dmp_sqf_norm(self.to_dense(f), self.ngens-1, self.domain)
return (s, self.from_dense(F), self.to_ground().from_dense(R))
def dup_gf_sqf_part(self, f):
return self.from_dense(dup_gf_sqf_part(self.to_dense(f), self.domain))
def dmp_gf_sqf_part(self, f):
return self.from_dense(dmp_gf_sqf_part(self.to_dense(f), self.domain))
def dup_sqf_part(self, f):
return self.from_dense(dup_sqf_part(self.to_dense(f), self.domain))
def dmp_sqf_part(self, f):
return self.from_dense(dmp_sqf_part(self.to_dense(f), self.ngens-1, self.domain))
def dup_gf_sqf_list(self, f, all=False):
coeff, factors = dup_gf_sqf_list(self.to_dense(f), self.domain, all=all)
return (coeff, [ (self.from_dense(g), k) for g, k in factors ])
def dmp_gf_sqf_list(self, f, all=False):
coeff, factors = dmp_gf_sqf_list(self.to_dense(f), self.ngens-1, self.domain, all=all)
return (coeff, [ (self.from_dense(g), k) for g, k in factors ])
def dup_sqf_list(self, f, all=False):
coeff, factors = dup_sqf_list(self.to_dense(f), self.domain, all=all)
return (coeff, [ (self.from_dense(g), k) for g, k in factors ])
def dup_sqf_list_include(self, f, all=False):
factors = dup_sqf_list_include(self.to_dense(f), self.domain, all=all)
return [ (self.from_dense(g), k) for g, k in factors ]
def dmp_sqf_list(self, f, all=False):
coeff, factors = dmp_sqf_list(self.to_dense(f), self.ngens-1, self.domain, all=all)
return (coeff, [ (self.from_dense(g), k) for g, k in factors ])
def dmp_sqf_list_include(self, f, all=False):
factors = dmp_sqf_list_include(self.to_dense(f), self.ngens-1, self.domain, all=all)
return [ (self.from_dense(g), k) for g, k in factors ]
def dup_gff_list(self, f):
factors = dup_gff_list(self.to_dense(f), self.domain)
return [ (self.from_dense(g), k) for g, k in factors ]
def dmp_gff_list(self, f):
factors = dmp_gff_list(self.to_dense(f), self.ngens-1, self.domain)
return [ (self.from_dense(g), k) for g, k in factors ]
def dup_root_upper_bound(self, f):
return dup_root_upper_bound(self.to_dense(f), self.domain)
def dup_root_lower_bound(self, f):
return dup_root_lower_bound(self.to_dense(f), self.domain)
def dup_step_refine_real_root(self, f, M, fast=False):
return dup_step_refine_real_root(self.to_dense(f), M, self.domain, fast=fast)
def dup_inner_refine_real_root(self, f, M, eps=None, steps=None, disjoint=None, fast=False, mobius=False):
return dup_inner_refine_real_root(self.to_dense(f), M, self.domain, eps=eps, steps=steps, disjoint=disjoint, fast=fast, mobius=mobius)
def dup_outer_refine_real_root(self, f, s, t, eps=None, steps=None, disjoint=None, fast=False):
return dup_outer_refine_real_root(self.to_dense(f), s, t, self.domain, eps=eps, steps=steps, disjoint=disjoint, fast=fast)
def dup_refine_real_root(self, f, s, t, eps=None, steps=None, disjoint=None, fast=False):
return dup_refine_real_root(self.to_dense(f), s, t, self.domain, eps=eps, steps=steps, disjoint=disjoint, fast=fast)
def dup_inner_isolate_real_roots(self, f, eps=None, fast=False):
return dup_inner_isolate_real_roots(self.to_dense(f), self.domain, eps=eps, fast=fast)
def dup_inner_isolate_positive_roots(self, f, eps=None, inf=None, sup=None, fast=False, mobius=False):
return dup_inner_isolate_positive_roots(self.to_dense(f), self.domain, eps=eps, inf=inf, sup=sup, fast=fast, mobius=mobius)
def dup_inner_isolate_negative_roots(self, f, inf=None, sup=None, eps=None, fast=False, mobius=False):
return dup_inner_isolate_negative_roots(self.to_dense(f), self.domain, inf=inf, sup=sup, eps=eps, fast=fast, mobius=mobius)
def dup_isolate_real_roots_sqf(self, f, eps=None, inf=None, sup=None, fast=False, blackbox=False):
return dup_isolate_real_roots_sqf(self.to_dense(f), self.domain, eps=eps, inf=inf, sup=sup, fast=fast, blackbox=blackbox)
def dup_isolate_real_roots(self, f, eps=None, inf=None, sup=None, basis=False, fast=False):
return dup_isolate_real_roots(self.to_dense(f), self.domain, eps=eps, inf=inf, sup=sup, basis=basis, fast=fast)
def dup_isolate_real_roots_list(self, polys, eps=None, inf=None, sup=None, strict=False, basis=False, fast=False):
return dup_isolate_real_roots_list(list(map(self.to_dense, polys)), self.domain, eps=eps, inf=inf, sup=sup, strict=strict, basis=basis, fast=fast)
def dup_count_real_roots(self, f, inf=None, sup=None):
return dup_count_real_roots(self.to_dense(f), self.domain, inf=inf, sup=sup)
def dup_count_complex_roots(self, f, inf=None, sup=None, exclude=None):
return dup_count_complex_roots(self.to_dense(f), self.domain, inf=inf, sup=sup, exclude=exclude)
def dup_isolate_complex_roots_sqf(self, f, eps=None, inf=None, sup=None, blackbox=False):
return dup_isolate_complex_roots_sqf(self.to_dense(f), self.domain, eps=eps, inf=inf, sup=sup, blackbox=blackbox)
def dup_isolate_all_roots_sqf(self, f, eps=None, inf=None, sup=None, fast=False, blackbox=False):
return dup_isolate_all_roots_sqf(self.to_dense(f), self.domain, eps=eps, inf=inf, sup=sup, fast=fast, blackbox=blackbox)
def dup_isolate_all_roots(self, f, eps=None, inf=None, sup=None, fast=False):
return dup_isolate_all_roots(self.to_dense(f), self.domain, eps=eps, inf=inf, sup=sup, fast=fast)
def fateman_poly_F_1(self):
from sympy.polys.specialpolys import dmp_fateman_poly_F_1
return tuple(map(self.from_dense, dmp_fateman_poly_F_1(self.ngens-1, self.domain)))
def fateman_poly_F_2(self):
from sympy.polys.specialpolys import dmp_fateman_poly_F_2
return tuple(map(self.from_dense, dmp_fateman_poly_F_2(self.ngens-1, self.domain)))
def fateman_poly_F_3(self):
from sympy.polys.specialpolys import dmp_fateman_poly_F_3
return tuple(map(self.from_dense, dmp_fateman_poly_F_3(self.ngens-1, self.domain)))
def to_gf_dense(self, element):
return gf_strip([ self.domain.dom.convert(c, self.domain) for c in self.wrap(element).to_dense() ])
def from_gf_dense(self, element):
return self.from_dict(dmp_to_dict(element, self.ngens-1, self.domain.dom))
def gf_degree(self, f):
return gf_degree(self.to_gf_dense(f))
def gf_LC(self, f):
return gf_LC(self.to_gf_dense(f), self.domain.dom)
def gf_TC(self, f):
return gf_TC(self.to_gf_dense(f), self.domain.dom)
def gf_strip(self, f):
return self.from_gf_dense(gf_strip(self.to_gf_dense(f)))
def gf_trunc(self, f):
return self.from_gf_dense(gf_strip(self.to_gf_dense(f), self.domain.mod))
def gf_normal(self, f):
return self.from_gf_dense(gf_strip(self.to_gf_dense(f), self.domain.mod, self.domain.dom))
def gf_from_dict(self, f):
return self.from_gf_dense(gf_from_dict(f, self.domain.mod, self.domain.dom))
def gf_to_dict(self, f, symmetric=True):
return gf_to_dict(self.to_gf_dense(f), self.domain.mod, symmetric=symmetric)
def gf_from_int_poly(self, f):
return self.from_gf_dense(gf_from_int_poly(f, self.domain.mod))
def gf_to_int_poly(self, f, symmetric=True):
return gf_to_int_poly(self.to_gf_dense(f), self.domain.mod, symmetric=symmetric)
def gf_neg(self, f):
return self.from_gf_dense(gf_neg(self.to_gf_dense(f), self.domain.mod, self.domain.dom))
def gf_add_ground(self, f, a):
return self.from_gf_dense(gf_add_ground(self.to_gf_dense(f), a, self.domain.mod, self.domain.dom))
def gf_sub_ground(self, f, a):
return self.from_gf_dense(gf_sub_ground(self.to_gf_dense(f), a, self.domain.mod, self.domain.dom))
def gf_mul_ground(self, f, a):
return self.from_gf_dense(gf_mul_ground(self.to_gf_dense(f), a, self.domain.mod, self.domain.dom))
def gf_quo_ground(self, f, a):
return self.from_gf_dense(gf_quo_ground(self.to_gf_dense(f), a, self.domain.mod, self.domain.dom))
def gf_add(self, f, g):
return self.from_gf_dense(gf_add(self.to_gf_dense(f), self.to_gf_dense(g), self.domain.mod, self.domain.dom))
def gf_sub(self, f, g):
return self.from_gf_dense(gf_sub(self.to_gf_dense(f), self.to_gf_dense(g), self.domain.mod, self.domain.dom))
def gf_mul(self, f, g):
return self.from_gf_dense(gf_mul(self.to_gf_dense(f), self.to_gf_dense(g), self.domain.mod, self.domain.dom))
def gf_sqr(self, f):
return self.from_gf_dense(gf_sqr(self.to_gf_dense(f), self.domain.mod, self.domain.dom))
def gf_add_mul(self, f, g, h):
return self.from_gf_dense(gf_add_mul(self.to_gf_dense(f), self.to_gf_dense(g), self.to_gf_dense(h), self.domain.mod, self.domain.dom))
def gf_sub_mul(self, f, g, h):
return self.from_gf_dense(gf_sub_mul(self.to_gf_dense(f), self.to_gf_dense(g), self.to_gf_dense(h), self.domain.mod, self.domain.dom))
def gf_expand(self, F):
return self.from_gf_dense(gf_expand(list(map(self.to_gf_dense, F)), self.domain.mod, self.domain.dom))
def gf_div(self, f, g):
q, r = gf_div(self.to_gf_dense(f), self.to_gf_dense(g), self.domain.mod, self.domain.dom)
return self.from_gf_dense(q), self.from_gf_dense(r)
def gf_rem(self, f, g):
return self.from_gf_dense(gf_rem(self.to_gf_dense(f), self.to_gf_dense(g), self.domain.mod, self.domain.dom))
def gf_quo(self, f, g):
return self.from_gf_dense(gf_quo(self.to_gf_dense(f), self.to_gf_dense(g), self.domain.mod, self.domain.dom))
def gf_exquo(self, f, g):
return self.from_gf_dense(gf_exquo(self.to_gf_dense(f), self.to_gf_dense(g), self.domain.mod, self.domain.dom))
def gf_lshift(self, f, n):
return self.from_gf_dense(gf_lshift(self.to_gf_dense(f), n, self.domain.dom))
def gf_rshift(self, f, n):
return self.from_gf_dense(gf_rshift(self.to_gf_dense(f), n, self.domain.dom))
def gf_pow(self, f, n):
return self.from_gf_dense(gf_pow(self.to_gf_dense(f), n, self.domain.mod, self.domain.dom))
def gf_pow_mod(self, f, n, g):
return self.from_gf_dense(gf_pow_mod(self.to_gf_dense(f), n, self.to_gf_dense(g), self.domain.mod, self.domain.dom))
def gf_cofactors(self, f, g):
h, cff, cfg = gf_cofactors(self.to_gf_dense(f), self.to_gf_dense(g), self.domain.mod, self.domain.dom)
return self.from_gf_dense(h), self.from_gf_dense(cff), self.from_gf_dense(cfg)
def gf_gcd(self, f, g):
return self.from_gf_dense(gf_gcd(self.to_gf_dense(f), self.to_gf_dense(g), self.domain.mod, self.domain.dom))
def gf_lcm(self, f, g):
return self.from_gf_dense(gf_lcm(self.to_gf_dense(f), self.to_gf_dense(g), self.domain.mod, self.domain.dom))
def gf_gcdex(self, f, g):
return self.from_gf_dense(gf_gcdex(self.to_gf_dense(f), self.to_gf_dense(g), self.domain.mod, self.domain.dom))
def gf_monic(self, f):
return self.from_gf_dense(gf_monic(self.to_gf_dense(f), self.domain.mod, self.domain.dom))
def gf_diff(self, f):
return self.from_gf_dense(gf_diff(self.to_gf_dense(f), self.domain.mod, self.domain.dom))
def gf_eval(self, f, a):
return gf_eval(self.to_gf_dense(f), a, self.domain.mod, self.domain.dom)
def gf_multi_eval(self, f, A):
return gf_multi_eval(self.to_gf_dense(f), A, self.domain.mod, self.domain.dom)
def gf_compose(self, f, g):
return self.from_gf_dense(gf_compose(self.to_gf_dense(f), self.to_gf_dense(g), self.domain.mod, self.domain.dom))
def gf_compose_mod(self, g, h, f):
return self.from_gf_dense(gf_compose_mod(self.to_gf_dense(g), self.to_gf_dense(h), self.to_gf_dense(f), self.domain.mod, self.domain.dom))
def gf_trace_map(self, a, b, c, n, f):
a = self.to_gf_dense(a)
b = self.to_gf_dense(b)
c = self.to_gf_dense(c)
f = self.to_gf_dense(f)
U, V = gf_trace_map(a, b, c, n, f, self.domain.mod, self.domain.dom)
return self.from_gf_dense(U), self.from_gf_dense(V)
def gf_random(self, n):
return self.from_gf_dense(gf_random(n, self.domain.mod, self.domain.dom))
def gf_irreducible(self, n):
return self.from_gf_dense(gf_irreducible(n, self.domain.mod, self.domain.dom))
def gf_irred_p_ben_or(self, f):
return gf_irred_p_ben_or(self.to_gf_dense(f), self.domain.mod, self.domain.dom)
def gf_irred_p_rabin(self, f):
return gf_irred_p_rabin(self.to_gf_dense(f), self.domain.mod, self.domain.dom)
def gf_irreducible_p(self, f):
return gf_irreducible_p(self.to_gf_dense(f), self.domain.mod, self.domain.dom)
def gf_sqf_p(self, f):
return gf_sqf_p(self.to_gf_dense(f), self.domain.mod, self.domain.dom)
def gf_sqf_part(self, f):
return self.from_gf_dense(gf_sqf_part(self.to_gf_dense(f), self.domain.mod, self.domain.dom))
def gf_sqf_list(self, f, all=False):
coeff, factors = gf_sqf_part(self.to_gf_dense(f), self.domain.mod, self.domain.dom, all=all)
return coeff, [ (self.from_gf_dense(g), k) for g, k in factors ]
def gf_Qmatrix(self, f):
return gf_Qmatrix(self.to_gf_dense(f), self.domain.mod, self.domain.dom)
def gf_berlekamp(self, f):
factors = gf_berlekamp(self.to_gf_dense(f), self.domain.mod, self.domain.dom)
return [ self.from_gf_dense(g) for g in factors ]
def gf_ddf_zassenhaus(self, f):
factors = gf_ddf_zassenhaus(self.to_gf_dense(f), self.domain.mod, self.domain.dom)
return [ (self.from_gf_dense(g), k) for g, k in factors ]
def gf_edf_zassenhaus(self, f, n):
factors = gf_edf_zassenhaus(self.to_gf_dense(f), self.domain.mod, self.domain.dom)
return [ self.from_gf_dense(g) for g in factors ]
def gf_ddf_shoup(self, f):
factors = gf_ddf_shoup(self.to_gf_dense(f), self.domain.mod, self.domain.dom)
return [ (self.from_gf_dense(g), k) for g, k in factors ]
def gf_edf_shoup(self, f, n):
factors = gf_edf_shoup(self.to_gf_dense(f), self.domain.mod, self.domain.dom)
return [ self.from_gf_dense(g) for g in factors ]
def gf_zassenhaus(self, f):
factors = gf_zassenhaus(self.to_gf_dense(f), self.domain.mod, self.domain.dom)
return [ self.from_gf_dense(g) for g in factors ]
def gf_shoup(self, f):
factors = gf_shoup(self.to_gf_dense(f), self.domain.mod, self.domain.dom)
return [ self.from_gf_dense(g) for g in factors ]
def gf_factor_sqf(self, f, method=None):
coeff, factors = gf_factor_sqf(self.to_gf_dense(f), self.domain.mod, self.domain.dom, method=method)
return coeff, [ self.from_gf_dense(g) for g in factors ]
def gf_factor(self, f):
coeff, factors = gf_factor(self.to_gf_dense(f), self.domain.mod, self.domain.dom)
return coeff, [ (self.from_gf_dense(g), k) for g, k in factors ]
| bsd-3-clause |
jimi-c/ansible | test/units/modules/conftest.py | 10 | 1154 | # Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
import json
from collections import MutableMapping
import pytest
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes
@pytest.fixture
def patch_ansible_module(request, mocker):
if isinstance(request.param, string_types):
args = request.param
elif isinstance(request.param, MutableMapping):
if 'ANSIBLE_MODULE_ARGS' not in request.param:
request.param = {'ANSIBLE_MODULE_ARGS': request.param}
if '_ansible_remote_tmp' not in request.param['ANSIBLE_MODULE_ARGS']:
request.param['ANSIBLE_MODULE_ARGS']['_ansible_remote_tmp'] = '/tmp'
if '_ansible_keep_remote_files' not in request.param['ANSIBLE_MODULE_ARGS']:
request.param['ANSIBLE_MODULE_ARGS']['_ansible_keep_remote_files'] = '/tmp'
args = json.dumps(request.param)
else:
raise Exception('Malformed data to the patch_ansible_module pytest fixture')
mocker.patch('ansible.module_utils.basic._ANSIBLE_ARGS', to_bytes(args))
| gpl-3.0 |
raspilot/ardupilot | mk/PX4/Tools/genmsg/src/genmsg/base.py | 216 | 2414 | # Software License Agreement (BSD License)
#
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
import os, sys
SEP = '/'
MSG_DIR = 'msg'
SRV_DIR = 'srv'
EXT_MSG = '.msg'
EXT_SRV = '.srv'
## character that designates a constant assignment rather than a field
CONSTCHAR = '='
COMMENTCHAR = '#'
IODELIM = '---'
verbose = False
import inspect, pprint
def log_verbose(value):
global verbose
verbose = value
def log(*args):
global verbose
if verbose:
print("%s:%d" % inspect.stack()[1][1:3], file=sys.stderr)
print(' '.join([str(x) for x in args]), file=sys.stderr)
def plog(msg, obj):
if verbose:
print("%s:%d" % inspect.stack()[1][1:3], file=sys.stderr)
print(msg, " ", file=sys.stderr)
pprint.pprint(obj, file=sys.stderr)
class InvalidMsgSpec(Exception):
pass
class MsgGenerationException(Exception):
pass
| gpl-3.0 |
ninotoshi/tensorflow | tensorflow/contrib/learn/python/learn/estimators/tensor_signature_test.py | 2 | 4896 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for learn.estimators.tensor_signature."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.contrib.learn.python.learn.estimators import tensor_signature
class TensorSignatureTest(tf.test.TestCase):
def testTensorSignatureCompatible(self):
placeholder_a = tf.placeholder(name='test',
shape=[None, 100],
dtype=tf.int32)
placeholder_b = tf.placeholder(name='another',
shape=[256, 100],
dtype=tf.int32)
placeholder_c = tf.placeholder(name='mismatch',
shape=[256, 100],
dtype=tf.float32)
placeholder_d = tf.placeholder(name='mismatch',
shape=[128, 100],
dtype=tf.int32)
signatures = tensor_signature.create_signatures(placeholder_a)
self.assertTrue(tensor_signature.tensors_compatible(placeholder_a,
signatures))
self.assertTrue(tensor_signature.tensors_compatible(placeholder_b,
signatures))
self.assertFalse(tensor_signature.tensors_compatible(placeholder_c,
signatures))
self.assertTrue(tensor_signature.tensors_compatible(placeholder_d,
signatures))
inputs = {'a': placeholder_a}
signatures = tensor_signature.create_signatures(inputs)
self.assertTrue(tensor_signature.tensors_compatible(inputs, signatures))
self.assertFalse(tensor_signature.tensors_compatible(placeholder_a,
signatures))
self.assertFalse(tensor_signature.tensors_compatible(placeholder_b,
signatures))
self.assertFalse(tensor_signature.tensors_compatible(
{'b': placeholder_b}, signatures))
self.assertTrue(tensor_signature.tensors_compatible(
{'a': placeholder_b,
'c': placeholder_c}, signatures))
self.assertFalse(tensor_signature.tensors_compatible(
{'a': placeholder_c}, signatures))
def testSparseTensorCompatible(self):
t = tf.SparseTensor(indices=[[0, 0], [1, 2]], values=[1, 2], shape=[3, 4])
signatures = tensor_signature.create_signatures(t)
self.assertTrue(tensor_signature.tensors_compatible(t, signatures))
def testTensorSignaturePlaceholders(self):
placeholder_a = tf.placeholder(name='test',
shape=[None, 100],
dtype=tf.int32)
signatures = tensor_signature.create_signatures(placeholder_a)
placeholder_out = tensor_signature.create_placeholders_from_signatures(
signatures)
self.assertEqual(placeholder_out.dtype, placeholder_a.dtype)
self.assertEqual(placeholder_out.get_shape(), placeholder_a.get_shape())
self.assertTrue(tensor_signature.tensors_compatible(placeholder_out,
signatures))
inputs = {'a': placeholder_a}
signatures = tensor_signature.create_signatures(inputs)
placeholders_out = tensor_signature.create_placeholders_from_signatures(
signatures)
self.assertEqual(placeholders_out['a'].dtype, placeholder_a.dtype)
self.assertEqual(placeholders_out['a'].get_shape(),
placeholder_a.get_shape())
self.assertTrue(tensor_signature.tensors_compatible(placeholders_out,
signatures))
def testSparseTensorSignaturePlaceholders(self):
tensor = tf.SparseTensor(values=[1.0, 2.0], indices=[[0, 2], [0, 3]],
shape=[5, 5])
signature = tensor_signature.create_signatures(tensor)
placeholder = tensor_signature.create_placeholders_from_signatures(
signature)
self.assertTrue(isinstance(placeholder, tf.SparseTensor))
self.assertEqual(placeholder.values.dtype, tensor.values.dtype)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 |
aiyyoi/DevFest-MaxBond | MaxBond/env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py | 1724 | 14031 | """Module for supporting the lxml.etree library. The idea here is to use as much
of the native library as possible, without using fragile hacks like custom element
names that break between releases. The downside of this is that we cannot represent
all possible trees; specifically the following are known to cause problems:
Text or comments as siblings of the root element
Docypes with no name
When any of these things occur, we emit a DataLossWarning
"""
from __future__ import absolute_import, division, unicode_literals
import warnings
import re
import sys
from . import _base
from ..constants import DataLossWarning
from .. import constants
from . import etree as etree_builders
from .. import ihatexml
import lxml.etree as etree
fullTree = True
tag_regexp = re.compile("{([^}]*)}(.*)")
comment_type = etree.Comment("asd").tag
class DocumentType(object):
def __init__(self, name, publicId, systemId):
self.name = name
self.publicId = publicId
self.systemId = systemId
class Document(object):
def __init__(self):
self._elementTree = None
self._childNodes = []
def appendChild(self, element):
self._elementTree.getroot().addnext(element._element)
def _getChildNodes(self):
return self._childNodes
childNodes = property(_getChildNodes)
def testSerializer(element):
rv = []
finalText = None
infosetFilter = ihatexml.InfosetFilter()
def serializeElement(element, indent=0):
if not hasattr(element, "tag"):
if hasattr(element, "getroot"):
# Full tree case
rv.append("#document")
if element.docinfo.internalDTD:
if not (element.docinfo.public_id or
element.docinfo.system_url):
dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name
else:
dtd_str = """<!DOCTYPE %s "%s" "%s">""" % (
element.docinfo.root_name,
element.docinfo.public_id,
element.docinfo.system_url)
rv.append("|%s%s" % (' ' * (indent + 2), dtd_str))
next_element = element.getroot()
while next_element.getprevious() is not None:
next_element = next_element.getprevious()
while next_element is not None:
serializeElement(next_element, indent + 2)
next_element = next_element.getnext()
elif isinstance(element, str) or isinstance(element, bytes):
# Text in a fragment
assert isinstance(element, str) or sys.version_info.major == 2
rv.append("|%s\"%s\"" % (' ' * indent, element))
else:
# Fragment case
rv.append("#document-fragment")
for next_element in element:
serializeElement(next_element, indent + 2)
elif element.tag == comment_type:
rv.append("|%s<!-- %s -->" % (' ' * indent, element.text))
if hasattr(element, "tail") and element.tail:
rv.append("|%s\"%s\"" % (' ' * indent, element.tail))
else:
assert isinstance(element, etree._Element)
nsmatch = etree_builders.tag_regexp.match(element.tag)
if nsmatch is not None:
ns = nsmatch.group(1)
tag = nsmatch.group(2)
prefix = constants.prefixes[ns]
rv.append("|%s<%s %s>" % (' ' * indent, prefix,
infosetFilter.fromXmlName(tag)))
else:
rv.append("|%s<%s>" % (' ' * indent,
infosetFilter.fromXmlName(element.tag)))
if hasattr(element, "attrib"):
attributes = []
for name, value in element.attrib.items():
nsmatch = tag_regexp.match(name)
if nsmatch is not None:
ns, name = nsmatch.groups()
name = infosetFilter.fromXmlName(name)
prefix = constants.prefixes[ns]
attr_string = "%s %s" % (prefix, name)
else:
attr_string = infosetFilter.fromXmlName(name)
attributes.append((attr_string, value))
for name, value in sorted(attributes):
rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
if element.text:
rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
indent += 2
for child in element:
serializeElement(child, indent)
if hasattr(element, "tail") and element.tail:
rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail))
serializeElement(element, 0)
if finalText is not None:
rv.append("|%s\"%s\"" % (' ' * 2, finalText))
return "\n".join(rv)
def tostring(element):
"""Serialize an element and its child nodes to a string"""
rv = []
finalText = None
def serializeElement(element):
if not hasattr(element, "tag"):
if element.docinfo.internalDTD:
if element.docinfo.doctype:
dtd_str = element.docinfo.doctype
else:
dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name
rv.append(dtd_str)
serializeElement(element.getroot())
elif element.tag == comment_type:
rv.append("<!--%s-->" % (element.text,))
else:
# This is assumed to be an ordinary element
if not element.attrib:
rv.append("<%s>" % (element.tag,))
else:
attr = " ".join(["%s=\"%s\"" % (name, value)
for name, value in element.attrib.items()])
rv.append("<%s %s>" % (element.tag, attr))
if element.text:
rv.append(element.text)
for child in element:
serializeElement(child)
rv.append("</%s>" % (element.tag,))
if hasattr(element, "tail") and element.tail:
rv.append(element.tail)
serializeElement(element)
if finalText is not None:
rv.append("%s\"" % (' ' * 2, finalText))
return "".join(rv)
class TreeBuilder(_base.TreeBuilder):
documentClass = Document
doctypeClass = DocumentType
elementClass = None
commentClass = None
fragmentClass = Document
implementation = etree
def __init__(self, namespaceHTMLElements, fullTree=False):
builder = etree_builders.getETreeModule(etree, fullTree=fullTree)
infosetFilter = self.infosetFilter = ihatexml.InfosetFilter()
self.namespaceHTMLElements = namespaceHTMLElements
class Attributes(dict):
def __init__(self, element, value={}):
self._element = element
dict.__init__(self, value)
for key, value in self.items():
if isinstance(key, tuple):
name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1]))
else:
name = infosetFilter.coerceAttribute(key)
self._element._element.attrib[name] = value
def __setitem__(self, key, value):
dict.__setitem__(self, key, value)
if isinstance(key, tuple):
name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1]))
else:
name = infosetFilter.coerceAttribute(key)
self._element._element.attrib[name] = value
class Element(builder.Element):
def __init__(self, name, namespace):
name = infosetFilter.coerceElement(name)
builder.Element.__init__(self, name, namespace=namespace)
self._attributes = Attributes(self)
def _setName(self, name):
self._name = infosetFilter.coerceElement(name)
self._element.tag = self._getETreeTag(
self._name, self._namespace)
def _getName(self):
return infosetFilter.fromXmlName(self._name)
name = property(_getName, _setName)
def _getAttributes(self):
return self._attributes
def _setAttributes(self, attributes):
self._attributes = Attributes(self, attributes)
attributes = property(_getAttributes, _setAttributes)
def insertText(self, data, insertBefore=None):
data = infosetFilter.coerceCharacters(data)
builder.Element.insertText(self, data, insertBefore)
def appendChild(self, child):
builder.Element.appendChild(self, child)
class Comment(builder.Comment):
def __init__(self, data):
data = infosetFilter.coerceComment(data)
builder.Comment.__init__(self, data)
def _setData(self, data):
data = infosetFilter.coerceComment(data)
self._element.text = data
def _getData(self):
return self._element.text
data = property(_getData, _setData)
self.elementClass = Element
self.commentClass = builder.Comment
# self.fragmentClass = builder.DocumentFragment
_base.TreeBuilder.__init__(self, namespaceHTMLElements)
def reset(self):
_base.TreeBuilder.reset(self)
self.insertComment = self.insertCommentInitial
self.initial_comments = []
self.doctype = None
def testSerializer(self, element):
return testSerializer(element)
def getDocument(self):
if fullTree:
return self.document._elementTree
else:
return self.document._elementTree.getroot()
def getFragment(self):
fragment = []
element = self.openElements[0]._element
if element.text:
fragment.append(element.text)
fragment.extend(list(element))
if element.tail:
fragment.append(element.tail)
return fragment
def insertDoctype(self, token):
name = token["name"]
publicId = token["publicId"]
systemId = token["systemId"]
if not name:
warnings.warn("lxml cannot represent empty doctype", DataLossWarning)
self.doctype = None
else:
coercedName = self.infosetFilter.coerceElement(name)
if coercedName != name:
warnings.warn("lxml cannot represent non-xml doctype", DataLossWarning)
doctype = self.doctypeClass(coercedName, publicId, systemId)
self.doctype = doctype
def insertCommentInitial(self, data, parent=None):
self.initial_comments.append(data)
def insertCommentMain(self, data, parent=None):
if (parent == self.document and
self.document._elementTree.getroot()[-1].tag == comment_type):
warnings.warn("lxml cannot represent adjacent comments beyond the root elements", DataLossWarning)
super(TreeBuilder, self).insertComment(data, parent)
def insertRoot(self, token):
"""Create the document root"""
# Because of the way libxml2 works, it doesn't seem to be possible to
# alter information like the doctype after the tree has been parsed.
# Therefore we need to use the built-in parser to create our iniial
# tree, after which we can add elements like normal
docStr = ""
if self.doctype:
assert self.doctype.name
docStr += "<!DOCTYPE %s" % self.doctype.name
if (self.doctype.publicId is not None or
self.doctype.systemId is not None):
docStr += (' PUBLIC "%s" ' %
(self.infosetFilter.coercePubid(self.doctype.publicId or "")))
if self.doctype.systemId:
sysid = self.doctype.systemId
if sysid.find("'") >= 0 and sysid.find('"') >= 0:
warnings.warn("DOCTYPE system cannot contain single and double quotes", DataLossWarning)
sysid = sysid.replace("'", 'U00027')
if sysid.find("'") >= 0:
docStr += '"%s"' % sysid
else:
docStr += "'%s'" % sysid
else:
docStr += "''"
docStr += ">"
if self.doctype.name != token["name"]:
warnings.warn("lxml cannot represent doctype with a different name to the root element", DataLossWarning)
docStr += "<THIS_SHOULD_NEVER_APPEAR_PUBLICLY/>"
root = etree.fromstring(docStr)
# Append the initial comments:
for comment_token in self.initial_comments:
root.addprevious(etree.Comment(comment_token["data"]))
# Create the root document and add the ElementTree to it
self.document = self.documentClass()
self.document._elementTree = root.getroottree()
# Give the root element the right name
name = token["name"]
namespace = token.get("namespace", self.defaultNamespace)
if namespace is None:
etree_tag = name
else:
etree_tag = "{%s}%s" % (namespace, name)
root.tag = etree_tag
# Add the root element to the internal child/open data structures
root_element = self.elementClass(name, namespace)
root_element._element = root
self.document._childNodes.append(root_element)
self.openElements.append(root_element)
# Reset to the default insert comment function
self.insertComment = self.insertCommentMain
| mit |
alfcrisci/httpie | tests/test_downloads.py | 49 | 4859 | import os
import time
import pytest
from requests.structures import CaseInsensitiveDict
from httpie.compat import urlopen
from httpie.downloads import (
parse_content_range, filename_from_content_disposition, filename_from_url,
get_unique_filename, ContentRangeError, Download,
)
from utils import http, TestEnvironment
class Response(object):
# noinspection PyDefaultArgument
def __init__(self, url, headers={}, status_code=200):
self.url = url
self.headers = CaseInsensitiveDict(headers)
self.status_code = status_code
class TestDownloadUtils:
def test_Content_Range_parsing(self):
parse = parse_content_range
assert parse('bytes 100-199/200', 100) == 200
assert parse('bytes 100-199/*', 100) == 200
# missing
pytest.raises(ContentRangeError, parse, None, 100)
# syntax error
pytest.raises(ContentRangeError, parse, 'beers 100-199/*', 100)
# unexpected range
pytest.raises(ContentRangeError, parse, 'bytes 100-199/*', 99)
# invalid instance-length
pytest.raises(ContentRangeError, parse, 'bytes 100-199/199', 100)
# invalid byte-range-resp-spec
pytest.raises(ContentRangeError, parse, 'bytes 100-99/199', 100)
# invalid byte-range-resp-spec
pytest.raises(ContentRangeError, parse, 'bytes 100-100/*', 100)
@pytest.mark.parametrize('header, expected_filename', [
('attachment; filename=hello-WORLD_123.txt', 'hello-WORLD_123.txt'),
('attachment; filename=".hello-WORLD_123.txt"', 'hello-WORLD_123.txt'),
('attachment; filename="white space.txt"', 'white space.txt'),
(r'attachment; filename="\"quotes\".txt"', '"quotes".txt'),
('attachment; filename=/etc/hosts', 'hosts'),
('attachment; filename=', None)
])
def test_Content_Disposition_parsing(self, header, expected_filename):
assert filename_from_content_disposition(header) == expected_filename
def test_filename_from_url(self):
assert 'foo.txt' == filename_from_url(
url='http://example.org/foo',
content_type='text/plain'
)
assert 'foo.html' == filename_from_url(
url='http://example.org/foo',
content_type='text/html; charset=utf8'
)
assert 'foo' == filename_from_url(
url='http://example.org/foo',
content_type=None
)
assert 'foo' == filename_from_url(
url='http://example.org/foo',
content_type='x-foo/bar'
)
def test_unique_filename(self):
def attempts(unique_on_attempt=0):
# noinspection PyUnresolvedReferences,PyUnusedLocal
def exists(filename):
if exists.attempt == unique_on_attempt:
return False
exists.attempt += 1
return True
exists.attempt = 0
return exists
assert 'foo.bar' == get_unique_filename('foo.bar', attempts(0))
assert 'foo.bar-1' == get_unique_filename('foo.bar', attempts(1))
assert 'foo.bar-10' == get_unique_filename('foo.bar', attempts(10))
class TestDownloads:
# TODO: more tests
def test_actual_download(self, httpbin):
url = httpbin.url + '/robots.txt'
body = urlopen(url).read().decode()
env = TestEnvironment(stdin_isatty=True, stdout_isatty=False)
r = http('--download', url, env=env)
assert 'Downloading' in r.stderr
assert '[K' in r.stderr
assert 'Done' in r.stderr
assert body == r
def test_download_with_Content_Length(self, httpbin):
devnull = open(os.devnull, 'w')
download = Download(output_file=devnull, progress_file=devnull)
download.start(Response(
url=httpbin.url + '/',
headers={'Content-Length': 10}
))
time.sleep(1.1)
download.chunk_downloaded(b'12345')
time.sleep(1.1)
download.chunk_downloaded(b'12345')
download.finish()
assert not download.interrupted
def test_download_no_Content_Length(self, httpbin):
devnull = open(os.devnull, 'w')
download = Download(output_file=devnull, progress_file=devnull)
download.start(Response(url=httpbin.url + '/'))
time.sleep(1.1)
download.chunk_downloaded(b'12345')
download.finish()
assert not download.interrupted
def test_download_interrupted(self, httpbin):
devnull = open(os.devnull, 'w')
download = Download(output_file=devnull, progress_file=devnull)
download.start(Response(
url=httpbin.url + '/',
headers={'Content-Length': 5}
))
download.chunk_downloaded(b'1234')
download.finish()
assert download.interrupted
| bsd-3-clause |
elvishknight1/Terranova | venv/Lib/site-packages/pip/_vendor/requests/packages/chardet/euckrfreq.py | 3121 | 45978 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Sampling from about 20M text materials include literature and computer technology
# 128 --> 0.79
# 256 --> 0.92
# 512 --> 0.986
# 1024 --> 0.99944
# 2048 --> 0.99999
#
# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
# Random Distribution Ration = 512 / (2350-512) = 0.279.
#
# Typical Distribution Ratio
EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
EUCKR_TABLE_SIZE = 2352
# Char to FreqOrder table ,
EUCKRCharToFreqOrder = ( \
13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87,
1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734,
945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622,
708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750,
1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19,
1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885,
127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841,
1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910,
1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610,
269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939,
887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934,
1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7,
103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250,
423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824,
532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745,
619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61,
191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032,
2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075,
1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274,
962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721,
1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463,
2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285,
2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10,
2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350,
1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247,
1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259,
1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262,
2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273,
295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117,
432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312,
2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229,
2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315,
501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170,
1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485,
1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494,
434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544,
2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562,
2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431,
202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406,
2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256
#Everything below is of no interest for detection purpose
2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658,
2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674,
2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690,
2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704,
2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720,
2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734,
2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750,
2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765,
2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779,
2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793,
2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809,
2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824,
2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840,
2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856,
1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869,
2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883,
2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899,
2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915,
2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331,
2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945,
2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961,
2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976,
2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992,
2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008,
3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021,
3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037,
3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052,
3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066,
3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080,
3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095,
3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110,
3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124,
3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140,
3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156,
3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172,
3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187,
3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201,
3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217,
3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233,
3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248,
3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264,
3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279,
3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295,
3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311,
3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327,
3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343,
3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359,
3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374,
3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389,
3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405,
3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338,
3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432,
3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446,
3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191,
3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471,
3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486,
1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499,
1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513,
3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525,
3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541,
3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557,
3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573,
3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587,
3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603,
3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618,
3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632,
3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648,
3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663,
3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679,
3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695,
3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583,
1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722,
3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738,
3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753,
3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767,
3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782,
3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796,
3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810,
3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591,
1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836,
3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851,
3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866,
3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880,
3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895,
1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905,
3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921,
3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934,
3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603,
3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964,
3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978,
3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993,
3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009,
4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024,
4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040,
1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055,
4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069,
4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083,
4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098,
4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113,
4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610,
4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142,
4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157,
4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173,
4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189,
4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205,
4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220,
4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234,
4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249,
4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265,
4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279,
4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294,
4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310,
4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326,
4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341,
4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357,
4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371,
4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387,
4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403,
4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418,
4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432,
4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446,
4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461,
4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476,
4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491,
4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507,
4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623,
4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536,
4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551,
4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567,
4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581,
4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627,
4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611,
4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626,
4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642,
4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657,
4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672,
4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687,
1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700,
4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715,
4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731,
4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633,
4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758,
4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773,
4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788,
4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803,
4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817,
4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832,
4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847,
4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863,
4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879,
4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893,
4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909,
4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923,
4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938,
4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954,
4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970,
4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645,
4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999,
5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078,
5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028,
1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042,
5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056,
5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072,
5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087,
5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103,
5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118,
1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132,
5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,
5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161,
5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177,
5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192,
5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206,
1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218,
5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,
5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249,
5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262,
5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,
5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,
5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308,
5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323,
5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338,
5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353,
5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369,
5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385,
5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400,
5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415,
5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430,
5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445,
5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461,
5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477,
5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491,
5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507,
5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523,
5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539,
5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554,
5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570,
1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585,
5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600,
5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615,
5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,
5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,
5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,
1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673,
5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688,
5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703,
5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716,
5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729,
5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744,
1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758,
5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773,
1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786,
5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801,
5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815,
5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831,
5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847,
5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862,
5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876,
5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889,
5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,
5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,
5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687,
5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,
5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963,
5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,
5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993,
5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009,
6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025,
6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039,
6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055,
6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071,
6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086,
6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102,
6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118,
6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133,
6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147,
6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,
6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,
6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194,
6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,
6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225,
6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,
6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256,
6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271, #1024
6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287,
6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699,
6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317,
6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,
6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347,
6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,
6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,
6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,
6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411,
6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425,
6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440,
6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456,
6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,
6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488,
6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266,
6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,
6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535,
6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551,
1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565,
6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581,
6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597,
6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613,
6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629,
6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644,
1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659,
6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674,
1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689,
6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705,
6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721,
6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736,
1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748,
6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763,
6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779,
6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794,
6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711,
6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825,
6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840,
6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856,
6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872,
6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888,
6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903,
6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918,
6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934,
6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950,
6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,
6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981,
6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996,
6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011,
7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,
7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042,
7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,
7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074,
7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090,
7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106,
7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122,
7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138,
7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154,
7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170,
7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186,
7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202,
7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216,
7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232,
7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248,
7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264,
7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280,
7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296,
7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312,
7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327,
7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343,
7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359,
7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375,
7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391,
7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407,
7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423,
7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439,
7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455,
7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,
7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,
7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503,
7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,
7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,
7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551,
7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567,
7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583,
7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599,
7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615,
7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631,
7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647,
7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663,
7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679,
7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695,
7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711,
7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727,
7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743,
7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759,
7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,
7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,
7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807,
7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,
7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,
7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,
7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,
7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,
7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,
7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,
7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935,
7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951,
7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967,
7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983,
7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999,
8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,
8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031,
8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047,
8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,
8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,
8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,
8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111,
8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127,
8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,
8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,
8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,
8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,
8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,
8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,
8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,
8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,
8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,
8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,
8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,
8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,
8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,
8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,
8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,
8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,
8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,
8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,
8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,
8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,
8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,
8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,
8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,
8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,
8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,
8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,
8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,
8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,
8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,
8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,
8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,
8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,
8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,
8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,
8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,
8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,
8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,
8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,
8736,8737,8738,8739,8740,8741)
# flake8: noqa
| artistic-2.0 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.